From f688dc2e8616f914040746de845646abb158125d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 8 May 2015 18:06:59 +0200 Subject: introduce backend for distance function isi- and spike distances over complete intervals are now computed without obtaining the profile first. This gives more than x2 performance improvements. --- pyspike/cython/cython_distances.pyx | 328 ++++++++++++++++++++++++++++++++++++ 1 file changed, 328 insertions(+) create mode 100644 pyspike/cython/cython_distances.pyx (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx new file mode 100644 index 0000000..65c2872 --- /dev/null +++ b/pyspike/cython/cython_distances.pyx @@ -0,0 +1,328 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_distances.pyx + +cython implementation of the isi-, spike- and spike-sync distances + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_distances.pyx + +which gives:: + + cython_distances.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# isi_distance_cython +############################################################ +def isi_distance_cython(double[:] s1, double[:] s2, + double t_start, double t_end): + + cdef double isi_value + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + cdef double last_t, curr_t, curr_isi + isi_value = 0.0 + N1 = len(s1) + N2 = len(s2) + + # first interspike interval - check if a spike exists at the start time + if s1[0] > t_start: + # edge correction + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + index1 = -1 + else: + nu1 = s1[1]-s1[0] + index1 = 0 + + if s2[0] > t_start: + # edge correction + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + index2 = -1 + else: + nu2 = s2[1]-s2[0] + index2 = 0 + + last_t = t_start + curr_isi = fabs(nu1-nu2)/fmax(nu1, nu2) + index = 1 + + with nogil: # release the interpreter to allow multithreading + while index1+index2 < N1+N2-2: + # check which spike is next, only if there are spikes left in 1 + # next spike in 1 is earlier, or there are no spikes left in 2 + if (index1 < N1-1) and ((index2 == N2-1) or + (s1[index1+1] < s2[index2+1])): + index1 += 1 + curr_t = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + elif (index2 < N2-1) and ((index1 == N1-1) or + (s1[index1+1] > s2[index2+1])): + index2 += 1 + curr_t = s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + curr_t = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + # compute the corresponding isi-distance + isi_value += curr_isi * (curr_t - last_t) + curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2) + last_t = curr_t + index += 1 + + isi_value += curr_isi * (t_end - last_t) + # end nogil + + return isi_value / (t_end-t_start) + + +############################################################ +# get_min_dist_cython +############################################################ +cdef inline double get_min_dist_cython(double spike_time, + double[:] spike_train, + # use memory view to ensure inlining + # np.ndarray[DTYPE_t,ndim=1] spike_train, + int N, + int start_index, + double t_start, double t_end) nogil: + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + cdef double d, d_temp + # start with the distance to the start time + d = fabs(spike_time - t_start) + if start_index < 0: + start_index = 0 + while start_index < N: + d_temp = fabs(spike_time - spike_train[start_index]) + if d_temp > d: + return d + else: + d = d_temp + start_index += 1 + + # finally, check the distance to end time + d_temp = fabs(t_end - spike_time) + if d_temp > d: + return d + else: + return d_temp + + +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + +############################################################ +# spike_distance_cython +############################################################ +def spike_distance_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) -- cgit v1.2.3 From ad7961c9f73d77b87ee23349169618d128418a51 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 11 May 2015 12:06:19 +0200 Subject: performance improvement for spike sync Additional cython implementation for overall spike sync values. It is not necessary to compute the profile anymore if only the spike sync value is required. 3x performance gain. --- pyspike/cython/cython_distances.pyx | 64 +++++++++++++++++++++++++++++++++++++ pyspike/spike_sync.py | 46 ++++++++++++++++++++++---- 2 files changed, 104 insertions(+), 6 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 65c2872..bf90638 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -326,3 +326,67 @@ def spike_distance_cython(double[:] t1, double[:] t2, # use only the data added above # could be less than original length due to equal spike times return spike_value / (t_end-t_start) + + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double max_tau): + cdef double m = 1E100 # some huge number + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# coincidence_value_cython +############################################################ +def coincidence_value_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double coinc = 0.0 + cdef double mp = 0.0 + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + coinc += 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + coinc += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + mp += 2 + coinc += 2 + + return coinc, mp diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 107734d..40d98d2 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -57,6 +57,40 @@ Falling back to slow python backend.") return DiscreteFunc(times, coincidences, multiplicity) +############################################################ +# _spike_sync_values +############################################################ +def _spike_sync_values(spike_train1, spike_train2, interval, max_tau): + """" Internal function. Computes the summed coincidences and multiplicity + for spike synchronization of the two given spike trains. + + Do not call this function directly, use `spike_sync` or `spike_sync_multi` + instead. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_distances import coincidence_value_cython \ + as coincidence_value_impl + if max_tau is None: + max_tau = 0.0 + c, mp = coincidence_value_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + return c, mp + except ImportError: + # Cython backend not available: fall back to profile averaging + return spike_sync_profile(spike_train1, spike_train2, + max_tau).integral(interval) + else: + # some specific interval is provided: use profile + return spike_sync_profile(spike_train1, spike_train2, + max_tau).integral(interval) + + ############################################################ # spike_sync ############################################################ @@ -80,8 +114,8 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None): :rtype: `double` """ - return spike_sync_profile(spike_train1, spike_train2, - max_tau).avrg(interval) + c, mp = _spike_sync_values(spike_train1, spike_train2, interval, max_tau) + return 1.0*c/mp ############################################################ @@ -144,10 +178,10 @@ def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None): coincidence = 0.0 mp = 0.0 for (i, j) in pairs: - profile = spike_sync_profile(spike_trains[i], spike_trains[j]) - summed_vals = profile.integral(interval) - coincidence += summed_vals[0] - mp += summed_vals[1] + c, m = _spike_sync_values(spike_trains[i], spike_trains[j], + interval, max_tau) + coincidence += c + mp += m return coincidence/mp -- cgit v1.2.3 From bec2529367f1bdd5dac6d6fbaec560a30feec3c7 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 11 May 2015 17:41:08 +0200 Subject: treatment of empty spike trains in spike sync --- pyspike/DiscreteFunc.py | 4 ++++ pyspike/cython/cython_distances.pyx | 13 +++++++++---- pyspike/cython/cython_profiles.pyx | 9 +++++---- pyspike/cython/python_backend.py | 2 +- test/test_empty.py | 39 +++++++++++++++++++++++++++++++++++++ 5 files changed, 58 insertions(+), 9 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index dfe2cab..6ade87e 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -136,6 +136,10 @@ class DiscreteFunc(object): :rtype: pair of float """ + if len(self.y) <= 2: + # no actual values in the profile, return spike sync of 0 + return 0.0, 1.0 + def get_indices(ival): """ Retuns the indeces surrounding the given interval""" start_ind = np.searchsorted(self.x, ival[0], side='right') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index bf90638..16780f2 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -333,8 +333,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, # get_tau ############################################################ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double max_tau): - cdef double m = 1E100 # some huge number + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 if i < N1 and i > -1: @@ -363,12 +363,13 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, cdef int j = -1 cdef double coinc = 0.0 cdef double mp = 0.0 + cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 mp += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 @@ -376,7 +377,7 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 mp += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 @@ -389,4 +390,8 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, mp += 2 coinc += 2 + if coinc == 0 and mp == 0: + # empty spike trains -> set mp to one to avoid 0/0 + mp = 1 + return coinc, mp diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 3690091..d937a02 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -345,8 +345,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, # get_tau ############################################################ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double max_tau): - cdef double m = 1E100 # some huge number + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval as initial tau cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 if i < N1 and i > -1: @@ -377,12 +377,13 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -392,7 +393,7 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 1fd8c42..830dc69 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -340,7 +340,7 @@ def cumulative_sync_python(spikes1, spikes2): def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): def get_tau(spikes1, spikes2, i, j, max_tau): - m = 1E100 # some huge number + m = t_end - t_start # use interval as initial tau if i < len(spikes1)-1 and i > -1: m = min(m, spikes1[i+1]-spikes1[i]) if j < len(spikes2)-1 and j > -1: diff --git a/test/test_empty.py b/test/test_empty.py index 42c3716..b31d8a4 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -102,7 +102,46 @@ def test_spike_empty(): assert_array_almost_equal(prof.y2, expected_y2, decimal=15) +def test_spike_sync_empty(): + st1 = SpikeTrain([], edges=(0.0, 1.0)) + st2 = SpikeTrain([], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_equal(d, 0.0) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_equal(prof.x, [0.0, 1.0]) + assert_array_equal(prof.y, [0.0, 0.0]) + + st1 = SpikeTrain([], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_equal(d, 0.0) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_equal(prof.x, [0.0, 0.4, 1.0]) + assert_array_equal(prof.y, [0.0, 0.0, 0.0]) + + st1 = SpikeTrain([0.6, ], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_almost_equal(d, 1.0, decimal=15) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) + assert_array_almost_equal(prof.y, [1.0, 1.0, 1.0, 1.0], decimal=15) + + st1 = SpikeTrain([0.2, ], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.8, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_almost_equal(d, 0.0, decimal=15) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_almost_equal(prof.x, [0.0, 0.2, 0.8, 1.0], decimal=15) + assert_array_almost_equal(prof.y, [0.0, 0.0, 0.0, 0.0], decimal=15) + + if __name__ == "__main__": test_get_non_empty() test_isi_empty() test_spike_empty() + test_spike_sync_empty() -- cgit v1.2.3 From a35402c208bd0ad31e5e60b6ddc55a3470e7bdde Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 11 May 2015 17:54:02 +0200 Subject: bugfix: spike_sync=1 for empty spike trains --- pyspike/DiscreteFunc.py | 4 ++-- pyspike/cython/cython_distances.pyx | 3 ++- pyspike/cython/cython_profiles.pyx | 12 ++++++++---- pyspike/cython/python_backend.py | 12 ++++++++---- test/test_empty.py | 4 ++-- 5 files changed, 22 insertions(+), 13 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 6ade87e..17153ee 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -137,8 +137,8 @@ class DiscreteFunc(object): """ if len(self.y) <= 2: - # no actual values in the profile, return spike sync of 0 - return 0.0, 1.0 + # no actual values in the profile, return spike sync of 1 + return 1.0, 1.0 def get_indices(ival): """ Retuns the indeces surrounding the given interval""" diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 16780f2..c4f2349 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -391,7 +391,8 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, coinc += 2 if coinc == 0 and mp == 0: - # empty spike trains -> set mp to one to avoid 0/0 + # empty spike trains -> spike sync = 1 by definition + coinc = 1 mp = 1 return coinc, mp diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index d937a02..f9893eb 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -416,9 +416,13 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, st[0] = t_start st[len(st)-1] = t_end - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] + if N1 + N2 > 0: + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + c[0] = 1 + c[1] = 1 return st, c, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 830dc69..69a420f 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -399,10 +399,14 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): st[0] = t_start st[len(st)-1] = t_end - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] + if N1 + N2 > 0: + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + c[0] = 1 + c[1] = 1 return st, c, mp diff --git a/test/test_empty.py b/test/test_empty.py index b31d8a4..48be25d 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -106,11 +106,11 @@ def test_spike_sync_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 0.0) + assert_equal(d, 1.0) prof = spk.spike_sync_profile(st1, st2) assert_equal(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) - assert_array_equal(prof.y, [0.0, 0.0]) + assert_array_equal(prof.y, [1.0, 1.0]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) -- cgit v1.2.3 From b970055641b215d30b671ee810e29c6a55e6214a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 14 Dec 2015 14:23:02 +0100 Subject: improved edge correction for spike distance Improvement following Eeros suggestions to use auxiliary spike at the edges consistently with the corresponding corrected ISI intervals. --- pyspike/cython/cython_distances.pyx | 55 +++++++++++++++++++++------------ pyspike/cython/cython_profiles.pyx | 61 +++++++++++++++++++++++++------------ test/test_distance.py | 50 ++++++++++++++++++++++++++---- test/test_empty.py | 4 +-- 4 files changed, 122 insertions(+), 48 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index c4f2349..41baa60 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -176,6 +176,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 cdef double isi1, isi2, s1, s2 cdef double y_start, y_end, t_last, t_current, spike_value + cdef double[:] t_aux1 = np.empty(2) + cdef double[:] t_aux2 = np.empty(2) spike_value = 0.0 @@ -184,19 +186,27 @@ def spike_distance_cython(double[:] t1, double[:] t2, with nogil: # release the interpreter to allow multithreading t_last = t_start - t_p1 = t_start - t_p2 = t_start + # t_p1 = t_start + # t_p2 = t_start + # auxiliary spikes for edge correction - consistent with first/last ISI + t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) + t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) + t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) + t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] + t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: # dt_p1 = t2[0]-t_start t_f1 = t1[0] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) dt_p1 = dt_f1 - s1 = dt_p1*(t_f1-t_start)/isi1 + # s1 = dt_p1*(t_f1-t_start)/isi1 + s1 = dt_p1 index1 = -1 else: t_f1 = t1[1] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) dt_p1 = 0.0 isi1 = t1[1]-t1[0] s1 = dt_p1 @@ -204,14 +214,15 @@ def spike_distance_cython(double[:] t1, double[:] t2, if t2[0] > t_start: # dt_p1 = t2[0]-t_start t_f2 = t2[0] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_p2*(t_f2-t_start)/isi2 + # s2 = dt_p2*(t_f2-t_start)/isi2 + s2 = dt_p2 index2 = -1 else: t_f2 = t2[1] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = 0.0 isi2 = t2[1]-t2[0] s2 = dt_p2 @@ -233,7 +244,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, if index1 < N1-1: t_f1 = t1[index1+1] else: - t_f1 = t_end + t_f1 = t_aux1[1] t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) @@ -243,14 +254,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, # now the next interval start value if index1 < N1-1: dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) + t_aux2[0], t_aux2[1]) isi1 = t_f1-t_p1 s1 = dt_p1 else: dt_f1 = dt_p1 isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) # s1 needs adjustment due to change of isi1 - s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # Eero's correction: no adjustment + s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): @@ -264,7 +277,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, if index2 < N2-1: t_f2 = t2[index2+1] else: - t_f2 = t_end + t_f2 = t_aux2[1] t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) @@ -274,14 +287,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, # now the next interval start value if index2 < N2-1: dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) + t_aux1[0], t_aux1[1]) isi2 = t_f2-t_p2 s2 = dt_p2 else: dt_f2 = dt_p2 isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) # s2 needs adjustment due to change of isi2 - s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # Eero's correction: no adjustment + s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) else: # t_f1 == t_f2 - generate only one event @@ -298,27 +313,27 @@ def spike_distance_cython(double[:] t1, double[:] t2, if index1 < N1-1: t_f1 = t1[index1+1] dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) + t_aux2[0], t_aux2[1]) isi1 = t_f1 - t_p1 else: - t_f1 = t_end + t_f1 = t_aux1[1] dt_f1 = dt_p1 isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) + t_aux1[0], t_aux1[1]) isi2 = t_f2 - t_p2 else: - t_f2 = t_end + t_f2 = t_aux2[1] dt_f2 = dt_p2 isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 t_last = t_curr # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - s1 = dt_f1*(t_end-t1[N1-1])/isi1 - s2 = dt_f2*(t_end-t2[N2-1])/isi2 + s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 + s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index f9893eb..f08de6e 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -181,6 +181,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, cdef double[:] spike_events cdef double[:] y_starts cdef double[:] y_ends + cdef double[:] t_aux1 = np.empty(2) + cdef double[:] t_aux2 = np.empty(2) cdef int N1, N2, index1, index2, index cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 @@ -189,6 +191,10 @@ def spike_profile_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) + # we can assume at least two spikes per spike train + assert N1 > 1 + assert N2 > 1 + spike_events = np.empty(N1+N2+2) y_starts = np.empty(len(spike_events)-1) @@ -196,19 +202,27 @@ def spike_profile_cython(double[:] t1, double[:] t2, with nogil: # release the interpreter to allow multithreading spike_events[0] = t_start - t_p1 = t_start - t_p2 = t_start + # t_p1 = t_start + # t_p2 = t_start + # auxiliary spikes for edge correction - consistent with first/last ISI + t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) + t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) + t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) + t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] + t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: # dt_p1 = t2[0]-t_start t_f1 = t1[0] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) dt_p1 = dt_f1 - s1 = dt_p1*(t_f1-t_start)/isi1 + # s1 = dt_p1*(t_f1-t_start)/isi1 + s1 = dt_p1 index1 = -1 else: t_f1 = t1[1] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) dt_p1 = 0.0 isi1 = t1[1]-t1[0] s1 = dt_p1 @@ -216,14 +230,15 @@ def spike_profile_cython(double[:] t1, double[:] t2, if t2[0] > t_start: # dt_p1 = t2[0]-t_start t_f2 = t2[0] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_p2*(t_f2-t_start)/isi2 + # s2 = dt_p2*(t_f2-t_start)/isi2 + s2 = dt_p2 index2 = -1 else: t_f2 = t2[1] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = 0.0 isi2 = t2[1]-t2[0] s2 = dt_p2 @@ -245,7 +260,7 @@ def spike_profile_cython(double[:] t1, double[:] t2, if index1 < N1-1: t_f1 = t1[index1+1] else: - t_f1 = t_end + t_f1 = t_aux1[1] spike_events[index] = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, @@ -253,14 +268,16 @@ def spike_profile_cython(double[:] t1, double[:] t2, # now the next interval start value if index1 < N1-1: dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) + t_aux2[0], t_aux2[1]) isi1 = t_f1-t_p1 s1 = dt_p1 else: dt_f1 = dt_p1 isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) # s1 needs adjustment due to change of isi1 - s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # Eero's correction: no adjustment + s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) @@ -275,7 +292,7 @@ def spike_profile_cython(double[:] t1, double[:] t2, if index2 < N2-1: t_f2 = t2[index2+1] else: - t_f2 = t_end + t_f2 = t_aux2[1] spike_events[index] = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, @@ -283,14 +300,16 @@ def spike_profile_cython(double[:] t1, double[:] t2, # now the next interval start value if index2 < N2-1: dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) + t_aux1[0], t_aux1[1]) isi2 = t_f2-t_p2 s2 = dt_p2 else: dt_f2 = dt_p2 isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) # s2 needs adjustment due to change of isi2 - s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # Eero's correction: no adjustment + s2 = dt_p2 # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) else: # t_f1 == t_f2 - generate only one event @@ -306,19 +325,19 @@ def spike_profile_cython(double[:] t1, double[:] t2, if index1 < N1-1: t_f1 = t1[index1+1] dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) + t_aux2[0], t_aux2[1]) isi1 = t_f1 - t_p1 else: - t_f1 = t_end + t_f1 = t_aux1[1] dt_f1 = dt_p1 isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) + t_aux1[0], t_aux1[1]) isi2 = t_f2 - t_p2 else: - t_f2 = t_end + t_f2 = t_aux2[1] dt_f2 = dt_p2 isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 @@ -330,8 +349,10 @@ def spike_profile_cython(double[:] t1, double[:] t2, # the ending value of the last interval isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - s1 = dt_f1*(t_end-t1[N1-1])/isi1 - s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # s1 = dt_f1*(t_end-t1[N1-1])/isi1 + # s2 = dt_f2*(t_end-t2[N2-1])/isi2 + s1 = dt_f1 + s2 = dt_f2 y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) # end nogil diff --git a/test/test_distance.py b/test/test_distance.py index e45ac16..626b438 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -36,6 +36,7 @@ def test_isi(): f = spk.isi_profile(t1, t2) # print("ISI: ", f.y) + print("ISI value:", expected_isi_val) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=15) @@ -73,8 +74,19 @@ def test_spike(): assert_equal(f.x, expected_times) - assert_almost_equal(f.avrg(), 1.6624149659863946e-01, decimal=15) - assert_almost_equal(f.y2[-1], 0.1394558, decimal=6) + # from SPIKY: + y_all = np.array([0.000000000000000000, 0.555555555555555580, + 0.222222222222222210, 0.305555555555555580, + 0.255102040816326536, 0.000000000000000000, + 0.000000000000000000, 0.255102040816326536, + 0.255102040816326536, 0.285714285714285698, + 0.285714285714285698, 0.285714285714285698]) + + #assert_array_almost_equal(f.y1, y_all[::2]) + assert_array_almost_equal(f.y2, y_all[1::2]) + + assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15) + assert_equal(spk.spike_distance(t1, t2), f.avrg()) t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) @@ -99,6 +111,8 @@ def test_spike(): (expected_y1+expected_y2)/2) expected_spike_val /= (expected_times[-1]-expected_times[0]) + print("SPIKE value:", expected_spike_val) + f = spk.spike_profile(t1, t2) assert_equal(f.x, expected_times) @@ -117,9 +131,14 @@ def test_spike(): # for left and right values s1_r = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) s1_l = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) - s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3, + # s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3, + # 0.0, 0.1, 0.0, 0.0]) + # s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0, + # 0.1, 0.0, 0.0]) + # eero's edge correction: + s2_r = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) - s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0, + s2_l = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4]) isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4]) @@ -345,7 +364,7 @@ def test_regression_spiky(): assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) - assert_equal(spike_dist, 2.1105878248735391e-01) + assert_equal(spike_dist, 0.211058782487353908) spike_sync = spk.spike_sync(st1, st2) assert_equal(spike_sync, 8.6956521739130432e-01) @@ -363,12 +382,31 @@ def test_regression_spiky(): spike_dist = spk.spike_distance_multi(spike_trains) # get the full precision from SPIKY - assert_almost_equal(spike_dist, 2.4432433330596512e-01, decimal=15) + assert_almost_equal(spike_dist, 0.25188056475463755, decimal=15) spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY assert_equal(spike_sync, 0.7183531505298066) + # Eero's edge correction example + st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0) + st2 = SpikeTrain([3.5, 4.5, 5.5], 6.0) + + f = spk.spike_profile(st1, st2) + + expected_times = np.array([0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0]) + y_all = np.array([0.271604938271605, 0.271604938271605, 0.271604938271605, + 0.617283950617284, 0.617283950617284, 0.444444444444444, + 0.285714285714286, 0.285714285714286, 0.444444444444444, + 0.617283950617284, 0.617283950617284, 0.271604938271605, + 0.271604938271605, 0.271604938271605]) + expected_y1 = y_all[::2] + expected_y2 = y_all[1::2] + + assert_equal(f.x, expected_times) + assert_array_almost_equal(f.y1, expected_y1, decimal=14) + assert_array_almost_equal(f.y2, expected_y2, decimal=14) + def test_multi_variate_subsets(): spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", diff --git a/test/test_empty.py b/test/test_empty.py index af7fb36..5a0042f 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -70,8 +70,8 @@ def test_spike_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - assert_almost_equal(d, 0.4*0.4*1.0/(0.4+1.0)**2 + 0.6*0.4*1.0/(0.6+1.0)**2, - decimal=15) + d_expect = 0.4*0.4*1.0/(0.4+1.0)**2 + 0.6*0.4*1.0/(0.6+1.0)**2 + assert_almost_equal(d, d_expect, decimal=15) prof = spk.spike_profile(st1, st2) assert_equal(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) -- cgit v1.2.3 From 619fdef014c44a89a7ecef9078905ee44e373b84 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 18 Dec 2015 14:37:45 +0100 Subject: bugfix for edge correction fixed bug within new edge correction (auxiliary spike was ignored in some cases) added regression test with 10000 random spike train sets --- pyspike/cython/cython_distances.pyx | 10 +- pyspike/cython/python_backend.py | 21 +++- test/test_distance.py | 1 + .../regression_random_results_cSPIKY.mat | Bin 0 -> 149130 bytes test/test_regression/regression_random_spikes.mat | Bin 0 -> 16241579 bytes .../test_regression_random_spikes.py | 113 +++++++++++++++++++++ 6 files changed, 140 insertions(+), 5 deletions(-) create mode 100644 test/test_regression/regression_random_results_cSPIKY.mat create mode 100644 test/test_regression/regression_random_spikes.mat create mode 100644 test/test_regression/test_regression_random_spikes.py (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 41baa60..6c6e7e5 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -184,7 +184,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - with nogil: # release the interpreter to allow multithreading + # with nogil: # release the interpreter to allow multithreading + if True: t_last = t_start # t_p1 = t_start # t_p2 = t_start @@ -193,6 +194,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1]) t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: @@ -207,7 +209,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f1 = t1[1] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - dt_p1 = 0.0 + # dt_p1 = t_start-t_p2 # 0.0 + dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) isi1 = t1[1]-t1[0] s1 = dt_p1 index1 = 0 @@ -223,7 +226,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f2 = t2[1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) - dt_p2 = 0.0 + # dt_p2 = t_start-t_p1 # 0.0 + dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a5f7ae4..a007a7f 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -153,6 +153,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] + print "t_aux1", t_aux1, ", t_aux2:", t_aux2 + spike_events[0] = t_start if t1[0] > t_start: t_f1 = t1[0] @@ -163,7 +165,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s1 = dt_p1 index1 = -1 else: - dt_p1 = 0.0 + # dt_p1 = t_start-t_p2 + dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1]) t_f1 = t1[1] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) isi1 = t1[1]-t1[0] @@ -179,13 +182,18 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_p2 index2 = -1 else: - dt_p2 = 0.0 + dt_p2 = t_start-t_p1 + dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1]) t_f2 = t2[1] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 @@ -276,6 +284,11 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): dt_f2 = dt_p2 isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 + + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 @@ -288,6 +301,10 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + # use only the data added above # could be less than original length due to equal spike times return spike_events[:index+1], y_starts[:index], y_ends[:index] diff --git a/test/test_distance.py b/test/test_distance.py index 8cf81e2..083d8a3 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -20,6 +20,7 @@ from pyspike import SpikeTrain import os TEST_PATH = os.path.dirname(os.path.realpath(__file__)) + def test_isi(): # generate two spike trains: t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) diff --git a/test/test_regression/regression_random_results_cSPIKY.mat b/test/test_regression/regression_random_results_cSPIKY.mat new file mode 100644 index 0000000..47c9b50 Binary files /dev/null and b/test/test_regression/regression_random_results_cSPIKY.mat differ diff --git a/test/test_regression/regression_random_spikes.mat b/test/test_regression/regression_random_spikes.mat new file mode 100644 index 0000000..e5ebeb1 Binary files /dev/null and b/test/test_regression/regression_random_spikes.mat differ diff --git a/test/test_regression/test_regression_random_spikes.py b/test/test_regression/test_regression_random_spikes.py new file mode 100644 index 0000000..757bab2 --- /dev/null +++ b/test/test_regression/test_regression_random_spikes.py @@ -0,0 +1,113 @@ +""" regression benchmark + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" +import numpy as np +from scipy.io import loadmat +import pyspike as spk + +from numpy.testing import assert_almost_equal + +spk.disable_backend_warning = True + + +def test_regression_random(): + + spike_file = "test/test_regression/regression_random_spikes.mat" + spikes_name = "spikes" + result_name = "Distances" + result_file = "test/test_regression/regression_random_results_cSPIKY.mat" + + spike_train_sets = loadmat(spike_file)[spikes_name][0] + results_cSPIKY = loadmat(result_file)[result_name] + + for i, spike_train_data in enumerate(spike_train_sets): + spike_trains = [] + for spikes in spike_train_data[0]: + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + isi = spk.isi_distance_multi(spike_trains) + spike = spk.spike_distance_multi(spike_trains) + # spike_sync = spk.spike_sync_multi(spike_trains) + + assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14, + err_msg="Index: %d, ISI" % i) + assert_almost_equal(spike, results_cSPIKY[i][1], decimal=14, + err_msg="Index: %d, SPIKE" % i) + + +def check_regression_dataset(spike_file="benchmark.mat", + spikes_name="spikes", + result_file="results_cSPIKY.mat", + result_name="Distances"): + """ Debuging function """ + np.set_printoptions(precision=15) + + spike_train_sets = loadmat(spike_file)[spikes_name][0] + + results_cSPIKY = loadmat(result_file)[result_name] + + err_max = 0.0 + err_max_ind = -1 + err_count = 0 + + for i, spike_train_data in enumerate(spike_train_sets): + spike_trains = [] + for spikes in spike_train_data[0]: + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + isi = spk.isi_distance_multi(spike_trains) + spike = spk.spike_distance_multi(spike_trains) + # spike_sync = spk.spike_sync_multi(spike_trains) + + if abs(isi - results_cSPIKY[i][0]) > 1E-14: + print "Error in ISI:", i, isi, results_cSPIKY[i][0] + print "Spike trains:" + for st in spike_trains: + print st.spikes + + err = abs(spike - results_cSPIKY[i][1]) + if err > 1E-14: + err_count += 1 + if err > err_max: + err_max = err + err_max_ind = i + + print "Total Errors:", err_count + + if err_max_ind > -1: + print "Max SPIKE distance error:", err_max, "at index:", err_max_ind + spike_train_data = spike_train_sets[err_max_ind] + for spikes in spike_train_data[0]: + print spikes.flatten() + + +def check_single_spike_train_set(index): + """ Debuging function """ + np.set_printoptions(precision=15) + + spike_train_sets = loadmat("benchmark.mat")['spikes'][0] + + results_cSPIKY = loadmat("results_cSPIKY.mat")['Distances'] + + spike_train_data = spike_train_sets[index] + + spike_trains = [] + for spikes in spike_train_data[0]: + print "Spikes:", spikes.flatten() + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + print spk.spike_distance_multi(spike_trains) + + print results_cSPIKY[index][1] + + print spike_trains[1].spikes + + +if __name__ == "__main__": + + test_regression_random() + # check_regression_dataset() + # check_single_spike_train_set(7633) -- cgit v1.2.3 From e32272f4540de347abcc548a94239b625458b3a6 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 22 Dec 2015 18:15:59 +0100 Subject: changed edge correction for single spikes Spike trains with single spikes now only get auxiliary spikes at the edges for the SPIKE distance instead of real spikes before. --- pyspike/SpikeTrain.py | 2 +- pyspike/cython/cython_distances.pyx | 89 ++++++++++++++++++++++--------------- pyspike/cython/cython_profiles.pyx | 67 +++++++++++++++------------- pyspike/cython/python_backend.py | 68 +++++++++++++--------------- test/test_empty.py | 16 ++++--- 5 files changed, 130 insertions(+), 112 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py index 4b59a5d..19f2419 100644 --- a/pyspike/SpikeTrain.py +++ b/pyspike/SpikeTrain.py @@ -68,7 +68,7 @@ class SpikeTrain(object): """Returns the spikes of this spike train with auxiliary spikes in case of empty spike trains. """ - if len(self.spikes) < 2: + if len(self.spikes) < 1: return np.unique(np.insert([self.t_start, self.t_end], 1, self.spikes)) else: diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 6c6e7e5..7dc9cb9 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -55,20 +55,27 @@ def isi_distance_cython(double[:] s1, double[:] s2, N2 = len(s2) # first interspike interval - check if a spike exists at the start time + # and also account for spike trains with single spikes if s1[0] > t_start: - # edge correction - nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + # edge correction for the first interspike interval: + # take the maximum of the distance from the beginning to the first + # spike and the interval between the first two spikes. + # if there is only one spike, take the its distance to the beginning + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1]-s1[0] + # if the first spike is exactly at the start, take the distance + # to the next spike. If this is the only spike, take the distance to + # the end. + nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: - # edge correction - nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + # edge correction as above + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1]-s2[0] + nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 last_t = t_start @@ -86,8 +93,12 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + # edge correction for the last ISI: + # take the max of the distance of the last + # spike to the end and the previous ISI. If there was only + # one spike, always take the distance to the end. + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] elif (index2 < N2-1) and ((index1 == N1-1) or (s1[index1+1] > s2[index2+1])): index2 += 1 @@ -95,8 +106,9 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + # edge correction for the end as above + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 @@ -104,13 +116,15 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + # edge correction for the end as above + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + # edge correction for the end as above + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] # compute the corresponding isi-distance isi_value += curr_isi * (curr_t - last_t) curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2) @@ -184,16 +198,18 @@ def spike_distance_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - # with nogil: # release the interpreter to allow multithreading - if True: + # we can assume at least one spikes per spike train + assert N1 > 0 + assert N2 > 0 + + + with nogil: # release the interpreter to allow multithreading t_last = t_start - # t_p1 = t_start - # t_p2 = t_start # auxiliary spikes for edge correction - consistent with first/last ISI - t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start + t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end + t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start + t_aux2[1] = fmax(t_end, 2*t2[N2-1]+-t2[N2-2]) if N2 > 1 else t_end # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1]) t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] @@ -201,17 +217,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, # dt_p1 = t2[0]-t_start t_f1 = t1[0] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start dt_p1 = dt_f1 # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 - else: - t_f1 = t1[1] + else: # t1[0] == t_start + t_f1 = t1[1] if N1 > 1 else t_end dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - # dt_p1 = t_start-t_p2 # 0.0 dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = t1[1]-t1[0] + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -219,16 +234,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_f2 = t2[0] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 - else: - t_f2 = t2[1] + else: # t2[0] == t_start + t_f2 = t2[1] if N2 > 1 else t_end dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) # dt_p2 = t_start-t_p1 # 0.0 dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) - isi2 = t2[1]-t2[0] + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 @@ -263,7 +278,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -296,7 +312,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's correction: no adjustment @@ -322,7 +339,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, @@ -331,7 +349,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 t_last = t_curr # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index f08de6e..4a42cdb 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -63,18 +63,18 @@ def isi_profile_cython(double[:] s1, double[:] s2, # first interspike interval - check if a spike exists at the start time if s1[0] > t_start: # edge correction - nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1]-s1[0] + nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: # edge correction - nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1]-s2[0] + nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) @@ -92,7 +92,8 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] elif (index2 < N2-1) and ((index1 == N1-1) or (s1[index1+1] > s2[index2+1])): index2 += 1 @@ -101,7 +102,8 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 @@ -110,12 +112,14 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] # compute the corresponding isi-distance isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 @@ -191,9 +195,9 @@ def spike_profile_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - # we can assume at least two spikes per spike train - assert N1 > 1 - assert N2 > 1 + # we can assume at least one spikes per spike train + assert N1 > 0 + assert N2 > 0 spike_events = np.empty(N1+N2+2) @@ -205,26 +209,26 @@ def spike_profile_cython(double[:] t1, double[:] t2, # t_p1 = t_start # t_p2 = t_start # auxiliary spikes for edge correction - consistent with first/last ISI - t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start + t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end + t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start + t_aux2[1] = fmax(t_end, 2*t2[N2-1]-t2[N2-2]) if N2 > 1 else t_end t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: # dt_p1 = t2[0]-t_start t_f1 = t1[0] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start dt_p1 = dt_f1 # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 else: - t_f1 = t1[1] + t_f1 = t1[1] if N1 > 1 else t_end dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - dt_p1 = 0.0 - isi1 = t1[1]-t1[0] + dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -232,15 +236,15 @@ def spike_profile_cython(double[:] t1, double[:] t2, t_f2 = t2[0] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 else: - t_f2 = t2[1] + t_f2 = t2[1] if N2 > 1 else t_end dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) - dt_p2 = 0.0 - isi2 = t2[1]-t2[0] + dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 @@ -273,7 +277,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -305,7 +310,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's correction: no adjustment @@ -330,7 +336,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, @@ -339,18 +346,14 @@ def spike_profile_cython(double[:] t1, double[:] t2, else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 else: spike_events[index] = t_end - # the ending value of the last interval - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - # s1 = dt_f1*(t_end-t1[N1-1])/isi1 - # s2 = dt_f2*(t_end-t2[N2-1])/isi2 s1 = dt_f1 s2 = dt_f2 y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index cf898d7..a1e3a65 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -28,17 +28,17 @@ def isi_distance_python(s1, s2, t_start, t_end): isi_values = np.empty(len(spike_events) - 1) if s1[0] > t_start: # edge correction - nu1 = max(s1[0] - t_start, s1[1] - s1[0]) + nu1 = max(s1[0] - t_start, s1[1] - s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1] - s1[0] + nu1 = s1[1] - s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: # edge correction - nu2 = max(s2[0] - t_start, s2[1] - s2[0]) + nu2 = max(s2[0] - t_start, s2[1] - s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1] - s2[0] + nu2 = s2[1] - s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2) @@ -52,7 +52,8 @@ def isi_distance_python(s1, s2, t_start, t_end): nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \ + else t_end-s1[N1-1] elif (index2 < N2-1) and (index1 == N1-1 or s1[index1+1] > s2[index2+1]): @@ -62,7 +63,8 @@ def isi_distance_python(s1, s2, t_start, t_end): nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \ + else t_end-s2[N2-1] else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 @@ -72,12 +74,14 @@ def isi_distance_python(s1, s2, t_start, t_end): nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \ + else t_end-s1[N1-1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \ + else t_end-s2[N2-1] # compute the corresponding isi-distance isi_values[index] = abs(nu1 - nu2) / \ max(nu1, nu2) @@ -146,10 +150,10 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_aux1 = np.zeros(2) t_aux2 = np.zeros(2) - t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) if N1 > 1 else t_start + t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) if N1 > 1 else t_end + t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) if N2 > 1 else t_start + t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) if N2 > 1 else t_end t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] @@ -160,16 +164,16 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_f1 = t1[0] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) dt_p1 = dt_f1 - isi1 = max(t_f1-t_start, t1[1]-t1[0]) + isi1 = max(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 else: # dt_p1 = t_start-t_p2 + t_f1 = t1[1] if N1 > 1 else t_end dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1]) - t_f1 = t1[1] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) - isi1 = t1[1]-t1[0] + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -177,23 +181,18 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_f2 = t2[0] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = max(t_f2-t_start, t2[1]-t2[0]) + isi2 = max(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 else: - dt_p2 = t_start-t_p1 + t_f2 = t2[1] if N2 > 1 else t_end dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1]) - t_f2 = t2[1] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) - isi2 = t2[1]-t2[0] + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 @@ -221,7 +220,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -250,7 +250,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's adjustment: no correction @@ -274,7 +275,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1]) @@ -282,29 +284,19 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 else: spike_events[index] = t_end - # the ending value of the last interval - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - # use only the data added above # could be less than original length due to equal spike times return spike_events[:index+1], y_starts[:index], y_ends[:index] diff --git a/test/test_empty.py b/test/test_empty.py index 5a0042f..4d0a5cf 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -24,7 +24,9 @@ def test_get_non_empty(): st = SpikeTrain([0.5, ], edges=(0.0, 1.0)) spikes = st.get_spikes_non_empty() - assert_array_equal(spikes, [0.0, 0.5, 1.0]) + # assert_array_equal(spikes, [0.0, 0.5, 1.0]) + # spike trains with one spike don't get edge spikes anymore + assert_array_equal(spikes, [0.5, ]) def test_isi_empty(): @@ -70,21 +72,23 @@ def test_spike_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - d_expect = 0.4*0.4*1.0/(0.4+1.0)**2 + 0.6*0.4*1.0/(0.6+1.0)**2 + d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2 assert_almost_equal(d, d_expect, decimal=15) prof = spk.spike_profile(st1, st2) assert_equal(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) - assert_array_almost_equal(prof.y1, [0.0, 2*0.4*1.0/(0.6+1.0)**2], + assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2, + 2*0.4*1.0/(0.6+1.0)**2], decimal=15) - assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2, 0.0], + assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2, + 2*0.4*1.0/(0.6+1.0)**2], decimal=15) st1 = SpikeTrain([0.6, ], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - s1 = np.array([0.0, 0.4*0.2/0.6, 0.2, 0.0]) - s2 = np.array([0.0, 0.2, 0.2*0.4/0.6, 0.0]) + s1 = np.array([0.2, 0.2, 0.2, 0.2]) + s2 = np.array([0.2, 0.2, 0.2, 0.2]) isi1 = np.array([0.6, 0.6, 0.4]) isi2 = np.array([0.4, 0.6, 0.6]) expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) -- cgit v1.2.3 From b09561705ab9c67c93a384248f7c3bc9ad5bdd32 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 3 Feb 2016 13:07:26 +0100 Subject: fixed spike-sync bug fixed ugly bugs in code for computing multi-variate spike sync profile and multi-variate spike sync value. --- pyspike/DiscreteFunc.py | 9 ++++---- pyspike/cython/cython_add.pyx | 33 ++++++++++++++------------- pyspike/cython/cython_distances.pyx | 5 ---- pyspike/cython/python_backend.py | 33 +++++++++++++++------------ pyspike/spike_sync.py | 10 ++++++-- test/numeric/test_regression_random_spikes.py | 27 +++++++++++++++------- test/test_distance.py | 33 +++++++++++++++++++++++++++ 7 files changed, 99 insertions(+), 51 deletions(-) (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index fe97bc2..caad290 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -170,10 +170,6 @@ expected." start_ind, end_ind = get_indices(ival) value += np.sum(self.y[start_ind:end_ind]) multiplicity += np.sum(self.mp[start_ind:end_ind]) - if multiplicity == 0.0: - # empty profile, return spike sync of 1 - value = 1.0 - multiplicity = 1.0 return (value, multiplicity) def avrg(self, interval=None, normalize=True): @@ -190,7 +186,10 @@ expected." """ val, mp = self.integral(interval) if normalize: - return val/mp + if mp > 0: + return val/mp + else: + return 1.0 else: return val diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx index 8da1e53..25f1181 100644 --- a/pyspike/cython/cython_add.pyx +++ b/pyspike/cython/cython_add.pyx @@ -182,8 +182,8 @@ def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, cdef int index1 = 0 cdef int index2 = 0 cdef int index = 0 - cdef int N1 = len(y1) - cdef int N2 = len(y2) + cdef int N1 = len(y1)-1 + cdef int N2 = len(y2)-1 x_new[0] = x1[0] while (index1+1 < N1) and (index2+1 < N2): if x1[index1+1] < x2[index2+1]: @@ -206,20 +206,21 @@ def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, y_new[index] = y1[index1] + y2[index2] mp_new[index] = mp1[index1] + mp2[index2] # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] + if index1+1 < N1: + x_new[index+1:index+1+N1-index1] = x1[index1+1:] + y_new[index+1:index+1+N1-index1] = y1[index1+1:] + mp_new[index+1:index+1+N1-index1] = mp1[index1+1:] + index += N1-index1 + elif index2+1 < N2: + x_new[index+1:index+1+N2-index2] = x2[index2+1:] + y_new[index+1:index+1+N2-index2] = y2[index2+1:] + mp_new[index+1:index+1+N2-index2] = mp2[index2+1:] + index += N2-index2 + else: # both arrays reached the end simultaneously + x_new[index+1] = x1[index1+1] + y_new[index+1] = y1[index1+1] + y2[index2+1] + mp_new[index+1] = mp1[index1+1] + mp2[index2+1] + index += 1 y_new[0] = y_new[1] mp_new[0] = mp_new[1] diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 7dc9cb9..ac5f226 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -428,9 +428,4 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, mp += 2 coinc += 2 - if coinc == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - coinc = 1 - mp = 1 - return coinc, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a1e3a65..6b7209a 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -560,7 +560,9 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): index1 = 0 index2 = 0 index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): + N1 = len(x1)-1 + N2 = len(x2)-1 + while (index1+1 < N1) and (index2+1 < N2): if x1[index1+1] < x2[index2+1]: index1 += 1 index += 1 @@ -581,20 +583,21 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): y_new[index] = y1[index1] + y2[index2] mp_new[index] = mp1[index1] + mp2[index2] # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] + if index1+1 < N1: + x_new[index+1:index+1+N1-index1] = x1[index1+1:] + y_new[index+1:index+1+N1-index1] = y1[index1+1:] + mp_new[index+1:index+1+N1-index1] = mp1[index1+1:] + index += N1-index1 + elif index2+1 < N2: + x_new[index+1:index+1+N2-index2] = x2[index2+1:] + y_new[index+1:index+1+N2-index2] = y2[index2+1:] + mp_new[index+1:index+1+N2-index2] = mp2[index2+1:] + index += N2-index2 + else: # both arrays reached the end simultaneously + x_new[index+1] = x1[-1] + y_new[index+1] = y1[-1] + y2[-1] + mp_new[index+1] = mp1[-1] + mp2[-1] + index += 1 y_new[0] = y_new[1] mp_new[0] = mp_new[1] diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 3dc29ff..802b98c 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -119,7 +119,10 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None): """ c, mp = _spike_sync_values(spike_train1, spike_train2, interval, max_tau) - return 1.0*c/mp + if mp == 0: + return 1.0 + else: + return 1.0*c/mp ############################################################ @@ -187,7 +190,10 @@ def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None): coincidence += c mp += m - return coincidence/mp + if mp == 0.0: + return 1.0 + else: + return coincidence/mp ############################################################ diff --git a/test/numeric/test_regression_random_spikes.py b/test/numeric/test_regression_random_spikes.py index 6156bb4..73d39a6 100644 --- a/test/numeric/test_regression_random_spikes.py +++ b/test/numeric/test_regression_random_spikes.py @@ -35,7 +35,9 @@ def test_regression_random(): spike = spk.spike_distance_multi(spike_trains) spike_prof = spk.spike_profile_multi(spike_trains).avrg() - # spike_sync = spk.spike_sync_multi(spike_trains) + + spike_sync = spk.spike_sync_multi(spike_trains) + spike_sync_prof = spk.spike_sync_profile_multi(spike_trains).avrg() assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14, err_msg="Index: %d, ISI" % i) @@ -47,6 +49,9 @@ def test_regression_random(): assert_almost_equal(spike_prof, results_cSPIKY[i][1], decimal=14, err_msg="Index: %d, SPIKE" % i) + assert_almost_equal(spike_sync, spike_sync_prof, decimal=14, + err_msg="Index: %d, SPIKE-Sync" % i) + def check_regression_dataset(spike_file="benchmark.mat", spikes_name="spikes", @@ -109,19 +114,25 @@ def check_single_spike_train_set(index): spike_train_data = spike_train_sets[index] spike_trains = [] + N = 0 for spikes in spike_train_data[0]: - print("Spikes:", spikes.flatten()) - spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + N += len(spikes.flatten()) + print("Spikes:", len(spikes.flatten())) + spikes_array = spikes.flatten() + if len(spikes_array > 0) and (spikes_array[-1] > 100.0): + spikes_array[-1] = 100.0 + spike_trains.append(spk.SpikeTrain(spikes_array, 100.0)) + print(spike_trains[-1].spikes) - print(spk.spike_distance_multi(spike_trains)) + print(N) - print(results_cSPIKY[index][1]) + print(spk.spike_sync_multi(spike_trains)) - print(spike_trains[1].spikes) + print(spk.spike_sync_profile_multi(spike_trains).integral()) if __name__ == "__main__": - test_regression_random() + # test_regression_random() # check_regression_dataset() - # check_single_spike_train_set(7633) + check_single_spike_train_set(4) diff --git a/test/test_distance.py b/test/test_distance.py index 083d8a3..fe09f34 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -217,6 +217,28 @@ def test_spike_sync(): assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.4, decimal=16) + spikes1 = SpikeTrain([1.0, 2.0, 4.0], 4.0) + spikes2 = SpikeTrain([3.8], 4.0) + spikes3 = SpikeTrain([3.9, ], 4.0) + + expected_x = np.array([0.0, 1.0, 2.0, 3.8, 4.0, 4.0]) + expected_y = np.array([0.0, 0.0, 0.0, 1.0, 1.0, 1.0]) + + f = spk.spike_sync_profile(spikes1, spikes2) + + assert_array_almost_equal(f.x, expected_x, decimal=16) + assert_array_almost_equal(f.y, expected_y, decimal=16) + + f2 = spk.spike_sync_profile(spikes2, spikes3) + + i1 = f.integral() + i2 = f2.integral() + f.add(f2) + i12 = f.integral() + + assert_equal(i1[0]+i2[0], i12[0]) + assert_equal(i1[1]+i2[1], i12[1]) + def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): # generate spike trains: @@ -313,6 +335,17 @@ def test_multi_spike_sync(): assert_equal(np.sum(f.y[1:-1]), 39932) assert_equal(np.sum(f.mp[1:-1]), 85554) + # example with 2 empty spike trains + sts = [] + sts.append(SpikeTrain([1, 9], [0, 10])) + sts.append(SpikeTrain([1, 3], [0, 10])) + sts.append(SpikeTrain([], [0, 10])) + sts.append(SpikeTrain([], [0, 10])) + + assert_almost_equal(spk.spike_sync_multi(sts), 1.0/6.0, decimal=15) + assert_almost_equal(spk.spike_sync_profile_multi(sts).avrg(), 1.0/6.0, + decimal=15) + def check_dist_matrix(dist_func, dist_matrix_func): # generate spike trains: -- cgit v1.2.3 From 34bd30415dd93a2425ce566627e24ee9483ada3e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:49:42 -0700 Subject: Spike Order support (#39) * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- Changelog | 3 + Readme.rst | 9 +- doc/pyspike.rst | 6 + doc/tutorial.rst | 66 +++ examples/spike_train_order.py | 52 +++ pyspike/PieceWiseConstFunc.py | 32 +- pyspike/PieceWiseLinFunc.py | 42 +- pyspike/__init__.py | 16 +- pyspike/cython/cython_directionality.pyx | 262 ++++++++++++ pyspike/cython/cython_distances.pyx | 200 +++++++++ pyspike/cython/cython_profiles.pyx | 33 ++ pyspike/cython/cython_simulated_annealing.pyx | 82 ++++ pyspike/cython/directionality_python_backend.py | 144 +++++++ pyspike/cython/python_backend.py | 67 ++- pyspike/spike_directionality.py | 522 ++++++++++++++++++++++++ pyspike/spike_sync.py | 55 ++- setup.py | 28 +- test/test_directionality.py | 97 +++++ test/test_function.py | 62 +++ test/test_sync_filter.py | 95 +++++ 20 files changed, 1812 insertions(+), 61 deletions(-) create mode 100644 examples/spike_train_order.py create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/cython_simulated_annealing.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py create mode 100644 test/test_sync_filter.py (limited to 'pyspike/cython/cython_distances.pyx') diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,12 @@ PSTH :undoc-members: :show-inheritance: +Directionality +........................................ +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from .spike_directionality import spike_directionality, \ + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..ac37690 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,262 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + + +############################################################ +# spike_directionality_profiles_cython +############################################################ +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_directionality_cython +############################################################ +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 # directionality value + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + + return d diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if fabs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..c1d820b --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,144 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..248862c --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,522 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +from __future__ import absolute_import + +import numpy as np +import pyspike +from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_directionality_cython as spike_directionality_impl + if max_tau is None: + max_tau = 0.0 + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) + if normalize: + return 1.0*d/c + else: + return d + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. + + Valid call structures:: + + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) + + +############################################################ +# spike_train_order_profile_bi +############################################################ +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_train_order_profile_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl +############################################################ +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_func + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_func(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau=max_tau).integral(interval) + return c, mp + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + + + +############################################################ +# optimal_spike_train_sorting_from_matrix +############################################################ +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_sorting +############################################################ +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return _optimal_spike_train_sorting_from_matrix(D, full_output) + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` + """ + N = len(D) + D_p = np.empty_like(D) + for n in range(N): + for m in range(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ @@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + removed_spike_trains = [] + + # cython implementation + try: + from .cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.python_backend import coincidence_single_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N): + if i == j: + continue + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 5b9e677..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -90,11 +100,17 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..c2e9bfe --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,97 @@ +""" test_directionality.py + +Tests the directionality functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc + + +def test_spike_directionality(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_values([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) diff --git a/test/test_function.py b/test/test_function.py index 92d378d..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -162,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..e259903 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,95 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_single_prof(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import \ + coincidence_single_python as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3