From 6eb6bc486027d3d5304a94cfb417a2257f2b6fd9 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 12:19:53 +0100 Subject: moved cython functions to subdirectory --- pyspike/cython/python_backend.py | 485 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 485 insertions(+) create mode 100644 pyspike/cython/python_backend.py (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py new file mode 100644 index 0000000..481daf9 --- /dev/null +++ b/pyspike/cython/python_backend.py @@ -0,0 +1,485 @@ +""" python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# isi_distance_python +############################################################ +def isi_distance_python(s1, s2): + """ Plain Python implementation of the isi distance. + """ + # compute the interspike interval + nu1 = s1[1:] - s1[:-1] + nu2 = s2[1:] - s2[:-1] + + # compute the isi-distance + spike_events = np.empty(len(nu1) + len(nu2)) + spike_events[0] = s1[0] + # the values have one entry less - the number of intervals between events + isi_values = np.empty(len(spike_events) - 1) + # add the distance of the first events + # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ + # else 1.0 - nu2[0]/nu1[0] + isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= len(nu1): + break + spike_events[index] = s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= len(nu2): + break + spike_events[index] = s2[index2] + else: # s1[index1 + 1] == s2[index2 + 1] + index1 += 1 + index2 += 1 + if (index1 >= len(nu1)) or (index2 >= len(nu2)): + break + spike_events[index] = s1[index1] + # compute the corresponding isi-distance + isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ + max(nu1[index1], nu2[index2]) + index += 1 + # the last event is the interval end + spike_events[index] = s1[-1] + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index + 1], isi_values[:index] + + +############################################################ +# get_min_dist +############################################################ +def get_min_dist(spike_time, spike_train, start_index=0): + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + d = abs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < len(spike_train): + d_temp = abs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +############################################################ +# spike_distance_python +############################################################ +def spike_distance_python(spikes1, spikes2): + """ Computes the instantaneous spike-distance S_spike (t) of the two given + spike trains. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. + Args: + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + Returns: + - PieceWiseLinFunc describing the spike-distance. + """ + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0] == spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1] == spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + # shorter variables + t1 = spikes1 + t2 = spikes2 + + spike_events = np.empty(len(t1) + len(t2) - 2) + spike_events[0] = t1[0] + y_starts = np.empty(len(spike_events) - 1) + y_ends = np.empty(len(spike_events) - 1) + + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist(t1[1], t2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist(t2[1], t1, 0) + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + while True: + # print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= len(t1): + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + elif t1[index1+1] > t2[index2+1]: + index2 += 1 + if index2+1 >= len(t2): + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + #s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + else: # t1[index1+1] == t2[index2+1] - generate only one event + index1 += 1 + index2 += 1 + if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): + break + assert dt_f2 == 0.0 + assert dt_f1 == 0.0 + spike_events[index] = t1[index1] + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + dt_p1 = 0.0 + dt_p2 = 0.0 + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + isi1 = t1[index1+1]-t1[index1] + isi2 = t2[index2+1]-t2[index2] + index += 1 + # the last event is the interval end + spike_events[index] = t1[-1] + # the ending value of the last interval + isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) + isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) + s1 = dt_p1*(t1[-1]-t1[-2])/isi1 + s2 = dt_p2*(t2[-1]-t2[-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + +############################################################ +# cumulative_sync_python +############################################################ +def cumulative_sync_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], + spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) + c = np.zeros(N1 + N2 - 3) + c[0] = 0 + st[0] = 0 + while n < N1 + N2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if spikes1[i]-spikes2[j] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if spikes2[j]-spikes1[i] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + else: # spikes1[i+1] = spikes2[j+1] + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1] + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1]+1 + c[0] = 0 + st[0] = spikes1[0] + st[-1] = spikes1[-1] + + return st, c + + +############################################################ +# coincidence_python +############################################################ +def coincidence_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + m = 1E100 # some huge number + if i < len(spikes1)-2: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-2: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = min(m, spikes2[j]-spikes2[j-1]) + return 0.5*m + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) # spike times + c = np.zeros(N1 + N2 - 2) # coincidences + mp = np.ones(N1 + N2 - 2) # multiplicity + while n < N1 + N2 - 2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = spikes1[0] + st[-1] = spikes1[-1] + c[0] = c[1] + c[-1] = c[-2] + mp[0] = mp[1] + mp[-1] = mp[-2] + + return st, c, mp + + +############################################################ +# add_piece_wise_const_python +############################################################ +def add_piece_wise_const_python(x1, y1, x2, y2): + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty(len(x_new)-1) + x_new[0] = x1[0] + y_new[0] = y1[0] + y2[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1]: + index1 += 1 + index2 += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] + index += len(x1)-index1-2 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + + return x_new[:index+2], y_new[:index+1] + + +############################################################ +# add_piece_lin_const_python +############################################################ +def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): + x_new = np.empty(len(x1) + len(x2)) + y1_new = np.empty(len(x_new)-1) + y2_new = np.empty_like(y1_new) + x_new[0] = x1[0] + y1_new[0] = y11[0] + y21[0] + index1 = 0 # index for self + index2 = 0 # index for f + index = 0 # index for new + while (index1+1 < len(y11)) and (index2+1 < len(y21)): + # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) + y2_new[index] = y12[index1] + y + index1 += 1 + index += 1 + x_new[index] = x1[index1] + # and the starting value for the next interval + y1_new[index] = y11[index1] + y + elif x1[index1+1] > x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y2_new[index] = y22[index2] + y + index2 += 1 + index += 1 + x_new[index] = x2[index2] + # and the starting value for the next interval + y1_new[index] = y21[index2] + y + else: # x1[index1+1] == x2[index2+1]: + y2_new[index] = y12[index1] + y22[index2] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y1_new[index] = y11[index1] + y21[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y11): + # compute the linear interpolations values + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2]) + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y + y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y + index += len(x1)-index1-2 + elif index2+1 < len(y21): + # compute the linear interpolations values + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1:-1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y + y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # finally, the end value for the last interval + y2_new[index] = y12[-1]+y22[-1] + # only use the data that was actually filled + return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] + + +############################################################ +# add_discrete_function_python +############################################################ +def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): + + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty_like(x_new) + mp_new = np.empty_like(x_new) + x_new[0] = x1[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] + -- cgit v1.2.3 From 7989b2d337a0e5d2e0223d7fdec73833ff47c7bb Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 16:40:55 +0100 Subject: first version of psth profile --- examples/spike_sync.py | 8 ++++++++ pyspike/__init__.py | 3 ++- pyspike/cython/python_backend.py | 1 - pyspike/psth.py | 27 +++++++++++++++++++++++++++ 4 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 pyspike/psth.py (limited to 'pyspike/cython/python_backend.py') diff --git a/examples/spike_sync.py b/examples/spike_sync.py index 7f9e762..9c5f75c 100644 --- a/examples/spike_sync.py +++ b/examples/spike_sync.py @@ -30,6 +30,8 @@ plt.legend(loc="center right") plt.figure() +plt.subplot(211) + f = spk.spike_sync_profile_multi(spike_trains) x, y = f.get_plottable_data() plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile") @@ -37,6 +39,12 @@ plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile") x1, y1 = f.get_plottable_data(averaging_window_size=50) plt.plot(x1, y1, '-k', lw=2.5, label="averaged SPIKE-Sync profile") +plt.subplot(212) + +f_psth = spk.psth(spike_trains, bin_size=5.0) +x, y = f_psth.get_plottable_data() +plt.plot(x, y, '-k', alpha=1.0, label="PSTH") + print("Average:", f.avrg()) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 945dd4e..4d3f9f6 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,7 +4,7 @@ Copyright 2014-2015, Mario Mulansky Distributed under the BSD License """ -__all__ = ["isi_distance", "spike_distance", "spike_sync", +__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] @@ -18,6 +18,7 @@ from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix +from psth import psth from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 481daf9..4efefc5 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -482,4 +482,3 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): # the last value is again the end of the interval # only use the data that was actually filled return x_new[:index+1], y_new[:index+1], mp_new[:index+1] - diff --git a/pyspike/psth.py b/pyspike/psth.py new file mode 100644 index 0000000..8516460 --- /dev/null +++ b/pyspike/psth.py @@ -0,0 +1,27 @@ +""" + +Module containing functions to compute the PSTH profile + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +import numpy as np +from pyspike import PieceWiseConstFunc + + +# Computes the Peristimulus time histogram of a set of spike trains +def psth(spike_trains, bin_size): + + bins = int((spike_trains[0][-1] - spike_trains[0][0]) / bin_size) + + N = len(spike_trains) + combined_spike_train = spike_trains[0][1:-1] + for i in xrange(1, len(spike_trains)): + combined_spike_train = np.append(combined_spike_train, + spike_trains[i][1:-1]) + + vals, edges = np.histogram(combined_spike_train, bins, density=False) + bin_size = edges[1]-edges[0] + return PieceWiseConstFunc(edges, vals/(N*bin_size)) -- cgit v1.2.3 From 22f480fc5c53308779667beb24604d8fc019fd9e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 2 Apr 2015 20:24:11 +0200 Subject: add max_tau to spike sync in python backend --- pyspike/cython/python_backend.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 4efefc5..749507a 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -245,9 +245,9 @@ def cumulative_sync_python(spikes1, spikes2): ############################################################ # coincidence_python ############################################################ -def coincidence_python(spikes1, spikes2): +def coincidence_python(spikes1, spikes2, max_tau): - def get_tau(spikes1, spikes2, i, j): + def get_tau(spikes1, spikes2, i, j, max_tau): m = 1E100 # some huge number if i < len(spikes1)-2: m = min(m, spikes1[i+1]-spikes1[i]) @@ -257,7 +257,10 @@ def coincidence_python(spikes1, spikes2): m = min(m, spikes1[i]-spikes1[i-1]) if j > 1: m = min(m, spikes2[j]-spikes2[j-1]) - return 0.5*m + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m N1 = len(spikes1) N2 = len(spikes2) i = 0 @@ -270,7 +273,7 @@ def coincidence_python(spikes1, spikes2): if spikes1[i+1] < spikes2[j+1]: i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j) + tau = get_tau(spikes1, spikes2, i, j, max_tau) st[n] = spikes1[i] if j > 0 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -280,7 +283,7 @@ def coincidence_python(spikes1, spikes2): elif spikes1[i+1] > spikes2[j+1]: j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j) + tau = get_tau(spikes1, spikes2, i, j, max_tau) st[n] = spikes2[j] if i > 0 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike -- cgit v1.2.3 From 27aa30d1fdb830a04b608c702cf7b616115eeb50 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 22 Apr 2015 18:18:30 +0200 Subject: added SpikeTrain class, changed isi_distance spike trains are now represented as SpikeTrain objects consisting of the spike times and the interval edges. The implementation of the ISI-distance has been modified accordingly. The SPIKE-distance and SPIKE-Synchronization are still to be done. --- pyspike/SpikeTrain.py | 34 +++++++++++++++ pyspike/__init__.py | 3 +- pyspike/cython/cython_distance.pyx | 85 ++++++++++++++++++++++++-------------- pyspike/cython/python_backend.py | 72 +++++++++++++++++++++----------- pyspike/isi_distance.py | 17 ++++---- test/test_distance.py | 19 ++++----- 6 files changed, 156 insertions(+), 74 deletions(-) create mode 100644 pyspike/SpikeTrain.py (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py new file mode 100644 index 0000000..4760014 --- /dev/null +++ b/pyspike/SpikeTrain.py @@ -0,0 +1,34 @@ +""" Module containing the class representing spike trains for PySpike. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +import numpy as np +import collections + + +class SpikeTrain: + """ Class representing spike trains for the PySpike Module.""" + + def __init__(self, spike_times, interval): + """ Constructs the SpikeTrain + :param spike_times: ordered array of spike times. + :param interval: interval defining the edges of the spike train. + Given as a pair of floats (T0, T1) or a single float T1, where T0=0 is + assumed. + """ + + # TODO: sanity checks + self.spikes = np.array(spike_times) + + # check if interval is as sequence + if not isinstance(interval, collections.Sequence): + # treat value as end time and assume t_start = 0 + self.t_start = 0.0 + self.t_end = interval + else: + # extract times from sequence + self.t_start = interval[0] + self.t_end = interval[1] diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 4d3f9f6..76e58a1 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -5,12 +5,13 @@ Distributed under the BSD License """ __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", + "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc from DiscreteFunc import DiscreteFunc +from SpikeTrain import SpikeTrain from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index 2834ca5..1d652ee 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -42,57 +42,82 @@ ctypedef np.float_t DTYPE_t ############################################################ # isi_distance_cython ############################################################ -def isi_distance_cython(double[:] s1, - double[:] s2): +def isi_distance_cython(double[:] s1, double[:] s2, + double t_start, double t_end): cdef double[:] spike_events cdef double[:] isi_values cdef int index1, index2, index cdef int N1, N2 cdef double nu1, nu2 - N1 = len(s1)-1 - N2 = len(s2)-1 + N1 = len(s1) + N2 = len(s2) + + spike_events = np.empty(N1+N2+2) + # the values have one entry less as they are defined at the intervals + isi_values = np.empty(N1+N2+1) + + # first x-value of the profile + spike_events[0] = t_start + + # first interspike interval - check if a spike exists at the start time + if s1[0] > t_start: + nu1 = s1[0] - t_start + index1 = -1 + else: + nu1 = s1[1]-s1[0] + index1 = 0 + + if s2[0] > t_start: + nu2 = s2[0] - t_start + index2 = -1 + else: + nu2 = s2[1]-s2[0] + index2 = 0 - nu1 = s1[1]-s1[0] - nu2 = s2[1]-s2[0] - spike_events = np.empty(N1+N2) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(N1+N2-1) + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) + index = 1 with nogil: # release the interpreter to allow multithreading - isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: + while index1+index2 < N1+N2-2: + # check which spike is next, only if there are spikes left in 1 + # next spike in 1 is earlier, or there are no spikes left in 2 + if (index1 < N1-1) and ((index2 == N2-1) or + (s1[index1+1] < s2[index2+1])): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= N1: - break spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - elif s1[index1+1] > s2[index2+1]: + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + elif (index2 < N2-1) and ((index1 == N1-1) or + (s1[index1+1] > s2[index2+1])): index2 += 1 - if index2 >= N2: - break spike_events[index] = s2[index2] - nu2 = s2[index2+1]-s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 - if (index1 >= N1) or (index2 >= N2): - break spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - nu2 = s2[index2+1]-s2[index2] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] # compute the corresponding isi-distance isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 # the last event is the interval end - spike_events[index] = s1[N1] + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end # end nogil return spike_events[:index+1], isi_values[:index] diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 749507a..4c37236 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -15,50 +15,72 @@ import numpy as np ############################################################ # isi_distance_python ############################################################ -def isi_distance_python(s1, s2): +def isi_distance_python(s1, s2, t_start, t_end): """ Plain Python implementation of the isi distance. """ - # compute the interspike interval - nu1 = s1[1:] - s1[:-1] - nu2 = s2[1:] - s2[:-1] + N1 = len(s1) + N2 = len(s2) # compute the isi-distance - spike_events = np.empty(len(nu1) + len(nu2)) - spike_events[0] = s1[0] + spike_events = np.empty(N1+N2+2) + spike_events[0] = t_start # the values have one entry less - the number of intervals between events isi_values = np.empty(len(spike_events) - 1) - # add the distance of the first events - # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ - # else 1.0 - nu2[0]/nu1[0] - isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) - index1 = 0 - index2 = 0 + if s1[0] > t_start: + nu1 = s1[0] - t_start + index1 = -1 + else: + nu1 = s1[1] - s1[0] + index1 = 0 + if s2[0] > t_start: + nu2 = s2[0] - t_start + index2 = -1 + else: + nu2 = s2[1] - s2[0] + index2 = 0 + + isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2) index = 1 - while True: + while index1+index2 < N1+N2-2: # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: + if (index1 < N1-1) and (index2 == N2-1 or s1[index1+1] < s2[index2+1]): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= len(nu1): - break spike_events[index] = s1[index1] - elif s1[index1+1] > s2[index2+1]: + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + + elif (index2 < N2-1) and (index1 == N1-1 or + s1[index1+1] > s2[index2+1]): index2 += 1 - if index2 >= len(nu2): - break spike_events[index] = s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] + else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 index2 += 1 - if (index1 >= len(nu1)) or (index2 >= len(nu2)): - break spike_events[index] = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] # compute the corresponding isi-distance - isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ - max(nu1[index1], nu2[index2]) + isi_values[index] = abs(nu1 - nu2) / \ + max(nu1, nu2) index += 1 # the last event is the interval end - spike_events[index] = s1[-1] + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end # use only the data added above # could be less than original length due to equal spike times return spike_events[:index + 1], isi_values[:index] diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index c2ef8e8..a34e135 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -14,23 +14,25 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix ############################################################ # isi_profile ############################################################ -def isi_profile(spikes1, spikes2): +def isi_profile(spike_train1, spike_train2): """ Computes the isi-distance profile :math:`S_{isi}(t)` of the two given spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi values are defined positive S_isi(t)>=0. The spike trains are expected to have auxiliary spikes at the beginning and end of the interval. Use the function add_auxiliary_spikes to add those spikes to the spike train. - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: `SpikeTrain` :returns: The isi-distance profile :math:`S_{isi}(t)` :rtype: :class:`pyspike.function.PieceWiseConstFunc` """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ + assert spike_train1.t_end == spike_train2.t_end, \ "Given spike trains seems not to have auxiliary spikes!" # load cython implementation @@ -45,7 +47,8 @@ Falling back to slow python backend.") from cython.python_backend import isi_distance_python \ as isi_distance_impl - times, values = isi_distance_impl(spikes1, spikes2) + times, values = isi_distance_impl(spike_train1.spikes, spike_train2.spikes, + spike_train1.t_start, spike_train1.t_end) return PieceWiseConstFunc(times, values) diff --git a/test/test_distance.py b/test/test_distance.py index ba19f5e..b54e908 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -15,12 +15,13 @@ from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk +from pyspike import SpikeTrain def test_isi(): # generate two spike trains: - t1 = np.array([0.2, 0.4, 0.6, 0.7]) - t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95]) + t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) + t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) # pen&paper calculation of the isi distance expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] @@ -32,8 +33,6 @@ def test_isi(): expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) * expected_isi)/(expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.isi_profile(t1, t2) # print("ISI: ", f.y) @@ -44,8 +43,8 @@ def test_isi(): assert_equal(spk.isi_distance(t1, t2), expected_isi_val) # check with some equal spike times - t1 = np.array([0.2, 0.4, 0.6]) - t2 = np.array([0.1, 0.4, 0.5, 0.6]) + t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0]) + t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] expected_isi = [0.1/0.2, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5] @@ -55,8 +54,6 @@ def test_isi(): expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) * expected_isi)/(expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.isi_profile(t1, t2) assert_equal(f.x, expected_times) @@ -318,6 +315,6 @@ def test_multi_variate_subsets(): if __name__ == "__main__": test_isi() - test_spike() - test_multi_isi() - test_multi_spike() + # test_spike() + # test_multi_isi() + # test_multi_spike() -- cgit v1.2.3 From ed85a9b72edcb7bba6ae1105e213b3b0a2f78d3a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 24 Apr 2015 00:49:16 +0200 Subject: changed spike distance to use new SpikeTrain class --- pyspike/cython/cython_distance.pyx | 199 +++++++++++++++++++++++++----------- pyspike/cython/python_backend.py | 203 ++++++++++++++++++++++++------------- pyspike/spike_distance.py | 37 ++++--- test/test_distance.py | 34 ++++--- 4 files changed, 313 insertions(+), 160 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index 1d652ee..7999e0a 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -131,21 +131,30 @@ cdef inline double get_min_dist_cython(double spike_time, # use memory view to ensure inlining # np.ndarray[DTYPE_t,ndim=1] spike_train, int N, - int start_index=0) nogil: + int start_index, + double t_start, double t_end) nogil: """ Returns the minimal distance |spike_time - spike_train[i]| with i>=start_index. """ cdef double d, d_temp - d = fabs(spike_time - spike_train[start_index]) - start_index += 1 + # start with the distance to the start time + d = fabs(spike_time - t_start) + if start_index < 0: + start_index = 0 while start_index < N: d_temp = fabs(spike_time - spike_train[start_index]) if d_temp > d: - break + return d else: d = d_temp start_index += 1 - return d + + # finally, check the distance to end time + d_temp = fabs(t_end - spike_time) + if d_temp > d: + return d + else: + return d_temp ############################################################ @@ -160,96 +169,162 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: ############################################################ # spike_distance_cython ############################################################ -def spike_distance_cython(double[:] t1, - double[:] t2): +def spike_distance_cython(double[:] t1, double[:] t2, + double t_start, double t_end): cdef double[:] spike_events cdef double[:] y_starts cdef double[:] y_ends cdef int N1, N2, index1, index2, index - cdef double dt_p1, dt_p2, dt_f1, dt_f2, isi1, isi2, s1, s2 + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 N1 = len(t1) N2 = len(t2) - spike_events = np.empty(N1+N2-2) - spike_events[0] = t1[0] + spike_events = np.empty(N1+N2+2) + y_starts = np.empty(len(spike_events)-1) y_ends = np.empty(len(spike_events)-1) with nogil: # release the interpreter to allow multithreading - index1 = 0 - index2 = 0 - index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist_cython(t1[1], t2, N2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist_cython(t2[1], t1, N1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 + spike_events[0] = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + dt_p1 = 0.0 + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + s1 = dt_f1*(t_f1-t_start)/isi1 + index1 = -1 + else: + dt_p1 = 0.0 + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + dt_p2 = 0.0 + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_f2*(t_f2-t_start)/isi2 + index2 = -1 + else: + dt_p2 = 0.0 + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - while True: + index = 1 + + while index1+index2 < N1+N2-2: # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= N1: - break - spike_events[index] = t1[index1] # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time now was the following time before + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + spike_events[index] = t_p1 s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, + isi2) # now the next interval start value - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - isi1 = t1[index1+1]-t1[index1] + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - elif t1[index1+1] > t2[index2+1]: + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, + isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 - if index2+1 >= N2: - break - spike_events[index] = t2[index2] # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time now was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p1 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + spike_events[index] = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, + isi2) # now the next interval start value - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - else: # t1[index1+1] == t2[index2+1] - generate only one event + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 - if (index1+1 >= N1) or (index2+1 >= N2): - break - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 + t_p1 = t_f1 + t_p2 = t_f2 dt_p1 = 0.0 dt_p2 = 0.0 - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] + spike_events[index] = t_f1 + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 # the last event is the interval end - spike_events[index] = t1[N1-1] - # the ending value of the last interval - isi1 = max(t1[N1-1]-t1[N1-2], t1[N1-2]-t1[N1-3]) - isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) - s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 - s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end + # the ending value of the last interval + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) # end nogil # use only the data added above diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 4c37236..bcf9c30 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -89,122 +89,185 @@ def isi_distance_python(s1, s2, t_start, t_end): ############################################################ # get_min_dist ############################################################ -def get_min_dist(spike_time, spike_train, start_index=0): +def get_min_dist(spike_time, spike_train, start_index, t_start, t_end): """ Returns the minimal distance |spike_time - spike_train[i]| with i>=start_index. """ - d = abs(spike_time - spike_train[start_index]) - start_index += 1 + d = abs(spike_time - t_start) + if start_index < 0: + start_index = 0 while start_index < len(spike_train): d_temp = abs(spike_time - spike_train[start_index]) if d_temp > d: - break + return d else: d = d_temp start_index += 1 - return d + # finally, check the distance to end time + d_temp = abs(t_end - spike_time) + if d_temp > d: + return d + else: + return d_temp ############################################################ # spike_distance_python ############################################################ -def spike_distance_python(spikes1, spikes2): +def spike_distance_python(spikes1, spikes2, t_start, t_end): """ Computes the instantaneous spike-distance S_spike (t) of the two given spike trains. The spike trains are expected to have auxiliary spikes at the beginning and end of the interval. Use the function add_auxiliary_spikes to add those spikes to the spike train. Args: - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + - t_start, t_end: edges of the spike train Returns: - PieceWiseLinFunc describing the spike-distance. """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ - "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ - "Given spike trains seems not to have auxiliary spikes!" + # shorter variables t1 = spikes1 t2 = spikes2 - spike_events = np.empty(len(t1) + len(t2) - 2) - spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events) - 1) - y_ends = np.empty(len(spike_events) - 1) + N1 = len(t1) + N2 = len(t2) - index1 = 0 - index2 = 0 + spike_events = np.empty(N1+N2+2) + + y_starts = np.empty(len(spike_events)-1) + y_ends = np.empty(len(spike_events)-1) + + spike_events[0] = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + dt_p1 = 0.0 + t_f1 = t1[0] + dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end) + isi1 = max(t_f1-t_start, t1[1]-t1[0]) + s1 = dt_f1*(t_f1-t_start)/isi1 + index1 = -1 + else: + dt_p1 = 0.0 + t_f1 = t1[1] + dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end) + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + dt_p2 = 0.0 + t_f2 = t2[0] + dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end) + isi2 = max(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_f2*(t_f2-t_start)/isi2 + index2 = -1 + else: + dt_p2 = 0.0 + t_f2 = t2[1] + dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end) + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist(t1[1], t2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist(t2[1], t1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - while True: + + while index1+index2 < N1+N2-2: # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= len(t1): - break - spike_events[index] = t1[index1] # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time was the following time before + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + spike_events[index] = t_p1 s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - isi1 = t1[index1+1]-t1[index1] + if index1 < N1-1: + dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end) + isi1 = t_f1-t_p1 + else: + dt_f1 = dt_p1 + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - elif t1[index1+1] > t2[index2+1]: + y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 - if index2+1 >= len(t2): - break - spike_events[index] = t2[index2] # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p1 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + spike_events[index] = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] + if index2 < N2-1: + dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end) + isi2 = t_f2-t_p2 + else: + dt_f2 = dt_p2 + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - else: # t1[index1+1] == t2[index2+1] - generate only one event + y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 - if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): - break - assert dt_f2 == 0.0 - assert dt_f1 == 0.0 - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 + t_p1 = t_f1 + t_p2 = t_f2 dt_p1 = 0.0 dt_p2 = 0.0 - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] + spike_events[index] = t_f1 + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 # the last event is the interval end - spike_events[index] = t1[-1] - # the ending value of the last interval - isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) - isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) - s1 = dt_p1*(t1[-1]-t1[-2])/isi1 - s2 = dt_p2*(t2[-1]-t2[-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end + # the ending value of the last interval + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) + # use only the data added above # could be less than original length due to equal spike times return spike_events[:index+1], y_starts[:index], y_ends[:index] diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index f721c86..8d03d70 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -14,23 +14,23 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix ############################################################ # spike_profile ############################################################ -def spike_profile(spikes1, spikes2): +def spike_profile(spike_train1, spike_train2): """ Computes the spike-distance profile S_spike(t) of the two given spike trains. Returns the profile as a PieceWiseLinFunc object. The S_spike - values are defined positive S_spike(t)>=0. The spike trains are expected to - have auxiliary spikes at the beginning and end of the interval. Use the - function add_auxiliary_spikes to add those spikes to the spike train. + values are defined positive S_spike(t)>=0. - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` :returns: The spike-distance profile :math:`S_{spike}(t)`. :rtype: :class:`pyspike.function.PieceWiseLinFunc` """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ + assert spike_train1.t_end == spike_train2.t_end, \ "Given spike trains seems not to have auxiliary spikes!" # cython implementation @@ -45,21 +45,26 @@ Falling back to slow python backend.") from cython.python_backend import spike_distance_python \ as spike_distance_impl - times, y_starts, y_ends = spike_distance_impl(spikes1, spikes2) + times, y_starts, y_ends = spike_distance_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end) return PieceWiseLinFunc(times, y_starts, y_ends) ############################################################ # spike_distance ############################################################ -def spike_distance(spikes1, spikes2, interval=None): +def spike_distance(spike_train1, spike_train2, interval=None): """ Computes the spike-distance S of the given spike trains. The spike-distance is the integral over the isi distance profile S_spike(t): .. math:: S = \int_{T_0}^{T_1} S_{spike}(t) dt. - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` :param interval: averaging interval given as a pair of floats (T0, T1), if None the average over the whole function is computed. :type interval: Pair of floats or None. @@ -67,7 +72,7 @@ def spike_distance(spikes1, spikes2, interval=None): :rtype: double """ - return spike_profile(spikes1, spikes2).avrg(interval) + return spike_profile(spike_train1, spike_train2).avrg(interval) ############################################################ @@ -102,7 +107,7 @@ def spike_distance_multi(spike_trains, indices=None, interval=None): S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt where the sum goes over all pairs - :param spike_trains: list of spike trains + :param spike_trains: list of :class:`pyspike.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None @@ -121,7 +126,7 @@ def spike_distance_multi(spike_trains, indices=None, interval=None): def spike_distance_matrix(spike_trains, indices=None, interval=None): """ Computes the time averaged spike-distance of all pairs of spike-trains. - :param spike_trains: list of spike trains + :param spike_trains: list of :class:`pyspike.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None diff --git a/test/test_distance.py b/test/test_distance.py index b54e908..4af0e63 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -64,13 +64,26 @@ def test_isi(): def test_spike(): # generate two spike trains: - t1 = np.array([0.2, 0.4, 0.6, 0.7]) - t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95]) + t1 = SpikeTrain([0.0, 2.0, 5.0, 8.0], 10.0) + t2 = SpikeTrain([0.0, 1.0, 5.0, 9.0], 10.0) + + expected_times = np.array([0.0, 1.0, 2.0, 5.0, 8.0, 9.0, 10.0]) + + f = spk.spike_profile(t1, t2) + + assert_equal(f.x, expected_times) + + assert_almost_equal(f.avrg(), 0.1662415, decimal=6) + assert_almost_equal(f.y2[-1], 0.1394558, decimal=6) + + t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) + t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) # pen&paper calculation of the spike distance expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2, - 0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.1]) + 0.15, 0.1, (0.1*0.1+0.1*0.2)/0.3, (0.1*0.2+0.1*0.1)/0.3, + (0.1*0.05+0.1*0.25)/0.3, 0.1]) s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, 0.1, 0.1, 0.05, 0.05]) @@ -86,19 +99,18 @@ def test_spike(): (expected_y1+expected_y2)/2) expected_spike_val /= (expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.spike_profile(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=15) assert_array_almost_equal(f.y2, expected_y2, decimal=15) - assert_equal(f.avrg(), expected_spike_val) - assert_equal(spk.spike_distance(t1, t2), expected_spike_val) + assert_almost_equal(f.avrg(), expected_spike_val, decimal=15) + assert_almost_equal(spk.spike_distance(t1, t2), expected_spike_val, + decimal=15) # check with some equal spike times - t1 = np.array([0.2, 0.4, 0.6]) - t2 = np.array([0.1, 0.4, 0.5, 0.6]) + t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0]) + t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) @@ -115,8 +127,6 @@ def test_spike(): (expected_y1+expected_y2)/2) expected_spike_val /= (expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.spike_profile(t1, t2) assert_equal(f.x, expected_times) @@ -315,6 +325,6 @@ def test_multi_variate_subsets(): if __name__ == "__main__": test_isi() - # test_spike() + test_spike() # test_multi_isi() # test_multi_spike() -- cgit v1.2.3 From 36d80c9ec1d28488f9b5c97cd202c196efff694e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 24 Apr 2015 15:58:35 +0200 Subject: distance tests now pass with new spike trains --- pyspike/cython/cython_distance.pyx | 8 ++++---- pyspike/cython/python_backend.py | 41 +++++++++++++++++++------------------- test/test_distance.py | 40 ++++++++++++++++++++++++++++++------- 3 files changed, 57 insertions(+), 32 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index 2841da8..dc2557f 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -345,9 +345,9 @@ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, m = fmin(m, spikes1[i+1]-spikes1[i]) if j < N2 and j > -1: m = fmin(m, spikes2[j+1]-spikes2[j]) - if i > 1: + if i > 0: m = fmin(m, spikes1[i]-spikes1[i-1]) - if j > 1: + if j > 0: m = fmin(m, spikes2[j]-spikes2[j-1]) m *= 0.5 if max_tau > 0.0: @@ -371,7 +371,7 @@ def coincidence_cython(double[:] spikes1, double[:] spikes2, cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity cdef double tau while i + j < N1 + N2 - 2: - if (i < N1-1) and (spikes1[i+1] < spikes2[j+1] or j == N2-1): + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j, max_tau) @@ -381,7 +381,7 @@ def coincidence_cython(double[:] spikes1, double[:] spikes2, # both get marked with 1 c[n] = 1 c[n-1] = 1 - elif (j < N2-1) and (spikes1[i+1] > spikes2[j+1] or i == N1-1): + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j, max_tau) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index bcf9c30..c65bfb0 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -330,47 +330,48 @@ def cumulative_sync_python(spikes1, spikes2): ############################################################ # coincidence_python ############################################################ -def coincidence_python(spikes1, spikes2, max_tau): +def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): def get_tau(spikes1, spikes2, i, j, max_tau): m = 1E100 # some huge number - if i < len(spikes1)-2: + if i < len(spikes1)-1 and i > -1: m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-2: + if j < len(spikes2)-1 and j > -1: m = min(m, spikes2[j+1]-spikes2[j]) - if i > 1: + if i > 0: m = min(m, spikes1[i]-spikes1[i-1]) - if j > 1: + if j > 0: m = min(m, spikes2[j]-spikes2[j-1]) m *= 0.5 if max_tau > 0.0: m = min(m, max_tau) return m + N1 = len(spikes1) N2 = len(spikes2) - i = 0 - j = 0 + i = -1 + j = -1 n = 0 - st = np.zeros(N1 + N2 - 2) # spike times - c = np.zeros(N1 + N2 - 2) # coincidences - mp = np.ones(N1 + N2 - 2) # multiplicity - while n < N1 + N2 - 2: - if spikes1[i+1] < spikes2[j+1]: + st = np.zeros(N1 + N2 + 2) # spike times + c = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j, max_tau) st[n] = spikes1[i] - if j > 0 and spikes1[i]-spikes2[j] < tau: + if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 c[n] = 1 c[n-1] = 1 - elif spikes1[i+1] > spikes2[j+1]: + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j, max_tau) st[n] = spikes2[j] - if i > 0 and spikes2[j]-spikes1[i] < tau: + if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 c[n] = 1 @@ -379,8 +380,6 @@ def coincidence_python(spikes1, spikes2, max_tau): # advance in both spike trains j += 1 i += 1 - if i == N1-1 or j == N2-1: - break n += 1 # add only one event, but with coincidence 2 and multiplicity 2 st[n] = spikes1[i] @@ -391,12 +390,12 @@ def coincidence_python(spikes1, spikes2, max_tau): c = c[:n+2] mp = mp[:n+2] - st[0] = spikes1[0] - st[-1] = spikes1[-1] + st[0] = t_start + st[len(st)-1] = t_end c[0] = c[1] - c[-1] = c[-2] + c[len(c)-1] = c[len(c)-2] mp[0] = mp[1] - mp[-1] = mp[-2] + mp[len(mp)-1] = mp[len(mp)-2] return st, c, mp diff --git a/test/test_distance.py b/test/test_distance.py index 0fff840..88cf40e 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -177,6 +177,18 @@ def test_spike_sync(): assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=16) + spikes2 = SpikeTrain([3.0], 4.0) + assert_almost_equal(spk.spike_sync(spikes1, spikes2), + 0.5, decimal=16) + + spikes2 = SpikeTrain([1.0], 4.0) + assert_almost_equal(spk.spike_sync(spikes1, spikes2), + 0.5, decimal=16) + + spikes2 = SpikeTrain([1.5, 3.0], 4.0) + assert_almost_equal(spk.spike_sync(spikes1, spikes2), + 0.4, decimal=16) + def check_multi_profile(profile_func, profile_func_multi): # generate spike trains: @@ -250,19 +262,28 @@ def test_multi_spike_sync(): # multivariate regression test spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt", - interval=(0, 4000)) - print(spike_trains[0].spikes) + interval=[0, 4000]) + # extract all spike times + spike_times = np.array([]) + for st in spike_trains: + spike_times = np.append(spike_times, st.spikes) + spike_times = np.unique(np.sort(spike_times)) + f = spk.spike_sync_profile_multi(spike_trains) + + assert_equal(spike_times, f.x[1:-1]) + assert_equal(len(f.x), len(f.y)) + assert_equal(np.sum(f.y[1:-1]), 39932) assert_equal(np.sum(f.mp[1:-1]), 85554) def check_dist_matrix(dist_func, dist_matrix_func): # generate spike trains: - t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) - t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0) - t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0) - t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0) + t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) + t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) + t3 = SpikeTrain([0.2, 0.4, 0.6], 1.0) + t4 = SpikeTrain([0.1, 0.4, 0.5, 0.6], 1.0) spike_trains = [t1, t2, t3, t4] f12 = dist_func(t1, t2) @@ -340,4 +361,9 @@ if __name__ == "__main__": test_spike_sync() test_multi_isi() test_multi_spike() - # test_multi_spike_sync() + test_multi_spike_sync() + test_isi_matrix() + test_spike_matrix() + test_spike_sync_matrix() + test_regression_spiky() + test_multi_variate_subsets() -- cgit v1.2.3 From 795e16ffe7afb469ef07a548c1f6a31d924196b3 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 24 Apr 2015 23:29:05 +0200 Subject: bugfixes for spike distance --- pyspike/cython/cython_distance.pyx | 24 +++++++++--------- pyspike/cython/python_backend.py | 15 ++++++------ test/test_distance.py | 50 ++++++++++++++++++++++++++++---------- 3 files changed, 58 insertions(+), 31 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index dc2557f..a41d8e8 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -194,31 +194,31 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_p2 = t_start if t1[0] > t_start: # dt_p1 = t2[0]-t_start - dt_p1 = 0.0 t_f1 = t1[0] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) - s1 = dt_f1*(t_f1-t_start)/isi1 + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 index1 = -1 else: - dt_p1 = 0.0 t_f1 = t1[1] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 isi1 = t1[1]-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: # dt_p1 = t2[0]-t_start - dt_p2 = 0.0 t_f2 = t2[0] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_f2*(t_f2-t_start)/isi2 + s2 = dt_p2*(t_f2-t_start)/isi2 index2 = -1 else: - dt_p2 = 0.0 t_f2 = t2[1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 @@ -231,16 +231,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): index1 += 1 # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 # the previous time now was the following time before: - dt_p1 = dt_f1 + dt_p1 = dt_f1 t_p1 = t_f1 # t_p1 contains the current time point - # get the next time + # get the next time if index1 < N1-1: t_f1 = t1[index1+1] else: t_f1 = t_end spike_events[index] = t_p1 - s1 = dt_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) @@ -249,6 +249,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, t_start, t_end) isi1 = t_f1-t_p1 + s1 = dt_p1 else: dt_f1 = dt_p1 isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) @@ -260,9 +261,10 @@ def spike_distance_cython(double[:] t1, double[:] t2, elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 # the previous time now was the following time before: dt_p2 = dt_f2 - t_p2 = t_f2 # t_p1 contains the current time point + t_p2 = t_f2 # t_p2 contains the current time point # get the next time if index2 < N2-1: t_f2 = t2[index2+1] @@ -270,7 +272,6 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_f2 = t_end spike_events[index] = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 - s2 = dt_p2 y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) # now the next interval start value @@ -278,6 +279,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, t_start, t_end) isi2 = t_f2-t_p2 + s2 = dt_p2 else: dt_f2 = dt_p2 isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index c65bfb0..317b568 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -142,12 +142,11 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_p1 = t_start t_p2 = t_start if t1[0] > t_start: - # dt_p1 = t2[0]-t_start - dt_p1 = 0.0 t_f1 = t1[0] dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end) + dt_p1 = dt_f1 isi1 = max(t_f1-t_start, t1[1]-t1[0]) - s1 = dt_f1*(t_f1-t_start)/isi1 + s1 = dt_p1*(t_f1-t_start)/isi1 index1 = -1 else: dt_p1 = 0.0 @@ -158,11 +157,11 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): index1 = 0 if t2[0] > t_start: # dt_p1 = t2[0]-t_start - dt_p2 = 0.0 t_f2 = t2[0] dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end) + dt_p2 = dt_f2 isi2 = max(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_f2*(t_f2-t_start)/isi2 + s2 = dt_p2*(t_f2-t_start)/isi2 index2 = -1 else: dt_p2 = 0.0 @@ -180,6 +179,7 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): index1 += 1 # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 # the previous time now was the following time before: dt_p1 = dt_f1 t_p1 = t_f1 # t_p1 contains the current time point @@ -189,13 +189,13 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): else: t_f1 = t_end spike_events[index] = t_p1 - s1 = dt_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value if index1 < N1-1: dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end) isi1 = t_f1-t_p1 + s1 = dt_p1 else: dt_f1 = dt_p1 isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) @@ -206,6 +206,7 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 # the previous time now was the following time before: dt_p2 = dt_f2 t_p2 = t_f2 # t_p1 contains the current time point @@ -216,12 +217,12 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_f2 = t_end spike_events[index] = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 - s2 = dt_p2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value if index2 < N2-1: dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end) isi2 = t_f2-t_p2 + s2 = dt_p2 else: dt_f2 = dt_p2 isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) diff --git a/test/test_distance.py b/test/test_distance.py index 0059001..20b52e8 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -73,7 +73,7 @@ def test_spike(): assert_equal(f.x, expected_times) - assert_almost_equal(f.avrg(), 0.1662415, decimal=6) + assert_almost_equal(f.avrg(), 1.6624149659863946e-01, decimal=15) assert_almost_equal(f.y2[-1], 0.1394558, decimal=6) t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) @@ -84,7 +84,7 @@ def test_spike(): s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2, 0.15, 0.1, (0.1*0.1+0.1*0.2)/0.3, (0.1*0.2+0.1*0.1)/0.3, (0.1*0.05+0.1*0.25)/0.3, 0.1]) - s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, + s2 = np.array([0.1, (0.1*0.2+0.1*0.1)/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, 0.1, 0.1, 0.05, 0.05]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3]) @@ -113,12 +113,18 @@ def test_spike(): t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] - s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) - s2 = np.array([0.1*0.1/0.3, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) + # due to the edge correction in the beginning, s1 and s2 are different + # for left and right values + s1_r = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) + s1_l = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) + s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3, + 0.0, 0.1, 0.0, 0.0]) + s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0, + 0.1, 0.0, 0.0]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4]) isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4]) - expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) - expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) + expected_y1 = (s1_r[:-1]*isi2+s2_r[:-1]*isi1) / (0.5*(isi1+isi2)**2) + expected_y2 = (s1_l[1:]*isi2+s2_l[1:]*isi1) / (0.5*(isi1+isi2)**2) expected_times = np.array(expected_times) expected_y1 = np.array(expected_y1) @@ -321,19 +327,37 @@ def test_spike_sync_matrix(): def test_regression_spiky(): + # standard example + st1 = SpikeTrain(np.arange(100, 1201, 100), 1300) + st2 = SpikeTrain(np.arange(100, 1201, 110), 1300) + + isi_dist = spk.isi_distance(st1, st2) + assert_almost_equal(isi_dist, 7.6923076923076941e-02, decimal=15) + + spike_dist = spk.spike_distance(st1, st2) + assert_equal(spike_dist, 2.1105878248735391e-01) + + spike_sync = spk.spike_sync(st1, st2) + assert_equal(spike_sync, 8.6956521739130432e-01) + + # multivariate check + spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", (0.0, 4000.0)) - isi_profile = spk.isi_profile_multi(spike_trains) - isi_dist = isi_profile.avrg() - print(isi_dist) + isi_dist = spk.isi_distance_multi(spike_trains) # get the full precision from SPIKY - # assert_equal(isi_dist, 0.1832) + assert_almost_equal(isi_dist, 1.8318789829845508e-01, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) - spike_dist = spike_profile.avrg() - print(spike_dist) + assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252) + + spike_dist = spk.spike_distance_multi(spike_trains) + # get the full precision from SPIKY + assert_almost_equal(spike_dist, 2.4432433330596512e-01, decimal=15) + + spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY - # assert_equal(spike_dist, 0.2445) + assert_equal(spike_sync, 0.7183531505298066) def test_multi_variate_subsets(): -- cgit v1.2.3 From 2e7351393927ba9e9e0c3b7b59d05e8aeeb41d1f Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 28 Apr 2015 16:11:11 +0200 Subject: edge correction for the ISI-distance --- pyspike/cython/cython_distance.pyx | 18 ++++++++++++------ pyspike/cython/python_backend.py | 18 ++++++++++++------ test/test_distance.py | 8 +++++--- 3 files changed, 29 insertions(+), 15 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index a41d8e8..6ee0181 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -62,14 +62,16 @@ def isi_distance_cython(double[:] s1, double[:] s2, # first interspike interval - check if a spike exists at the start time if s1[0] > t_start: - nu1 = s1[0] - t_start + # edge correction + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) index1 = -1 else: nu1 = s1[1]-s1[0] index1 = 0 if s2[0] > t_start: - nu2 = s2[0] - t_start + # edge correction + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) index2 = -1 else: nu2 = s2[1]-s2[0] @@ -89,7 +91,8 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - nu1 = t_end-s1[index1] + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) elif (index2 < N2-1) and ((index1 == N1-1) or (s1[index1+1] > s2[index2+1])): index2 += 1 @@ -97,7 +100,8 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - nu2 = t_end-s2[index2] + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 @@ -105,11 +109,13 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - nu1 = t_end-s1[index1] + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - nu2 = t_end-s2[index2] + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) # compute the corresponding isi-distance isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 317b568..1fd8c42 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -27,13 +27,15 @@ def isi_distance_python(s1, s2, t_start, t_end): # the values have one entry less - the number of intervals between events isi_values = np.empty(len(spike_events) - 1) if s1[0] > t_start: - nu1 = s1[0] - t_start + # edge correction + nu1 = max(s1[0] - t_start, s1[1] - s1[0]) index1 = -1 else: nu1 = s1[1] - s1[0] index1 = 0 if s2[0] > t_start: - nu2 = s2[0] - t_start + # edge correction + nu2 = max(s2[0] - t_start, s2[1] - s2[0]) index2 = -1 else: nu2 = s2[1] - s2[0] @@ -49,7 +51,8 @@ def isi_distance_python(s1, s2, t_start, t_end): if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - nu1 = t_end-s1[index1] + # edge correction + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) elif (index2 < N2-1) and (index1 == N1-1 or s1[index1+1] > s2[index2+1]): @@ -58,7 +61,8 @@ def isi_distance_python(s1, s2, t_start, t_end): if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - nu2 = t_end-s2[index2] + # edge correction + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 @@ -67,11 +71,13 @@ def isi_distance_python(s1, s2, t_start, t_end): if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - nu1 = t_end-s1[index1] + # edge correction + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - nu2 = t_end-s2[index2] + # edge correction + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) # compute the corresponding isi-distance isi_values[index] = abs(nu1 - nu2) / \ max(nu1, nu2) diff --git a/test/test_distance.py b/test/test_distance.py index 20b52e8..19da35f 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -47,7 +47,7 @@ def test_isi(): t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] - expected_isi = [0.1/0.2, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5] + expected_isi = [0.1/0.3, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5] expected_times = np.array(expected_times) expected_isi = np.array(expected_isi) @@ -332,7 +332,9 @@ def test_regression_spiky(): st2 = SpikeTrain(np.arange(100, 1201, 110), 1300) isi_dist = spk.isi_distance(st1, st2) - assert_almost_equal(isi_dist, 7.6923076923076941e-02, decimal=15) + assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15) + isi_profile = spk.isi_profile(st1, st2) + assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) assert_equal(spike_dist, 2.1105878248735391e-01) @@ -346,7 +348,7 @@ def test_regression_spiky(): (0.0, 4000.0)) isi_dist = spk.isi_distance_multi(spike_trains) # get the full precision from SPIKY - assert_almost_equal(isi_dist, 1.8318789829845508e-01, decimal=15) + assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252) -- cgit v1.2.3 From bec2529367f1bdd5dac6d6fbaec560a30feec3c7 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 11 May 2015 17:41:08 +0200 Subject: treatment of empty spike trains in spike sync --- pyspike/DiscreteFunc.py | 4 ++++ pyspike/cython/cython_distances.pyx | 13 +++++++++---- pyspike/cython/cython_profiles.pyx | 9 +++++---- pyspike/cython/python_backend.py | 2 +- test/test_empty.py | 39 +++++++++++++++++++++++++++++++++++++ 5 files changed, 58 insertions(+), 9 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index dfe2cab..6ade87e 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -136,6 +136,10 @@ class DiscreteFunc(object): :rtype: pair of float """ + if len(self.y) <= 2: + # no actual values in the profile, return spike sync of 0 + return 0.0, 1.0 + def get_indices(ival): """ Retuns the indeces surrounding the given interval""" start_ind = np.searchsorted(self.x, ival[0], side='right') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index bf90638..16780f2 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -333,8 +333,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, # get_tau ############################################################ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double max_tau): - cdef double m = 1E100 # some huge number + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 if i < N1 and i > -1: @@ -363,12 +363,13 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, cdef int j = -1 cdef double coinc = 0.0 cdef double mp = 0.0 + cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 mp += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 @@ -376,7 +377,7 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 mp += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike # both get marked with 1 @@ -389,4 +390,8 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, mp += 2 coinc += 2 + if coinc == 0 and mp == 0: + # empty spike trains -> set mp to one to avoid 0/0 + mp = 1 + return coinc, mp diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 3690091..d937a02 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -345,8 +345,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, # get_tau ############################################################ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double max_tau): - cdef double m = 1E100 # some huge number + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval as initial tau cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 if i < N1 and i > -1: @@ -377,12 +377,13 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -392,7 +393,7 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 1fd8c42..830dc69 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -340,7 +340,7 @@ def cumulative_sync_python(spikes1, spikes2): def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): def get_tau(spikes1, spikes2, i, j, max_tau): - m = 1E100 # some huge number + m = t_end - t_start # use interval as initial tau if i < len(spikes1)-1 and i > -1: m = min(m, spikes1[i+1]-spikes1[i]) if j < len(spikes2)-1 and j > -1: diff --git a/test/test_empty.py b/test/test_empty.py index 42c3716..b31d8a4 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -102,7 +102,46 @@ def test_spike_empty(): assert_array_almost_equal(prof.y2, expected_y2, decimal=15) +def test_spike_sync_empty(): + st1 = SpikeTrain([], edges=(0.0, 1.0)) + st2 = SpikeTrain([], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_equal(d, 0.0) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_equal(prof.x, [0.0, 1.0]) + assert_array_equal(prof.y, [0.0, 0.0]) + + st1 = SpikeTrain([], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_equal(d, 0.0) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_equal(prof.x, [0.0, 0.4, 1.0]) + assert_array_equal(prof.y, [0.0, 0.0, 0.0]) + + st1 = SpikeTrain([0.6, ], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_almost_equal(d, 1.0, decimal=15) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) + assert_array_almost_equal(prof.y, [1.0, 1.0, 1.0, 1.0], decimal=15) + + st1 = SpikeTrain([0.2, ], edges=(0.0, 1.0)) + st2 = SpikeTrain([0.8, ], edges=(0.0, 1.0)) + d = spk.spike_sync(st1, st2) + assert_almost_equal(d, 0.0, decimal=15) + prof = spk.spike_sync_profile(st1, st2) + assert_equal(d, prof.avrg()) + assert_array_almost_equal(prof.x, [0.0, 0.2, 0.8, 1.0], decimal=15) + assert_array_almost_equal(prof.y, [0.0, 0.0, 0.0, 0.0], decimal=15) + + if __name__ == "__main__": test_get_non_empty() test_isi_empty() test_spike_empty() + test_spike_sync_empty() -- cgit v1.2.3 From a35402c208bd0ad31e5e60b6ddc55a3470e7bdde Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 11 May 2015 17:54:02 +0200 Subject: bugfix: spike_sync=1 for empty spike trains --- pyspike/DiscreteFunc.py | 4 ++-- pyspike/cython/cython_distances.pyx | 3 ++- pyspike/cython/cython_profiles.pyx | 12 ++++++++---- pyspike/cython/python_backend.py | 12 ++++++++---- test/test_empty.py | 4 ++-- 5 files changed, 22 insertions(+), 13 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 6ade87e..17153ee 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -137,8 +137,8 @@ class DiscreteFunc(object): """ if len(self.y) <= 2: - # no actual values in the profile, return spike sync of 0 - return 0.0, 1.0 + # no actual values in the profile, return spike sync of 1 + return 1.0, 1.0 def get_indices(ival): """ Retuns the indeces surrounding the given interval""" diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 16780f2..c4f2349 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -391,7 +391,8 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, coinc += 2 if coinc == 0 and mp == 0: - # empty spike trains -> set mp to one to avoid 0/0 + # empty spike trains -> spike sync = 1 by definition + coinc = 1 mp = 1 return coinc, mp diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index d937a02..f9893eb 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -416,9 +416,13 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, st[0] = t_start st[len(st)-1] = t_end - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] + if N1 + N2 > 0: + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + c[0] = 1 + c[1] = 1 return st, c, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 830dc69..69a420f 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -399,10 +399,14 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): st[0] = t_start st[len(st)-1] = t_end - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] + if N1 + N2 > 0: + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + c[0] = 1 + c[1] = 1 return st, c, mp diff --git a/test/test_empty.py b/test/test_empty.py index b31d8a4..48be25d 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -106,11 +106,11 @@ def test_spike_sync_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 0.0) + assert_equal(d, 1.0) prof = spk.spike_sync_profile(st1, st2) assert_equal(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) - assert_array_equal(prof.y, [0.0, 0.0]) + assert_array_equal(prof.y, [1.0, 1.0]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) -- cgit v1.2.3 From a37dc02a0f6c11a8773cba8a6137a2604222fa26 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 14 Dec 2015 16:17:28 +0100 Subject: python backend updated for new edge correction Eero's improved edge correction now also implemented in the python backend. --- pyspike/cython/python_backend.py | 53 +++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 20 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 69a420f..a5f7ae4 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -144,35 +144,44 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): y_starts = np.empty(len(spike_events)-1) y_ends = np.empty(len(spike_events)-1) + t_aux1 = np.zeros(2) + t_aux2 = np.zeros(2) + t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) + t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) + t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) + t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] + t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] + spike_events[0] = t_start - t_p1 = t_start - t_p2 = t_start if t1[0] > t_start: t_f1 = t1[0] - dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end) + dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) dt_p1 = dt_f1 isi1 = max(t_f1-t_start, t1[1]-t1[0]) - s1 = dt_p1*(t_f1-t_start)/isi1 + # s1 = dt_p1*(t_f1-t_start)/isi1 + s1 = dt_p1 index1 = -1 else: dt_p1 = 0.0 t_f1 = t1[1] - dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end) + dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) isi1 = t1[1]-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: # dt_p1 = t2[0]-t_start t_f2 = t2[0] - dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end) + dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 isi2 = max(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_p2*(t_f2-t_start)/isi2 + # s2 = dt_p2*(t_f2-t_start)/isi2 + s2 = dt_p2 index2 = -1 else: dt_p2 = 0.0 t_f2 = t2[1] - dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end) + dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 @@ -193,20 +202,22 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): if index1 < N1-1: t_f1 = t1[index1+1] else: - t_f1 = t_end + t_f1 = t_aux1[1] spike_events[index] = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value if index1 < N1-1: - dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end) + dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1]) isi1 = t_f1-t_p1 s1 = dt_p1 else: dt_f1 = dt_p1 isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) # s1 needs adjustment due to change of isi1 - s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # Eero's correction: no adjustment + s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): @@ -220,20 +231,22 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): if index2 < N2-1: t_f2 = t2[index2+1] else: - t_f2 = t_end + t_f2 = t_aux2[1] spike_events[index] = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # now the next interval start value if index2 < N2-1: - dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end) + dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1]) isi2 = t_f2-t_p2 s2 = dt_p2 else: dt_f2 = dt_p2 isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) # s2 needs adjustment due to change of isi2 - s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # Eero's adjustment: no correction + s2 = dt_p2 # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) else: # t_f1 == t_f2 - generate only one event @@ -248,18 +261,18 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): y_starts[index] = 0.0 if index1 < N1-1: t_f1 = t1[index1+1] - dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end) + dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1]) isi1 = t_f1 - t_p1 else: - t_f1 = t_end + t_f1 = t_aux1[1] dt_f1 = dt_p1 isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if index2 < N2-1: t_f2 = t2[index2+1] - dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end) + dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1]) isi2 = t_f2 - t_p2 else: - t_f2 = t_end + t_f2 = t_aux2[1] dt_f2 = dt_p2 isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 @@ -271,8 +284,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): # the ending value of the last interval isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - s1 = dt_f1*(t_end-t1[N1-1])/isi1 - s2 = dt_f2*(t_end-t2[N2-1])/isi2 + s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 + s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) # use only the data added above -- cgit v1.2.3 From 619fdef014c44a89a7ecef9078905ee44e373b84 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 18 Dec 2015 14:37:45 +0100 Subject: bugfix for edge correction fixed bug within new edge correction (auxiliary spike was ignored in some cases) added regression test with 10000 random spike train sets --- pyspike/cython/cython_distances.pyx | 10 +- pyspike/cython/python_backend.py | 21 +++- test/test_distance.py | 1 + .../regression_random_results_cSPIKY.mat | Bin 0 -> 149130 bytes test/test_regression/regression_random_spikes.mat | Bin 0 -> 16241579 bytes .../test_regression_random_spikes.py | 113 +++++++++++++++++++++ 6 files changed, 140 insertions(+), 5 deletions(-) create mode 100644 test/test_regression/regression_random_results_cSPIKY.mat create mode 100644 test/test_regression/regression_random_spikes.mat create mode 100644 test/test_regression/test_regression_random_spikes.py (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 41baa60..6c6e7e5 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -184,7 +184,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - with nogil: # release the interpreter to allow multithreading + # with nogil: # release the interpreter to allow multithreading + if True: t_last = t_start # t_p1 = t_start # t_p2 = t_start @@ -193,6 +194,7 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1]) t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: @@ -207,7 +209,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f1 = t1[1] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - dt_p1 = 0.0 + # dt_p1 = t_start-t_p2 # 0.0 + dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) isi1 = t1[1]-t1[0] s1 = dt_p1 index1 = 0 @@ -223,7 +226,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f2 = t2[1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) - dt_p2 = 0.0 + # dt_p2 = t_start-t_p1 # 0.0 + dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a5f7ae4..a007a7f 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -153,6 +153,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] + print "t_aux1", t_aux1, ", t_aux2:", t_aux2 + spike_events[0] = t_start if t1[0] > t_start: t_f1 = t1[0] @@ -163,7 +165,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s1 = dt_p1 index1 = -1 else: - dt_p1 = 0.0 + # dt_p1 = t_start-t_p2 + dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1]) t_f1 = t1[1] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) isi1 = t1[1]-t1[0] @@ -179,13 +182,18 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_p2 index2 = -1 else: - dt_p2 = 0.0 + dt_p2 = t_start-t_p1 + dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1]) t_f2 = t2[1] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) isi2 = t2[1]-t2[0] s2 = dt_p2 index2 = 0 + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 @@ -276,6 +284,11 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): dt_f2 = dt_p2 isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 + + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 @@ -288,6 +301,10 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) + print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + print "s1: ", repr(s1), ", s2:", repr(s2) + # use only the data added above # could be less than original length due to equal spike times return spike_events[:index+1], y_starts[:index], y_ends[:index] diff --git a/test/test_distance.py b/test/test_distance.py index 8cf81e2..083d8a3 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -20,6 +20,7 @@ from pyspike import SpikeTrain import os TEST_PATH = os.path.dirname(os.path.realpath(__file__)) + def test_isi(): # generate two spike trains: t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) diff --git a/test/test_regression/regression_random_results_cSPIKY.mat b/test/test_regression/regression_random_results_cSPIKY.mat new file mode 100644 index 0000000..47c9b50 Binary files /dev/null and b/test/test_regression/regression_random_results_cSPIKY.mat differ diff --git a/test/test_regression/regression_random_spikes.mat b/test/test_regression/regression_random_spikes.mat new file mode 100644 index 0000000..e5ebeb1 Binary files /dev/null and b/test/test_regression/regression_random_spikes.mat differ diff --git a/test/test_regression/test_regression_random_spikes.py b/test/test_regression/test_regression_random_spikes.py new file mode 100644 index 0000000..757bab2 --- /dev/null +++ b/test/test_regression/test_regression_random_spikes.py @@ -0,0 +1,113 @@ +""" regression benchmark + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" +import numpy as np +from scipy.io import loadmat +import pyspike as spk + +from numpy.testing import assert_almost_equal + +spk.disable_backend_warning = True + + +def test_regression_random(): + + spike_file = "test/test_regression/regression_random_spikes.mat" + spikes_name = "spikes" + result_name = "Distances" + result_file = "test/test_regression/regression_random_results_cSPIKY.mat" + + spike_train_sets = loadmat(spike_file)[spikes_name][0] + results_cSPIKY = loadmat(result_file)[result_name] + + for i, spike_train_data in enumerate(spike_train_sets): + spike_trains = [] + for spikes in spike_train_data[0]: + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + isi = spk.isi_distance_multi(spike_trains) + spike = spk.spike_distance_multi(spike_trains) + # spike_sync = spk.spike_sync_multi(spike_trains) + + assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14, + err_msg="Index: %d, ISI" % i) + assert_almost_equal(spike, results_cSPIKY[i][1], decimal=14, + err_msg="Index: %d, SPIKE" % i) + + +def check_regression_dataset(spike_file="benchmark.mat", + spikes_name="spikes", + result_file="results_cSPIKY.mat", + result_name="Distances"): + """ Debuging function """ + np.set_printoptions(precision=15) + + spike_train_sets = loadmat(spike_file)[spikes_name][0] + + results_cSPIKY = loadmat(result_file)[result_name] + + err_max = 0.0 + err_max_ind = -1 + err_count = 0 + + for i, spike_train_data in enumerate(spike_train_sets): + spike_trains = [] + for spikes in spike_train_data[0]: + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + isi = spk.isi_distance_multi(spike_trains) + spike = spk.spike_distance_multi(spike_trains) + # spike_sync = spk.spike_sync_multi(spike_trains) + + if abs(isi - results_cSPIKY[i][0]) > 1E-14: + print "Error in ISI:", i, isi, results_cSPIKY[i][0] + print "Spike trains:" + for st in spike_trains: + print st.spikes + + err = abs(spike - results_cSPIKY[i][1]) + if err > 1E-14: + err_count += 1 + if err > err_max: + err_max = err + err_max_ind = i + + print "Total Errors:", err_count + + if err_max_ind > -1: + print "Max SPIKE distance error:", err_max, "at index:", err_max_ind + spike_train_data = spike_train_sets[err_max_ind] + for spikes in spike_train_data[0]: + print spikes.flatten() + + +def check_single_spike_train_set(index): + """ Debuging function """ + np.set_printoptions(precision=15) + + spike_train_sets = loadmat("benchmark.mat")['spikes'][0] + + results_cSPIKY = loadmat("results_cSPIKY.mat")['Distances'] + + spike_train_data = spike_train_sets[index] + + spike_trains = [] + for spikes in spike_train_data[0]: + print "Spikes:", spikes.flatten() + spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + + print spk.spike_distance_multi(spike_trains) + + print results_cSPIKY[index][1] + + print spike_trains[1].spikes + + +if __name__ == "__main__": + + test_regression_random() + # check_regression_dataset() + # check_single_spike_train_set(7633) -- cgit v1.2.3 From 1e581a1d384e3e0a9b136b962052f2d711d84c4d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 18 Dec 2015 15:48:04 +0100 Subject: removed print outs from python backend --- pyspike/cython/python_backend.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a007a7f..cf898d7 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -153,7 +153,7 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] - print "t_aux1", t_aux1, ", t_aux2:", t_aux2 + # print "t_aux1", t_aux1, ", t_aux2:", t_aux2 spike_events[0] = t_start if t1[0] > t_start: @@ -190,9 +190,9 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_p2 index2 = 0 - print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - print "s1: ", repr(s1), ", s2:", repr(s2) + # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + # print "s1: ", repr(s1), ", s2:", repr(s2) y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 @@ -285,9 +285,9 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) index += 1 - print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - print "s1: ", repr(s1), ", s2:", repr(s2) + # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + # print "s1: ", repr(s1), ", s2:", repr(s2) # the last event is the interval end if spike_events[index-1] == t_end: @@ -301,9 +301,9 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) - print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - print "s1: ", repr(s1), ", s2:", repr(s2) + # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) + # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) + # print "s1: ", repr(s1), ", s2:", repr(s2) # use only the data added above # could be less than original length due to equal spike times -- cgit v1.2.3 From e32272f4540de347abcc548a94239b625458b3a6 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 22 Dec 2015 18:15:59 +0100 Subject: changed edge correction for single spikes Spike trains with single spikes now only get auxiliary spikes at the edges for the SPIKE distance instead of real spikes before. --- pyspike/SpikeTrain.py | 2 +- pyspike/cython/cython_distances.pyx | 89 ++++++++++++++++++++++--------------- pyspike/cython/cython_profiles.pyx | 67 +++++++++++++++------------- pyspike/cython/python_backend.py | 68 +++++++++++++--------------- test/test_empty.py | 16 ++++--- 5 files changed, 130 insertions(+), 112 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py index 4b59a5d..19f2419 100644 --- a/pyspike/SpikeTrain.py +++ b/pyspike/SpikeTrain.py @@ -68,7 +68,7 @@ class SpikeTrain(object): """Returns the spikes of this spike train with auxiliary spikes in case of empty spike trains. """ - if len(self.spikes) < 2: + if len(self.spikes) < 1: return np.unique(np.insert([self.t_start, self.t_end], 1, self.spikes)) else: diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 6c6e7e5..7dc9cb9 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -55,20 +55,27 @@ def isi_distance_cython(double[:] s1, double[:] s2, N2 = len(s2) # first interspike interval - check if a spike exists at the start time + # and also account for spike trains with single spikes if s1[0] > t_start: - # edge correction - nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + # edge correction for the first interspike interval: + # take the maximum of the distance from the beginning to the first + # spike and the interval between the first two spikes. + # if there is only one spike, take the its distance to the beginning + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1]-s1[0] + # if the first spike is exactly at the start, take the distance + # to the next spike. If this is the only spike, take the distance to + # the end. + nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: - # edge correction - nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + # edge correction as above + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1]-s2[0] + nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 last_t = t_start @@ -86,8 +93,12 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + # edge correction for the last ISI: + # take the max of the distance of the last + # spike to the end and the previous ISI. If there was only + # one spike, always take the distance to the end. + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] elif (index2 < N2-1) and ((index1 == N1-1) or (s1[index1+1] > s2[index2+1])): index2 += 1 @@ -95,8 +106,9 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + # edge correction for the end as above + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 @@ -104,13 +116,15 @@ def isi_distance_cython(double[:] s1, double[:] s2, if index1 < N1-1: nu1 = s1[index1+1]-s1[index1] else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + # edge correction for the end as above + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + # edge correction for the end as above + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] # compute the corresponding isi-distance isi_value += curr_isi * (curr_t - last_t) curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2) @@ -184,16 +198,18 @@ def spike_distance_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - # with nogil: # release the interpreter to allow multithreading - if True: + # we can assume at least one spikes per spike train + assert N1 > 0 + assert N2 > 0 + + + with nogil: # release the interpreter to allow multithreading t_last = t_start - # t_p1 = t_start - # t_p2 = t_start # auxiliary spikes for edge correction - consistent with first/last ISI - t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start + t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end + t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start + t_aux2[1] = fmax(t_end, 2*t2[N2-1]+-t2[N2-2]) if N2 > 1 else t_end # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1]) t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] @@ -201,17 +217,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, # dt_p1 = t2[0]-t_start t_f1 = t1[0] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start dt_p1 = dt_f1 # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 - else: - t_f1 = t1[1] + else: # t1[0] == t_start + t_f1 = t1[1] if N1 > 1 else t_end dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - # dt_p1 = t_start-t_p2 # 0.0 dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = t1[1]-t1[0] + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -219,16 +234,16 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_f2 = t2[0] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 - else: - t_f2 = t2[1] + else: # t2[0] == t_start + t_f2 = t2[1] if N2 > 1 else t_end dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) # dt_p2 = t_start-t_p1 # 0.0 dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) - isi2 = t2[1]-t2[0] + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 @@ -263,7 +278,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -296,7 +312,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's correction: no adjustment @@ -322,7 +339,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, @@ -331,7 +349,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 t_last = t_curr # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index f08de6e..4a42cdb 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -63,18 +63,18 @@ def isi_profile_cython(double[:] s1, double[:] s2, # first interspike interval - check if a spike exists at the start time if s1[0] > t_start: # edge correction - nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1]-s1[0] + nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: # edge correction - nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1]-s2[0] + nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) @@ -92,7 +92,8 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] elif (index2 < N2-1) and ((index1 == N1-1) or (s1[index1+1] > s2[index2+1])): index2 += 1 @@ -101,7 +102,8 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 @@ -110,12 +112,14 @@ def isi_profile_cython(double[:] s1, double[:] s2, nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = fmax(t_end-s1[index1], nu1) + nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \ + else t_end-s1[index1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = fmax(t_end-s2[index2], nu2) + nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \ + else t_end-s2[index2] # compute the corresponding isi-distance isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 @@ -191,9 +195,9 @@ def spike_profile_cython(double[:] t1, double[:] t2, N1 = len(t1) N2 = len(t2) - # we can assume at least two spikes per spike train - assert N1 > 1 - assert N2 > 1 + # we can assume at least one spikes per spike train + assert N1 > 0 + assert N2 > 0 spike_events = np.empty(N1+N2+2) @@ -205,26 +209,26 @@ def spike_profile_cython(double[:] t1, double[:] t2, # t_p1 = t_start # t_p2 = t_start # auxiliary spikes for edge correction - consistent with first/last ISI - t_aux1[0] = fmin(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = fmax(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = fmin(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = fmax(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start + t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end + t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start + t_aux2[1] = fmax(t_end, 2*t2[N2-1]-t2[N2-2]) if N2 > 1 else t_end t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] if t1[0] > t_start: # dt_p1 = t2[0]-t_start t_f1 = t1[0] dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start dt_p1 = dt_f1 # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 else: - t_f1 = t1[1] + t_f1 = t1[1] if N1 > 1 else t_end dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1]) - dt_p1 = 0.0 - isi1 = t1[1]-t1[0] + dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1]) + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -232,15 +236,15 @@ def spike_profile_cython(double[:] t1, double[:] t2, t_f2 = t2[0] dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 else: - t_f2 = t2[1] + t_f2 = t2[1] if N2 > 1 else t_end dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1]) - dt_p2 = 0.0 - isi2 = t2[1]-t2[0] + dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1]) + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 @@ -273,7 +277,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -305,7 +310,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's correction: no adjustment @@ -330,7 +336,8 @@ def spike_profile_cython(double[:] t1, double[:] t2, else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, @@ -339,18 +346,14 @@ def spike_profile_cython(double[:] t1, double[:] t2, else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 else: spike_events[index] = t_end - # the ending value of the last interval - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - # s1 = dt_f1*(t_end-t1[N1-1])/isi1 - # s2 = dt_f2*(t_end-t2[N2-1])/isi2 s1 = dt_f1 s2 = dt_f2 y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index cf898d7..a1e3a65 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -28,17 +28,17 @@ def isi_distance_python(s1, s2, t_start, t_end): isi_values = np.empty(len(spike_events) - 1) if s1[0] > t_start: # edge correction - nu1 = max(s1[0] - t_start, s1[1] - s1[0]) + nu1 = max(s1[0] - t_start, s1[1] - s1[0]) if N1 > 1 else s1[0]-t_start index1 = -1 else: - nu1 = s1[1] - s1[0] + nu1 = s1[1] - s1[0] if N1 > 1 else t_end-s1[0] index1 = 0 if s2[0] > t_start: # edge correction - nu2 = max(s2[0] - t_start, s2[1] - s2[0]) + nu2 = max(s2[0] - t_start, s2[1] - s2[0]) if N2 > 1 else s2[0]-t_start index2 = -1 else: - nu2 = s2[1] - s2[0] + nu2 = s2[1] - s2[0] if N2 > 1 else t_end-s2[0] index2 = 0 isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2) @@ -52,7 +52,8 @@ def isi_distance_python(s1, s2, t_start, t_end): nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \ + else t_end-s1[N1-1] elif (index2 < N2-1) and (index1 == N1-1 or s1[index1+1] > s2[index2+1]): @@ -62,7 +63,8 @@ def isi_distance_python(s1, s2, t_start, t_end): nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \ + else t_end-s2[N2-1] else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 @@ -72,12 +74,14 @@ def isi_distance_python(s1, s2, t_start, t_end): nu1 = s1[index1+1]-s1[index1] else: # edge correction - nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) + nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \ + else t_end-s1[N1-1] if index2 < N2-1: nu2 = s2[index2+1]-s2[index2] else: # edge correction - nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) + nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \ + else t_end-s2[N2-1] # compute the corresponding isi-distance isi_values[index] = abs(nu1 - nu2) / \ max(nu1, nu2) @@ -146,10 +150,10 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_aux1 = np.zeros(2) t_aux2 = np.zeros(2) - t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) - t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) - t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) - t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) + t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) if N1 > 1 else t_start + t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) if N1 > 1 else t_end + t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) if N2 > 1 else t_start + t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) if N2 > 1 else t_end t_p1 = t_start if (t1[0] == t_start) else t_aux1[0] t_p2 = t_start if (t2[0] == t_start) else t_aux2[0] @@ -160,16 +164,16 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_f1 = t1[0] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) dt_p1 = dt_f1 - isi1 = max(t_f1-t_start, t1[1]-t1[0]) + isi1 = max(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start # s1 = dt_p1*(t_f1-t_start)/isi1 s1 = dt_p1 index1 = -1 else: # dt_p1 = t_start-t_p2 + t_f1 = t1[1] if N1 > 1 else t_end dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1]) - t_f1 = t1[1] dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1]) - isi1 = t1[1]-t1[0] + isi1 = t_f1-t1[0] s1 = dt_p1 index1 = 0 if t2[0] > t_start: @@ -177,23 +181,18 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): t_f2 = t2[0] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) dt_p2 = dt_f2 - isi2 = max(t_f2-t_start, t2[1]-t2[0]) + isi2 = max(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start # s2 = dt_p2*(t_f2-t_start)/isi2 s2 = dt_p2 index2 = -1 else: - dt_p2 = t_start-t_p1 + t_f2 = t2[1] if N2 > 1 else t_end dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1]) - t_f2 = t2[1] dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1]) - isi2 = t2[1]-t2[0] + isi2 = t_f2-t2[0] s2 = dt_p2 index2 = 0 - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) index = 1 @@ -221,7 +220,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s1 = dt_p1 else: dt_f1 = dt_p1 - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] # s1 needs adjustment due to change of isi1 # s1 = dt_p1*(t_end-t1[N1-1])/isi1 # Eero's correction: no adjustment @@ -250,7 +250,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): s2 = dt_p2 else: dt_f2 = dt_p2 - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] # s2 needs adjustment due to change of isi2 # s2 = dt_p2*(t_end-t2[N2-1])/isi2 # Eero's adjustment: no correction @@ -274,7 +275,8 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): else: t_f1 = t_aux1[1] dt_f1 = dt_p1 - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \ + else t_end-t1[N1-1] if index2 < N2-1: t_f2 = t2[index2+1] dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1]) @@ -282,29 +284,19 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end): else: t_f2 = t_aux2[1] dt_f2 = dt_p2 - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \ + else t_end-t2[N2-1] index += 1 - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - # the last event is the interval end if spike_events[index-1] == t_end: index -= 1 else: spike_events[index] = t_end - # the ending value of the last interval - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2) - # print "t_p1:", repr(t_p1), ", t_f1:", repr(t_f1), ", dt_p1:", repr(dt_p1), ", dt_f1:", repr(dt_f1) - # print "t_p2:", repr(t_p2), ", t_f2:", repr(t_f2), ", dt_p2:", repr(dt_p2), ", dt_f2:", repr(dt_f2) - # print "s1: ", repr(s1), ", s2:", repr(s2) - # use only the data added above # could be less than original length due to equal spike times return spike_events[:index+1], y_starts[:index], y_ends[:index] diff --git a/test/test_empty.py b/test/test_empty.py index 5a0042f..4d0a5cf 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -24,7 +24,9 @@ def test_get_non_empty(): st = SpikeTrain([0.5, ], edges=(0.0, 1.0)) spikes = st.get_spikes_non_empty() - assert_array_equal(spikes, [0.0, 0.5, 1.0]) + # assert_array_equal(spikes, [0.0, 0.5, 1.0]) + # spike trains with one spike don't get edge spikes anymore + assert_array_equal(spikes, [0.5, ]) def test_isi_empty(): @@ -70,21 +72,23 @@ def test_spike_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - d_expect = 0.4*0.4*1.0/(0.4+1.0)**2 + 0.6*0.4*1.0/(0.6+1.0)**2 + d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2 assert_almost_equal(d, d_expect, decimal=15) prof = spk.spike_profile(st1, st2) assert_equal(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) - assert_array_almost_equal(prof.y1, [0.0, 2*0.4*1.0/(0.6+1.0)**2], + assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2, + 2*0.4*1.0/(0.6+1.0)**2], decimal=15) - assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2, 0.0], + assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2, + 2*0.4*1.0/(0.6+1.0)**2], decimal=15) st1 = SpikeTrain([0.6, ], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - s1 = np.array([0.0, 0.4*0.2/0.6, 0.2, 0.0]) - s2 = np.array([0.0, 0.2, 0.2*0.4/0.6, 0.0]) + s1 = np.array([0.2, 0.2, 0.2, 0.2]) + s2 = np.array([0.2, 0.2, 0.2, 0.2]) isi1 = np.array([0.6, 0.6, 0.4]) isi2 = np.array([0.4, 0.6, 0.6]) expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) -- cgit v1.2.3 From b09561705ab9c67c93a384248f7c3bc9ad5bdd32 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 3 Feb 2016 13:07:26 +0100 Subject: fixed spike-sync bug fixed ugly bugs in code for computing multi-variate spike sync profile and multi-variate spike sync value. --- pyspike/DiscreteFunc.py | 9 ++++---- pyspike/cython/cython_add.pyx | 33 ++++++++++++++------------- pyspike/cython/cython_distances.pyx | 5 ---- pyspike/cython/python_backend.py | 33 +++++++++++++++------------ pyspike/spike_sync.py | 10 ++++++-- test/numeric/test_regression_random_spikes.py | 27 +++++++++++++++------- test/test_distance.py | 33 +++++++++++++++++++++++++++ 7 files changed, 99 insertions(+), 51 deletions(-) (limited to 'pyspike/cython/python_backend.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index fe97bc2..caad290 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -170,10 +170,6 @@ expected." start_ind, end_ind = get_indices(ival) value += np.sum(self.y[start_ind:end_ind]) multiplicity += np.sum(self.mp[start_ind:end_ind]) - if multiplicity == 0.0: - # empty profile, return spike sync of 1 - value = 1.0 - multiplicity = 1.0 return (value, multiplicity) def avrg(self, interval=None, normalize=True): @@ -190,7 +186,10 @@ expected." """ val, mp = self.integral(interval) if normalize: - return val/mp + if mp > 0: + return val/mp + else: + return 1.0 else: return val diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx index 8da1e53..25f1181 100644 --- a/pyspike/cython/cython_add.pyx +++ b/pyspike/cython/cython_add.pyx @@ -182,8 +182,8 @@ def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, cdef int index1 = 0 cdef int index2 = 0 cdef int index = 0 - cdef int N1 = len(y1) - cdef int N2 = len(y2) + cdef int N1 = len(y1)-1 + cdef int N2 = len(y2)-1 x_new[0] = x1[0] while (index1+1 < N1) and (index2+1 < N2): if x1[index1+1] < x2[index2+1]: @@ -206,20 +206,21 @@ def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, y_new[index] = y1[index1] + y2[index2] mp_new[index] = mp1[index1] + mp2[index2] # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] + if index1+1 < N1: + x_new[index+1:index+1+N1-index1] = x1[index1+1:] + y_new[index+1:index+1+N1-index1] = y1[index1+1:] + mp_new[index+1:index+1+N1-index1] = mp1[index1+1:] + index += N1-index1 + elif index2+1 < N2: + x_new[index+1:index+1+N2-index2] = x2[index2+1:] + y_new[index+1:index+1+N2-index2] = y2[index2+1:] + mp_new[index+1:index+1+N2-index2] = mp2[index2+1:] + index += N2-index2 + else: # both arrays reached the end simultaneously + x_new[index+1] = x1[index1+1] + y_new[index+1] = y1[index1+1] + y2[index2+1] + mp_new[index+1] = mp1[index1+1] + mp2[index2+1] + index += 1 y_new[0] = y_new[1] mp_new[0] = mp_new[1] diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index 7dc9cb9..ac5f226 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -428,9 +428,4 @@ def coincidence_value_cython(double[:] spikes1, double[:] spikes2, mp += 2 coinc += 2 - if coinc == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - coinc = 1 - mp = 1 - return coinc, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a1e3a65..6b7209a 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -560,7 +560,9 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): index1 = 0 index2 = 0 index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): + N1 = len(x1)-1 + N2 = len(x2)-1 + while (index1+1 < N1) and (index2+1 < N2): if x1[index1+1] < x2[index2+1]: index1 += 1 index += 1 @@ -581,20 +583,21 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): y_new[index] = y1[index1] + y2[index2] mp_new[index] = mp1[index1] + mp2[index2] # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] + if index1+1 < N1: + x_new[index+1:index+1+N1-index1] = x1[index1+1:] + y_new[index+1:index+1+N1-index1] = y1[index1+1:] + mp_new[index+1:index+1+N1-index1] = mp1[index1+1:] + index += N1-index1 + elif index2+1 < N2: + x_new[index+1:index+1+N2-index2] = x2[index2+1:] + y_new[index+1:index+1+N2-index2] = y2[index2+1:] + mp_new[index+1:index+1+N2-index2] = mp2[index2+1:] + index += N2-index2 + else: # both arrays reached the end simultaneously + x_new[index+1] = x1[-1] + y_new[index+1] = y1[-1] + y2[-1] + mp_new[index+1] = mp1[-1] + mp2[-1] + index += 1 y_new[0] = y_new[1] mp_new[0] = mp_new[1] diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 3dc29ff..802b98c 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -119,7 +119,10 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None): """ c, mp = _spike_sync_values(spike_train1, spike_train2, interval, max_tau) - return 1.0*c/mp + if mp == 0: + return 1.0 + else: + return 1.0*c/mp ############################################################ @@ -187,7 +190,10 @@ def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None): coincidence += c mp += m - return coincidence/mp + if mp == 0.0: + return 1.0 + else: + return coincidence/mp ############################################################ diff --git a/test/numeric/test_regression_random_spikes.py b/test/numeric/test_regression_random_spikes.py index 6156bb4..73d39a6 100644 --- a/test/numeric/test_regression_random_spikes.py +++ b/test/numeric/test_regression_random_spikes.py @@ -35,7 +35,9 @@ def test_regression_random(): spike = spk.spike_distance_multi(spike_trains) spike_prof = spk.spike_profile_multi(spike_trains).avrg() - # spike_sync = spk.spike_sync_multi(spike_trains) + + spike_sync = spk.spike_sync_multi(spike_trains) + spike_sync_prof = spk.spike_sync_profile_multi(spike_trains).avrg() assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14, err_msg="Index: %d, ISI" % i) @@ -47,6 +49,9 @@ def test_regression_random(): assert_almost_equal(spike_prof, results_cSPIKY[i][1], decimal=14, err_msg="Index: %d, SPIKE" % i) + assert_almost_equal(spike_sync, spike_sync_prof, decimal=14, + err_msg="Index: %d, SPIKE-Sync" % i) + def check_regression_dataset(spike_file="benchmark.mat", spikes_name="spikes", @@ -109,19 +114,25 @@ def check_single_spike_train_set(index): spike_train_data = spike_train_sets[index] spike_trains = [] + N = 0 for spikes in spike_train_data[0]: - print("Spikes:", spikes.flatten()) - spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) + N += len(spikes.flatten()) + print("Spikes:", len(spikes.flatten())) + spikes_array = spikes.flatten() + if len(spikes_array > 0) and (spikes_array[-1] > 100.0): + spikes_array[-1] = 100.0 + spike_trains.append(spk.SpikeTrain(spikes_array, 100.0)) + print(spike_trains[-1].spikes) - print(spk.spike_distance_multi(spike_trains)) + print(N) - print(results_cSPIKY[index][1]) + print(spk.spike_sync_multi(spike_trains)) - print(spike_trains[1].spikes) + print(spk.spike_sync_profile_multi(spike_trains).integral()) if __name__ == "__main__": - test_regression_random() + # test_regression_random() # check_regression_dataset() - # check_single_spike_train_set(7633) + check_single_spike_train_set(4) diff --git a/test/test_distance.py b/test/test_distance.py index 083d8a3..fe09f34 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -217,6 +217,28 @@ def test_spike_sync(): assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.4, decimal=16) + spikes1 = SpikeTrain([1.0, 2.0, 4.0], 4.0) + spikes2 = SpikeTrain([3.8], 4.0) + spikes3 = SpikeTrain([3.9, ], 4.0) + + expected_x = np.array([0.0, 1.0, 2.0, 3.8, 4.0, 4.0]) + expected_y = np.array([0.0, 0.0, 0.0, 1.0, 1.0, 1.0]) + + f = spk.spike_sync_profile(spikes1, spikes2) + + assert_array_almost_equal(f.x, expected_x, decimal=16) + assert_array_almost_equal(f.y, expected_y, decimal=16) + + f2 = spk.spike_sync_profile(spikes2, spikes3) + + i1 = f.integral() + i2 = f2.integral() + f.add(f2) + i12 = f.integral() + + assert_equal(i1[0]+i2[0], i12[0]) + assert_equal(i1[1]+i2[1], i12[1]) + def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): # generate spike trains: @@ -313,6 +335,17 @@ def test_multi_spike_sync(): assert_equal(np.sum(f.y[1:-1]), 39932) assert_equal(np.sum(f.mp[1:-1]), 85554) + # example with 2 empty spike trains + sts = [] + sts.append(SpikeTrain([1, 9], [0, 10])) + sts.append(SpikeTrain([1, 3], [0, 10])) + sts.append(SpikeTrain([], [0, 10])) + sts.append(SpikeTrain([], [0, 10])) + + assert_almost_equal(spk.spike_sync_multi(sts), 1.0/6.0, decimal=15) + assert_almost_equal(spk.spike_sync_profile_multi(sts).avrg(), 1.0/6.0, + decimal=15) + def check_dist_matrix(dist_func, dist_matrix_func): # generate spike trains: -- cgit v1.2.3 From 34bd30415dd93a2425ce566627e24ee9483ada3e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:49:42 -0700 Subject: Spike Order support (#39) * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- Changelog | 3 + Readme.rst | 9 +- doc/pyspike.rst | 6 + doc/tutorial.rst | 66 +++ examples/spike_train_order.py | 52 +++ pyspike/PieceWiseConstFunc.py | 32 +- pyspike/PieceWiseLinFunc.py | 42 +- pyspike/__init__.py | 16 +- pyspike/cython/cython_directionality.pyx | 262 ++++++++++++ pyspike/cython/cython_distances.pyx | 200 +++++++++ pyspike/cython/cython_profiles.pyx | 33 ++ pyspike/cython/cython_simulated_annealing.pyx | 82 ++++ pyspike/cython/directionality_python_backend.py | 144 +++++++ pyspike/cython/python_backend.py | 67 ++- pyspike/spike_directionality.py | 522 ++++++++++++++++++++++++ pyspike/spike_sync.py | 55 ++- setup.py | 28 +- test/test_directionality.py | 97 +++++ test/test_function.py | 62 +++ test/test_sync_filter.py | 95 +++++ 20 files changed, 1812 insertions(+), 61 deletions(-) create mode 100644 examples/spike_train_order.py create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/cython_simulated_annealing.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py create mode 100644 test/test_sync_filter.py (limited to 'pyspike/cython/python_backend.py') diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,12 @@ PSTH :undoc-members: :show-inheritance: +Directionality +........................................ +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from .spike_directionality import spike_directionality, \ + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..ac37690 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,262 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + + +############################################################ +# spike_directionality_profiles_cython +############################################################ +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_directionality_cython +############################################################ +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 # directionality value + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + + return d diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if fabs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..c1d820b --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,144 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..248862c --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,522 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +from __future__ import absolute_import + +import numpy as np +import pyspike +from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_directionality_cython as spike_directionality_impl + if max_tau is None: + max_tau = 0.0 + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) + if normalize: + return 1.0*d/c + else: + return d + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. + + Valid call structures:: + + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) + + +############################################################ +# spike_train_order_profile_bi +############################################################ +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_train_order_profile_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl +############################################################ +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_func + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_func(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau=max_tau).integral(interval) + return c, mp + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + + + +############################################################ +# optimal_spike_train_sorting_from_matrix +############################################################ +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_sorting +############################################################ +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return _optimal_spike_train_sorting_from_matrix(D, full_output) + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` + """ + N = len(D) + D_p = np.empty_like(D) + for n in range(N): + for m in range(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ @@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + removed_spike_trains = [] + + # cython implementation + try: + from .cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.python_backend import coincidence_single_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N): + if i == j: + continue + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 5b9e677..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -90,11 +100,17 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..c2e9bfe --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,97 @@ +""" test_directionality.py + +Tests the directionality functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc + + +def test_spike_directionality(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_values([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) diff --git a/test/test_function.py b/test/test_function.py index 92d378d..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -162,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..e259903 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,95 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_single_prof(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import \ + coincidence_single_python as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3