From f9529c78538882879a07cb67e342eade8d2153ab Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 15 Sep 2014 17:01:13 +0200 Subject: isi distance and basic example --- pyspike/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 pyspike/__init__.py (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py new file mode 100644 index 0000000..6651eb5 --- /dev/null +++ b/pyspike/__init__.py @@ -0,0 +1,5 @@ +__all__ = ["function", "distances", "spikes"] + +from function import PieceWiseConstFunc +from distances import isi_distance +from spikes import spike_train_from_string -- cgit v1.2.3 From 5ea0fc218bb3bb30b1c40dd20e2e35a8bd11151c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 15 Sep 2014 17:31:55 +0200 Subject: +merge_spike_trains --- examples/test_merge.py | 23 +++++++++++++++++++++++ pyspike/__init__.py | 2 +- pyspike/distances.py | 10 ---------- pyspike/spikes.py | 26 +++++++++++++++++++++++++- 4 files changed, 49 insertions(+), 12 deletions(-) create mode 100644 examples/test_merge.py (limited to 'pyspike/__init__.py') diff --git a/examples/test_merge.py b/examples/test_merge.py new file mode 100644 index 0000000..1186062 --- /dev/null +++ b/examples/test_merge.py @@ -0,0 +1,23 @@ +# compute the isi distance of some test data +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt + +import pyspike as spk + +# first load the data +spike_trains = [] +spike_file = open("SPIKY_testdata.txt", 'r') +for line in spike_file: + spike_trains.append(spk.spike_train_from_string(line)) + +spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) + +print(spikes) + +plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') +plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x') +plt.plot(spikes, 2*np.ones_like(spikes), 'o') + +plt.show() diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 6651eb5..6895bd8 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -2,4 +2,4 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc from distances import isi_distance -from spikes import spike_train_from_string +from spikes import spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index d9790dc..7044a52 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -9,16 +9,6 @@ import numpy as np from pyspike import PieceWiseConstFunc -def spike_train_from_string(s, sep=' '): - """ Converts a string of times into a SpikeTrain object. - Params: - - s: the string with (ordered) spike times - - sep: The separator between the time numbers. - Returns: - - array of spike times - """ - return np.fromstring(s, sep=sep) - def isi_distance(spikes1, spikes2, T_end, T_start=0.0): """ Computes the instantaneous isi-distance S_isi (t) of the two given spike trains. diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 42b6501..66ef554 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -8,7 +8,7 @@ Copyright 2014, Mario Mulansky import numpy as np def spike_train_from_string(s, sep=' '): - """ Converts a string of times into a SpikeTrain object. + """ Converts a string of times into an array of spike times. Params: - s: the string with (ordered) spike times - sep: The separator between the time numbers. @@ -16,3 +16,27 @@ def spike_train_from_string(s, sep=' '): - array of spike times """ return np.fromstring(s, sep=sep) + + +def merge_spike_trains( spike_trains ): + """ Merges a number of spike trains into a single spike train. + Params: + - spike_trains: list of arrays of spike times + Returns: + - array with the merged spike times + """ + # get the lengths of the spike trains + lens = np.array([len(st) for st in spike_trains]) + merged_spikes = np.empty(np.sum(lens)) + index = 0 + indices = np.zeros_like(lens) + vals = [spike_trains[i][indices[i]] for i in xrange(len(indices))] + while len(indices) > 0: + i = np.argmin(vals) + merged_spikes[index] = vals[i] + index += 1 + indices[i] += 1 + if indices[i] >= lens[i]: + indices = np.delete(indices, i) + vals = [spike_trains[i][indices[i]] for i in xrange(len(indices))] + return merged_spikes -- cgit v1.2.3 From b20d416c4765b2280526c633ca62f43677b1d26a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 18 Sep 2014 17:09:58 +0200 Subject: add spike-distance, PWL function (probably buggy) --- examples/test_data.py | 12 +++++-- pyspike/__init__.py | 4 +-- pyspike/distances.py | 99 +++++++++++++++++++++++++++++++++++++++++++++++++-- pyspike/function.py | 33 +++++++++++++++-- 4 files changed, 139 insertions(+), 9 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/examples/test_data.py b/examples/test_data.py index bddcb15..94e7d51 100644 --- a/examples/test_data.py +++ b/examples/test_data.py @@ -16,13 +16,21 @@ for line in spike_file: for (i,spikes) in enumerate(spike_trains): plt.plot(spikes, i*np.ones_like(spikes), 'o') -f = spk.isi_distance(spike_trains[0], spike_trains[1], 4000) +f = spk.isi_distance(spike_trains[0], spike_trains[10], 4000) x, y = f.get_plottable_data() plt.figure() -plt.plot(x, y, '-k') +plt.plot(x, np.abs(y), '--k') print("Average: %.8f" % f.avrg()) print("Absolute average: %.8f" % f.abs_avrg()) + +f = spk.spike_distance(spike_trains[0], spike_trains[10], 4000) +x, y = f.get_plottable_data() +print(x) +print(y) +#plt.figure() +plt.plot(x, y, '-b') + plt.show() diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 6895bd8..a5f146a 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ __all__ = ["function", "distances", "spikes"] -from function import PieceWiseConstFunc -from distances import isi_distance +from function import PieceWiseConstFunc, PieceWiseLinFunc +from distances import isi_distance, spike_distance from spikes import spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index f4989c8..2ea80e7 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -7,11 +7,11 @@ Copyright 2014, Mario Mulansky import numpy as np -from pyspike import PieceWiseConstFunc +from pyspike import PieceWiseConstFunc, PieceWiseLinFunc def isi_distance(spikes1, spikes2, T_end, T_start=0.0): - """ Computes the instantaneous isi-distance S_isi (t) of the two given spike - trains. + """ Computes the instantaneous isi-distance S_isi (t) of the two given + spike trains. Args: - spikes1, spikes2: ordered arrays of spike times. - T_end: end time of the observation interval. @@ -50,6 +50,7 @@ def isi_distance(spikes1, spikes2, T_end, T_start=0.0): # check which spike is next - from s1 or s2 if s1[index1+1] <= s2[index2+1]: index1 += 1 + # break condition relies on existence of spikes at T_end if index1 >= len(nu1): break spike_events[index] = s1[index1] @@ -63,3 +64,95 @@ def isi_distance(spikes1, spikes2, T_end, T_start=0.0): max(nu1[index1], nu2[index2]) index += 1 return PieceWiseConstFunc(spike_events, isi_values) + + +def get_min_dist(spike_time, spike_train, start_index=0): + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index + """ + d = abs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < len(spike_train): + d_temp = abs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +def spike_distance(spikes1, spikes2, T_end, T_start=0.0): + """ Computes the instantaneous spike-distance S_spike (t) of the two given + spike trains. + Args: + - spikes1, spikes2: ordered arrays of spike times. + - T_end: end time of the observation interval. + - T_start: begin of the observation interval (default=0.0). + Returns: + - PieceWiseLinFunc describing the spike-distance. + """ + # add spikes at the beginning and end of the interval + t1 = np.empty(len(spikes1)+2) + t1[0] = T_start + t1[-1] = T_end + t1[1:-1] = spikes1 + t2 = np.empty(len(spikes2)+2) + t2[0] = T_start + t2[-1] = T_end + t2[1:-1] = spikes2 + + spike_events = np.empty(len(t1)+len(t2)-2) + spike_events[0] = T_start + spike_events[-1] = T_end + y_starts = np.empty(len(spike_events)-1) + y_starts[0] = 0.0 + y_ends = np.empty(len(spike_events)-1) + + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist(t1[1], t2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist(t2[1], t1, 0) + isi1 = t1[1]-t1[0] + isi2 = t2[1]-t2[0] + while True: + print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= len(t1): + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time now was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + s1 = dt_f1 + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + else: + index2 += 1 + if index2+1 >= len(t2): + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time now was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + index += 1 + + return PieceWiseLinFunc(spike_events, y_starts, y_ends) diff --git a/pyspike/function.py b/pyspike/function.py index c1de9cb..adf4dbb 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -44,8 +44,6 @@ class PieceWiseConstFunc: Returns: - the average a. """ - print(self.x) - print(self.y) return np.sum((self.x[1:]-self.x[:-1]) * self.y) / \ (self.x[-1]-self.x[0]) @@ -58,3 +56,34 @@ class PieceWiseConstFunc: """ return np.sum((self.x[1:]-self.x[:-1]) * np.abs(self.y)) / \ (self.x[-1]-self.x[0]) + + +class PieceWiseLinFunc: + """ A class representing a piece-wise linear function. """ + + def __init__(self, x, y1, y2): + """ Constructs the piece-wise linear function. + Params: + - x: array of length N+1 defining the edges of the intervals of the pwc + function. + - y1: array of length N defining the function values at the left of the + intervals. + - y2: array of length N defining the function values at the right of the + intervals. + """ + self.x = x + self.y1 = y1 + self.y2 = y2 + + def get_plottable_data(self): + """ Returns two arrays containing x- and y-coordinates for immeditate + plotting of the piece-wise function. + """ + x_plot = np.empty(2*len(self.x)-2) + x_plot[0] = self.x[0] + x_plot[1::2] = self.x[1:] + x_plot[2::2] = self.x[1:-1] + y_plot = np.empty_like(x_plot) + y_plot[0::2] = self.y1 + y_plot[1::2] = self.y2 + return x_plot, y_plot -- cgit v1.2.3 From 375e210d2a54bcff345495d9bb6dc90534d94bfb Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 26 Sep 2014 16:17:18 +0200 Subject: + add_auxiliary_spikes function incl test --- pyspike/__init__.py | 2 +- pyspike/distances.py | 108 ++++++++++++++++++++++++++++++-------------------- test/test_distance.py | 43 ++++++++++++++------ 3 files changed, 98 insertions(+), 55 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index a5f146a..1784037 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc -from distances import isi_distance, spike_distance +from distances import add_auxiliary_spikes, isi_distance, spike_distance from spikes import spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index a9a2cc8..10b1d3c 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -9,25 +9,45 @@ import numpy as np from pyspike import PieceWiseConstFunc, PieceWiseLinFunc -def isi_distance(spikes1, spikes2, T_end, T_start=0.0): +def add_auxiliary_spikes( spike_train, T_end , T_start=0.0): + """ Adds spikes at the beginning (T_start) and end (T_end) of the + observation interval. + Args: + - spike_train: ordered array of spike times + - T_end: end time of the observation interval + - T_start: start time of the observation interval (default 0.0) + Returns: + - spike train with additional spikes at T_start and T_end. + """ + assert spike_train[0] >= T_start, \ + "Spike train has events before the given start time" + assert spike_train[-1] <= T_end, \ + "Spike train has events after the given end time" + if spike_train[0] != T_start: + spike_train = np.insert(spike_train, 0, T_start) + if spike_train[-1] != T_end: + spike_train = np.append(spike_train, T_end) + return spike_train + +def isi_distance(spikes1, spikes2): """ Computes the instantaneous isi-distance S_isi (t) of the two given - spike trains. + spike trains. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. Args: - - spikes1, spikes2: ordered arrays of spike times. - - T_end: end time of the observation interval. - - T_start: begin of the observation interval (default=0.0). + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. Returns: - PieceWiseConstFunc describing the isi-distance. """ - # add spikes at the beginning and end of the interval - s1 = np.empty(len(spikes1)+2) - s1[0] = T_start - s1[-1] = T_end - s1[1:-1] = spikes1 - s2 = np.empty(len(spikes2)+2) - s2[0] = T_start - s2[-1] = T_end - s2[1:-1] = spikes2 + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0]==spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1]==spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + + # shorter names + s1 = spikes1 + s2 = spikes2 # compute the interspike interval nu1 = s1[1:]-s1[:-1] @@ -35,7 +55,7 @@ def isi_distance(spikes1, spikes2, T_end, T_start=0.0): # compute the isi-distance spike_events = np.empty(len(nu1)+len(nu2)) - spike_events[0] = T_start + spike_events[0] = s1[0] # the values have one entry less - the number of intervals between events isi_values = np.empty(len(spike_events)-1) # add the distance of the first events @@ -69,7 +89,7 @@ def isi_distance(spikes1, spikes2, T_end, T_start=0.0): max(nu1[index1], nu2[index2]) index += 1 # the last event is the interval end - spike_events[index] = T_end + spike_events[index] = s1[-1] # use only the data added above # could be less than original length due to equal spike times return PieceWiseConstFunc(spike_events[:index+1], isi_values[:index]) @@ -77,7 +97,7 @@ def isi_distance(spikes1, spikes2, T_end, T_start=0.0): def get_min_dist(spike_time, spike_train, start_index=0): """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index + with i>=start_index. """ d = abs(spike_time - spike_train[start_index]) start_index += 1 @@ -91,31 +111,28 @@ def get_min_dist(spike_time, spike_train, start_index=0): return d -def spike_distance(spikes1, spikes2, T_end, T_start=0.0): +def spike_distance(spikes1, spikes2): """ Computes the instantaneous spike-distance S_spike (t) of the two given - spike trains. + spike trains. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. Args: - - spikes1, spikes2: ordered arrays of spike times. - - T_end: end time of the observation interval. - - T_start: begin of the observation interval (default=0.0). + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. Returns: - PieceWiseLinFunc describing the spike-distance. """ - # add spikes at the beginning and end of the interval - t1 = np.empty(len(spikes1)+2) - t1[0] = T_start - t1[-1] = T_end - t1[1:-1] = spikes1 - t2 = np.empty(len(spikes2)+2) - t2[0] = T_start - t2[-1] = T_end - t2[1:-1] = spikes2 + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0]==spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1]==spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + # shorter variables + t1 = spikes1 + t2 = spikes2 spike_events = np.empty(len(t1)+len(t2)-2) - spike_events[0] = T_start - spike_events[-1] = T_end + spike_events[0] = t1[0] y_starts = np.empty(len(spike_events)-1) - y_starts[0] = 0.0 y_ends = np.empty(len(spike_events)-1) index1 = 0 @@ -125,10 +142,13 @@ def spike_distance(spikes1, spikes2, T_end, T_start=0.0): dt_f1 = get_min_dist(t1[1], t2, 0) dt_p2 = 0.0 dt_f2 = get_min_dist(t2[1], t1, 0) - isi1 = t1[1]-t1[0] - isi2 = t2[1]-t2[0] + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) while True: - print(index, index1, index2) + # print(index, index1, index2) if t1[index1+1] < t2[index2+1]: index1 += 1 # break condition relies on existence of spikes at T_end @@ -166,8 +186,8 @@ def spike_distance(spikes1, spikes2, T_end, T_start=0.0): index2 += 1 if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): break - assert( dt_f2 == 0.0 ) - assert( dt_f1 == 0.0 ) + assert dt_f2 == 0.0 + assert dt_f1 == 0.0 spike_events[index] = t1[index1] y_ends[index-1] = 0.0 y_starts[index] = 0.0 @@ -179,9 +199,13 @@ def spike_distance(spikes1, spikes2, T_end, T_start=0.0): isi2 = t2[index2+1]-t2[index2] index += 1 # the last event is the interval end - spike_events[index] = T_end - # the ending value of the last interval is 0 - y_ends[index-1] = 0.0 + spike_events[index] = t1[-1] + # the ending value of the last interval + isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) + isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) + s1 = dt_p1*(t1[-1]-t1[-2])/isi1 + s2 = dt_p2*(t2[-1]-t2[-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) # use only the data added above # could be less than original length due to equal spike times return PieceWiseLinFunc(spike_events[:index+1], diff --git a/test/test_distance.py b/test/test_distance.py index 17ca14a..35bdf85 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -11,6 +11,13 @@ from numpy.testing import assert_equal, assert_array_almost_equal import pyspike as spk +def test_auxiliary_spikes(): + t = np.array([0.2, 0.4, 0.6, 0.7]) + t_aux = spk.add_auxiliary_spikes(t, T_end=1.0, T_start=0.1) + assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) + t_aux = spk.add_auxiliary_spikes(t_aux, 1.0) + assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) + def test_isi(): # generate two spike trains: t1 = np.array([0.2, 0.4, 0.6, 0.7]) @@ -21,7 +28,11 @@ def test_isi(): expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35, -0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3] - f = spk.isi_distance(t1, t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, 1.0) + t2 = spk.add_auxiliary_spikes(t2, 1.0) + f = spk.isi_distance(t1, t2) + + print("ISI: ", f.y) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=14) @@ -33,7 +44,9 @@ def test_isi(): expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0] expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5] - f = spk.isi_distance(t1, t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, 1.0) + t2 = spk.add_auxiliary_spikes(t2, 1.0) + f = spk.isi_distance(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=14) @@ -46,16 +59,19 @@ def test_spike(): # pen&paper calculation of the spike distance expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0] - s1 = np.array([0.0, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2, - 0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.0]) - s2 = np.array([0.0, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, - (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, 0.1,0.1,0.05,0.0]) + s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2, + 0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.1]) + s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, + (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, + 0.1, 0.1, 0.05, 0.05]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3]) isi2 = np.array([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05]) expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) - f = spk.spike_distance(t1, t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, 1.0) + t2 = spk.add_auxiliary_spikes(t2, 1.0) + f = spk.spike_distance(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) @@ -66,20 +82,23 @@ def test_spike(): t2 = np.array([0.1,0.4,0.5,0.6]) expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0] - s1 = np.array([0.0, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) - s2 = np.array([0.0, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) + s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) + s2 = np.array([0.1*0.1/0.3, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4]) - isi2 = np.array([0.1, 0.3, 0.3, 0.1, 0.1, 0.4]) + isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4]) expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) - f = spk.spike_distance(t1, t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, 1.0) + t2 = spk.add_auxiliary_spikes(t2, 1.0) + f = spk.spike_distance(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14) -if __name__ == "main": +if __name__ == "__main__": + test_auxiliary_spikes() test_isi() test_spike() -- cgit v1.2.3 From b726773a29f85d465ff71867fab4fa5b8e5bcfe1 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 29 Sep 2014 16:08:45 +0200 Subject: + multivariate distances --- pyspike/__init__.py | 3 +- pyspike/distances.py | 76 +++++++++++++++++++++++++++++++++++++++++++++++++++ pyspike/function.py | 40 +++++++++++++++++++++++---- test/test_distance.py | 47 ++++++++++++++++++++++++++++++- 4 files changed, 158 insertions(+), 8 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 1784037..2143bdc 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,6 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc -from distances import add_auxiliary_spikes, isi_distance, spike_distance +from distances import add_auxiliary_spikes, isi_distance, spike_distance, \ + isi_distance_multi, spike_distance_multi from spikes import spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index f4be625..52c6640 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -224,3 +224,79 @@ def spike_distance(spikes1, spikes2): # could be less than original length due to equal spike times return PieceWiseLinFunc(spike_events[:index+1], y_starts[:index], y_ends[:index]) + + + + +############################################################ +# multi_distance +############################################################ +def multi_distance(spike_trains, pair_distance_func, indices=None): + """ Internal implementation detail, use isi_distance_multi or + spike_distance_multi. + + Computes the multi-variate distance for a set of spike-trains using the + pair_dist_func to compute pair-wise distances. That is it computes the + average distance of all pairs of spike-trains: + S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + where the sum goes over all pairs . + Args: + - spike_trains: list of spike trains + - pair_distance_func: function computing the distance of two spike trains + - indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + Returns: + - The averaged multi-variate distance of all pairs + """ + if indices==None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(i,j) for i in indices for j in indices[i+1:]] + # start with first pair + (i,j) = pairs[0] + average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) + for (i,j) in pairs[1:]: + current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) + average_dist.add(current_dist) # add to the average + average_dist.mul_scalar(1.0/len(pairs)) # normalize + return average_dist + + +############################################################ +# isi_distance_multi +############################################################ +def isi_distance_multi(spike_trains, indices=None): + """ computes the multi-variate isi-distance for a set of spike-trains. That + is the average isi-distance of all pairs of spike-trains: + S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + where the sum goes over all pairs + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + Returns: + - A PieceWiseConstFunc representing the averaged isi distance S + """ + return multi_distance(spike_trains, isi_distance, indices) + + +############################################################ +# spike_distance_multi +############################################################ +def spike_distance_multi(spike_trains, indices=None): + """ computes the multi-variate spike-distance for a set of spike-trains. + That is the average spike-distance of all pairs of spike-trains: + S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + where the sum goes over all pairs + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + Returns: + - A PieceWiseLinFunc representing the averaged spike distance S + """ + return multi_distance(spike_trains, spike_distance, indices) diff --git a/pyspike/function.py b/pyspike/function.py index 3a5a01c..26ca4b2 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -10,6 +10,7 @@ from __future__ import print_function import numpy as np + ############################################################## # PieceWiseConstFunc ############################################################## @@ -18,7 +19,7 @@ class PieceWiseConstFunc: def __init__(self, x, y): """ Constructs the piece-wise const function. - Params: + Args: - x: array of length N+1 defining the edges of the intervals of the pwc function. - y: array of length N defining the function values at the intervals. @@ -26,6 +27,19 @@ class PieceWiseConstFunc: self.x = np.array(x) self.y = np.array(y) + def almost_equal(self, other, decimal=14): + """ Checks if the function is equal to another function up to `decimal` + precision. + Args: + - other: another PieceWiseConstFunc object + Returns: + True if the two functions are equal up to `decimal` decimals, + False otherwise + """ + eps = 10.0**(-decimal) + return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ + np.allclose(self.y, other.y, atol=eps, rtol=0.0) + def get_plottable_data(self): """ Returns two arrays containing x- and y-coordinates for immeditate plotting of the piece-wise function. @@ -63,7 +77,7 @@ class PieceWiseConstFunc: def add(self, f): """ Adds another PieceWiseConst function to this function. Note: only functions defined on the same interval can be summed. - Params: + Args: - f: PieceWiseConst function to be added. """ assert self.x[0] == f.x[0], "The functions have different intervals" @@ -111,7 +125,7 @@ class PieceWiseConstFunc: def mul_scalar(self, fac): """ Multiplies the function with a scalar value - Params: + Args: - fac: Value to multiply """ self.y *= fac @@ -125,7 +139,7 @@ class PieceWiseLinFunc: def __init__(self, x, y1, y2): """ Constructs the piece-wise linear function. - Params: + Args: - x: array of length N+1 defining the edges of the intervals of the pwc function. - y1: array of length N defining the function values at the left of the @@ -137,6 +151,20 @@ class PieceWiseLinFunc: self.y1 = np.array(y1) self.y2 = np.array(y2) + def almost_equal(self, other, decimal=14): + """ Checks if the function is equal to another function up to `decimal` + precision. + Args: + - other: another PieceWiseLinFunc object + Returns: + True if the two functions are equal up to `decimal` decimals, + False otherwise + """ + eps = 10.0**(-decimal) + return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ + np.allclose(self.y1, other.y1, atol=eps, rtol=0.0) and \ + np.allclose(self.y2, other.y2, atol=eps, rtol=0.0) + def get_plottable_data(self): """ Returns two arrays containing x- and y-coordinates for immeditate plotting of the piece-wise function. @@ -171,7 +199,7 @@ class PieceWiseLinFunc: def add(self, f): """ Adds another PieceWiseLin function to this function. Note: only functions defined on the same interval can be summed. - Params: + Args: - f: PieceWiseLin function to be added. """ assert self.x[0] == f.x[0], "The functions have different intervals" @@ -246,7 +274,7 @@ class PieceWiseLinFunc: def mul_scalar(self, fac): """ Multiplies the function with a scalar value - Params: + Args: - fac: Value to multiply """ self.y1 *= fac diff --git a/test/test_distance.py b/test/test_distance.py index 35bdf85..c43f0b3 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -7,10 +7,12 @@ Copyright 2014, Mario Mulansky from __future__ import print_function import numpy as np +from copy import copy from numpy.testing import assert_equal, assert_array_almost_equal import pyspike as spk + def test_auxiliary_spikes(): t = np.array([0.2, 0.4, 0.6, 0.7]) t_aux = spk.add_auxiliary_spikes(t, T_end=1.0, T_start=0.1) @@ -18,6 +20,7 @@ def test_auxiliary_spikes(): t_aux = spk.add_auxiliary_spikes(t_aux, 1.0) assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) + def test_isi(): # generate two spike trains: t1 = np.array([0.2, 0.4, 0.6, 0.7]) @@ -32,7 +35,7 @@ def test_isi(): t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.isi_distance(t1, t2) - print("ISI: ", f.y) + # print("ISI: ", f.y) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=14) @@ -98,6 +101,48 @@ def test_spike(): assert_array_almost_equal(f.y2, expected_y2, decimal=14) +def check_multi_distance(dist_func, dist_func_multi): + # generate spike trains: + t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) + t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0) + t3 = spk.add_auxiliary_spikes(np.array([0.2,0.4,0.6]), 1.0) + t4 = spk.add_auxiliary_spikes(np.array([0.1,0.4,0.5,0.6]), 1.0) + spike_trains = [t1, t2, t3, t4] + + f12 = dist_func(t1, t2) + f13 = dist_func(t1, t3) + f14 = dist_func(t1, t4) + f23 = dist_func(t2, t3) + f24 = dist_func(t2, t4) + f34 = dist_func(t3, t4) + + f_multi = dist_func_multi(spike_trains, [0,1]) + assert f_multi.almost_equal(f12, decimal=14) + + f = copy(f12) + f.add(f13) + f.add(f23) + f.mul_scalar(1.0/3) + f_multi = dist_func_multi(spike_trains, [0,1,2]) + assert f_multi.almost_equal(f, decimal=14) + + f.mul_scalar(3) # revert above normalization + f.add(f14) + f.add(f24) + f.add(f34) + f.mul_scalar(1.0/6) + f_multi = dist_func_multi(spike_trains) + assert f_multi.almost_equal(f, decimal=14) + + +def test_multi_isi(): + check_multi_distance(spk.isi_distance, spk.isi_distance_multi) + + +def test_multi_spike(): + check_multi_distance(spk.spike_distance, spk.spike_distance_multi) + + if __name__ == "__main__": test_auxiliary_spikes() test_isi() -- cgit v1.2.3 From 7bbbf06c23e8eb727f45dc47d6613fb7d03f4c8f Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 2 Oct 2014 14:30:02 +0200 Subject: +isi distance matrix with examples --- examples/isi_matrix.py | 21 +++++++++++++++++++++ pyspike/__init__.py | 2 +- pyspike/cython_distance.pyx | 5 +++-- pyspike/distances.py | 28 +++++++++++++++++++++++++++- 4 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 examples/isi_matrix.py (limited to 'pyspike/__init__.py') diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py new file mode 100644 index 0000000..a149cd6 --- /dev/null +++ b/examples/isi_matrix.py @@ -0,0 +1,21 @@ +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt + +import pyspike as spk + +# first load the data +spike_trains = [] +spike_file = open("SPIKY_testdata.txt", 'r') +for line in spike_file: + spike_trains.append(spk.add_auxiliary_spikes( + spk.spike_train_from_string(line), 4000)) + +print(len(spike_trains)) + +m = spk.isi_distance_matrix(spike_trains) + +plt.imshow(m, interpolation='none') +plt.show() + diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 2143bdc..21005e9 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -2,5 +2,5 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc from distances import add_auxiliary_spikes, isi_distance, spike_distance, \ - isi_distance_multi, spike_distance_multi + isi_distance_multi, spike_distance_multi, isi_distance_matrix from spikes import spike_train_from_string, merge_spike_trains diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index 23ffc37..2be8525 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -29,6 +29,7 @@ import numpy as np cimport numpy as np from libc.math cimport fabs +from libc.math cimport fmax DTYPE = np.float ctypedef np.float_t DTYPE_t @@ -56,7 +57,7 @@ def isi_distance_cython(double[:] s1, isi_values = np.empty(N1+N2-1) with nogil: # release the interpreter to allow multithreading - isi_values[0] = (nu1-nu2)/max(nu1,nu2) + isi_values[0] = (nu1-nu2)/fmax(nu1,nu2) index1 = 0 index2 = 0 index = 1 @@ -84,7 +85,7 @@ def isi_distance_cython(double[:] s1, nu1 = s1[index1+1]-s1[index1] nu2 = s2[index2+1]-s2[index2] # compute the corresponding isi-distance - isi_values[index] = (nu1 - nu2) / max(nu1, nu2) + isi_values[index] = (nu1 - nu2) / fmax(nu1, nu2) index += 1 # the last event is the interval end spike_events[index] = s1[N1] diff --git a/pyspike/distances.py b/pyspike/distances.py index 35650f7..f78c0d4 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -206,10 +206,36 @@ def spike_distance_multi(spike_trains, indices=None): where the sum goes over all pairs Args: - spike_trains: list of spike trains - - indices: list of indices defining which spike trains to use, + - indices: list of indices defining which spike-trains to use, if None all given spike trains are used (default=None) Returns: - A PieceWiseLinFunc representing the averaged spike distance S """ return multi_distance(spike_trains, spike_distance, indices) + +def isi_distance_matrix(spike_trains, indices=None): + """ Computes the average isi-distance of all pairs of spike-trains. + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike-trains to use + if None all given spike-trains are used (default=None) + Return: + - a 2D array of size len(indices)*len(indices) containing the average + pair-wise isi-distance + """ + if indices==None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(i,j) for i in indices for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i,j in pairs: + d = isi_distance(spike_trains[i], spike_trains[j]).abs_avrg() + distance_matrix[i,j] = d + distance_matrix[j,i] = d + return distance_matrix -- cgit v1.2.3 From a769a03d089ac0c61e2155239a28665c9316e14a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 10 Oct 2014 17:04:04 +0200 Subject: added load_txt function, some restructuring --- Readme.md | 2 +- examples/SPIKY_testdata.txt | 3 ++ examples/test_data.py | 11 +++--- pyspike/__init__.py | 5 +-- pyspike/distances.py | 25 -------------- pyspike/spikes.py | 72 ++++++++++++++++++++++++++++++++++++-- test/SPIKY_testdata.txt | 3 ++ test/test_distance.py | 24 +++++-------- test/test_merge_spikes.py | 49 -------------------------- test/test_spikes.py | 84 +++++++++++++++++++++++++++++++++++++++++++++ 10 files changed, 176 insertions(+), 102 deletions(-) delete mode 100644 test/test_merge_spikes.py create mode 100644 test/test_spikes.py (limited to 'pyspike/__init__.py') diff --git a/Readme.md b/Readme.md index 368eef4..8b84ebd 100644 --- a/Readme.md +++ b/Readme.md @@ -1,7 +1,7 @@ # PySpike PySpike is a Python library for numerical analysis of spike train similarity. -Its core functionality are the implementation of the bivariate [ISI and SPIKE distance](http://www.scholarpedia.org/article/Measures_of_spike_train_synchrony). +Its core functionality is the implementation of the bivariate [ISI and SPIKE distance](http://www.scholarpedia.org/article/Measures_of_spike_train_synchrony). Additionally, it allows to compute multi-variate spike train distances, averaging and general spike train processing. All source codes are published under the liberal [MIT License](http://opensource.org/licenses/MIT). diff --git a/examples/SPIKY_testdata.txt b/examples/SPIKY_testdata.txt index 8fa3fcf..c8bea67 100755 --- a/examples/SPIKY_testdata.txt +++ b/examples/SPIKY_testdata.txt @@ -1,7 +1,10 @@ 64.886 305.81 696 937.77 1059.7 1322.2 1576.1 1808.1 2121.5 2381.1 2728.6 2966.9 3223.7 3473.7 3644.3 3936.3 65.553 307.49 696.63 948.66 1070.4 1312.2 1712.7 1934.3 2117.6 2356.9 2727.3 2980.6 3226.9 3475.7 3726.4 3944 +# test comment 69.064 319.1 688.32 947.85 1071.8 1300.8 1697.2 1930.6 2139.4 2354.2 2723.7 2963.6 3221.3 3470.1 59.955 313.83 692.23 955.95 1070.4 1319.6 1681.9 1963.5 2151.4 2373.8 2729.4 2971.2 3220.2 3475.5 3632.3 3788.9 +# empty line + 59.977 306.84 686.09 935.08 1059.9 1325.9 1543.4 1821.9 2150.2 2390.4 2724.5 2969.6 3222.5 3471.5 3576 3913.9 66.415 313.41 688.83 931.43 1051.8 1304.6 1555.6 1820.2 2150.5 2383.1 2723.4 2947.7 3196.6 3443.5 3575 3804.9 66.449 311.02 689.26 947.12 1058.9 1286.6 1708.2 1957.3 2124.8 2375.7 2709.4 2977.6 3191.1 3449.6 3590.4 3831.2 diff --git a/examples/test_data.py b/examples/test_data.py index ff7b510..dcd0f20 100644 --- a/examples/test_data.py +++ b/examples/test_data.py @@ -7,17 +7,14 @@ import matplotlib.pyplot as plt import pyspike as spk -# first load the data -spike_trains = [] -spike_file = open("SPIKY_testdata.txt", 'r') -for line in spike_file: - spike_trains.append(spk.spike_train_from_string(line)) +spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=(0,4000)) # plot the spike time for (i,spikes) in enumerate(spike_trains): plt.plot(spikes, i*np.ones_like(spikes), 'o') -f = spk.isi_distance(spike_trains[0], spike_trains[1], 4000) +f = spk.isi_distance(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() plt.figure() @@ -27,7 +24,7 @@ print("Average: %.8f" % f.avrg()) print("Absolute average: %.8f" % f.abs_avrg()) -f = spk.spike_distance(spike_trains[0], spike_trains[1], 4000) +f = spk.spike_distance(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() print(x) print(y) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 21005e9..2703f65 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,6 +1,7 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc -from distances import add_auxiliary_spikes, isi_distance, spike_distance, \ +from distances import isi_distance, spike_distance, \ isi_distance_multi, spike_distance_multi, isi_distance_matrix -from spikes import spike_train_from_string, merge_spike_trains +from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ + spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index f78c0d4..da603ad 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -11,31 +11,6 @@ import threading from pyspike import PieceWiseConstFunc, PieceWiseLinFunc -############################################################ -# add_auxiliary_spikes -############################################################ -def add_auxiliary_spikes( spike_train, T_end , T_start=0.0): - """ Adds spikes at the beginning (T_start) and end (T_end) of the - observation interval. - Args: - - spike_train: ordered array of spike times - - T_end: end time of the observation interval - - T_start: start time of the observation interval (default 0.0) - Returns: - - spike train with additional spikes at T_start and T_end. - - """ - assert spike_train[0] >= T_start, \ - "Spike train has events before the given start time" - assert spike_train[-1] <= T_end, \ - "Spike train has events after the given end time" - if spike_train[0] != T_start: - spike_train = np.insert(spike_train, 0, T_start) - if spike_train[-1] != T_end: - spike_train = np.append(spike_train, T_end) - return spike_train - - ############################################################ # isi_distance ############################################################ diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 70b48ff..502c460 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -7,12 +7,46 @@ Copyright 2014, Mario Mulansky import numpy as np + +############################################################ +# add_auxiliary_spikes +############################################################ +def add_auxiliary_spikes(spike_train, time_interval): + """ Adds spikes at the beginning and end of the given time interval. + Args: + - spike_train: ordered array of spike times + - time_interval: A pair (T_start, T_end) of values representing the start + and end time of the spike train measurement or a single value representing + the end time, the T_start is then assuemd as 0. Auxiliary spikes will be + added to the spike train at the beginning and end of this interval. + Returns: + - spike train with additional spikes at T_start and T_end. + + """ + try: + T_start = time_interval[0] + T_end = time_interval[1] + except: + T_start = 0 + T_end = time_interval + + assert spike_train[0] >= T_start, \ + "Spike train has events before the given start time" + assert spike_train[-1] <= T_end, \ + "Spike train has events after the given end time" + if spike_train[0] != T_start: + spike_train = np.insert(spike_train, 0, T_start) + if spike_train[-1] != T_end: + spike_train = np.append(spike_train, T_end) + return spike_train + + ############################################################ # spike_train_from_string ############################################################ def spike_train_from_string(s, sep=' '): """ Converts a string of times into an array of spike times. - Params: + Args: - s: the string with (ordered) spike times - sep: The separator between the time numbers. Returns: @@ -21,12 +55,46 @@ def spike_train_from_string(s, sep=' '): return np.fromstring(s, sep=sep) +############################################################ +# load_spike_trains_txt +############################################################ +def load_spike_trains_from_txt(file_name, time_interval=None, + separator=' ', comment='#'): + """ Loads a number of spike trains from a text file. Each line of the text + file should contain one spike train as a sequence of spike times separated + by `separator`. Empty lines as well as lines starting with `comment` are + neglected. The `time_interval` represents the start and the end of the spike + trains and it is used to add auxiliary spikes at the beginning and end of + each spike train. However, if `time_interval == None`, no auxiliary spikes + are added, but note that the Spike and ISI distance both require auxiliary + spikes. + Args: + - file_name: The name of the text file. + - time_interval: A pair (T_start, T_end) of values representing the start + and end time of the spike train measurement or a single value representing + the end time, the T_start is then assuemd as 0. Auxiliary spikes will be + added to the spike train at the beginning and end of this interval. + - separator: The character used to seprate the values in the text file. + - comment: Lines starting with this character are ignored. + """ + spike_trains = [] + spike_file = open(file_name, 'r') + for line in spike_file: + if len(line) > 1 and not line.startswith(comment): + # use only the lines with actual data and not commented + spike_train = spike_train_from_string(line) + if not time_interval == None: # add auxiliary spikes if times given + spike_train = add_auxiliary_spikes(spike_train, time_interval) + spike_trains.append(spike_train) + return spike_trains + + ############################################################ # merge_spike_trains ############################################################ def merge_spike_trains(spike_trains): """ Merges a number of spike trains into a single spike train. - Params: + Args: - spike_trains: list of arrays of spike times Returns: - array with the merged spike times diff --git a/test/SPIKY_testdata.txt b/test/SPIKY_testdata.txt index 8fa3fcf..c8bea67 100755 --- a/test/SPIKY_testdata.txt +++ b/test/SPIKY_testdata.txt @@ -1,7 +1,10 @@ 64.886 305.81 696 937.77 1059.7 1322.2 1576.1 1808.1 2121.5 2381.1 2728.6 2966.9 3223.7 3473.7 3644.3 3936.3 65.553 307.49 696.63 948.66 1070.4 1312.2 1712.7 1934.3 2117.6 2356.9 2727.3 2980.6 3226.9 3475.7 3726.4 3944 +# test comment 69.064 319.1 688.32 947.85 1071.8 1300.8 1697.2 1930.6 2139.4 2354.2 2723.7 2963.6 3221.3 3470.1 59.955 313.83 692.23 955.95 1070.4 1319.6 1681.9 1963.5 2151.4 2373.8 2729.4 2971.2 3220.2 3475.5 3632.3 3788.9 +# empty line + 59.977 306.84 686.09 935.08 1059.9 1325.9 1543.4 1821.9 2150.2 2390.4 2724.5 2969.6 3222.5 3471.5 3576 3913.9 66.415 313.41 688.83 931.43 1051.8 1304.6 1555.6 1820.2 2150.5 2383.1 2723.4 2947.7 3196.6 3443.5 3575 3804.9 66.449 311.02 689.26 947.12 1058.9 1286.6 1708.2 1957.3 2124.8 2375.7 2709.4 2977.6 3191.1 3449.6 3590.4 3831.2 diff --git a/test/test_distance.py b/test/test_distance.py index c43f0b3..92b99ae 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -13,14 +13,6 @@ from numpy.testing import assert_equal, assert_array_almost_equal import pyspike as spk -def test_auxiliary_spikes(): - t = np.array([0.2, 0.4, 0.6, 0.7]) - t_aux = spk.add_auxiliary_spikes(t, T_end=1.0, T_start=0.1) - assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) - t_aux = spk.add_auxiliary_spikes(t_aux, 1.0) - assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) - - def test_isi(): # generate two spike trains: t1 = np.array([0.2, 0.4, 0.6, 0.7]) @@ -31,8 +23,8 @@ def test_isi(): expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35, -0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3] - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0)) + t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0)) f = spk.isi_distance(t1, t2) # print("ISI: ", f.y) @@ -47,8 +39,8 @@ def test_isi(): expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0] expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5] - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0)) + t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0)) f = spk.isi_distance(t1, t2) assert_equal(f.x, expected_times) @@ -72,8 +64,8 @@ def test_spike(): expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0)) + t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0)) f = spk.spike_distance(t1, t2) assert_equal(f.x, expected_times) @@ -92,8 +84,8 @@ def test_spike(): expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) + t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0)) + t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0)) f = spk.spike_distance(t1, t2) assert_equal(f.x, expected_times) diff --git a/test/test_merge_spikes.py b/test/test_merge_spikes.py deleted file mode 100644 index 3162700..0000000 --- a/test/test_merge_spikes.py +++ /dev/null @@ -1,49 +0,0 @@ -""" test_merge_spikes.py - -Tests merging spikes - -Copyright 2014, Mario Mulansky -""" -from __future__ import print_function -import numpy as np - -import pyspike as spk - -def check_merged_spikes( merged_spikes, spike_trains ): - # create a flat array with all spike events - all_spikes = np.array([]) - for spike_train in spike_trains: - all_spikes = np.append(all_spikes, spike_train) - indices = np.zeros_like(all_spikes, dtype='bool') - # check if we find all the spike events in the original spike trains - for x in merged_spikes: - i = np.where(all_spikes == x)[0][0] # the first axis and the first entry - # change to something impossible so we dont find this event again - all_spikes[i] = -1.0 - indices[i] = True - assert( indices.all() ) - -def test_merge_spike_trains(): - - # first load the data - spike_trains = [] - spike_file = open("SPIKY_testdata.txt", 'r') - for line in spike_file: - spike_trains.append(spk.spike_train_from_string(line)) - - spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) - # test if result is sorted - assert((spikes == np.sort(spikes)).all()) - # check merging - check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] ) - - spikes = spk.merge_spike_trains(spike_trains) - # test if result is sorted - assert((spikes == np.sort(spikes)).all()) - # check merging - check_merged_spikes( spikes, spike_trains ) - - -if __name__ == "main": - test_merge_spike_trains() - diff --git a/test/test_spikes.py b/test/test_spikes.py new file mode 100644 index 0000000..dca580f --- /dev/null +++ b/test/test_spikes.py @@ -0,0 +1,84 @@ +""" test_load.py + +Test loading of spike trains from text files + +Copyright 2014, Mario Mulansky +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal + +import pyspike as spk + + +def test_auxiliary_spikes(): + t = np.array([0.2, 0.4, 0.6, 0.7]) + t_aux = spk.add_auxiliary_spikes(t, time_interval=(0.1, 1.0)) + assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) + t_aux = spk.add_auxiliary_spikes(t_aux, time_interval=(0.0, 1.0)) + assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0]) + + +def test_load_from_txt(): + spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=(0,4000)) + assert len(spike_trains) == 40 + + # check the first spike train + spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1, + 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7, + 3644.3, 3936.3, 4000] + assert_equal(spike_times, spike_trains[0]) + + # check auxiliary spikes + for spike_train in spike_trains: + assert spike_train[0] == 0.0 + assert spike_train[-1] == 4000 + + # load without adding auxiliary spikes + spike_trains2 = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=None) + assert len(spike_trains2) == 40 + # check auxiliary spikes + for i in xrange(len(spike_trains)): + assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less + + +def check_merged_spikes( merged_spikes, spike_trains ): + # create a flat array with all spike events + all_spikes = np.array([]) + for spike_train in spike_trains: + all_spikes = np.append(all_spikes, spike_train) + indices = np.zeros_like(all_spikes, dtype='bool') + # check if we find all the spike events in the original spike trains + for x in merged_spikes: + i = np.where(all_spikes == x)[0][0] # the first axis and the first entry + # change to something impossible so we dont find this event again + all_spikes[i] = -1.0 + indices[i] = True + assert( indices.all() ) + + +def test_merge_spike_trains(): + # first load the data + spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=(0,4000)) + + spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) + # test if result is sorted + assert((spikes == np.sort(spikes)).all()) + # check merging + check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] ) + + spikes = spk.merge_spike_trains(spike_trains) + # test if result is sorted + assert((spikes == np.sort(spikes)).all()) + # check merging + check_merged_spikes( spikes, spike_trains ) + +if __name__ == "main": + test_auxiliary_spikes() + test_load_from_txt() + test_merge_spike_trains() + -- cgit v1.2.3 From c1c5403b8274bd19aa1e71933cfaefe1ba622e59 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 10 Oct 2014 17:23:28 +0200 Subject: added License note in headers --- examples/isi_matrix.py | 11 +++++++++++ examples/merge.py | 28 ++++++++++++++++++++++++++++ examples/plot.py | 42 ++++++++++++++++++++++++++++++++++++++++++ examples/test_data.py | 34 ---------------------------------- examples/test_merge.py | 20 -------------------- pyspike/__init__.py | 6 ++++++ pyspike/cython_distance.pyx | 3 +++ pyspike/distances.py | 2 ++ pyspike/function.py | 2 ++ pyspike/python_backend.py | 4 +++- pyspike/spikes.py | 2 ++ test/test_distance.py | 3 +++ test/test_function.py | 2 ++ 13 files changed, 104 insertions(+), 55 deletions(-) create mode 100644 examples/merge.py create mode 100644 examples/plot.py delete mode 100644 examples/test_data.py delete mode 100644 examples/test_merge.py (limited to 'pyspike/__init__.py') diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py index 0d6e185..3297d3d 100644 --- a/examples/isi_matrix.py +++ b/examples/isi_matrix.py @@ -1,3 +1,14 @@ +""" isi_matrix.py + +Simple example showing how to compute the isi distance matrix of a set of spike +trains. + +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) +""" + + from __future__ import print_function import numpy as np diff --git a/examples/merge.py b/examples/merge.py new file mode 100644 index 0000000..55c7f0a --- /dev/null +++ b/examples/merge.py @@ -0,0 +1,28 @@ +""" merge.py + +Simple example showing the merging of two spike trains. + +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) +""" + +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt + +import pyspike as spk + +# first load the data, ending time = 4000 +spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", 4000) + +spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) + +print(spikes) + +plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') +plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x') +plt.plot(spikes, 2*np.ones_like(spikes), 'o') + +plt.show() diff --git a/examples/plot.py b/examples/plot.py new file mode 100644 index 0000000..d7e2173 --- /dev/null +++ b/examples/plot.py @@ -0,0 +1,42 @@ +""" plot.py + +Simple example showing how to load and plot spike trains and their distances. + +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) +""" + + +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt + +import pyspike as spk + +spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=(0,4000)) + +# plot the spike time +for (i,spikes) in enumerate(spike_trains): + plt.plot(spikes, i*np.ones_like(spikes), 'o') + +f = spk.isi_distance(spike_trains[0], spike_trains[1]) +x, y = f.get_plottable_data() + +plt.figure() +plt.plot(x, np.abs(y), '--k') + +print("Average: %.8f" % f.avrg()) +print("Absolute average: %.8f" % f.abs_avrg()) + + +f = spk.spike_distance(spike_trains[0], spike_trains[1]) +x, y = f.get_plottable_data() +print(x) +print(y) +#plt.figure() +plt.plot(x, y, '-b') + +plt.show() diff --git a/examples/test_data.py b/examples/test_data.py deleted file mode 100644 index dcd0f20..0000000 --- a/examples/test_data.py +++ /dev/null @@ -1,34 +0,0 @@ -# compute the isi distance of some test data - -from __future__ import print_function - -import numpy as np -import matplotlib.pyplot as plt - -import pyspike as spk - -spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", - time_interval=(0,4000)) - -# plot the spike time -for (i,spikes) in enumerate(spike_trains): - plt.plot(spikes, i*np.ones_like(spikes), 'o') - -f = spk.isi_distance(spike_trains[0], spike_trains[1]) -x, y = f.get_plottable_data() - -plt.figure() -plt.plot(x, np.abs(y), '--k') - -print("Average: %.8f" % f.avrg()) -print("Absolute average: %.8f" % f.abs_avrg()) - - -f = spk.spike_distance(spike_trains[0], spike_trains[1]) -x, y = f.get_plottable_data() -print(x) -print(y) -#plt.figure() -plt.plot(x, y, '-b') - -plt.show() diff --git a/examples/test_merge.py b/examples/test_merge.py deleted file mode 100644 index 0c34608..0000000 --- a/examples/test_merge.py +++ /dev/null @@ -1,20 +0,0 @@ -# compute the isi distance of some test data -from __future__ import print_function - -import numpy as np -import matplotlib.pyplot as plt - -import pyspike as spk - -# first load the data, ending time = 4000 -spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", 4000) - -spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) - -print(spikes) - -plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') -plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x') -plt.plot(spikes, 2*np.ones_like(spikes), 'o') - -plt.show() diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 2703f65..3867e6e 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,3 +1,9 @@ +""" +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) +""" + __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index 2be8525..4ab4381 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -11,6 +11,9 @@ Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects improves the performance of spike_distance by a factor of 10! Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) + """ """ diff --git a/pyspike/distances.py b/pyspike/distances.py index da603ad..db04c4e 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -3,6 +3,8 @@ Module containing several functions to compute spike distances Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) """ import numpy as np diff --git a/pyspike/function.py b/pyspike/function.py index 5444c36..243ef67 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -5,6 +5,8 @@ functions. Copyright 2014, Mario Mulansky +Distributed under the MIT License (MIT) + """ from __future__ import print_function diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index 9134149..e5b74e9 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -3,7 +3,9 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulanksy +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) """ diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 502c460..9375e30 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -3,6 +3,8 @@ Module containing several function to load and transform spike trains Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) """ import numpy as np diff --git a/test/test_distance.py b/test/test_distance.py index 84d0af9..dafe693 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -3,6 +3,9 @@ Tests the isi- and spike-distance computation Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) + """ from __future__ import print_function diff --git a/test/test_function.py b/test/test_function.py index 7420011..c0fb3fd 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -3,6 +3,8 @@ Tests the PieceWiseConst and PieceWiseLinear functions Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) """ from __future__ import print_function -- cgit v1.2.3 From 5ce807943fab2ba233cff661e34e4d6a83397b99 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 13 Oct 2014 11:03:42 +0200 Subject: changed to BSD license --- License | 25 ++++++++----------------- examples/isi_matrix.py | 2 +- examples/merge.py | 2 +- examples/plot.py | 2 +- pyspike/__init__.py | 2 +- pyspike/cython_distance.pyx | 2 +- pyspike/distances.py | 2 +- pyspike/function.py | 2 +- pyspike/python_backend.py | 2 +- pyspike/spikes.py | 2 +- test/test_distance.py | 2 +- test/test_function.py | 2 +- test/test_spikes.py | 2 +- 13 files changed, 20 insertions(+), 29 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/License b/License index 95d0405..472deac 100644 --- a/License +++ b/License @@ -1,21 +1,12 @@ -The MIT License (MIT) +BSD License -Copyright (c) 2014 Mario Mulansky, +Copyright (c) 2014, Mario Mulansky +All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py index db740dd..7bf1cf9 100644 --- a/examples/isi_matrix.py +++ b/examples/isi_matrix.py @@ -5,7 +5,7 @@ trains. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/examples/merge.py b/examples/merge.py index 726d32b..2550cdb 100644 --- a/examples/merge.py +++ b/examples/merge.py @@ -4,7 +4,7 @@ Simple example showing the merging of two spike trains. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/examples/plot.py b/examples/plot.py index 5c3ad4a..da53670 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -4,7 +4,7 @@ Simple example showing how to load and plot spike trains and their distances. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3867e6e..c58a6b1 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,7 +1,7 @@ """ Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ __all__ = ["function", "distances", "spikes"] diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index 4ab4381..ccf8060 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -12,7 +12,7 @@ improves the performance of spike_distance by a factor of 10! Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/distances.py b/pyspike/distances.py index b2eec92..3b9fe1f 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -4,7 +4,7 @@ Module containing several functions to compute spike distances Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ import numpy as np diff --git a/pyspike/function.py b/pyspike/function.py index 8107538..7722cc3 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -5,7 +5,7 @@ linear functions. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index cf1a92f..a1f5ea2 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -5,7 +5,7 @@ implementation. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/spikes.py b/pyspike/spikes.py index c496ab8..d390222 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -4,7 +4,7 @@ Module containing several function to load and transform spike trains Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ import numpy as np diff --git a/test/test_distance.py b/test/test_distance.py index 3371cbd..b500b2c 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -4,7 +4,7 @@ Tests the isi- and spike-distance computation Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/test/test_function.py b/test/test_function.py index ed7d6bc..a579796 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -4,7 +4,7 @@ Tests the PieceWiseConst and PieceWiseLinear functions Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/test/test_spikes.py b/test/test_spikes.py index 349e0bf..bf914c0 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -4,7 +4,7 @@ Test loading of spike trains from text files Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function -- cgit v1.2.3 From 2a99e3bf2c575efc9abbc1cf262810d223f2cad0 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 15 Oct 2014 12:32:09 +0200 Subject: +average_profile function --- pyspike/__init__.py | 2 +- pyspike/function.py | 35 +++++++++++++++++++++++++++++++++++ test/test_function.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 78 insertions(+), 1 deletion(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index c58a6b1..1bfa7fc 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -6,7 +6,7 @@ Distributed under the BSD License __all__ = ["function", "distances", "spikes"] -from function import PieceWiseConstFunc, PieceWiseLinFunc +from function import PieceWiseConstFunc, PieceWiseLinFunc, average_profile from distances import isi_distance, spike_distance, \ isi_distance_multi, spike_distance_multi, isi_distance_matrix from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ diff --git a/pyspike/function.py b/pyspike/function.py index bd3e2d5..46fdea2 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -26,9 +26,17 @@ class PieceWiseConstFunc: function. - y: array of length N defining the function values at the intervals. """ + # convert parameters to arrays, also ensures copying self.x = np.array(x) self.y = np.array(y) + def copy(self): + """ Returns a copy of itself + Returns: + - PieceWiseConstFunc copy + """ + return PieceWiseConstFunc(self.x, self.y) + def almost_equal(self, other, decimal=14): """ Checks if the function is equal to another function up to `decimal` precision. @@ -108,10 +116,18 @@ class PieceWiseLinFunc: - y2: array of length N defining the function values at the right of the intervals. """ + # convert to array, which also ensures copying self.x = np.array(x) self.y1 = np.array(y1) self.y2 = np.array(y2) + def copy(self): + """ Returns a copy of itself + Returns: + - PieceWiseLinFunc copy + """ + return PieceWiseLinFunc(self.x, self.y1, self.y2) + def almost_equal(self, other, decimal=14): """ Checks if the function is equal to another function up to `decimal` precision. @@ -183,3 +199,22 @@ class PieceWiseLinFunc: """ self.y1 *= fac self.y2 *= fac + + +def average_profile(profiles): + """ Computes the average profile from the given ISI- or SPIKE-profiles. + Args: + - profiles: list of PieceWiseConstFunc or PieceWiseLinFunc representing + ISI- or SPIKE-profiles to be averaged + Returns: + - avrg_profile: PieceWiseConstFunc or PieceWiseLinFunc containing the + average profile. + """ + assert len(profiles) > 1 + + avrg_profile = profiles[0].copy() + for i in xrange(1, len(profiles)): + avrg_profile.add(profiles[i]) + avrg_profile.mul_scalar(1.0/len(profiles)) # normalize + + return avrg_profile diff --git a/test/test_function.py b/test/test_function.py index c5caa5a..ed70180 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -68,6 +68,23 @@ def test_pwc_mul(): assert_array_almost_equal(f.y, 1.5/5.0*np.array(y), decimal=16) +def test_pwc_avrg(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + x = [0.0, 0.75, 2.0, 2.5, 2.7, 4.0] + y = [0.5, 1.0, -0.25, 0.0, 1.5] + f2 = spk.PieceWiseConstFunc(x, y) + + f_avrg = spk.average_profile([f1, f2]) + x_expected = [0.0, 0.75, 1.0, 2.0, 2.5, 2.7, 4.0] + y_expected = [0.75, 1.0, 0.25, 0.625, 0.375, 1.125] + assert_array_almost_equal(f_avrg.x, x_expected, decimal=16) + assert_array_almost_equal(f_avrg.y, y_expected, decimal=16) + + def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] y1 = [1.0, -0.5, 1.5, 0.75] @@ -134,6 +151,31 @@ def test_pwl_mul(): assert_array_almost_equal(f.y1, 1.5/5.0*np.array(y1), decimal=16) assert_array_almost_equal(f.y2, 1.5/5.0*np.array(y2), decimal=16) + +def test_pwl_avrg(): + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y1 = [1.0, -0.5, 1.5, 0.75] + y2 = [1.5, -0.4, 1.5, 0.25] + f1 = spk.PieceWiseLinFunc(x, y1, y2) + + x = [0.0, 0.75, 2.0, 2.5, 2.7, 4.0] + y1 = [0.5, 1.0, -0.25, 0.0, 1.5] + y2 = [0.8, 0.2, -1.0, 0.0, 2.0] + f2 = spk.PieceWiseLinFunc(x, y1, y2) + + x_expected = [0.0, 0.75, 1.0, 2.0, 2.5, 2.7, 4.0] + y1_expected = np.array([1.5, 1.0+1.0+0.5*0.75, -0.5+1.0-0.8*0.25/1.25, + 1.5-0.25, 0.75, 1.5+0.75-0.5*0.2/1.5]) / 2 + y2_expected = np.array([0.8+1.0+0.5*0.75, 1.5+1.0-0.8*0.25/1.25, -0.4+0.2, + 1.5-1.0, 0.75-0.5*0.2/1.5, 2.25]) / 2 + + f_avrg = spk.average_profile([f1, f2]) + + assert_array_almost_equal(f_avrg.x, x_expected, decimal=16) + assert_array_almost_equal(f_avrg.y1, y1_expected, decimal=16) + assert_array_almost_equal(f_avrg.y2, y2_expected, decimal=16) + + if __name__ == "__main__": test_pwc() test_pwc_add() -- cgit v1.2.3 From 4249dd363e992fe1178c9d76db3f74c5005afb0a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 16 Oct 2014 12:41:43 +0200 Subject: changed function names distance -> profiles, added distance functions --- pyspike/__init__.py | 6 ++- pyspike/distances.py | 105 +++++++++++++++++++++++++++++++++++++++++--------- test/test_distance.py | 18 ++++----- test/test_spikes.py | 7 ++-- 4 files changed, 103 insertions(+), 33 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 1bfa7fc..5146507 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -7,7 +7,9 @@ Distributed under the BSD License __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc, average_profile -from distances import isi_distance, spike_distance, \ - isi_distance_multi, spike_distance_multi, isi_distance_matrix +from distances import isi_profile, isi_distance, \ + spike_profile, spike_distance, \ + isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ + spike_profile_multi, spike_distance_multi from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index 4ba2bd3..e50772f 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -14,9 +14,9 @@ from pyspike import PieceWiseConstFunc, PieceWiseLinFunc ############################################################ -# isi_distance +# isi_profile ############################################################ -def isi_distance(spikes1, spikes2): +def isi_profile(spikes1, spikes2): """ Computes the isi-distance profile S_isi(t) of the two given spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi values are defined positive S_isi(t)>=0. The spike trains are expected @@ -41,9 +41,24 @@ def isi_distance(spikes1, spikes2): ############################################################ -# spike_distance +# isi_distance ############################################################ -def spike_distance(spikes1, spikes2): +def isi_distance(spikes1, spikes2): + """ Computes the isi-distance I of the given spike trains. The + isi-distance is the integral over the isi distance profile S_isi(t): + I = \int_^T S_isi(t) dt. + Args: + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + Returns: + - double value: The isi-distance I. + """ + return isi_profile(spikes1, spikes2).avrg() + + +############################################################ +# spike_profile +############################################################ +def spike_profile(spikes1, spikes2): """ Computes the spike-distance profile S_spike(t) of the two given spike trains. Returns the profile as a PieceWiseLinFunc object. The S_spike values are defined positive S_spike(t)>=0. The spike trains are expected to @@ -69,11 +84,26 @@ def spike_distance(spikes1, spikes2): ############################################################ -# multi_distance +# spike_distance +############################################################ +def spike_distance(spikes1, spikes2): + """ Computes the spike-distance S of the given spike trains. The + spike-distance is the integral over the isi distance profile S_spike(t): + S = \int_^T S_spike(t) dt. + Args: + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + Returns: + - double value: The spike-distance S. + """ + return spike_profile(spikes1, spikes2).avrg() + + +############################################################ +# multi_profile ############################################################ -def multi_distance(spike_trains, pair_distance_func, indices=None): +def multi_profile(spike_trains, pair_distance_func, indices=None): """ Internal implementation detail, don't call this function directly, - use isi_distance_multi or spike_distance_multi instead. + use isi_profile_multi or spike_profile_multi instead. Computes the multi-variate distance for a set of spike-trains using the pair_dist_func to compute pair-wise distances. That is it computes the @@ -158,42 +188,81 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None): return average_dist +############################################################ +# isi_profile_multi +############################################################ +def isi_profile_multi(spike_trains, indices=None): + """ computes the multi-variate isi distance profile for a set of spike + trains. That is the average isi-distance of all pairs of spike-trains: + S_isi(t) = 2/((N(N-1)) sum_{} S_{isi}^{i,j}, + where the sum goes over all pairs + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + Returns: + - A PieceWiseConstFunc representing the averaged isi distance S_isi(t) + """ + return multi_profile(spike_trains, isi_profile, indices) + + ############################################################ # isi_distance_multi ############################################################ def isi_distance_multi(spike_trains, indices=None): - """ computes the multi-variate isi-distance for a set of spike-trains. That - is the average isi-distance of all pairs of spike-trains: - S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + """ computes the multi-variate isi-distance for a set of spike-trains. + That is the time average of the multi-variate spike profile: + S_isi = \int_0^T 2/((N(N-1)) sum_{} S_{isi}^{i,j}, where the sum goes over all pairs Args: - spike_trains: list of spike trains - indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) Returns: - - A PieceWiseConstFunc representing the averaged isi distance S + - A double value representing the averaged isi distance S_isi """ - return multi_distance(spike_trains, isi_distance, indices) + return isi_profile_multi(spike_trains, indices).avrg() + + +############################################################ +# spike_profile_multi +############################################################ +def spike_profile_multi(spike_trains, indices=None): + """ Computes the multi-variate spike distance profile for a set of spike + trains. That is the average spike-distance of all pairs of spike-trains: + S_spike(t) = 2/((N(N-1)) sum_{} S_{spike}^{i, j}, + where the sum goes over all pairs + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike-trains to use, + if None all given spike trains are used (default=None) + Returns: + - A PieceWiseLinFunc representing the averaged spike distance S(t) + """ + return multi_profile(spike_trains, spike_profile, indices) ############################################################ # spike_distance_multi ############################################################ def spike_distance_multi(spike_trains, indices=None): - """ computes the multi-variate spike-distance for a set of spike-trains. - That is the average spike-distance of all pairs of spike-trains: - S(t) = 2/((N(N-1)) sum_{} S_{i, j}, + """ Computes the multi-variate spike distance for a set of spike trains. + That is the time average of the multi-variate spike profile: + S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt where the sum goes over all pairs Args: - spike_trains: list of spike trains - indices: list of indices defining which spike-trains to use, if None all given spike trains are used (default=None) Returns: - - A PieceWiseLinFunc representing the averaged spike distance S + - A double value representing the averaged spike distance S """ - return multi_distance(spike_trains, spike_distance, indices) + return spike_profile_multi(spike_trains, indices).avrg() +############################################################ +# isi_distance_matrix +############################################################ def isi_distance_matrix(spike_trains, indices=None): """ Computes the average isi-distance of all pairs of spike-trains. Args: @@ -212,7 +281,7 @@ def isi_distance_matrix(spike_trains, indices=None): "Invalid index list." # generate a list of possible index pairs pairs = [(i, j) for i in indices for j in indices[i+1:]] - + distance_matrix = np.zeros((len(indices), len(indices))) for i, j in pairs: d = isi_distance(spike_trains[i], spike_trains[j]).avrg() diff --git a/test/test_distance.py b/test/test_distance.py index 0695701..81ffe09 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -28,7 +28,7 @@ def test_isi(): t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) - f = spk.isi_distance(t1, t2) + f = spk.isi_profile(t1, t2) # print("ISI: ", f.y) @@ -44,7 +44,7 @@ def test_isi(): t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) - f = spk.isi_distance(t1, t2) + f = spk.isi_profile(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=14) @@ -69,7 +69,7 @@ def test_spike(): t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) - f = spk.spike_distance(t1, t2) + f = spk.spike_profile(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) @@ -89,7 +89,7 @@ def test_spike(): t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) - f = spk.spike_distance(t1, t2) + f = spk.spike_profile(t1, t2) assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) @@ -131,23 +131,23 @@ def check_multi_distance(dist_func, dist_func_multi): def test_multi_isi(): - check_multi_distance(spk.isi_distance, spk.isi_distance_multi) + check_multi_distance(spk.isi_profile, spk.isi_profile_multi) def test_multi_spike(): - check_multi_distance(spk.spike_distance, spk.spike_distance_multi) + check_multi_distance(spk.spike_profile, spk.spike_profile_multi) def test_regression_spiky(): - spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", (0.0, 4000.0)) - isi_profile = spk.isi_distance_multi(spike_trains) + isi_profile = spk.isi_profile_multi(spike_trains) isi_dist = isi_profile.avrg() print(isi_dist) # get the full precision from SPIKY # assert_equal(isi_dist, 0.1832) - spike_profile = spk.spike_distance_multi(spike_trains) + spike_profile = spk.spike_profile_multi(spike_trains) spike_dist = spike_profile.avrg() print(spike_dist) # get the full precision from SPIKY diff --git a/test/test_spikes.py b/test/test_spikes.py index bf914c0..d650d5d 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -23,7 +23,7 @@ def test_auxiliary_spikes(): def test_load_from_txt(): - spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", time_interval=(0, 4000)) assert len(spike_trains) == 40 @@ -39,7 +39,7 @@ def test_load_from_txt(): assert spike_train[-1] == 4000 # load without adding auxiliary spikes - spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + spike_trains2 = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", time_interval=None) assert len(spike_trains2) == 40 # check auxiliary spikes @@ -64,9 +64,8 @@ def check_merged_spikes(merged_spikes, spike_trains): def test_merge_spike_trains(): # first load the data - spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", time_interval=(0, 4000)) - spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) # test if result is sorted assert((spikes == np.sort(spikes)).all()) -- cgit v1.2.3 From 5970a9cfdbecc1af232b7ffe485bdc057591a2b8 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 16 Oct 2014 14:50:26 +0200 Subject: added spike_matrix, refactoring dist matrix functs --- pyspike/__init__.py | 2 +- pyspike/distances.py | 52 ++++++++++++++++++++++++++++++++++-------- test/test_distance.py | 62 +++++++++++++++++++++++++++++++++++++++++---------- test/test_spikes.py | 1 + 4 files changed, 95 insertions(+), 22 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 5146507..d2d5b57 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -10,6 +10,6 @@ from function import PieceWiseConstFunc, PieceWiseLinFunc, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ - spike_profile_multi, spike_distance_multi + spike_profile_multi, spike_distance_multi, spike_distance_matrix from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains diff --git a/pyspike/distances.py b/pyspike/distances.py index 9056863..7d7044b 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -99,9 +99,9 @@ def spike_distance(spikes1, spikes2): ############################################################ -# multi_profile +# generic_profile_multi ############################################################ -def multi_profile(spike_trains, pair_distance_func, indices=None): +def generic_profile_multi(spike_trains, pair_distance_func, indices=None): """ Internal implementation detail, don't call this function directly, use isi_profile_multi or spike_profile_multi instead. @@ -203,7 +203,7 @@ def isi_profile_multi(spike_trains, indices=None): Returns: - A PieceWiseConstFunc representing the averaged isi distance S_isi(t) """ - return multi_profile(spike_trains, isi_profile, indices) + return generic_profile_multi(spike_trains, isi_profile, indices) ############################################################ @@ -239,7 +239,7 @@ def spike_profile_multi(spike_trains, indices=None): Returns: - A PieceWiseLinFunc representing the averaged spike distance S(t) """ - return multi_profile(spike_trains, spike_profile, indices) + return generic_profile_multi(spike_trains, spike_profile, indices) ############################################################ @@ -261,17 +261,19 @@ def spike_distance_multi(spike_trains, indices=None): ############################################################ -# isi_distance_matrix +# generic_distance_matrix ############################################################ -def isi_distance_matrix(spike_trains, indices=None): - """ Computes the average isi-distance of all pairs of spike-trains. +def generic_distance_matrix(spike_trains, dist_function, indices=None): + """ Internal implementation detail. Don't use this function directly. + Instead use isi_distance_matrix or spike_distance_matrix. + Computes the time averaged distance of all pairs of spike-trains. Args: - spike_trains: list of spike trains - indices: list of indices defining which spike-trains to use if None all given spike-trains are used (default=None) Return: - a 2D array of size len(indices)*len(indices) containing the average - pair-wise isi-distance + pair-wise distance """ if indices is None: indices = np.arange(len(spike_trains)) @@ -284,7 +286,39 @@ def isi_distance_matrix(spike_trains, indices=None): distance_matrix = np.zeros((len(indices), len(indices))) for i, j in pairs: - d = isi_distance(spike_trains[i], spike_trains[j]) + d = dist_function(spike_trains[i], spike_trains[j]) distance_matrix[i, j] = d distance_matrix[j, i] = d return distance_matrix + + +############################################################ +# isi_distance_matrix +############################################################ +def isi_distance_matrix(spike_trains, indices=None): + """ Computes the time averaged isi-distance of all pairs of spike-trains. + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike-trains to use + if None all given spike-trains are used (default=None) + Return: + - a 2D array of size len(indices)*len(indices) containing the average + pair-wise isi-distance + """ + return generic_distance_matrix(spike_trains, isi_distance, indices) + + +############################################################ +# spike_distance_matrix +############################################################ +def spike_distance_matrix(spike_trains, indices=None): + """ Computes the time averaged spike-distance of all pairs of spike-trains. + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike-trains to use + if None all given spike-trains are used (default=None) + Return: + - a 2D array of size len(indices)*len(indices) containing the average + pair-wise spike-distance + """ + return generic_distance_matrix(spike_trains, spike_distance, indices) diff --git a/test/test_distance.py b/test/test_distance.py index 2a6bf4e..7be0d9b 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -130,7 +130,7 @@ def test_spike(): decimal=16) -def check_multi_distance(dist_func, dist_func_multi): +def check_multi_profile(profile_func, profile_func_multi): # generate spike trains: t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0) @@ -138,21 +138,21 @@ def check_multi_distance(dist_func, dist_func_multi): t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0) spike_trains = [t1, t2, t3, t4] - f12 = dist_func(t1, t2) - f13 = dist_func(t1, t3) - f14 = dist_func(t1, t4) - f23 = dist_func(t2, t3) - f24 = dist_func(t2, t4) - f34 = dist_func(t3, t4) + f12 = profile_func(t1, t2) + f13 = profile_func(t1, t3) + f14 = profile_func(t1, t4) + f23 = profile_func(t2, t3) + f24 = profile_func(t2, t4) + f34 = profile_func(t3, t4) - f_multi = dist_func_multi(spike_trains, [0, 1]) + f_multi = profile_func_multi(spike_trains, [0, 1]) assert f_multi.almost_equal(f12, decimal=14) f = copy(f12) f.add(f13) f.add(f23) f.mul_scalar(1.0/3) - f_multi = dist_func_multi(spike_trains, [0, 1, 2]) + f_multi = profile_func_multi(spike_trains, [0, 1, 2]) assert f_multi.almost_equal(f, decimal=14) f.mul_scalar(3) # revert above normalization @@ -160,16 +160,54 @@ def check_multi_distance(dist_func, dist_func_multi): f.add(f24) f.add(f34) f.mul_scalar(1.0/6) - f_multi = dist_func_multi(spike_trains) + f_multi = profile_func_multi(spike_trains) assert f_multi.almost_equal(f, decimal=14) def test_multi_isi(): - check_multi_distance(spk.isi_profile, spk.isi_profile_multi) + check_multi_profile(spk.isi_profile, spk.isi_profile_multi) def test_multi_spike(): - check_multi_distance(spk.spike_profile, spk.spike_profile_multi) + check_multi_profile(spk.spike_profile, spk.spike_profile_multi) + + +def check_dist_matrix(dist_func, dist_matrix_func): + # generate spike trains: + t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) + t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0) + t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0) + t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0) + spike_trains = [t1, t2, t3, t4] + + f12 = dist_func(t1, t2) + f13 = dist_func(t1, t3) + f14 = dist_func(t1, t4) + f23 = dist_func(t2, t3) + f24 = dist_func(t2, t4) + f34 = dist_func(t3, t4) + + f_matrix = dist_matrix_func(spike_trains) + # check zero diagonal + for i in xrange(4): + assert_equal(0.0, f_matrix[i, i]) + for i in xrange(4): + for j in xrange(i+1, 4): + assert_equal(f_matrix[i, j], f_matrix[j, i]) + assert_equal(f12, f_matrix[1, 0]) + assert_equal(f13, f_matrix[2, 0]) + assert_equal(f14, f_matrix[3, 0]) + assert_equal(f23, f_matrix[2, 1]) + assert_equal(f24, f_matrix[3, 1]) + assert_equal(f34, f_matrix[3, 2]) + + +def test_isi_matrix(): + check_dist_matrix(spk.isi_distance, spk.isi_distance_matrix) + + +def test_spike_matrix(): + check_dist_matrix(spk.spike_distance, spk.spike_distance_matrix) def test_regression_spiky(): diff --git a/test/test_spikes.py b/test/test_spikes.py index d650d5d..b12099e 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -66,6 +66,7 @@ def test_merge_spike_trains(): # first load the data spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", time_interval=(0, 4000)) + spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) # test if result is sorted assert((spikes == np.sort(spikes)).all()) -- cgit v1.2.3 From 1b2aa84e7d642c7a5f4b99ca83b5ca25d6905960 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 21 Nov 2014 17:40:10 +0100 Subject: added spike generation function --- pyspike/__init__.py | 2 +- pyspike/cython_distance.pyx | 21 +++++++++++++++------ pyspike/spikes.py | 40 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 7 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index d2d5b57..d700e7a 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -12,4 +12,4 @@ from distances import isi_profile, isi_distance, \ isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ spike_profile_multi, spike_distance_multi, spike_distance_matrix from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ - spike_train_from_string, merge_spike_trains + spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index 178fcba..779ff94 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -122,6 +122,15 @@ cdef inline double get_min_dist_cython(double spike_time, return d +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + ############################################################ # spike_distance_cython ############################################################ @@ -155,7 +164,7 @@ def spike_distance_cython(double[:] t1, isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) s1 = dt_f1*(t1[1]-t1[0])/isi1 s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) while True: # print(index, index1, index2) if t1[index1+1] < t2[index2+1]: @@ -169,12 +178,12 @@ def spike_distance_cython(double[:] t1, s1 = dt_p1 s2 = (dt_p2*(t2[index2+1]-t1[index1]) + dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1)/(0.5*(isi1+isi2)*(isi1+isi2)) + y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) # now the next interval start value dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) isi1 = t1[index1+1]-t1[index1] # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/(0.5*(isi1+isi2)*(isi1+isi2)) + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) elif t1[index1+1] > t2[index2+1]: index2 += 1 if index2+1 >= N2: @@ -185,13 +194,13 @@ def spike_distance_cython(double[:] t1, s1 = (dt_p1*(t1[index1+1]-t2[index2]) + dt_f1*(t2[index2]-t1[index1])) / isi1 s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)*(isi1+isi2)) + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) # now the next interval start value dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) #s2 = dt_f2 isi2 = t2[index2+1]-t2[index2] # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/(0.5*(isi1+isi2)*(isi1+isi2)) + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) else: # t1[index1+1] == t2[index2+1] - generate only one event index1 += 1 index2 += 1 @@ -214,7 +223,7 @@ def spike_distance_cython(double[:] t1, isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)*(isi1+isi2)) + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) # end nogil # use only the data added above diff --git a/pyspike/spikes.py b/pyspike/spikes.py index f7172c9..aa25c48 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -129,3 +129,43 @@ def merge_spike_trains(spike_trains): index_list = index_list[index_list != i] vals = [spike_trains[n][indices[n]] for n in index_list] return merged_spikes + + +############################################################ +# generate_poisson_spikes +############################################################ +def generate_poisson_spikes(rate, time_interval, add_aux_spikes=True): + """ Generates a Poisson spike train with the given rate in the given time + interval + + :param rate: The rate of the spike trains + :param time_interval: A pair (T_start, T_end) of values representing the + start and end time of the spike train measurement or + a single value representing the end time, the T_start + is then assuemd as 0. Auxiliary spikes will be added + to the spike train at the beginning and end of this + interval, if they are not yet present. + :type time_interval: pair of doubles or double + :returns: Poisson spike train + """ + try: + T_start = time_interval[0] + T_end = time_interval[1] + except: + T_start = 0 + T_end = time_interval + # roughly how many spikes are required to fill the interval + N = max(1, int(1.2 * rate * (T_end-T_start))) + N_append = max(1, int(0.1 * rate * (T_end-T_start))) + intervals = np.random.exponential(1.0/rate, N) + # make sure we have enough spikes + while T_start + sum(intervals) < T_end: + print T_start + sum(intervals) + intervals = np.append(intervals, + np.random.exponential(1.0/rate, N_append)) + spikes = T_start + np.cumsum(intervals) + spikes = spikes[spikes < T_end] + if add_aux_spikes: + return add_auxiliary_spikes(spikes, time_interval) + else: + return spikes -- cgit v1.2.3 From 7c8391298843e3ead55896e075fe28d5fe5bf795 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 12 Dec 2014 16:44:25 +0100 Subject: +spike synchronization, python impl only --- pyspike/__init__.py | 4 +++- pyspike/distances.py | 59 +++++++++++++++++++++++++++++++++++++++++++++++ pyspike/function.py | 9 ++++++++ pyspike/python_backend.py | 54 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 125 insertions(+), 1 deletion(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index d700e7a..dca5722 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -9,7 +9,9 @@ __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ + spike_sync_profile, \ isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ - spike_profile_multi, spike_distance_multi, spike_distance_matrix + spike_profile_multi, spike_distance_multi, spike_distance_matrix, \ + spike_sync_profile_multi from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/distances.py b/pyspike/distances.py index 34f7d78..fbedce5 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -9,6 +9,7 @@ Distributed under the BSD License import numpy as np import threading +from functools import partial from pyspike import PieceWiseConstFunc, PieceWiseLinFunc @@ -128,6 +129,42 @@ def spike_distance(spikes1, spikes2, interval=None): return spike_profile(spikes1, spikes2).avrg(interval) +############################################################ +# spike_sync_profile +############################################################ +def spike_sync_profile(spikes1, spikes2, k=3): + + assert k > 0 + + # cython implementation + try: + from cython_distance import cumulative_sync_cython \ + as cumulative_sync_impl + except ImportError: +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") + # use python backend + from python_backend import cumulative_sync_python \ + as cumulative_sync_impl + + st, c = cumulative_sync_impl(spikes1, spikes2) + + # print c + # print 2*(c[-1]-c[0])/(len(spikes1)+len(spikes2)-2) + + dc = np.zeros(len(c)) + dc[k:-k] = (c[2*k:] - c[:-2*k]) / k + for n in xrange(1, k): + dc[n] = (c[2*n] - c[0]) / k + dc[-n-1] = (c[-1]-c[-2*n-1]) / k + dc[0] = dc[1] + dc[-1] = dc[-2] + # dc[-1] = (c[-1]-c[-2])/k + # print dc + return PieceWiseConstFunc(st, dc) + + ############################################################ # _generic_profile_multi ############################################################ @@ -278,6 +315,28 @@ def spike_profile_multi(spike_trains, indices=None): return _generic_profile_multi(spike_trains, spike_profile, indices) +############################################################ +# spike_profile_multi +############################################################ +def spike_sync_profile_multi(spike_trains, indices=None, k=3): + """ Computes the multi-variate spike synchronization profile for a set of + spike trains. That is the average spike-distance of all pairs of spike + trains: + :math:`S_ss(t) = 2/((N(N-1)) sum_{} S_{ss}^{i, j}`, + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :returns: The averaged spike profile :math:`(t)` + :rtype: :class:`pyspike.function.PieceWiseConstFunc` + + """ + prof_func = partial(spike_sync_profile, k=k) + return _generic_profile_multi(spike_trains, prof_func, indices) + + ############################################################ # spike_distance_multi ############################################################ diff --git a/pyspike/function.py b/pyspike/function.py index 0c0a391..662606c 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -136,6 +136,15 @@ class PieceWiseConstFunc(object): a /= int_length return a + def avrg_function_value(self): + """ Computes the average function value of the piece-wise const + function: :math:`a = 1/N sum_i f_i` where N is the number of intervals. + + :returns: the average a. + :rtype: float + """ + return sum(self.y)/(len(self.y)) + def add(self, f): """ Adds another PieceWiseConst function to this function. Note: only functions defined on the same interval can be summed. diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index 874c689..b85262d 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -188,6 +188,60 @@ def spike_distance_python(spikes1, spikes2): return spike_events[:index+1], y_starts[:index], y_ends[:index] +############################################################ +# cumulative_sync_python +############################################################ +def cumulative_sync_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], + spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) + c = np.zeros(N1 + N2 - 3) + c[0] = 0 + st[0] = 0 + while n < N1 + N2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if spikes1[i]-spikes2[j] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if spikes2[j]-spikes1[i] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + else: # spikes1[i+1] = spikes2[j+1] + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1] + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1]+1 + c[0] = 0 + st[0] = spikes1[0] + st[-1] = spikes1[-1] + + return st, c + + ############################################################ # add_piece_wise_const_python ############################################################ -- cgit v1.2.3 From 8c98f0043fa785b8352b3c685615da24b30e6149 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 26 Dec 2014 15:31:15 -0600 Subject: spike sync --- pyspike/__init__.py | 5 +- pyspike/distances.py | 57 ++++++++++++------ pyspike/function.py | 149 +++++++++++++++++++++++++++++++++++++++++++--- pyspike/python_backend.py | 82 ++++++++++++++++++++++++- test/test_distance.py | 54 +++++++++++++++++ 5 files changed, 316 insertions(+), 31 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index dca5722..fa90d99 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -6,10 +6,11 @@ Distributed under the BSD License __all__ = ["function", "distances", "spikes"] -from function import PieceWiseConstFunc, PieceWiseLinFunc, average_profile +from function import PieceWiseConstFunc, PieceWiseLinFunc, IntervalSequence,\ + average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ - spike_sync_profile, \ + spike_sync_profile, spike_sync_distance, \ isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ spike_profile_multi, spike_distance_multi, spike_distance_matrix, \ spike_sync_profile_multi diff --git a/pyspike/distances.py b/pyspike/distances.py index c28fd7a..38c5cc2 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -11,7 +11,7 @@ import numpy as np import threading from functools import partial -from pyspike import PieceWiseConstFunc, PieceWiseLinFunc +from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, IntervalSequence ############################################################ @@ -148,23 +148,34 @@ def spike_sync_profile(spikes1, spikes2, k=3): from python_backend import coincidence_python \ as coincidence_impl - st, c = coincidence_impl(spikes1, spikes2) + st, J = coincidence_impl(spikes1, spikes2) - dc = np.zeros(len(c)) - for i in xrange(2*k): - dc[k:-k] += c[i:-2*k+i] + N = len(J) - for n in xrange(0, k): - for i in xrange(n+k): - dc[n] += c[i] - dc[-n-1] += c[-i-1] - for i in xrange(k-n-1): - dc[n] += c[i] - dc[-n-1] += c[-i-1] + # compute the cumulative sum, include some extra values for boundary + # conditions + c = np.zeros(N + 2*k) + c[k:-k] = np.cumsum(J) + # set the boundary values + # on the left: c_0 = -c_1, c_{-1} = -c_2, ..., c{-k+1} = c_k + # on the right: c_{N+1} = c_N, c_{N+2} = 2*c_N - c_{N-1}, + # c_{N+2} = 2*c_N - c_{N-2}, ..., c_{N+k} = 2*c_N - c_{N-k+1} + for n in xrange(k): + c[k-n-1] = -c[k+n] + c[-k+n] = 2*c[-k-1] - c[-k-1-n] + # with the right boundary values, the differences become trivial + J_w = c[2*k:] - c[:-2*k] + # normalize to half the interval width + J_w *= 1.0/k - dc *= 1.0/k + return IntervalSequence(st, J_w) - return PieceWiseConstFunc(st, dc) + +############################################################ +# spike_sync_distance +############################################################ +def spike_sync_distance(spikes1, spikes2, k=3): + return spike_sync_profile(spikes1, spikes2, k).avrg() ############################################################ @@ -201,8 +212,7 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): for (i, j) in pairs[1:]: current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) average_dist.add(current_dist) # add to the average - average_dist.mul_scalar(1.0/len(pairs)) # normalize - return average_dist + return average_dist, len(pairs) ############################################################ @@ -273,7 +283,10 @@ def isi_profile_multi(spike_trains, indices=None): :returns: The averaged isi profile :math:`(t)` :rtype: :class:`pyspike.function.PieceWiseConstFunc` """ - return _generic_profile_multi(spike_trains, isi_profile, indices) + average_dist, M = _generic_profile_multi(spike_trains, isi_profile, + indices) + average_dist.mul_scalar(1.0/M) # normalize + return average_dist ############################################################ @@ -314,7 +327,10 @@ def spike_profile_multi(spike_trains, indices=None): :rtype: :class:`pyspike.function.PieceWiseLinFunc` """ - return _generic_profile_multi(spike_trains, spike_profile, indices) + average_dist, M = _generic_profile_multi(spike_trains, spike_profile, + indices) + average_dist.mul_scalar(1.0/M) # normalize + return average_dist ############################################################ @@ -336,7 +352,10 @@ def spike_sync_profile_multi(spike_trains, indices=None, k=3): """ prof_func = partial(spike_sync_profile, k=k) - return _generic_profile_multi(spike_trains, prof_func, indices) + average_dist, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_dist ############################################################ diff --git a/pyspike/function.py b/pyspike/function.py index 662606c..f5a1133 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -136,15 +136,6 @@ class PieceWiseConstFunc(object): a /= int_length return a - def avrg_function_value(self): - """ Computes the average function value of the piece-wise const - function: :math:`a = 1/N sum_i f_i` where N is the number of intervals. - - :returns: the average a. - :rtype: float - """ - return sum(self.y)/(len(self.y)) - def add(self, f): """ Adds another PieceWiseConst function to this function. Note: only functions defined on the same interval can be summed. @@ -179,6 +170,146 @@ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ self.y *= fac +############################################################## +# IntervalSequence +############################################################## +class IntervalSequence(object): + """ A class representing a sequence of values defined in some interval. + This is very similar to a `PieceWiseConstFunc`, but with different + averaging and addition. + """ + + def __init__(self, x, y): + """ Constructs the interval sequence. + + :param x: array of length N+1 defining the edges of the intervals of + the intervals. + :param y: array of length N defining the values at the intervals. + """ + # convert parameters to arrays, also ensures copying + self.x = np.array(x) + self.y = np.array(y) + self.extra_zero_intervals = 0 + + def copy(self): + """ Returns a copy of itself + + :rtype: :class:`IntervalSequence` + """ + return IntervalSequence(self.x, self.y) + + def almost_equal(self, other, decimal=14): + """ Checks if the function is equal to another function up to `decimal` + precision. + + :param other: another :class:`IntervalSequence` + :returns: True if the two functions are equal up to `decimal` decimals, + False otherwise + :rtype: bool + """ + eps = 10.0**(-decimal) + return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ + np.allclose(self.y, other.y, atol=eps, rtol=0.0) + + def get_plottable_data(self): + """ Returns two arrays containing x- and y-coordinates for immeditate + plotting of the interval sequence. + + :returns: (x_plot, y_plot) containing plottable data + :rtype: pair of np.array + + Example:: + + x, y = f.get_plottable_data() + plt.plot(x, y, '-o', label="Piece-wise const function") + """ + + x_plot = np.empty(2*len(self.x)-2) + x_plot[0] = self.x[0] + x_plot[1::2] = self.x[1:] + x_plot[2::2] = self.x[1:-1] + y_plot = np.empty(2*len(self.y)) + y_plot[::2] = self.y + normalization = 1.0 * (len(self.y)-1) / (len(self.y) + + self.extra_zero_intervals-1) + y_plot[1::2] = self.y + + return x_plot, y_plot * normalization + + def integral(self, interval=None): + """ Returns the integral over the given interval. For the interval + sequence this amounts to the sum over all values divided by the count + of intervals. + + :param interval: integration interval given as a pair of floats, if + None the integral over the whole function is computed. + :type interval: Pair of floats or None. + :returns: the integral + :rtype: float + """ + if interval is None: + # no interval given, integrate over the whole spike train + # don't count the first value, which is zero by definition + a = np.sum(self.y) + else: + raise NotImplementedError() + return a + + def avrg(self, interval=None): + """ Computes the average of the interval sequence: + :math:`a = 1/N sum f_n ` where N is the number of intervals. + + :param interval: averaging interval given as a pair of floats, a + sequence of pairs for averaging multiple intervals, or + None, if None the average over the whole function is + computed. + :type interval: Pair, sequence of pairs, or None. + :returns: the average a. + :rtype: float + """ + if interval is None: + # no interval given, average over the whole spike train + # don't count the first interval for normalization + return self.integral() / (len(self.y)-1+self.extra_zero_intervals) + else: + raise NotImplementedError() + + def add(self, f): + """ Adds another `IntervalSequence` function to this function. + Note: only functions defined on the same interval can be summed. + + :param f: :class:`IntervalSequence` function to be added. + :rtype: None + """ + assert self.x[0] == f.x[0], "The functions have different intervals" + assert self.x[-1] == f.x[-1], "The functions have different intervals" + + # cython version + try: + from cython_add import add_interval_sequence_cython as \ + add_interval_sequence_impl + except ImportError: +# print("Warning: add_piece_wise_const_cython not found. Make sure \ +# that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ +# \n Falling back to slow python backend.") + # use python backend + from python_backend import add_interval_sequence_python as \ + add_interval_sequence_impl + + self.x, self.y, extra_intervals = \ + add_interval_sequence_impl(self.x, self.y, f.x, f.y) + self.extra_zero_intervals += extra_intervals + + def mul_scalar(self, fac): + """ Multiplies the function with a scalar value + + :param fac: Value to multiply + :type fac: double + :rtype: None + """ + self.y *= fac + + ############################################################## # PieceWiseLinFunc ############################################################## diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index 7f8ea8c..154d250 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -289,7 +289,7 @@ def coincidence_python(spikes1, spikes2): n += 1 st[n] = spikes1[i] c[n] = 1 - c[0] = c[2] + #c[0] = c[2] st[0] = spikes1[0] st[-1] = spikes1[-1] @@ -340,6 +340,86 @@ def add_piece_wise_const_python(x1, y1, x2, y2): return x_new[:index+2], y_new[:index+1] +############################################################ +# add_interval_sequence_python +############################################################ +def add_interval_sequence_python(x1, y1, x2, y2): + yscale1 = np.empty_like(y1) + index2 = 1 + # s1 = (len(y1)+len(y2)-2.0) / (len(y1)-1.0) + # s2 = (len(y1)+len(y2)-2.0) / (len(y2)-1.0) + s1 = 1.0 + s2 = 1.0 + for i in xrange(len(y1)): + c = 1 + while index2 < len(x2)-1 and x2[index2] < x1[i+1]: + index2 += 1 + c += 1 + if index2 < len(x2)-1 and x2[index2] == x1[i+1]: + index2 += 1 + # c += 1 + yscale1[i] = s1/c + + yscale2 = np.empty_like(y2) + index1 = 1 + for i in xrange(len(y2)): + c = 1 + while index1 < len(x1)-1 and x1[index1] < x2[i+1]: + index1 += 1 + c += 1 + if index1 < len(x1)-1 and x1[index1] == x2[i+1]: + index1 += 1 + # c += 1 + yscale2[i] = s2/c + + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty(len(x_new)-1) + x_new[0] = x1[0] + index1 = 0 + index2 = 0 + index = 0 + additional_intervals = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + y_new[index] = y1[index1]*yscale1[index1] + y2[index2]*yscale2[index2] + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1] + # y_new[index] = y1[index1]*yscale1[index1] + \ + # y2[index2]*yscale2[index2] + index1 += 1 + # x_new[index] = x1[index1] + index2 += 1 + # index += 1 + x_new[index] = x1[index1] + additional_intervals += 1 + y_new[index] = y1[index1]*yscale1[index1] + y2[index2]*yscale2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(y1)-index1-1] = \ + y1[index1+1:]*yscale1[index1+1:] + y2[-1]*yscale2[-1] + index += len(x1)-index1-2 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(y2)-index2-1] = \ + y2[index2+1:]*yscale2[index2+1:] + y1[-1]*yscale1[-1] + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + + return x_new[:index+2], y_new[:index+1], additional_intervals + + ############################################################ # add_piece_lin_const_python ############################################################ diff --git a/test/test_distance.py b/test/test_distance.py index 7be0d9b..d98069d 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -130,6 +130,60 @@ def test_spike(): decimal=16) +def test_spike_sync(): + spikes1 = np.array([1.0, 2.0, 3.0]) + spikes2 = np.array([2.1]) + spikes1 = spk.add_auxiliary_spikes(spikes1, 4.0) + spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) + for k in xrange(1, 3): + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), + 0.5, decimal=16) + + spikes2 = np.array([3.1]) + spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) + for k in xrange(1, 3): + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), + 0.5, decimal=16) + + spikes2 = np.array([1.1]) + spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) + for k in xrange(1, 3): + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), + 0.5, decimal=16) + + spikes2 = np.array([0.9]) + spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) + for k in xrange(1, 3): + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), + 0.5, decimal=16) + + spikes1 = np.array([100, 300, 400, 405, 410, 500, 700, 800, + 805, 810, 815, 900]) + spikes2 = np.array([100, 200, 205, 210, 295, 350, 400, 510, + 600, 605, 700, 910]) + spikes3 = np.array([100, 180, 198, 295, 412, 420, 510, 640, + 695, 795, 820, 920]) + spikes1 = spk.add_auxiliary_spikes(spikes1, 1000) + spikes2 = spk.add_auxiliary_spikes(spikes2, 1000) + spikes3 = spk.add_auxiliary_spikes(spikes3, 1000) + for k in xrange(1, 10): + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), + 0.5, decimal=15) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes3, k=k), + 0.5, decimal=15) + assert_almost_equal(spk.spike_sync_distance(spikes2, spikes3, k=k), + 0.5, decimal=15) + + f1 = spk.spike_sync_profile(spikes1, spikes2, k=1) + f2 = spk.spike_sync_profile(spikes1, spikes3, k=1) + f3 = spk.spike_sync_profile(spikes2, spikes3, k=1) + f = spk.spike_sync_profile_multi([spikes1, spikes2, spikes3], k=1) + # hands on definition of the average multivariate spike synchronization + expected = (f1.integral() + f2.integral() + f3.integral()) / \ + (len(f1.y)+len(f2.y)+len(f3.y)-3) + assert_almost_equal(f.avrg(), expected, decimal=15) + + def check_multi_profile(profile_func, profile_func_multi): # generate spike trains: t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) -- cgit v1.2.3 From fed0ceec753fc1a7e5a1e20632de5a9800fe4fb1 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 19 Jan 2015 16:39:17 +0100 Subject: final version for spike sync --- examples/spike_sync.py | 30 +++++++---- pyspike/__init__.py | 4 +- pyspike/distances.py | 36 +++---------- pyspike/function.py | 58 +++++++++----------- pyspike/python_backend.py | 135 ++++++++++++++++++++++------------------------ pyspike/spikes.py | 4 +- test/SPIKE_Sync_Test.txt | 100 ++++++++++++++++++++++++++++++++++ test/test_distance.py | 79 ++++++++++++++------------- 8 files changed, 262 insertions(+), 184 deletions(-) create mode 100644 test/SPIKE_Sync_Test.txt (limited to 'pyspike/__init__.py') diff --git a/examples/spike_sync.py b/examples/spike_sync.py index 464dbb0..535f19f 100644 --- a/examples/spike_sync.py +++ b/examples/spike_sync.py @@ -5,28 +5,36 @@ import matplotlib.pyplot as plt import pyspike as spk -spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", +spike_trains = spk.load_spike_trains_from_txt("../test/SPIKE_Sync_Test.txt", time_interval=(0, 4000)) -print(spike_trains[0]) -print(spike_trains[1]) - -# plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') -# plt.plot(spike_trains[1], np.zeros_like(spike_trains[1]), 'o') - plt.figure() f = spk.spike_sync_profile(spike_trains[0], spike_trains[1]) +# f = spk.spike_sync_profile(spikes1, spikes2) x, y = f.get_plottable_data() -plt.plot(x, y, '--k', label="SPIKE-SYNC profile") -print(x) -print(y) +plt.plot(x, y, '--ok', label="SPIKE-SYNC profile") +print(f.x) +print(f.y) +print(f.mp) + +print("Average:", f.avrg()) + f = spk.spike_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() plt.plot(x, y, '-b', label="SPIKE-profile") -plt.legend(loc="upper left") +plt.axis([0, 4000, -0.1, 1.1]) +plt.legend(loc="center right") + +plt.figure() + +f = spk.spike_sync_profile_multi(spike_trains) +x, y = f.get_plottable_data() +plt.plot(x, y, '-k', label="SPIKE-SYNC profile") + +print("Average:", f.avrg()) plt.show() diff --git a/pyspike/__init__.py b/pyspike/__init__.py index fa90d99..74d52c5 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -6,8 +6,8 @@ Distributed under the BSD License __all__ = ["function", "distances", "spikes"] -from function import PieceWiseConstFunc, PieceWiseLinFunc, IntervalSequence,\ - average_profile +from function import PieceWiseConstFunc, PieceWiseLinFunc, \ + MultipleValueSequence, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ spike_sync_profile, spike_sync_distance, \ diff --git a/pyspike/distances.py b/pyspike/distances.py index 38c5cc2..5ee8261 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -11,7 +11,7 @@ import numpy as np import threading from functools import partial -from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, IntervalSequence +from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, MultipleValueSequence ############################################################ @@ -132,9 +132,7 @@ def spike_distance(spikes1, spikes2, interval=None): ############################################################ # spike_sync_profile ############################################################ -def spike_sync_profile(spikes1, spikes2, k=3): - - assert k > 0 +def spike_sync_profile(spikes1, spikes2): # cython implementation try: @@ -148,34 +146,16 @@ def spike_sync_profile(spikes1, spikes2, k=3): from python_backend import coincidence_python \ as coincidence_impl - st, J = coincidence_impl(spikes1, spikes2) - - N = len(J) - - # compute the cumulative sum, include some extra values for boundary - # conditions - c = np.zeros(N + 2*k) - c[k:-k] = np.cumsum(J) - # set the boundary values - # on the left: c_0 = -c_1, c_{-1} = -c_2, ..., c{-k+1} = c_k - # on the right: c_{N+1} = c_N, c_{N+2} = 2*c_N - c_{N-1}, - # c_{N+2} = 2*c_N - c_{N-2}, ..., c_{N+k} = 2*c_N - c_{N-k+1} - for n in xrange(k): - c[k-n-1] = -c[k+n] - c[-k+n] = 2*c[-k-1] - c[-k-1-n] - # with the right boundary values, the differences become trivial - J_w = c[2*k:] - c[:-2*k] - # normalize to half the interval width - J_w *= 1.0/k + times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) - return IntervalSequence(st, J_w) + return MultipleValueSequence(times, coincidences, multiplicity) ############################################################ # spike_sync_distance ############################################################ -def spike_sync_distance(spikes1, spikes2, k=3): - return spike_sync_profile(spikes1, spikes2, k).avrg() +def spike_sync_distance(spikes1, spikes2): + return spike_sync_profile(spikes1, spikes2).avrg() ############################################################ @@ -336,7 +316,7 @@ def spike_profile_multi(spike_trains, indices=None): ############################################################ # spike_profile_multi ############################################################ -def spike_sync_profile_multi(spike_trains, indices=None, k=3): +def spike_sync_profile_multi(spike_trains, indices=None): """ Computes the multi-variate spike synchronization profile for a set of spike trains. That is the average spike-distance of all pairs of spike trains: @@ -351,7 +331,7 @@ def spike_sync_profile_multi(spike_trains, indices=None, k=3): :rtype: :class:`pyspike.function.PieceWiseConstFunc` """ - prof_func = partial(spike_sync_profile, k=k) + prof_func = partial(spike_sync_profile) average_dist, M = _generic_profile_multi(spike_trains, prof_func, indices) # average_dist.mul_scalar(1.0/M) # no normalization here! diff --git a/pyspike/function.py b/pyspike/function.py index f5a1133..f10c136 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -171,32 +171,32 @@ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ ############################################################## -# IntervalSequence +# MultipleValueSequence ############################################################## -class IntervalSequence(object): +class MultipleValueSequence(object): """ A class representing a sequence of values defined in some interval. - This is very similar to a `PieceWiseConstFunc`, but with different - averaging and addition. """ - def __init__(self, x, y): - """ Constructs the interval sequence. + def __init__(self, x, y, multiplicity): + """ Constructs the value sequence. - :param x: array of length N+1 defining the edges of the intervals of - the intervals. - :param y: array of length N defining the values at the intervals. + :param x: array of length N defining the points at which the values are + defined. + :param y: array of length N degining the values at the points x. + :param multiplicity: array of length N defining the multiplicity of the + values. """ # convert parameters to arrays, also ensures copying self.x = np.array(x) self.y = np.array(y) - self.extra_zero_intervals = 0 + self.mp = np.array(multiplicity) def copy(self): """ Returns a copy of itself :rtype: :class:`IntervalSequence` """ - return IntervalSequence(self.x, self.y) + return MultipleValueSequence(self.x, self.y, self.mp) def almost_equal(self, other, decimal=14): """ Checks if the function is equal to another function up to `decimal` @@ -209,9 +209,10 @@ class IntervalSequence(object): """ eps = 10.0**(-decimal) return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ - np.allclose(self.y, other.y, atol=eps, rtol=0.0) + np.allclose(self.y, other.y, atol=eps, rtol=0.0) and \ + np.allclose(self.mp, other.mp, atol=eps, rtol=0.0) - def get_plottable_data(self): + def get_plottable_data(self, k=0): """ Returns two arrays containing x- and y-coordinates for immeditate plotting of the interval sequence. @@ -224,17 +225,10 @@ class IntervalSequence(object): plt.plot(x, y, '-o', label="Piece-wise const function") """ - x_plot = np.empty(2*len(self.x)-2) - x_plot[0] = self.x[0] - x_plot[1::2] = self.x[1:] - x_plot[2::2] = self.x[1:-1] - y_plot = np.empty(2*len(self.y)) - y_plot[::2] = self.y - normalization = 1.0 * (len(self.y)-1) / (len(self.y) + - self.extra_zero_intervals-1) - y_plot[1::2] = self.y + if k > 0: + raise NotImplementedError() - return x_plot, y_plot * normalization + return 1.0*self.x, 1.0*self.y/self.mp def integral(self, interval=None): """ Returns the integral over the given interval. For the interval @@ -250,7 +244,7 @@ class IntervalSequence(object): if interval is None: # no interval given, integrate over the whole spike train # don't count the first value, which is zero by definition - a = np.sum(self.y) + a = 1.0*np.sum(self.y[1:-1]) else: raise NotImplementedError() return a @@ -270,15 +264,15 @@ class IntervalSequence(object): if interval is None: # no interval given, average over the whole spike train # don't count the first interval for normalization - return self.integral() / (len(self.y)-1+self.extra_zero_intervals) + return self.integral() / np.sum(self.mp[1:-1]) else: raise NotImplementedError() def add(self, f): - """ Adds another `IntervalSequence` function to this function. + """ Adds another `MultipleValueSequence` function to this function. Note: only functions defined on the same interval can be summed. - :param f: :class:`IntervalSequence` function to be added. + :param f: :class:`MultipleValueSequence` function to be added. :rtype: None """ assert self.x[0] == f.x[0], "The functions have different intervals" @@ -293,12 +287,12 @@ class IntervalSequence(object): # that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ # \n Falling back to slow python backend.") # use python backend - from python_backend import add_interval_sequence_python as \ - add_interval_sequence_impl + from python_backend import add_multiple_value_sequence_python as \ + add_multiple_value_sequence_impl - self.x, self.y, extra_intervals = \ - add_interval_sequence_impl(self.x, self.y, f.x, f.y) - self.extra_zero_intervals += extra_intervals + self.x, self.y, self.mp = \ + add_multiple_value_sequence_impl(self.x, self.y, self.mp, + f.x, f.y, f.mp) def mul_scalar(self, fac): """ Multiplies the function with a scalar value diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index 154d250..bbbd572 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -248,52 +248,69 @@ def cumulative_sync_python(spikes1, spikes2): def coincidence_python(spikes1, spikes2): def get_tau(spikes1, spikes2, i, j): - return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], - spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) + m = 1E100 # some huge number + if i < len(spikes1)-2: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-2: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = min(m, spikes2[j]-spikes2[j-1]) + return 0.5*m N1 = len(spikes1) N2 = len(spikes2) i = 0 j = 0 n = 0 - st = np.zeros(N1 + N2 - 2) - c = np.zeros(N1 + N2 - 3) - c[0] = 0 - st[0] = 0 - while n < N1 + N2: + st = np.zeros(N1 + N2 - 2) # spike times + c = np.zeros(N1 + N2 - 2) # coincidences + mp = np.ones(N1 + N2 - 2) # multiplicity + while n < N1 + N2 - 2: if spikes1[i+1] < spikes2[j+1]: i += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j) st[n] = spikes1[i] - if spikes1[i]-spikes2[j] > tau: - c[n] = 0 - else: + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 c[n] = 1 + c[n-1] = 1 elif spikes1[i+1] > spikes2[j+1]: j += 1 n += 1 tau = get_tau(spikes1, spikes2, i, j) st[n] = spikes2[j] - if spikes2[j]-spikes1[i] > tau: - c[n] = 0 - else: + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 c[n] = 1 + c[n-1] = 1 else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains j += 1 i += 1 if i == N1-1 or j == N2-1: break n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 st[n] = spikes1[i] - c[n] = 0 - n += 1 - st[n] = spikes1[i] - c[n] = 1 - #c[0] = c[2] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + st[0] = spikes1[0] st[-1] = spikes1[-1] + c[0] = c[1] + c[-1] = c[-2] + mp[0] = mp[1] + mp[-1] = mp[-2] - return st, c + return st, c, mp ############################################################ @@ -341,83 +358,59 @@ def add_piece_wise_const_python(x1, y1, x2, y2): ############################################################ -# add_interval_sequence_python +# add_multiple_value_sequence_python ############################################################ -def add_interval_sequence_python(x1, y1, x2, y2): - yscale1 = np.empty_like(y1) - index2 = 1 - # s1 = (len(y1)+len(y2)-2.0) / (len(y1)-1.0) - # s2 = (len(y1)+len(y2)-2.0) / (len(y2)-1.0) - s1 = 1.0 - s2 = 1.0 - for i in xrange(len(y1)): - c = 1 - while index2 < len(x2)-1 and x2[index2] < x1[i+1]: - index2 += 1 - c += 1 - if index2 < len(x2)-1 and x2[index2] == x1[i+1]: - index2 += 1 - # c += 1 - yscale1[i] = s1/c - - yscale2 = np.empty_like(y2) - index1 = 1 - for i in xrange(len(y2)): - c = 1 - while index1 < len(x1)-1 and x1[index1] < x2[i+1]: - index1 += 1 - c += 1 - if index1 < len(x1)-1 and x1[index1] == x2[i+1]: - index1 += 1 - # c += 1 - yscale2[i] = s2/c +def add_multiple_value_sequence_python(x1, y1, mp1, x2, y2, mp2): x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty(len(x_new)-1) + y_new = np.empty_like(x_new) + mp_new = np.empty_like(x_new) x_new[0] = x1[0] index1 = 0 index2 = 0 index = 0 - additional_intervals = 0 while (index1+1 < len(y1)) and (index2+1 < len(y2)): - y_new[index] = y1[index1]*yscale1[index1] + y2[index2]*yscale2[index2] - index += 1 - # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) if x1[index1+1] < x2[index2+1]: index1 += 1 + index += 1 x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] elif x1[index1+1] > x2[index2+1]: index2 += 1 + index += 1 x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] else: # x1[index1+1] == x2[index2+1] - # y_new[index] = y1[index1]*yscale1[index1] + \ - # y2[index2]*yscale2[index2] index1 += 1 - # x_new[index] = x1[index1] index2 += 1 - # index += 1 + index += 1 x_new[index] = x1[index1] - additional_intervals += 1 - y_new[index] = y1[index1]*yscale1[index1] + y2[index2]*yscale2[index2] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] # one array reached the end -> copy the contents of the other to the end if index1+1 < len(y1): x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(y1)-index1-1] = \ - y1[index1+1:]*yscale1[index1+1:] + y2[-1]*yscale2[-1] - index += len(x1)-index1-2 + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 elif index2+1 < len(y2): x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(y2)-index2-1] = \ - y2[index2+1:]*yscale2[index2+1:] + y1[-1]*yscale1[-1] - index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[-1] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + # the last value is again the end of the interval - # x_new[index+1] = x1[-1] # only use the data that was actually filled - - return x_new[:index+2], y_new[:index+1], additional_intervals + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] ############################################################ diff --git a/pyspike/spikes.py b/pyspike/spikes.py index aa25c48..6a3353e 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -67,7 +67,7 @@ def spike_train_from_string(s, sep=' ', is_sorted=False): # load_spike_trains_txt ############################################################ def load_spike_trains_from_txt(file_name, time_interval=None, - separator=' ', comment='#', sort=True): + separator=' ', comment='#', is_sorted=False): """ Loads a number of spike trains from a text file. Each line of the text file should contain one spike train as a sequence of spike times separated by `separator`. Empty lines as well as lines starting with `comment` are @@ -94,7 +94,7 @@ def load_spike_trains_from_txt(file_name, time_interval=None, for line in spike_file: if len(line) > 1 and not line.startswith(comment): # use only the lines with actual data and not commented - spike_train = spike_train_from_string(line, separator, sort) + spike_train = spike_train_from_string(line, separator, is_sorted) if time_interval is not None: # add auxil. spikes if times given spike_train = add_auxiliary_spikes(spike_train, time_interval) spike_trains.append(spike_train) diff --git a/test/SPIKE_Sync_Test.txt b/test/SPIKE_Sync_Test.txt new file mode 100644 index 0000000..b97f777 --- /dev/null +++ b/test/SPIKE_Sync_Test.txt @@ -0,0 +1,100 @@ +61.000000 171.000000 268.000000 278.000000 326.000000 373.000000 400.000000 577.000000 793.000000 796.000000 798.000000 936.000000 994.000000 1026.000000 1083.000000 1097.000000 1187.000000 1228.000000 1400.000000 1522.000000 1554.000000 1579.000000 1661.000000 1895.000000 2040.000000 2082.000000 2264.000000 2502.000000 2689.000000 2922.000000 3093.000000 3276.000000 3495.000000 3693.000000 3900.000000 + +195.000000 400.000000 518.000000 522.000000 565.000000 569.000000 572.000000 630.000000 802.000000 938.000000 1095.000000 1198.000000 1222.000000 1316.000000 1319.000000 1328.000000 1382.000000 1505.000000 1631.000000 1662.000000 1676.000000 1708.000000 1803.000000 1947.000000 1999.000000 2129.000000 2332.000000 2466.000000 2726.000000 2896.000000 3102.000000 3316.000000 3505.000000 3707.000000 3900.000000 + +45.000000 111.000000 282.000000 319.000000 366.000000 400.000000 570.000000 633.000000 673.000000 750.000000 796.000000 1014.000000 1096.000000 1167.000000 1180.000000 1237.000000 1341.000000 1524.000000 1571.000000 1574.000000 1590.000000 1610.000000 1832.000000 1869.000000 1949.000000 1968.000000 2353.000000 2497.000000 2713.000000 2868.000000 3095.000000 3302.000000 3525.000000 3704.000000 3900.000000 + +60.000000 135.000000 204.000000 260.000000 297.000000 361.000000 364.000000 400.000000 438.000000 631.000000 787.000000 794.000000 908.000000 927.000000 1205.000000 1251.000000 1315.000000 1463.000000 1546.000000 1548.000000 1569.000000 1604.000000 1705.000000 1733.000000 1994.000000 2146.000000 2306.000000 2554.000000 2691.000000 2905.000000 3090.000000 3296.000000 3508.000000 3702.000000 3900.000000 + +159.000000 186.000000 308.000000 331.000000 400.000000 624.000000 758.000000 805.000000 811.000000 876.000000 1018.000000 1122.000000 1193.000000 1308.000000 1354.000000 1524.000000 1550.000000 1672.000000 1728.000000 1738.000000 1899.000000 1919.000000 1980.000000 1991.000000 2050.000000 2124.000000 2308.000000 2450.000000 2703.000000 2876.000000 3096.000000 3288.000000 3494.000000 3709.000000 3900.000000 + +23.000000 134.000000 278.000000 400.000000 443.000000 555.000000 589.000000 597.000000 750.000000 754.000000 807.000000 1207.000000 1302.000000 1386.000000 1390.000000 1391.000000 1411.000000 1467.000000 1564.000000 1621.000000 1672.000000 1707.000000 1752.000000 1889.000000 1983.000000 2026.000000 2277.000000 2527.000000 2685.000000 2903.000000 3124.000000 3326.000000 3499.000000 3695.000000 3900.000000 + +65.000000 120.000000 142.000000 200.000000 331.000000 400.000000 683.000000 701.000000 707.000000 788.000000 795.000000 880.000000 948.000000 972.000000 1197.000000 1282.000000 1354.000000 1451.000000 1543.000000 1549.000000 1555.000000 1624.000000 1723.000000 1766.000000 2023.000000 2071.000000 2285.000000 2478.000000 2766.000000 2883.000000 3100.000000 3292.000000 3502.000000 3700.000000 3900.000000 + +83.000000 122.000000 214.000000 354.000000 400.000000 505.000000 614.000000 621.000000 697.000000 788.000000 846.000000 871.000000 878.000000 1174.000000 1204.000000 1215.000000 1317.000000 1353.000000 1365.000000 1453.000000 1463.000000 1540.000000 1832.000000 2016.000000 2023.000000 2290.000000 2449.000000 2708.000000 2881.000000 3090.000000 3329.000000 3489.000000 3705.000000 3900.000000 + +56.000000 62.000000 143.000000 226.000000 259.000000 400.000000 439.000000 441.000000 569.000000 572.000000 639.000000 697.000000 808.000000 1162.000000 1178.000000 1250.000000 1360.000000 1427.000000 1598.000000 1667.000000 1671.000000 1780.000000 1865.000000 1902.000000 1972.000000 2092.000000 2318.000000 2548.000000 2741.000000 2888.000000 3096.000000 3304.000000 3518.000000 3705.000000 3900.000000 + +110.000000 116.000000 215.000000 400.000000 462.000000 542.000000 602.000000 614.000000 619.000000 795.000000 1166.000000 1196.000000 1240.000000 1252.000000 1268.000000 1295.000000 1405.000000 1561.000000 1597.000000 1725.000000 1750.000000 1759.000000 1877.000000 1948.000000 2053.000000 2119.000000 2339.000000 2527.000000 2672.000000 2874.000000 3137.000000 3312.000000 3488.000000 3698.000000 3900.000000 + +141.000000 159.000000 332.000000 333.000000 400.000000 756.000000 801.000000 843.000000 1161.000000 1208.000000 1225.000000 1246.000000 1418.000000 1448.000000 1501.000000 1559.000000 1578.000000 1684.000000 1751.000000 1797.000000 1815.000000 1818.000000 1948.000000 1975.000000 1989.000000 2110.000000 2360.000000 2453.000000 2704.000000 2906.000000 3106.000000 3286.000000 3491.000000 3697.000000 3900.000000 + +61.000000 145.000000 151.000000 340.000000 400.000000 642.000000 741.000000 801.000000 901.000000 912.000000 939.000000 1072.000000 1180.000000 1216.000000 1271.000000 1336.000000 1344.000000 1584.000000 1608.000000 1617.000000 1648.000000 1695.000000 1789.000000 1835.000000 2053.000000 2089.000000 2223.000000 2531.000000 2688.000000 2901.000000 3114.000000 3268.000000 3496.000000 3703.000000 3900.000000 + +313.000000 378.000000 400.000000 548.000000 657.000000 691.000000 715.000000 728.000000 802.000000 813.000000 1092.000000 1203.000000 1237.000000 1388.000000 1562.000000 1566.000000 1573.000000 1659.000000 1781.000000 1788.000000 1821.000000 1825.000000 1835.000000 1985.000000 1993.000000 2152.000000 2308.000000 2492.000000 2681.000000 2890.000000 3101.000000 3305.000000 3492.000000 3694.000000 3900.000000 + +244.000000 311.000000 392.000000 400.000000 431.000000 441.000000 562.000000 577.000000 632.000000 791.000000 818.000000 875.000000 1020.000000 1059.000000 1134.000000 1164.000000 1201.000000 1238.000000 1273.000000 1387.000000 1562.000000 1609.000000 1831.000000 1949.000000 1961.000000 2088.000000 2329.000000 2509.000000 2691.000000 2902.000000 3096.000000 3279.000000 3506.000000 3704.000000 3900.000000 + +11.000000 111.000000 159.000000 277.000000 334.000000 400.000000 480.000000 646.000000 804.000000 1122.000000 1129.000000 1178.000000 1198.000000 1233.000000 1359.000000 1374.000000 1411.000000 1476.000000 1477.000000 1571.000000 1582.000000 1622.000000 1706.000000 1867.000000 1988.000000 2094.000000 2233.000000 2512.000000 2671.000000 2931.000000 3111.000000 3292.000000 3488.000000 3691.000000 3900.000000 + +57.000000 114.000000 328.000000 400.000000 442.000000 582.000000 662.000000 752.000000 766.000000 795.000000 1035.000000 1115.000000 1204.000000 1242.000000 1261.000000 1277.000000 1295.000000 1300.000000 1333.000000 1398.000000 1571.000000 1594.000000 1743.000000 1765.000000 2076.000000 2094.000000 2319.000000 2518.000000 2683.000000 2933.000000 3109.000000 3317.000000 3492.000000 3696.000000 3900.000000 + +92.000000 102.000000 111.000000 190.000000 400.000000 446.000000 478.000000 630.000000 631.000000 805.000000 823.000000 918.000000 985.000000 1199.000000 1209.000000 1217.000000 1355.000000 1466.000000 1503.000000 1563.000000 1582.000000 1636.000000 1819.000000 1944.000000 1977.000000 2014.000000 2359.000000 2428.000000 2728.000000 2868.000000 3101.000000 3296.000000 3509.000000 3708.000000 3900.000000 + +34.000000 66.000000 70.000000 113.000000 135.000000 238.000000 284.000000 400.000000 528.000000 766.000000 805.000000 921.000000 994.000000 1045.000000 1137.000000 1180.000000 1193.000000 1481.000000 1625.000000 1660.000000 1699.000000 1764.000000 1809.000000 1861.000000 1967.000000 2095.000000 2267.000000 2518.000000 2719.000000 2885.000000 3081.000000 3252.000000 3484.000000 3705.000000 3900.000000 + +65.000000 90.000000 123.000000 199.000000 330.000000 400.000000 805.000000 1005.000000 1035.000000 1044.000000 1064.000000 1138.000000 1155.000000 1205.000000 1217.000000 1248.000000 1318.000000 1345.000000 1403.000000 1567.000000 1609.000000 1781.000000 1875.000000 1929.000000 2024.000000 2140.000000 2258.000000 2477.000000 2747.000000 2890.000000 3120.000000 3325.000000 3510.000000 3708.000000 3900.000000 + +70.000000 221.000000 280.000000 400.000000 489.000000 786.000000 1016.000000 1027.000000 1029.000000 1145.000000 1186.000000 1195.000000 1256.000000 1304.000000 1314.000000 1476.000000 1618.000000 1657.000000 1730.000000 1748.000000 1802.000000 1812.000000 1832.000000 1947.000000 1999.000000 2027.000000 2288.000000 2532.000000 2679.000000 2919.000000 3077.000000 3316.000000 3516.000000 3705.000000 3900.000000 + +153.000000 400.000000 474.000000 532.000000 568.000000 693.000000 738.000000 798.000000 806.000000 949.000000 1077.000000 1083.000000 1098.000000 1169.000000 1172.000000 1192.000000 1517.000000 1530.000000 1538.000000 1560.000000 1582.000000 1699.000000 1981.000000 1982.000000 2171.000000 2312.000000 2475.000000 2680.000000 2887.000000 3119.000000 3300.000000 3502.000000 3701.000000 3900.000000 + +92.000000 152.000000 164.000000 400.000000 520.000000 619.000000 621.000000 647.000000 648.000000 808.000000 853.000000 865.000000 920.000000 949.000000 1148.000000 1225.000000 1231.000000 1348.000000 1375.000000 1635.000000 1646.000000 1686.000000 1711.000000 2004.000000 2079.000000 2347.000000 2501.000000 2709.000000 2930.000000 3061.000000 3319.000000 3494.000000 3690.000000 3900.000000 + +74.000000 103.000000 247.000000 265.000000 400.000000 495.000000 501.000000 534.000000 552.000000 557.000000 601.000000 604.000000 792.000000 1003.000000 1138.000000 1195.000000 1252.000000 1325.000000 1336.000000 1425.000000 1646.000000 1657.000000 1795.000000 1990.000000 1992.000000 2062.000000 2300.000000 2509.000000 2690.000000 2913.000000 3066.000000 3276.000000 3460.000000 3700.000000 3900.000000 + +45.000000 90.000000 156.000000 400.000000 468.000000 523.000000 577.000000 583.000000 708.000000 797.000000 815.000000 1052.000000 1063.000000 1189.000000 1215.000000 1218.000000 1266.000000 1288.000000 1299.000000 1512.000000 1519.000000 1584.000000 1769.000000 1791.000000 1964.000000 2082.000000 2348.000000 2530.000000 2703.000000 2893.000000 3031.000000 3290.000000 3504.000000 3702.000000 3900.000000 + +140.000000 269.000000 400.000000 475.000000 492.000000 520.000000 569.000000 645.000000 727.000000 794.000000 819.000000 834.000000 957.000000 1122.000000 1210.000000 1374.000000 1471.000000 1485.000000 1515.000000 1574.000000 1668.000000 1732.000000 1743.000000 1917.000000 2041.000000 2104.000000 2294.000000 2453.000000 2662.000000 2894.000000 3128.000000 3301.000000 3489.000000 3705.000000 3900.000000 + +28.000000 96.000000 112.000000 400.000000 426.000000 477.000000 584.000000 763.000000 804.000000 815.000000 1089.000000 1175.000000 1218.000000 1366.000000 1394.000000 1506.000000 1553.000000 1564.000000 1592.000000 1712.000000 1755.000000 1788.000000 1814.000000 1816.000000 1997.000000 2072.000000 2345.000000 2487.000000 2741.000000 2881.000000 3074.000000 3310.000000 3521.000000 3707.000000 3900.000000 + +215.000000 286.000000 400.000000 461.000000 488.000000 489.000000 768.000000 796.000000 885.000000 919.000000 1188.000000 1253.000000 1432.000000 1476.000000 1521.000000 1524.000000 1566.000000 1590.000000 1684.000000 1714.000000 1733.000000 1776.000000 1816.000000 1943.000000 2016.000000 2031.000000 2308.000000 2488.000000 2642.000000 2832.000000 3120.000000 3293.000000 3507.000000 3702.000000 3900.000000 + +77.000000 229.000000 302.000000 369.000000 400.000000 401.000000 404.000000 418.000000 804.000000 1026.000000 1110.000000 1179.000000 1187.000000 1227.000000 1456.000000 1458.000000 1476.000000 1629.000000 1630.000000 1640.000000 1697.000000 1734.000000 1785.000000 1919.000000 1956.000000 2057.000000 2324.000000 2416.000000 2656.000000 2889.000000 3126.000000 3323.000000 3491.000000 3696.000000 3900.000000 + +244.000000 302.000000 400.000000 455.000000 533.000000 562.000000 673.000000 748.000000 791.000000 1120.000000 1136.000000 1191.000000 1235.000000 1238.000000 1296.000000 1336.000000 1447.000000 1466.000000 1551.000000 1594.000000 1691.000000 1744.000000 1897.000000 1959.000000 2060.000000 2109.000000 2230.000000 2564.000000 2717.000000 2900.000000 3089.000000 3320.000000 3491.000000 3712.000000 3900.000000 + +3.000000 196.000000 199.000000 320.000000 339.000000 358.000000 400.000000 495.000000 690.000000 737.000000 760.000000 791.000000 849.000000 1027.000000 1194.000000 1220.000000 1242.000000 1313.000000 1354.000000 1435.000000 1523.000000 1621.000000 1775.000000 1788.000000 1999.000000 2074.000000 2245.000000 2478.000000 2750.000000 2893.000000 3113.000000 3302.000000 3485.000000 3690.000000 3900.000000 + +206.000000 234.000000 261.000000 277.000000 341.000000 374.000000 400.000000 465.000000 613.000000 672.000000 745.000000 793.000000 799.000000 917.000000 954.000000 1144.000000 1180.000000 1283.000000 1484.000000 1574.000000 1575.000000 1795.000000 1965.000000 1984.000000 2086.000000 2093.000000 2312.000000 2501.000000 2738.000000 2879.000000 3084.000000 3270.000000 3483.000000 3701.000000 3900.000000 + +154.000000 314.000000 400.000000 611.000000 615.000000 795.000000 823.000000 869.000000 908.000000 938.000000 960.000000 1024.000000 1049.000000 1068.000000 1185.000000 1420.000000 1441.000000 1496.000000 1610.000000 1709.000000 1712.000000 1740.000000 1885.000000 1917.000000 1992.000000 2079.000000 2224.000000 2508.000000 2713.000000 2861.000000 3096.000000 3300.000000 3509.000000 3696.000000 3900.000000 + +26.000000 51.000000 83.000000 121.000000 343.000000 400.000000 625.000000 695.000000 697.000000 783.000000 803.000000 933.000000 1014.000000 1135.000000 1158.000000 1210.000000 1548.000000 1589.000000 1662.000000 1663.000000 1674.000000 1677.000000 1733.000000 1801.000000 1978.000000 2027.000000 2276.000000 2477.000000 2687.000000 2946.000000 3108.000000 3293.000000 3503.000000 3702.000000 3900.000000 + +21.000000 39.000000 125.000000 198.000000 254.000000 400.000000 456.000000 510.000000 806.000000 881.000000 920.000000 1000.000000 1046.000000 1067.000000 1129.000000 1143.000000 1188.000000 1438.000000 1552.000000 1603.000000 1754.000000 1761.000000 1943.000000 1960.000000 1980.000000 2068.000000 2246.000000 2544.000000 2731.000000 2923.000000 3060.000000 3271.000000 3517.000000 3700.000000 3900.000000 + +166.000000 237.000000 295.000000 300.000000 319.000000 369.000000 400.000000 407.000000 413.000000 428.000000 439.000000 804.000000 831.000000 899.000000 971.000000 1164.000000 1199.000000 1259.000000 1331.000000 1497.000000 1564.000000 1832.000000 1881.000000 1915.000000 1970.000000 2189.000000 2271.000000 2482.000000 2742.000000 2863.000000 3116.000000 3293.000000 3492.000000 3705.000000 3900.000000 + +298.000000 323.000000 400.000000 423.000000 526.000000 662.000000 799.000000 821.000000 830.000000 933.000000 989.000000 1190.000000 1200.000000 1227.000000 1251.000000 1306.000000 1543.000000 1574.000000 1589.000000 1690.000000 1697.000000 1849.000000 1938.000000 1951.000000 2027.000000 2059.000000 2315.000000 2456.000000 2703.000000 2944.000000 3103.000000 3307.000000 3497.000000 3693.000000 3900.000000 + +60.000000 172.000000 400.000000 413.000000 420.000000 600.000000 660.000000 690.000000 752.000000 789.000000 951.000000 1056.000000 1176.000000 1201.000000 1290.000000 1440.000000 1450.000000 1456.000000 1638.000000 1653.000000 1703.000000 1710.000000 1730.000000 1856.000000 2006.000000 2082.000000 2296.000000 2383.000000 2693.000000 2887.000000 3091.000000 3299.000000 3485.000000 3691.000000 3900.000000 + +20.000000 127.000000 326.000000 369.000000 400.000000 521.000000 588.000000 595.000000 700.000000 798.000000 799.000000 858.000000 913.000000 1101.000000 1193.000000 1379.000000 1432.000000 1440.000000 1482.000000 1486.000000 1575.000000 1577.000000 1792.000000 1820.000000 1957.000000 2097.000000 2309.000000 2493.000000 2639.000000 2854.000000 3109.000000 3294.000000 3488.000000 3713.000000 3900.000000 + +65.000000 119.000000 362.000000 400.000000 779.000000 803.000000 804.000000 897.000000 938.000000 984.000000 1147.000000 1207.000000 1266.000000 1319.000000 1373.000000 1579.000000 1596.000000 1626.000000 1644.000000 1650.000000 1725.000000 1776.000000 1851.000000 1965.000000 2023.000000 2116.000000 2331.000000 2552.000000 2727.000000 2855.000000 3081.000000 3268.000000 3521.000000 3698.000000 3900.000000 + +4.000000 10.000000 50.000000 124.000000 151.000000 169.000000 314.000000 317.000000 400.000000 474.000000 549.000000 630.000000 704.000000 798.000000 1030.000000 1144.000000 1155.000000 1188.000000 1345.000000 1390.000000 1428.000000 1603.000000 1867.000000 1902.000000 1922.000000 1995.000000 2290.000000 2431.000000 2679.000000 2886.000000 3092.000000 3305.000000 3501.000000 3704.000000 3900.000000 + +31.000000 37.000000 44.000000 211.000000 400.000000 445.000000 454.000000 602.000000 641.000000 760.000000 802.000000 850.000000 945.000000 1079.000000 1104.000000 1149.000000 1201.000000 1305.000000 1537.000000 1568.000000 1613.000000 1702.000000 1805.000000 1958.000000 1969.000000 2112.000000 2300.000000 2532.000000 2680.000000 2952.000000 3124.000000 3303.000000 3500.000000 3695.000000 3900.000000 + +43.000000 259.000000 276.000000 342.000000 362.000000 375.000000 380.000000 400.000000 674.000000 800.000000 804.000000 809.000000 882.000000 947.000000 952.000000 1219.000000 1351.000000 1504.000000 1568.000000 1593.000000 1720.000000 1752.000000 1871.000000 1961.000000 2022.000000 2046.000000 2254.000000 2486.000000 2651.000000 2868.000000 3103.000000 3278.000000 3482.000000 3708.000000 3900.000000 + +1.000000 219.000000 227.000000 235.000000 241.000000 400.000000 606.000000 618.000000 645.000000 738.000000 797.000000 943.000000 1217.000000 1343.000000 1424.000000 1448.000000 1578.000000 1661.000000 1706.000000 1765.000000 1903.000000 1915.000000 1975.000000 1987.000000 2084.000000 2324.000000 2490.000000 2671.000000 2865.000000 3063.000000 3331.000000 3505.000000 3702.000000 3900.000000 + +103.000000 109.000000 356.000000 357.000000 400.000000 501.000000 714.000000 788.000000 793.000000 810.000000 859.000000 974.000000 1109.000000 1172.000000 1238.000000 1252.000000 1291.000000 1319.000000 1479.000000 1559.000000 1598.000000 1678.000000 1753.000000 1768.000000 1940.000000 2100.000000 2331.000000 2600.000000 2758.000000 2889.000000 3073.000000 3292.000000 3487.000000 3707.000000 3900.000000 + +234.000000 362.000000 388.000000 399.000000 400.000000 407.000000 452.000000 483.000000 692.000000 721.000000 797.000000 809.000000 863.000000 1216.000000 1227.000000 1338.000000 1445.000000 1473.000000 1536.000000 1596.000000 1608.000000 1619.000000 1914.000000 1990.000000 2052.000000 2117.000000 2316.000000 2488.000000 2682.000000 2918.000000 3104.000000 3299.000000 3506.000000 3696.000000 3900.000000 + +31.000000 91.000000 400.000000 422.000000 545.000000 587.000000 751.000000 794.000000 828.000000 962.000000 963.000000 1032.000000 1073.000000 1166.000000 1174.000000 1188.000000 1320.000000 1423.000000 1462.000000 1589.000000 1625.000000 1677.000000 1706.000000 1939.000000 2023.000000 2103.000000 2292.000000 2507.000000 2745.000000 2921.000000 3088.000000 3297.000000 3506.000000 3698.000000 3900.000000 + +35.000000 92.000000 237.000000 296.000000 400.000000 515.000000 601.000000 613.000000 798.000000 852.000000 1201.000000 1248.000000 1257.000000 1286.000000 1429.000000 1616.000000 1633.000000 1656.000000 1778.000000 1819.000000 1838.000000 1864.000000 1903.000000 1918.000000 1991.000000 2106.000000 2315.000000 2455.000000 2690.000000 2891.000000 3084.000000 3280.000000 3488.000000 3698.000000 3900.000000 + +20.000000 25.000000 172.000000 223.000000 274.000000 295.000000 368.000000 372.000000 400.000000 493.000000 717.000000 775.000000 795.000000 1015.000000 1200.000000 1319.000000 1444.000000 1559.000000 1592.000000 1694.000000 1743.000000 1757.000000 1841.000000 1859.000000 2043.000000 2075.000000 2336.000000 2461.000000 2764.000000 2905.000000 3099.000000 3293.000000 3494.000000 3697.000000 3900.000000 + +76.000000 120.000000 130.000000 209.000000 396.000000 400.000000 559.000000 572.000000 671.000000 726.000000 803.000000 907.000000 1011.000000 1128.000000 1208.000000 1232.000000 1321.000000 1337.000000 1531.000000 1600.000000 1702.000000 1777.000000 1824.000000 1862.000000 1988.000000 1999.000000 2352.000000 2537.000000 2750.000000 2957.000000 3102.000000 3291.000000 3503.000000 3701.000000 3900.000000 + +135.000000 250.000000 302.000000 310.000000 393.000000 400.000000 417.000000 684.000000 730.000000 804.000000 981.000000 982.000000 1081.000000 1197.000000 1240.000000 1313.000000 1409.000000 1431.000000 1473.000000 1498.000000 1561.000000 1615.000000 1782.000000 1925.000000 1979.000000 2149.000000 2293.000000 2490.000000 2676.000000 2932.000000 3117.000000 3315.000000 3502.000000 3690.000000 3900.000000 + diff --git a/test/test_distance.py b/test/test_distance.py index d98069d..4f8f6e8 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -135,53 +135,23 @@ def test_spike_sync(): spikes2 = np.array([2.1]) spikes1 = spk.add_auxiliary_spikes(spikes1, 4.0) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - for k in xrange(1, 3): - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), - 0.5, decimal=16) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + 0.5, decimal=16) spikes2 = np.array([3.1]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - for k in xrange(1, 3): - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), - 0.5, decimal=16) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + 0.5, decimal=16) spikes2 = np.array([1.1]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - for k in xrange(1, 3): - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), - 0.5, decimal=16) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + 0.5, decimal=16) spikes2 = np.array([0.9]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - for k in xrange(1, 3): - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), - 0.5, decimal=16) - - spikes1 = np.array([100, 300, 400, 405, 410, 500, 700, 800, - 805, 810, 815, 900]) - spikes2 = np.array([100, 200, 205, 210, 295, 350, 400, 510, - 600, 605, 700, 910]) - spikes3 = np.array([100, 180, 198, 295, 412, 420, 510, 640, - 695, 795, 820, 920]) - spikes1 = spk.add_auxiliary_spikes(spikes1, 1000) - spikes2 = spk.add_auxiliary_spikes(spikes2, 1000) - spikes3 = spk.add_auxiliary_spikes(spikes3, 1000) - for k in xrange(1, 10): - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2, k=k), - 0.5, decimal=15) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes3, k=k), - 0.5, decimal=15) - assert_almost_equal(spk.spike_sync_distance(spikes2, spikes3, k=k), - 0.5, decimal=15) - - f1 = spk.spike_sync_profile(spikes1, spikes2, k=1) - f2 = spk.spike_sync_profile(spikes1, spikes3, k=1) - f3 = spk.spike_sync_profile(spikes2, spikes3, k=1) - f = spk.spike_sync_profile_multi([spikes1, spikes2, spikes3], k=1) - # hands on definition of the average multivariate spike synchronization - expected = (f1.integral() + f2.integral() + f3.integral()) / \ - (len(f1.y)+len(f2.y)+len(f3.y)-3) - assert_almost_equal(f.avrg(), expected, decimal=15) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + 0.5, decimal=16) def check_multi_profile(profile_func, profile_func_multi): @@ -226,6 +196,39 @@ def test_multi_spike(): check_multi_profile(spk.spike_profile, spk.spike_profile_multi) +def test_multi_spike_sync(): + # some basic multivariate check + spikes1 = np.array([100, 300, 400, 405, 410, 500, 700, 800, + 805, 810, 815, 900]) + spikes2 = np.array([100, 200, 205, 210, 295, 350, 400, 510, + 600, 605, 700, 910]) + spikes3 = np.array([100, 180, 198, 295, 412, 420, 510, 640, + 695, 795, 820, 920]) + spikes1 = spk.add_auxiliary_spikes(spikes1, 1000) + spikes2 = spk.add_auxiliary_spikes(spikes2, 1000) + spikes3 = spk.add_auxiliary_spikes(spikes3, 1000) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + 0.5, decimal=15) + assert_almost_equal(spk.spike_sync_distance(spikes1, spikes3), + 0.5, decimal=15) + assert_almost_equal(spk.spike_sync_distance(spikes2, spikes3), + 0.5, decimal=15) + + f = spk.spike_sync_profile_multi([spikes1, spikes2, spikes3]) + # hands on definition of the average multivariate spike synchronization + # expected = (f1.integral() + f2.integral() + f3.integral()) / \ + # (np.sum(f1.mp[1:-1])+np.sum(f2.mp[1:-1])+np.sum(f3.mp[1:-1])) + expected = 0.5 + assert_almost_equal(f.avrg(), expected, decimal=15) + + # multivariate regression test + spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt", + time_interval=(0, 4000)) + f = spk.spike_sync_profile_multi(spike_trains) + assert_equal(np.sum(f.y[1:-1]), 39932) + assert_equal(np.sum(f.mp[1:-1]), 85554) + + def check_dist_matrix(dist_func, dist_matrix_func): # generate spike trains: t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) -- cgit v1.2.3 From 66968eedd276eb5d661b25d92775203546a3d646 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 19 Jan 2015 17:58:04 +0100 Subject: renamed IntervalSequence -> DiscreteFunction, cython implementation of add --- pyspike/__init__.py | 2 +- pyspike/cython_add.pyx | 59 ++++++++++ pyspike/distances.py | 6 +- pyspike/function.py | 269 +++++++++++++++++++++++----------------------- pyspike/python_backend.py | 113 +++++++++---------- 5 files changed, 255 insertions(+), 194 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 74d52c5..1b18569 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -7,7 +7,7 @@ Distributed under the BSD License __all__ = ["function", "distances", "spikes"] from function import PieceWiseConstFunc, PieceWiseLinFunc, \ - MultipleValueSequence, average_profile + DiscreteFunction, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ spike_sync_profile, spike_sync_distance, \ diff --git a/pyspike/cython_add.pyx b/pyspike/cython_add.pyx index bfbe208..817799e 100644 --- a/pyspike/cython_add.pyx +++ b/pyspike/cython_add.pyx @@ -172,3 +172,62 @@ def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12, return (np.array(x_new[:index+2]), np.array(y1_new[:index+1]), np.array(y2_new[:index+1])) + + +############################################################ +# add_discrete_function_cython +############################################################ +def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, + double[:] x2, double[:] y2, double[:] mp2): + + cdef double[:] x_new = np.empty(len(x1) + len(x2)) + cdef double[:] y_new = np.empty_like(x_new) + cdef double[:] mp_new = np.empty_like(x_new) + cdef int index1 = 0 + cdef int index2 = 0 + cdef int index = 0 + cdef int N1 = len(y1) + cdef int N2 = len(y2) + x_new[0] = x1[0] + while (index1+1 < N1) and (index2+1 < N2): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] diff --git a/pyspike/distances.py b/pyspike/distances.py index 8bde724..0f0efa9 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -11,7 +11,7 @@ import numpy as np import threading from functools import partial -from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, MultipleValueSequence +from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, DiscreteFunction ############################################################ @@ -148,7 +148,7 @@ Falling back to slow python backend.") times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) - return MultipleValueSequence(times, coincidences, multiplicity) + return DiscreteFunction(times, coincidences, multiplicity) ############################################################ @@ -328,7 +328,7 @@ def spike_sync_profile_multi(spike_trains, indices=None): if None all given spike trains are used (default=None) :type indices: list or None :returns: The averaged spike profile :math:`(t)` - :rtype: :class:`pyspike.function.PieceWiseConstFunc` + :rtype: :class:`pyspike.function.DiscreteFunction` """ prof_func = partial(spike_sync_profile) diff --git a/pyspike/function.py b/pyspike/function.py index f10c136..6fb7537 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -170,140 +170,6 @@ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ self.y *= fac -############################################################## -# MultipleValueSequence -############################################################## -class MultipleValueSequence(object): - """ A class representing a sequence of values defined in some interval. - """ - - def __init__(self, x, y, multiplicity): - """ Constructs the value sequence. - - :param x: array of length N defining the points at which the values are - defined. - :param y: array of length N degining the values at the points x. - :param multiplicity: array of length N defining the multiplicity of the - values. - """ - # convert parameters to arrays, also ensures copying - self.x = np.array(x) - self.y = np.array(y) - self.mp = np.array(multiplicity) - - def copy(self): - """ Returns a copy of itself - - :rtype: :class:`IntervalSequence` - """ - return MultipleValueSequence(self.x, self.y, self.mp) - - def almost_equal(self, other, decimal=14): - """ Checks if the function is equal to another function up to `decimal` - precision. - - :param other: another :class:`IntervalSequence` - :returns: True if the two functions are equal up to `decimal` decimals, - False otherwise - :rtype: bool - """ - eps = 10.0**(-decimal) - return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ - np.allclose(self.y, other.y, atol=eps, rtol=0.0) and \ - np.allclose(self.mp, other.mp, atol=eps, rtol=0.0) - - def get_plottable_data(self, k=0): - """ Returns two arrays containing x- and y-coordinates for immeditate - plotting of the interval sequence. - - :returns: (x_plot, y_plot) containing plottable data - :rtype: pair of np.array - - Example:: - - x, y = f.get_plottable_data() - plt.plot(x, y, '-o', label="Piece-wise const function") - """ - - if k > 0: - raise NotImplementedError() - - return 1.0*self.x, 1.0*self.y/self.mp - - def integral(self, interval=None): - """ Returns the integral over the given interval. For the interval - sequence this amounts to the sum over all values divided by the count - of intervals. - - :param interval: integration interval given as a pair of floats, if - None the integral over the whole function is computed. - :type interval: Pair of floats or None. - :returns: the integral - :rtype: float - """ - if interval is None: - # no interval given, integrate over the whole spike train - # don't count the first value, which is zero by definition - a = 1.0*np.sum(self.y[1:-1]) - else: - raise NotImplementedError() - return a - - def avrg(self, interval=None): - """ Computes the average of the interval sequence: - :math:`a = 1/N sum f_n ` where N is the number of intervals. - - :param interval: averaging interval given as a pair of floats, a - sequence of pairs for averaging multiple intervals, or - None, if None the average over the whole function is - computed. - :type interval: Pair, sequence of pairs, or None. - :returns: the average a. - :rtype: float - """ - if interval is None: - # no interval given, average over the whole spike train - # don't count the first interval for normalization - return self.integral() / np.sum(self.mp[1:-1]) - else: - raise NotImplementedError() - - def add(self, f): - """ Adds another `MultipleValueSequence` function to this function. - Note: only functions defined on the same interval can be summed. - - :param f: :class:`MultipleValueSequence` function to be added. - :rtype: None - """ - assert self.x[0] == f.x[0], "The functions have different intervals" - assert self.x[-1] == f.x[-1], "The functions have different intervals" - - # cython version - try: - from cython_add import add_interval_sequence_cython as \ - add_interval_sequence_impl - except ImportError: -# print("Warning: add_piece_wise_const_cython not found. Make sure \ -# that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -# \n Falling back to slow python backend.") - # use python backend - from python_backend import add_multiple_value_sequence_python as \ - add_multiple_value_sequence_impl - - self.x, self.y, self.mp = \ - add_multiple_value_sequence_impl(self.x, self.y, self.mp, - f.x, f.y, f.mp) - - def mul_scalar(self, fac): - """ Multiplies the function with a scalar value - - :param fac: Value to multiply - :type fac: double - :rtype: None - """ - self.y *= fac - - ############################################################## # PieceWiseLinFunc ############################################################## @@ -489,6 +355,141 @@ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ self.y2 *= fac +############################################################## +# DiscreteFunction +############################################################## +class DiscreteFunction(object): + """ A class representing values defined on a discrete set of points. + """ + + def __init__(self, x, y, multiplicity): + """ Constructs the discrete function. + + :param x: array of length N defining the points at which the values are + defined. + :param y: array of length N degining the values at the points x. + :param multiplicity: array of length N defining the multiplicity of the + values. + """ + # convert parameters to arrays, also ensures copying + self.x = np.array(x) + self.y = np.array(y) + self.mp = np.array(multiplicity) + + def copy(self): + """ Returns a copy of itself + + :rtype: :class:`DiscreteFunction` + """ + return DiscreteFunction(self.x, self.y, self.mp) + + def almost_equal(self, other, decimal=14): + """ Checks if the function is equal to another function up to `decimal` + precision. + + :param other: another :class:`DiscreteFunction` + :returns: True if the two functions are equal up to `decimal` decimals, + False otherwise + :rtype: bool + """ + eps = 10.0**(-decimal) + return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ + np.allclose(self.y, other.y, atol=eps, rtol=0.0) and \ + np.allclose(self.mp, other.mp, atol=eps, rtol=0.0) + + def get_plottable_data(self, k=0): + """ Returns two arrays containing x- and y-coordinates for immeditate + plotting of the interval sequence. + + :returns: (x_plot, y_plot) containing plottable data + :rtype: pair of np.array + + Example:: + + x, y = f.get_plottable_data() + plt.plot(x, y, '-o', label="Discrete function") + """ + + if k > 0: + raise NotImplementedError() + + return 1.0*self.x, 1.0*self.y/self.mp + + def integral(self, interval=None): + """ Returns the integral over the given interval. For the interval + sequence this amounts to the sum over all values divided by the count + of intervals. + + :param interval: integration interval given as a pair of floats, if + None the integral over the whole function is computed. + :type interval: Pair of floats or None. + :returns: the integral + :rtype: float + """ + if interval is None: + # no interval given, integrate over the whole spike train + # don't count the first value, which is zero by definition + a = 1.0*np.sum(self.y[1:-1]) + else: + raise NotImplementedError() + return a + + def avrg(self, interval=None): + """ Computes the average of the interval sequence: + :math:`a = 1/N sum f_n ` where N is the number of intervals. + + :param interval: averaging interval given as a pair of floats, a + sequence of pairs for averaging multiple intervals, or + None, if None the average over the whole function is + computed. + :type interval: Pair, sequence of pairs, or None. + :returns: the average a. + :rtype: float + """ + if interval is None: + # no interval given, average over the whole spike train + # don't count the first interval for normalization + return self.integral() / np.sum(self.mp[1:-1]) + else: + raise NotImplementedError() + + def add(self, f): + """ Adds another `DiscreteFunction` function to this function. + Note: only functions defined on the same interval can be summed. + + :param f: :class:`DiscreteFunction` function to be added. + :rtype: None + """ + assert self.x[0] == f.x[0], "The functions have different intervals" + assert self.x[-1] == f.x[-1], "The functions have different intervals" + + # cython version + try: + from cython_add import add_discrete_function_cython as \ + add_discrete_function_impl + except ImportError: + print("Warning: add_discrete_function_cython not found. Make \ +sure that PySpike is installed by running\n\ +'python setup.py build_ext --inplace'! \ +\n Falling back to slow python backend.") + # use python backend + from python_backend import add_discrete_function_python as \ + add_discrete_function_impl + + self.x, self.y, self.mp = \ + add_discrete_function_impl(self.x, self.y, self.mp, + f.x, f.y, f.mp) + + def mul_scalar(self, fac): + """ Multiplies the function with a scalar value + + :param fac: Value to multiply + :type fac: double + :rtype: None + """ + self.y *= fac + + def average_profile(profiles): """ Computes the average profile from the given ISI- or SPIKE-profiles. diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index bbbd572..481daf9 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -357,62 +357,6 @@ def add_piece_wise_const_python(x1, y1, x2, y2): return x_new[:index+2], y_new[:index+1] -############################################################ -# add_multiple_value_sequence_python -############################################################ -def add_multiple_value_sequence_python(x1, y1, mp1, x2, y2, mp2): - - x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty_like(x_new) - mp_new = np.empty_like(x_new) - x_new[0] = x1[0] - index1 = 0 - index2 = 0 - index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): - if x1[index1+1] < x2[index2+1]: - index1 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] - mp_new[index] = mp1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - index += 1 - x_new[index] = x2[index2] - y_new[index] = y2[index2] - mp_new[index] = mp2[index2] - else: # x1[index1+1] == x2[index2+1] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - mp_new[index] = mp1[index1] + mp2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] - - y_new[0] = y_new[1] - mp_new[0] = mp_new[1] - - # the last value is again the end of the interval - # only use the data that was actually filled - return x_new[:index+1], y_new[:index+1], mp_new[:index+1] - - ############################################################ # add_piece_lin_const_python ############################################################ @@ -482,3 +426,60 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): y2_new[index] = y12[-1]+y22[-1] # only use the data that was actually filled return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] + + +############################################################ +# add_discrete_function_python +############################################################ +def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): + + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty_like(x_new) + mp_new = np.empty_like(x_new) + x_new[0] = x1[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] + -- cgit v1.2.3 From cce93ecbb1c961ab075a4924a42543483ffffb77 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 20 Jan 2015 10:47:27 +0100 Subject: added spike sync matrix --- pyspike/__init__.py | 4 ++-- pyspike/distances.py | 62 ++++++++++++++++++++++++++++++++++++++++++--------- pyspike/function.py | 1 + test/test_distance.py | 20 +++++++++++------ 4 files changed, 68 insertions(+), 19 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 1b18569..55687e6 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -10,9 +10,9 @@ from function import PieceWiseConstFunc, PieceWiseLinFunc, \ DiscreteFunction, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ - spike_sync_profile, spike_sync_distance, \ + spike_sync_profile, spike_sync, \ isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ spike_profile_multi, spike_distance_multi, spike_distance_matrix, \ - spike_sync_profile_multi + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/distances.py b/pyspike/distances.py index 0f0efa9..8a14a8d 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -152,10 +152,10 @@ Falling back to slow python backend.") ############################################################ -# spike_sync_distance +# spike_sync ############################################################ -def spike_sync_distance(spikes1, spikes2): - return spike_sync_profile(spikes1, spikes2).avrg() +def spike_sync(spikes1, spikes2, interval=None): + return spike_sync_profile(spikes1, spikes2).avrg(interval) ############################################################ @@ -313,6 +313,28 @@ def spike_profile_multi(spike_trains, indices=None): return average_dist +############################################################ +# spike_distance_multi +############################################################ +def spike_distance_multi(spike_trains, indices=None, interval=None): + """ Computes the multi-variate spike distance for a set of spike trains. + That is the time average of the multi-variate spike profile: + S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The averaged spike distance S. + :rtype: double + """ + return spike_profile_multi(spike_trains, indices).avrg(interval) + + ############################################################ # spike_profile_multi ############################################################ @@ -341,11 +363,9 @@ def spike_sync_profile_multi(spike_trains, indices=None): ############################################################ # spike_distance_multi ############################################################ -def spike_distance_multi(spike_trains, indices=None, interval=None): - """ Computes the multi-variate spike distance for a set of spike trains. - That is the time average of the multi-variate spike profile: - S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt - where the sum goes over all pairs +def spike_sync_multi(spike_trains, indices=None, interval=None): + """ Computes the multi-variate spike synchronization value for a set of + spike trains. :param spike_trains: list of spike trains :param indices: list of indices defining which spike trains to use, @@ -354,10 +374,10 @@ def spike_distance_multi(spike_trains, indices=None, interval=None): :param interval: averaging interval given as a pair of floats, if None the average over the whole function is computed. :type interval: Pair of floats or None. - :returns: The averaged spike distance S. + :returns: The averaged spike synchronization value SYNC. :rtype: double """ - return spike_profile_multi(spike_trains, indices).avrg(interval) + return spike_sync_profile_multi(spike_trains, indices).avrg(interval) ############################################################ @@ -433,3 +453,25 @@ def spike_distance_matrix(spike_trains, indices=None, interval=None): """ return _generic_distance_matrix(spike_trains, spike_distance, indices, interval) + + +############################################################ +# spike_sync_matrix +############################################################ +def spike_sync_matrix(spike_trains, indices=None, interval=None): + """ Computes the time averaged spike-synchronization value of all pairs of + spike-trains. + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: 2D array with the pair wise time spike synchronization values + :math:`SYNC_{ij}` + :rtype: np.array + """ + return _generic_distance_matrix(spike_trains, spike_sync, + indices, interval) diff --git a/pyspike/function.py b/pyspike/function.py index 62b0e2c..e0dadf6 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -429,6 +429,7 @@ class DiscreteFunction(object): """ def get_indices(ival): + """ Retuns the indeces surrounding the given interval""" start_ind = np.searchsorted(self.x, ival[0], side='right') end_ind = np.searchsorted(self.x, ival[1], side='left') assert start_ind > 0 and end_ind < len(self.x), \ diff --git a/test/test_distance.py b/test/test_distance.py index 6bdb049..2650313 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -135,22 +135,22 @@ def test_spike_sync(): spikes2 = np.array([2.1]) spikes1 = spk.add_auxiliary_spikes(spikes1, 4.0) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=16) spikes2 = np.array([3.1]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=16) spikes2 = np.array([1.1]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=16) spikes2 = np.array([0.9]) spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=16) @@ -207,11 +207,11 @@ def test_multi_spike_sync(): spikes1 = spk.add_auxiliary_spikes(spikes1, 1000) spikes2 = spk.add_auxiliary_spikes(spikes2, 1000) spikes3 = spk.add_auxiliary_spikes(spikes3, 1000) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes2), + assert_almost_equal(spk.spike_sync(spikes1, spikes2), 0.5, decimal=15) - assert_almost_equal(spk.spike_sync_distance(spikes1, spikes3), + assert_almost_equal(spk.spike_sync(spikes1, spikes3), 0.5, decimal=15) - assert_almost_equal(spk.spike_sync_distance(spikes2, spikes3), + assert_almost_equal(spk.spike_sync(spikes2, spikes3), 0.5, decimal=15) f = spk.spike_sync_profile_multi([spikes1, spikes2, spikes3]) @@ -220,6 +220,8 @@ def test_multi_spike_sync(): # (np.sum(f1.mp[1:-1])+np.sum(f2.mp[1:-1])+np.sum(f3.mp[1:-1])) expected = 0.5 assert_almost_equal(f.avrg(), expected, decimal=15) + assert_almost_equal(spk.spike_sync_multi([spikes1, spikes2, spikes3]), + expected, decimal=15) # multivariate regression test spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt", @@ -267,6 +269,10 @@ def test_spike_matrix(): check_dist_matrix(spk.spike_distance, spk.spike_distance_matrix) +def test_spike_sync_matrix(): + check_dist_matrix(spk.spike_sync, spk.spike_sync_matrix) + + def test_regression_spiky(): spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", (0.0, 4000.0)) -- cgit v1.2.3 From ae90eb6bdd6afd716cb90a4aeeec8e6e77635d10 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 28 Jan 2015 17:42:19 +0100 Subject: each function class in separate source file --- pyspike/__init__.py | 9 +- pyspike/distances.py | 13 +- pyspike/function.py | 585 -------------------------------------------------- test/test_function.py | 21 +- 4 files changed, 27 insertions(+), 601 deletions(-) delete mode 100644 pyspike/function.py (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 55687e6..f480964 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,10 +4,13 @@ Copyright 2014, Mario Mulansky Distributed under the BSD License """ -__all__ = ["function", "distances", "spikes"] +__all__ = ["distances", "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", + "DiscreteFunc"] + +from PieceWiseConstFunc import PieceWiseConstFunc +from PieceWiseLinFunc import PieceWiseLinFunc +from DiscreteFunc import DiscreteFunc -from function import PieceWiseConstFunc, PieceWiseLinFunc, \ - DiscreteFunction, average_profile from distances import isi_profile, isi_distance, \ spike_profile, spike_distance, \ spike_sync_profile, spike_sync, \ diff --git a/pyspike/distances.py b/pyspike/distances.py index 5476b6f..9077871 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -11,7 +11,7 @@ import numpy as np import threading from functools import partial -from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, DiscreteFunction +from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, DiscreteFunc ############################################################ @@ -161,7 +161,7 @@ Falling back to slow python backend.") times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) - return DiscreteFunction(times, coincidences, multiplicity) + return DiscreteFunc(times, coincidences, multiplicity) ############################################################ @@ -212,7 +212,8 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ "Invalid index list." # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) for j in indices[i+1:]] + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] # start with first pair (i, j) = pairs[0] average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) @@ -251,7 +252,8 @@ def _multi_distance_par(spike_trains, pair_distance_func, indices=None): assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ "Invalid index list." # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) for j in indices[i+1:]] + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] num_pairs = len(pairs) # start with first pair @@ -430,7 +432,8 @@ def _generic_distance_matrix(spike_trains, dist_function, assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ "Invalid index list." # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) for j in indices[i+1:]] + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] distance_matrix = np.zeros((len(indices), len(indices))) for i, j in pairs: diff --git a/pyspike/function.py b/pyspike/function.py deleted file mode 100644 index 047c88a..0000000 --- a/pyspike/function.py +++ /dev/null @@ -1,585 +0,0 @@ -""" function.py - -Module containing classes representing piece-wise constant and piece-wise -linear functions. - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" -from __future__ import print_function - -import numpy as np -import collections - - -############################################################## -# PieceWiseConstFunc -############################################################## -class PieceWiseConstFunc(object): - """ A class representing a piece-wise constant function. """ - - def __init__(self, x, y): - """ Constructs the piece-wise const function. - - :param x: array of length N+1 defining the edges of the intervals of - the pwc function. - :param y: array of length N defining the function values at the - intervals. - """ - # convert parameters to arrays, also ensures copying - self.x = np.array(x) - self.y = np.array(y) - - def copy(self): - """ Returns a copy of itself - - :rtype: :class:`PieceWiseConstFunc` - """ - return PieceWiseConstFunc(self.x, self.y) - - def almost_equal(self, other, decimal=14): - """ Checks if the function is equal to another function up to `decimal` - precision. - - :param other: another :class:`PieceWiseConstFunc` - :returns: True if the two functions are equal up to `decimal` decimals, - False otherwise - :rtype: bool - """ - eps = 10.0**(-decimal) - return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ - np.allclose(self.y, other.y, atol=eps, rtol=0.0) - - def get_plottable_data(self): - """ Returns two arrays containing x- and y-coordinates for immeditate - plotting of the piece-wise function. - - :returns: (x_plot, y_plot) containing plottable data - :rtype: pair of np.array - - Example:: - - x, y = f.get_plottable_data() - plt.plot(x, y, '-o', label="Piece-wise const function") - """ - - x_plot = np.empty(2*len(self.x)-2) - x_plot[0] = self.x[0] - x_plot[1::2] = self.x[1:] - x_plot[2::2] = self.x[1:-1] - y_plot = np.empty(2*len(self.y)) - y_plot[::2] = self.y - y_plot[1::2] = self.y - - return x_plot, y_plot - - def integral(self, interval=None): - """ Returns the integral over the given interval. - - :param interval: integration interval given as a pair of floats, if - None the integral over the whole function is computed. - :type interval: Pair of floats or None. - :returns: the integral - :rtype: float - """ - if interval is None: - # no interval given, integrate over the whole spike train - a = np.sum((self.x[1:]-self.x[:-1]) * self.y) - else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] - return a - - def avrg(self, interval=None): - """ Computes the average of the piece-wise const function: - :math:`a = 1/T int_0^T f(x) dx` where T is the length of the interval. - - :param interval: averaging interval given as a pair of floats, a - sequence of pairs for averaging multiple intervals, or - None, if None the average over the whole function is - computed. - :type interval: Pair, sequence of pairs, or None. - :returns: the average a. - :rtype: float - """ - if interval is None: - # no interval given, average over the whole spike train - return self.integral() / (self.x[-1]-self.x[0]) - - # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ - "Invalid value for `interval`. None, Sequence or Tuple expected." - # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): - # just one interval - a = self.integral(interval) / (interval[1]-interval[0]) - else: - # several intervals - a = 0.0 - int_length = 0.0 - for ival in interval: - a += self.integral(ival) - int_length += ival[1] - ival[0] - a /= int_length - return a - - def add(self, f): - """ Adds another PieceWiseConst function to this function. - Note: only functions defined on the same interval can be summed. - - :param f: :class:`PieceWiseConstFunc` function to be added. - :rtype: None - """ - assert self.x[0] == f.x[0], "The functions have different intervals" - assert self.x[-1] == f.x[-1], "The functions have different intervals" - - # cython version - try: - from cython_add import add_piece_wise_const_cython as \ - add_piece_wise_const_impl - except ImportError: - print("Warning: add_piece_wise_const_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") - # use python backend - from python_backend import add_piece_wise_const_python as \ - add_piece_wise_const_impl - - self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) - - def mul_scalar(self, fac): - """ Multiplies the function with a scalar value - - :param fac: Value to multiply - :type fac: double - :rtype: None - """ - self.y *= fac - - -############################################################## -# PieceWiseLinFunc -############################################################## -class PieceWiseLinFunc: - """ A class representing a piece-wise linear function. """ - - def __init__(self, x, y1, y2): - """ Constructs the piece-wise linear function. - - :param x: array of length N+1 defining the edges of the intervals of - the pwc function. - :param y1: array of length N defining the function values at the left - of the intervals. - :param y2: array of length N defining the function values at the right - of the intervals. - """ - # convert to array, which also ensures copying - self.x = np.array(x) - self.y1 = np.array(y1) - self.y2 = np.array(y2) - - def copy(self): - """ Returns a copy of itself - - :rtype: :class:`PieceWiseLinFunc` - """ - return PieceWiseLinFunc(self.x, self.y1, self.y2) - - def almost_equal(self, other, decimal=14): - """ Checks if the function is equal to another function up to `decimal` - precision. - - :param other: another :class:`PieceWiseLinFunc` - :returns: True if the two functions are equal up to `decimal` decimals, - False otherwise - :rtype: bool - """ - eps = 10.0**(-decimal) - return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ - np.allclose(self.y1, other.y1, atol=eps, rtol=0.0) and \ - np.allclose(self.y2, other.y2, atol=eps, rtol=0.0) - - def get_plottable_data(self): - """ Returns two arrays containing x- and y-coordinates for immeditate - plotting of the piece-wise function. - - :returns: (x_plot, y_plot) containing plottable data - :rtype: pair of np.array - - Example:: - - x, y = f.get_plottable_data() - plt.plot(x, y, '-o', label="Piece-wise const function") - """ - x_plot = np.empty(2*len(self.x)-2) - x_plot[0] = self.x[0] - x_plot[1::2] = self.x[1:] - x_plot[2::2] = self.x[1:-1] - y_plot = np.empty_like(x_plot) - y_plot[0::2] = self.y1 - y_plot[1::2] = self.y2 - return x_plot, y_plot - - def integral(self, interval=None): - """ Returns the integral over the given interval. - - :param interval: integration interval given as a pair of floats, if - None the integral over the whole function is computed. - :type interval: Pair of floats or None. - :returns: the integral - :rtype: float - """ - - def intermediate_value(x0, x1, y0, y1, x): - """ computes the intermediate value of a linear function """ - return y0 + (y1-y0)*(x-x0)/(x1-x0) - - if interval is None: - # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) - else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) - # correction from start to first index - integral += (self.x[start_ind]-interval[0]) * 0.5 * \ - (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], - self.x[start_ind], - self.y1[start_ind-1], - self.y2[start_ind-1], - interval[0] - )) - # correction from last index to end - integral += (interval[1]-self.x[end_ind]) * 0.5 * \ - (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], - self.y1[end_ind], self.y2[end_ind], - interval[1] - )) - return integral - - def avrg(self, interval=None): - """ Computes the average of the piece-wise linear function: - :math:`a = 1/T int_0^T f(x) dx` where T is the length of the interval. - - :param interval: averaging interval given as a pair of floats, a - sequence of pairs for averaging multiple intervals, or - None, if None the average over the whole function is - computed. - :type interval: Pair, sequence of pairs, or None. - :returns: the average a. - :rtype: float - - """ - - if interval is None: - # no interval given, average over the whole spike train - return self.integral() / (self.x[-1]-self.x[0]) - - # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ - "Invalid value for `interval`. None, Sequence or Tuple expected." - # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): - # just one interval - a = self.integral(interval) / (interval[1]-interval[0]) - else: - # several intervals - a = 0.0 - int_length = 0.0 - for ival in interval: - a += self.integral(ival) - int_length += ival[1] - ival[0] - a /= int_length - return a - - def add(self, f): - """ Adds another PieceWiseLin function to this function. - Note: only functions defined on the same interval can be summed. - - :param f: :class:`PieceWiseLinFunc` function to be added. - :rtype: None - """ - assert self.x[0] == f.x[0], "The functions have different intervals" - assert self.x[-1] == f.x[-1], "The functions have different intervals" - - # python implementation - # from python_backend import add_piece_wise_lin_python - # self.x, self.y1, self.y2 = add_piece_wise_lin_python( - # self.x, self.y1, self.y2, f.x, f.y1, f.y2) - - # cython version - try: - from cython_add import add_piece_wise_lin_cython as \ - add_piece_wise_lin_impl - except ImportError: - print("Warning: add_piece_wise_lin_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") - # use python backend - from python_backend import add_piece_wise_lin_python as \ - add_piece_wise_lin_impl - - self.x, self.y1, self.y2 = add_piece_wise_lin_impl( - self.x, self.y1, self.y2, f.x, f.y1, f.y2) - - def mul_scalar(self, fac): - """ Multiplies the function with a scalar value - - :param fac: Value to multiply - :type fac: double - :rtype: None - """ - self.y1 *= fac - self.y2 *= fac - - -############################################################## -# DiscreteFunction -############################################################## -class DiscreteFunction(object): - """ A class representing values defined on a discrete set of points. - """ - - def __init__(self, x, y, multiplicity): - """ Constructs the discrete function. - - :param x: array of length N defining the points at which the values are - defined. - :param y: array of length N degining the values at the points x. - :param multiplicity: array of length N defining the multiplicity of the - values. - """ - # convert parameters to arrays, also ensures copying - self.x = np.array(x) - self.y = np.array(y) - self.mp = np.array(multiplicity) - - def copy(self): - """ Returns a copy of itself - - :rtype: :class:`DiscreteFunction` - """ - return DiscreteFunction(self.x, self.y, self.mp) - - def almost_equal(self, other, decimal=14): - """ Checks if the function is equal to another function up to `decimal` - precision. - - :param other: another :class:`DiscreteFunction` - :returns: True if the two functions are equal up to `decimal` decimals, - False otherwise - :rtype: bool - """ - eps = 10.0**(-decimal) - return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ - np.allclose(self.y, other.y, atol=eps, rtol=0.0) and \ - np.allclose(self.mp, other.mp, atol=eps, rtol=0.0) - - def get_plottable_data(self, averaging_window_size=0): - """ Returns two arrays containing x- and y-coordinates for plotting - the interval sequence. The optional parameter `averaging_window_size` - determines the size of an averaging window to smoothen the profile. If - this value is 0, no averaging is performed. - - :param averaging_window_size: size of the averaging window, default=0. - :returns: (x_plot, y_plot) containing plottable data - :rtype: pair of np.array - - Example:: - - x, y = f.get_plottable_data() - plt.plot(x, y, '-o', label="Discrete function") - """ - - if averaging_window_size > 0: - # for the averaged profile we have to take the multiplicity into - # account. values with higher multiplicity should be consider as if - # they appeared several times. Hence we can not know how many - # entries we have to consider to the left and right. Rather, we - # will iterate until some wanted multiplicity is reached. - - # the first value in self.mp contains the number of averaged - # profiles without any possible extra multiplicities - # (by implementation) - expected_mp = (averaging_window_size+1) * int(self.mp[0]) - y_plot = np.zeros_like(self.y) - # compute the values in a loop, could be done in cython if required - for i in xrange(len(y_plot)): - - if self.mp[i] >= expected_mp: - # the current value contains already all the wanted - # multiplicity - y_plot[i] = self.y[i]/self.mp[i] - continue - - # first look to the right - y = self.y[i] - mp_r = self.mp[i] - j = i+1 - while j < len(y_plot): - if mp_r+self.mp[j] < expected_mp: - # if we still dont reach the required multiplicity - # we take the whole value - y += self.y[j] - mp_r += self.mp[j] - else: - # otherwise, just some fraction - y += self.y[j] * (expected_mp - mp_r)/self.mp[j] - mp_r += (expected_mp - mp_r) - break - j += 1 - - # same story to the left - mp_l = self.mp[i] - j = i-1 - while j >= 0: - if mp_l+self.mp[j] < expected_mp: - y += self.y[j] - mp_l += self.mp[j] - else: - y += self.y[j] * (expected_mp - mp_l)/self.mp[j] - mp_l += (expected_mp - mp_l) - break - j -= 1 - y_plot[i] = y/(mp_l+mp_r-self.mp[i]) - return 1.0*self.x, y_plot - - else: # k = 0 - - return 1.0*self.x, 1.0*self.y/self.mp - - def integral(self, interval=None): - """ Returns the integral over the given interval. For the discrete - function, this amounts to the sum over all values divided by the total - multiplicity. - - :param interval: integration interval given as a pair of floats, or a - sequence of pairs in case of multiple intervals, if - None the integral over the whole function is computed. - :type interval: Pair, sequence of pairs, or None. - :returns: the integral - :rtype: float - """ - - def get_indices(ival): - """ Retuns the indeces surrounding the given interval""" - start_ind = np.searchsorted(self.x, ival[0], side='right') - end_ind = np.searchsorted(self.x, ival[1], side='left') - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - return start_ind, end_ind - - if interval is None: - # no interval given, integrate over the whole spike train - # don't count the first value, which is zero by definition - return 1.0 * np.sum(self.y[1:-1]) / np.sum(self.mp[1:-1]) - - # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ - "Invalid value for `interval`. None, Sequence or Tuple expected." - # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): - # find the indices corresponding to the interval - start_ind, end_ind = get_indices(interval) - return (np.sum(self.y[start_ind:end_ind]) / - np.sum(self.mp[start_ind:end_ind])) - else: - value = 0.0 - multiplicity = 0.0 - for ival in interval: - # find the indices corresponding to the interval - start_ind, end_ind = get_indices(ival) - value += np.sum(self.y[start_ind:end_ind]) - multiplicity += np.sum(self.mp[start_ind:end_ind]) - return value/multiplicity - - def avrg(self, interval=None): - """ Computes the average of the interval sequence: - :math:`a = 1/N sum f_n ` where N is the number of intervals. - - :param interval: averaging interval given as a pair of floats, a - sequence of pairs for averaging multiple intervals, or - None, if None the average over the whole function is - computed. - :type interval: Pair, sequence of pairs, or None. - :returns: the average a. - :rtype: float - """ - return self.integral(interval) - - def add(self, f): - """ Adds another `DiscreteFunction` function to this function. - Note: only functions defined on the same interval can be summed. - - :param f: :class:`DiscreteFunction` function to be added. - :rtype: None - """ - assert self.x[0] == f.x[0], "The functions have different intervals" - assert self.x[-1] == f.x[-1], "The functions have different intervals" - - # cython version - try: - from cython_add import add_discrete_function_cython as \ - add_discrete_function_impl - except ImportError: - print("Warning: add_discrete_function_cython not found. Make \ -sure that PySpike is installed by running\n\ -'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") - # use python backend - from python_backend import add_discrete_function_python as \ - add_discrete_function_impl - - self.x, self.y, self.mp = \ - add_discrete_function_impl(self.x, self.y, self.mp, - f.x, f.y, f.mp) - - def mul_scalar(self, fac): - """ Multiplies the function with a scalar value - - :param fac: Value to multiply - :type fac: double - :rtype: None - """ - self.y *= fac - - -def average_profile(profiles): - """ Computes the average profile from the given ISI- or SPIKE-profiles. - - :param profiles: list of :class:`PieceWiseConstFunc` or - :class:`PieceWiseLinFunc` representing ISI- or - SPIKE-profiles to be averaged. - :returns: the averages profile :math:`` or :math:``. - :rtype: :class:`PieceWiseConstFunc` or :class:`PieceWiseLinFunc` - """ - assert len(profiles) > 1 - - avrg_profile = profiles[0].copy() - for i in xrange(1, len(profiles)): - avrg_profile.add(profiles[i]) - avrg_profile.mul_scalar(1.0/len(profiles)) # normalize - - return avrg_profile diff --git a/test/test_function.py b/test/test_function.py index 933fd2e..d81b03a 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -92,11 +92,12 @@ def test_pwc_avrg(): y = [0.5, 1.0, -0.25, 0.0, 1.5] f2 = spk.PieceWiseConstFunc(x, y) - f_avrg = spk.average_profile([f1, f2]) + f1.add(f2) + f1.mul_scalar(0.5) x_expected = [0.0, 0.75, 1.0, 2.0, 2.5, 2.7, 4.0] y_expected = [0.75, 1.0, 0.25, 0.625, 0.375, 1.125] - assert_array_almost_equal(f_avrg.x, x_expected, decimal=16) - assert_array_almost_equal(f_avrg.y, y_expected, decimal=16) + assert_array_almost_equal(f1.x, x_expected, decimal=16) + assert_array_almost_equal(f1.y, y_expected, decimal=16) def test_pwl(): @@ -196,11 +197,12 @@ def test_pwl_avrg(): y2_expected = np.array([0.8+1.0+0.5*0.75, 1.5+1.0-0.8*0.25/1.25, -0.4+0.2, 1.5-1.0, 0.75-0.5*0.2/1.5, 2.25]) / 2 - f_avrg = spk.average_profile([f1, f2]) + f1.add(f2) + f1.mul_scalar(0.5) - assert_array_almost_equal(f_avrg.x, x_expected, decimal=16) - assert_array_almost_equal(f_avrg.y1, y1_expected, decimal=16) - assert_array_almost_equal(f_avrg.y2, y2_expected, decimal=16) + assert_array_almost_equal(f1.x, x_expected, decimal=16) + assert_array_almost_equal(f1.y1, y1_expected, decimal=16) + assert_array_almost_equal(f1.y2, y2_expected, decimal=16) def test_df(): @@ -208,7 +210,7 @@ def test_df(): x = [0.0, 1.0, 2.0, 2.5, 4.0] y = [0.0, 1.0, 1.0, 0.0, 1.0] mp = [1.0, 2.0, 1.0, 2.0, 1.0] - f = spk.DiscreteFunction(x, y, mp) + f = spk.DiscreteFunc(x, y, mp) xp, yp = f.get_plottable_data() xp_expected = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -237,6 +239,9 @@ if __name__ == "__main__": test_pwc() test_pwc_add() test_pwc_mul() + test_pwc_avrg() test_pwl() test_pwl_add() test_pwl_mul() + test_pwl_avrg() + test_df() -- cgit v1.2.3 From be9eeb2e48115134b93ec0cd9035d97117bd019e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 10:45:32 +0100 Subject: split distance.py into 3 separate modules --- pyspike/__init__.py | 14 +- pyspike/distances.py | 507 ---------------------------------------------- pyspike/generic.py | 83 ++++++++ pyspike/isi_distance.py | 132 ++++++++++++ pyspike/spike_distance.py | 135 ++++++++++++ pyspike/spike_sync.py | 136 +++++++++++++ 6 files changed, 494 insertions(+), 513 deletions(-) delete mode 100644 pyspike/distances.py create mode 100644 pyspike/generic.py create mode 100644 pyspike/isi_distance.py create mode 100644 pyspike/spike_distance.py create mode 100644 pyspike/spike_sync.py (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index f480964..1c2efbc 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,18 +4,20 @@ Copyright 2014, Mario Mulansky Distributed under the BSD License """ -__all__ = ["distances", "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", +__all__ = ["isi_distance", "spike_distance", "spike_sync", + "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc from DiscreteFunc import DiscreteFunc -from distances import isi_profile, isi_distance, \ - spike_profile, spike_distance, \ - spike_sync_profile, spike_sync, \ - isi_profile_multi, isi_distance_multi, isi_distance_matrix, \ - spike_profile_multi, spike_distance_multi, spike_distance_matrix, \ +from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ + isi_distance_multi, isi_distance_matrix +from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ + spike_distance_multi, spike_distance_matrix +from spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/distances.py b/pyspike/distances.py deleted file mode 100644 index 9077871..0000000 --- a/pyspike/distances.py +++ /dev/null @@ -1,507 +0,0 @@ -""" distances.py - -Module containing several functions to compute spike distances - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License -""" - -import numpy as np -import threading -from functools import partial - -from pyspike import PieceWiseConstFunc, PieceWiseLinFunc, DiscreteFunc - - -############################################################ -# isi_profile -############################################################ -def isi_profile(spikes1, spikes2): - """ Computes the isi-distance profile :math:`S_{isi}(t)` of the two given - spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi - values are defined positive S_isi(t)>=0. The spike trains are expected - to have auxiliary spikes at the beginning and end of the interval. Use the - function add_auxiliary_spikes to add those spikes to the spike train. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :returns: The isi-distance profile :math:`S_{isi}(t)` - :rtype: :class:`pyspike.function.PieceWiseConstFunc` - - """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ - "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ - "Given spike trains seems not to have auxiliary spikes!" - - # load cython implementation - try: - from cython_distance import isi_distance_cython as isi_distance_impl - except ImportError: - print("Warning: isi_distance_cython not found. Make sure that PySpike \ -is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from python_backend import isi_distance_python as isi_distance_impl - - times, values = isi_distance_impl(spikes1, spikes2) - return PieceWiseConstFunc(times, values) - - -############################################################ -# isi_distance -############################################################ -def isi_distance(spikes1, spikes2, interval=None): - """ Computes the isi-distance I of the given spike trains. The - isi-distance is the integral over the isi distance profile - :math:`S_{isi}(t)`: - - .. math:: I = \int_{T_0}^{T_1} S_{isi}(t) dt. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :param interval: averaging interval given as a pair of floats (T0, T1), - if None the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The isi-distance I. - :rtype: double - """ - return isi_profile(spikes1, spikes2).avrg(interval) - - -############################################################ -# spike_profile -############################################################ -def spike_profile(spikes1, spikes2): - """ Computes the spike-distance profile S_spike(t) of the two given spike - trains. Returns the profile as a PieceWiseLinFunc object. The S_spike - values are defined positive S_spike(t)>=0. The spike trains are expected to - have auxiliary spikes at the beginning and end of the interval. Use the - function add_auxiliary_spikes to add those spikes to the spike train. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :returns: The spike-distance profile :math:`S_{spike}(t)`. - :rtype: :class:`pyspike.function.PieceWiseLinFunc` - - """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ - "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ - "Given spike trains seems not to have auxiliary spikes!" - - # cython implementation - try: - from cython_distance import spike_distance_cython \ - as spike_distance_impl - except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from python_backend import spike_distance_python as spike_distance_impl - - times, y_starts, y_ends = spike_distance_impl(spikes1, spikes2) - return PieceWiseLinFunc(times, y_starts, y_ends) - - -############################################################ -# spike_distance -############################################################ -def spike_distance(spikes1, spikes2, interval=None): - """ Computes the spike-distance S of the given spike trains. The - spike-distance is the integral over the isi distance profile S_spike(t): - - .. math:: S = \int_{T_0}^{T_1} S_{spike}(t) dt. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :param interval: averaging interval given as a pair of floats (T0, T1), - if None the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The spike-distance. - :rtype: double - - """ - return spike_profile(spikes1, spikes2).avrg(interval) - - -############################################################ -# spike_sync_profile -############################################################ -def spike_sync_profile(spikes1, spikes2): - """ Computes the spike-synchronization profile S_sync(t) of the two given - spike trains. Returns the profile as a DiscreteFunction object. The S_sync - values are either 1 or 0, indicating the presence or absence of a - coincidence. The spike trains are expected to have auxiliary spikes at the - beginning and end of the interval. Use the function add_auxiliary_spikes to - add those spikes to the spike train. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :returns: The spike-distance profile :math:`S_{sync}(t)`. - :rtype: :class:`pyspike.function.DiscreteFunction` - - """ - - # cython implementation - try: - from cython_distance import coincidence_cython \ - as coincidence_impl - except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from python_backend import coincidence_python \ - as coincidence_impl - - times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) - - return DiscreteFunc(times, coincidences, multiplicity) - - -############################################################ -# spike_sync -############################################################ -def spike_sync(spikes1, spikes2, interval=None): - """ Computes the spike synchronization value SYNC of the given spike - trains. The spike synchronization value is the computed as the total number - of coincidences divided by the total number of spikes: - - .. math:: SYNC = \sum_n C_n / N. - - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. - :param interval: averaging interval given as a pair of floats (T0, T1), - if None the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The spike synchronization value. - :rtype: double - """ - return spike_sync_profile(spikes1, spikes2).avrg(interval) - - -############################################################ -# _generic_profile_multi -############################################################ -def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): - """ Internal implementation detail, don't call this function directly, - use isi_profile_multi or spike_profile_multi instead. - - Computes the multi-variate distance for a set of spike-trains using the - pair_dist_func to compute pair-wise distances. That is it computes the - average distance of all pairs of spike-trains: - :math:`S(t) = 2/((N(N-1)) sum_{} S_{i,j}`, - where the sum goes over all pairs . - Args: - - spike_trains: list of spike trains - - pair_distance_func: function computing the distance of two spike trains - - indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - Returns: - - The averaged multi-variate distance of all pairs - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] - # start with first pair - (i, j) = pairs[0] - average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - for (i, j) in pairs[1:]: - current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - average_dist.add(current_dist) # add to the average - return average_dist, len(pairs) - - -############################################################ -# multi_distance_par -############################################################ -def _multi_distance_par(spike_trains, pair_distance_func, indices=None): - """ parallel implementation of the multi-distance. Not currently used as - it does not improve the performance. - """ - - num_threads = 2 - lock = threading.Lock() - - def run(spike_trains, index_pairs, average_dist): - (i, j) = index_pairs[0] - # print(i,j) - this_avrg = pair_distance_func(spike_trains[i], spike_trains[j]) - for (i, j) in index_pairs[1:]: - # print(i,j) - current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - this_avrg.add(current_dist) - with lock: - average_dist.add(this_avrg) - - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] - num_pairs = len(pairs) - - # start with first pair - (i, j) = pairs[0] - average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - # remove the one we already computed - pairs = pairs[1:] - # distribute the rest into num_threads pieces - clustered_pairs = [pairs[n::num_threads] for n in xrange(num_threads)] - - threads = [] - for pairs in clustered_pairs: - t = threading.Thread(target=run, args=(spike_trains, pairs, - average_dist)) - threads.append(t) - t.start() - for t in threads: - t.join() - average_dist.mul_scalar(1.0/num_pairs) # normalize - return average_dist - - -############################################################ -# isi_profile_multi -############################################################ -def isi_profile_multi(spike_trains, indices=None): - """ computes the multi-variate isi distance profile for a set of spike - trains. That is the average isi-distance of all pairs of spike-trains: - S_isi(t) = 2/((N(N-1)) sum_{} S_{isi}^{i,j}, - where the sum goes over all pairs - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type state: list or None - :returns: The averaged isi profile :math:`(t)` - :rtype: :class:`pyspike.function.PieceWiseConstFunc` - """ - average_dist, M = _generic_profile_multi(spike_trains, isi_profile, - indices) - average_dist.mul_scalar(1.0/M) # normalize - return average_dist - - -############################################################ -# isi_distance_multi -############################################################ -def isi_distance_multi(spike_trains, indices=None, interval=None): - """ computes the multi-variate isi-distance for a set of spike-trains. - That is the time average of the multi-variate spike profile: - I = \int_0^T 2/((N(N-1)) sum_{} S_{isi}^{i,j}, - where the sum goes over all pairs - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The time-averaged isi distance :math:`I` - :rtype: double - """ - return isi_profile_multi(spike_trains, indices).avrg(interval) - - -############################################################ -# spike_profile_multi -############################################################ -def spike_profile_multi(spike_trains, indices=None): - """ Computes the multi-variate spike distance profile for a set of spike - trains. That is the average spike-distance of all pairs of spike-trains: - :math:`S_spike(t) = 2/((N(N-1)) sum_{} S_{spike}^{i, j}`, - where the sum goes over all pairs - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :returns: The averaged spike profile :math:`(t)` - :rtype: :class:`pyspike.function.PieceWiseLinFunc` - - """ - average_dist, M = _generic_profile_multi(spike_trains, spike_profile, - indices) - average_dist.mul_scalar(1.0/M) # normalize - return average_dist - - -############################################################ -# spike_distance_multi -############################################################ -def spike_distance_multi(spike_trains, indices=None, interval=None): - """ Computes the multi-variate spike distance for a set of spike trains. - That is the time average of the multi-variate spike profile: - S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt - where the sum goes over all pairs - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The averaged spike distance S. - :rtype: double - """ - return spike_profile_multi(spike_trains, indices).avrg(interval) - - -############################################################ -# spike_profile_multi -############################################################ -def spike_sync_profile_multi(spike_trains, indices=None): - """ Computes the multi-variate spike synchronization profile for a set of - spike trains. For each spike in the set of spike trains, the multi-variate - profile is defined as the number of coincidences divided by the number of - spike trains pairs involving the spike train of containing this spike, - which is the number of spike trains minus one (N-1). - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :returns: The multi-variate spike sync profile :math:`(t)` - :rtype: :class:`pyspike.function.DiscreteFunction` - - """ - prof_func = partial(spike_sync_profile) - average_dist, M = _generic_profile_multi(spike_trains, prof_func, - indices) - # average_dist.mul_scalar(1.0/M) # no normalization here! - return average_dist - - -############################################################ -# spike_distance_multi -############################################################ -def spike_sync_multi(spike_trains, indices=None, interval=None): - """ Computes the multi-variate spike synchronization value for a set of - spike trains. - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: The multi-variate spike synchronization value SYNC. - :rtype: double - """ - return spike_sync_profile_multi(spike_trains, indices).avrg(interval) - - -############################################################ -# generic_distance_matrix -############################################################ -def _generic_distance_matrix(spike_trains, dist_function, - indices=None, interval=None): - """ Internal implementation detail. Don't use this function directly. - Instead use isi_distance_matrix or spike_distance_matrix. - Computes the time averaged distance of all pairs of spike-trains. - Args: - - spike_trains: list of spike trains - - indices: list of indices defining which spike-trains to use - if None all given spike-trains are used (default=None) - Return: - - a 2D array of size len(indices)*len(indices) containing the average - pair-wise distance - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] - - distance_matrix = np.zeros((len(indices), len(indices))) - for i, j in pairs: - d = dist_function(spike_trains[i], spike_trains[j], interval) - distance_matrix[i, j] = d - distance_matrix[j, i] = d - return distance_matrix - - -############################################################ -# isi_distance_matrix -############################################################ -def isi_distance_matrix(spike_trains, indices=None, interval=None): - """ Computes the time averaged isi-distance of all pairs of spike-trains. - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: 2D array with the pair wise time average isi distances - :math:`I_{ij}` - :rtype: np.array - """ - return _generic_distance_matrix(spike_trains, isi_distance, - indices, interval) - - -############################################################ -# spike_distance_matrix -############################################################ -def spike_distance_matrix(spike_trains, indices=None, interval=None): - """ Computes the time averaged spike-distance of all pairs of spike-trains. - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: 2D array with the pair wise time average spike distances - :math:`S_{ij}` - :rtype: np.array - """ - return _generic_distance_matrix(spike_trains, spike_distance, - indices, interval) - - -############################################################ -# spike_sync_matrix -############################################################ -def spike_sync_matrix(spike_trains, indices=None, interval=None): - """ Computes the overall spike-synchronization value of all pairs of - spike-trains. - - :param spike_trains: list of spike trains - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. - :type interval: Pair of floats or None. - :returns: 2D array with the pair wise time spike synchronization values - :math:`SYNC_{ij}` - :rtype: np.array - """ - return _generic_distance_matrix(spike_trains, spike_sync, - indices, interval) diff --git a/pyspike/generic.py b/pyspike/generic.py new file mode 100644 index 0000000..4f278d2 --- /dev/null +++ b/pyspike/generic.py @@ -0,0 +1,83 @@ +""" + +Generic functions to compute multi-variate profiles and distance matrices. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + + +import numpy as np + + +############################################################ +# _generic_profile_multi +############################################################ +def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): + """ Internal implementation detail, don't call this function directly, + use isi_profile_multi or spike_profile_multi instead. + + Computes the multi-variate distance for a set of spike-trains using the + pair_dist_func to compute pair-wise distances. That is it computes the + average distance of all pairs of spike-trains: + :math:`S(t) = 2/((N(N-1)) sum_{} S_{i,j}`, + where the sum goes over all pairs . + Args: + - spike_trains: list of spike trains + - pair_distance_func: function computing the distance of two spike trains + - indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + Returns: + - The averaged multi-variate distance of all pairs + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + # start with first pair + (i, j) = pairs[0] + average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) + for (i, j) in pairs[1:]: + current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) + average_dist.add(current_dist) # add to the average + return average_dist, len(pairs) + + +############################################################ +# generic_distance_matrix +############################################################ +def _generic_distance_matrix(spike_trains, dist_function, + indices=None, interval=None): + """ Internal implementation detail. Don't use this function directly. + Instead use isi_distance_matrix or spike_distance_matrix. + Computes the time averaged distance of all pairs of spike-trains. + Args: + - spike_trains: list of spike trains + - indices: list of indices defining which spike-trains to use + if None all given spike-trains are used (default=None) + Return: + - a 2D array of size len(indices)*len(indices) containing the average + pair-wise distance + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = dist_function(spike_trains[i], spike_trains[j], interval) + distance_matrix[i, j] = d + distance_matrix[j, i] = d + return distance_matrix diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py new file mode 100644 index 0000000..745d280 --- /dev/null +++ b/pyspike/isi_distance.py @@ -0,0 +1,132 @@ +""" + +Module containing several functions to compute the ISI profiles and distances + +Copyright 2014-2015, Mario Mulansky + +Distributed under the BSD License +""" + +from pyspike import PieceWiseConstFunc +from pyspike.generic import _generic_profile_multi, _generic_distance_matrix + + +############################################################ +# isi_profile +############################################################ +def isi_profile(spikes1, spikes2): + """ Computes the isi-distance profile :math:`S_{isi}(t)` of the two given + spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi + values are defined positive S_isi(t)>=0. The spike trains are expected + to have auxiliary spikes at the beginning and end of the interval. Use the + function add_auxiliary_spikes to add those spikes to the spike train. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :returns: The isi-distance profile :math:`S_{isi}(t)` + :rtype: :class:`pyspike.function.PieceWiseConstFunc` + + """ + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0] == spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1] == spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + + # load cython implementation + try: + from cython_distance import isi_distance_cython as isi_distance_impl + except ImportError: + print("Warning: isi_distance_cython not found. Make sure that PySpike \ +is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from python_backend import isi_distance_python as isi_distance_impl + + times, values = isi_distance_impl(spikes1, spikes2) + return PieceWiseConstFunc(times, values) + + +############################################################ +# isi_distance +############################################################ +def isi_distance(spikes1, spikes2, interval=None): + """ Computes the isi-distance I of the given spike trains. The + isi-distance is the integral over the isi distance profile + :math:`S_{isi}(t)`: + + .. math:: I = \int_{T_0}^{T_1} S_{isi}(t) dt. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :param interval: averaging interval given as a pair of floats (T0, T1), + if None the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The isi-distance I. + :rtype: double + """ + return isi_profile(spikes1, spikes2).avrg(interval) + + +############################################################ +# isi_profile_multi +############################################################ +def isi_profile_multi(spike_trains, indices=None): + """ computes the multi-variate isi distance profile for a set of spike + trains. That is the average isi-distance of all pairs of spike-trains: + S_isi(t) = 2/((N(N-1)) sum_{} S_{isi}^{i,j}, + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type state: list or None + :returns: The averaged isi profile :math:`(t)` + :rtype: :class:`pyspike.function.PieceWiseConstFunc` + """ + average_dist, M = _generic_profile_multi(spike_trains, isi_profile, + indices) + average_dist.mul_scalar(1.0/M) # normalize + return average_dist + + +############################################################ +# isi_distance_multi +############################################################ +def isi_distance_multi(spike_trains, indices=None, interval=None): + """ computes the multi-variate isi-distance for a set of spike-trains. + That is the time average of the multi-variate spike profile: + I = \int_0^T 2/((N(N-1)) sum_{} S_{isi}^{i,j}, + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The time-averaged isi distance :math:`I` + :rtype: double + """ + return isi_profile_multi(spike_trains, indices).avrg(interval) + + +############################################################ +# isi_distance_matrix +############################################################ +def isi_distance_matrix(spike_trains, indices=None, interval=None): + """ Computes the time averaged isi-distance of all pairs of spike-trains. + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: 2D array with the pair wise time average isi distances + :math:`I_{ij}` + :rtype: np.array + """ + return _generic_distance_matrix(spike_trains, isi_distance, + indices, interval) diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py new file mode 100644 index 0000000..2c989a4 --- /dev/null +++ b/pyspike/spike_distance.py @@ -0,0 +1,135 @@ +""" + +Module containing several functions to compute SPIKE profiles and distances + +Copyright 2014-2015, Mario Mulansky + +Distributed under the BSD License +""" + +from pyspike import PieceWiseLinFunc +from pyspike.generic import _generic_profile_multi, _generic_distance_matrix + + +############################################################ +# spike_profile +############################################################ +def spike_profile(spikes1, spikes2): + """ Computes the spike-distance profile S_spike(t) of the two given spike + trains. Returns the profile as a PieceWiseLinFunc object. The S_spike + values are defined positive S_spike(t)>=0. The spike trains are expected to + have auxiliary spikes at the beginning and end of the interval. Use the + function add_auxiliary_spikes to add those spikes to the spike train. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :returns: The spike-distance profile :math:`S_{spike}(t)`. + :rtype: :class:`pyspike.function.PieceWiseLinFunc` + + """ + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0] == spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1] == spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + + # cython implementation + try: + from cython_distance import spike_distance_cython \ + as spike_distance_impl + except ImportError: + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from python_backend import spike_distance_python as spike_distance_impl + + times, y_starts, y_ends = spike_distance_impl(spikes1, spikes2) + return PieceWiseLinFunc(times, y_starts, y_ends) + + +############################################################ +# spike_distance +############################################################ +def spike_distance(spikes1, spikes2, interval=None): + """ Computes the spike-distance S of the given spike trains. The + spike-distance is the integral over the isi distance profile S_spike(t): + + .. math:: S = \int_{T_0}^{T_1} S_{spike}(t) dt. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :param interval: averaging interval given as a pair of floats (T0, T1), + if None the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The spike-distance. + :rtype: double + + """ + return spike_profile(spikes1, spikes2).avrg(interval) + + +############################################################ +# spike_profile_multi +############################################################ +def spike_profile_multi(spike_trains, indices=None): + """ Computes the multi-variate spike distance profile for a set of spike + trains. That is the average spike-distance of all pairs of spike-trains: + :math:`S_spike(t) = 2/((N(N-1)) sum_{} S_{spike}^{i, j}`, + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :returns: The averaged spike profile :math:`(t)` + :rtype: :class:`pyspike.function.PieceWiseLinFunc` + + """ + average_dist, M = _generic_profile_multi(spike_trains, spike_profile, + indices) + average_dist.mul_scalar(1.0/M) # normalize + return average_dist + + +############################################################ +# spike_distance_multi +############################################################ +def spike_distance_multi(spike_trains, indices=None, interval=None): + """ Computes the multi-variate spike distance for a set of spike trains. + That is the time average of the multi-variate spike profile: + S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt + where the sum goes over all pairs + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The averaged spike distance S. + :rtype: double + """ + return spike_profile_multi(spike_trains, indices).avrg(interval) + + +############################################################ +# spike_distance_matrix +############################################################ +def spike_distance_matrix(spike_trains, indices=None, interval=None): + """ Computes the time averaged spike-distance of all pairs of spike-trains. + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: 2D array with the pair wise time average spike distances + :math:`S_{ij}` + :rtype: np.array + """ + return _generic_distance_matrix(spike_trains, spike_distance, + indices, interval) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py new file mode 100644 index 0000000..f7cbe1d --- /dev/null +++ b/pyspike/spike_sync.py @@ -0,0 +1,136 @@ +""" + +Module containing several functions to compute SPIKE-Synchronization profiles +and distances + +Copyright 2014-2015, Mario Mulansky + +Distributed under the BSD License +""" + +from functools import partial +from pyspike import DiscreteFunc +from pyspike.generic import _generic_profile_multi, _generic_distance_matrix + + +############################################################ +# spike_sync_profile +############################################################ +def spike_sync_profile(spikes1, spikes2): + """ Computes the spike-synchronization profile S_sync(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. The S_sync + values are either 1 or 0, indicating the presence or absence of a + coincidence. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + + # cython implementation + try: + from cython_distance import coincidence_cython \ + as coincidence_impl + except ImportError: + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from python_backend import coincidence_python \ + as coincidence_impl + + times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_sync +############################################################ +def spike_sync(spikes1, spikes2, interval=None): + """ Computes the spike synchronization value SYNC of the given spike + trains. The spike synchronization value is the computed as the total number + of coincidences divided by the total number of spikes: + + .. math:: SYNC = \sum_n C_n / N. + + :param spikes1: ordered array of spike times with auxiliary spikes. + :param spikes2: ordered array of spike times with auxiliary spikes. + :param interval: averaging interval given as a pair of floats (T0, T1), + if None the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The spike synchronization value. + :rtype: double + """ + return spike_sync_profile(spikes1, spikes2).avrg(interval) + + +############################################################ +# spike_sync_profile_multi +############################################################ +def spike_sync_profile_multi(spike_trains, indices=None): + """ Computes the multi-variate spike synchronization profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the number of coincidences divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + prof_func = partial(spike_sync_profile) + average_dist, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_dist + + +############################################################ +# spike_distance_multi +############################################################ +def spike_sync_multi(spike_trains, indices=None, interval=None): + """ Computes the multi-variate spike synchronization value for a set of + spike trains. + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: The multi-variate spike synchronization value SYNC. + :rtype: double + """ + return spike_sync_profile_multi(spike_trains, indices).avrg(interval) + + +############################################################ +# spike_sync_matrix +############################################################ +def spike_sync_matrix(spike_trains, indices=None, interval=None): + """ Computes the overall spike-synchronization value of all pairs of + spike-trains. + + :param spike_trains: list of spike trains + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :returns: 2D array with the pair wise time spike synchronization values + :math:`SYNC_{ij}` + :rtype: np.array + """ + return _generic_distance_matrix(spike_trains, spike_sync, + indices, interval) -- cgit v1.2.3 From 7f20d9a8076326c1800373a7f95f4871873f14b0 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 10:46:51 +0100 Subject: copyright, docs --- pyspike/__init__.py | 2 +- pyspike/spike_sync.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 1c2efbc..945dd4e 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License """ diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index f7cbe1d..bded8da 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -95,7 +95,7 @@ def spike_sync_profile_multi(spike_trains, indices=None): ############################################################ -# spike_distance_multi +# spike_sync_multi ############################################################ def spike_sync_multi(spike_trains, indices=None, interval=None): """ Computes the multi-variate spike synchronization value for a set of -- cgit v1.2.3 From 7989b2d337a0e5d2e0223d7fdec73833ff47c7bb Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 16:40:55 +0100 Subject: first version of psth profile --- examples/spike_sync.py | 8 ++++++++ pyspike/__init__.py | 3 ++- pyspike/cython/python_backend.py | 1 - pyspike/psth.py | 27 +++++++++++++++++++++++++++ 4 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 pyspike/psth.py (limited to 'pyspike/__init__.py') diff --git a/examples/spike_sync.py b/examples/spike_sync.py index 7f9e762..9c5f75c 100644 --- a/examples/spike_sync.py +++ b/examples/spike_sync.py @@ -30,6 +30,8 @@ plt.legend(loc="center right") plt.figure() +plt.subplot(211) + f = spk.spike_sync_profile_multi(spike_trains) x, y = f.get_plottable_data() plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile") @@ -37,6 +39,12 @@ plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile") x1, y1 = f.get_plottable_data(averaging_window_size=50) plt.plot(x1, y1, '-k', lw=2.5, label="averaged SPIKE-Sync profile") +plt.subplot(212) + +f_psth = spk.psth(spike_trains, bin_size=5.0) +x, y = f_psth.get_plottable_data() +plt.plot(x, y, '-k', alpha=1.0, label="PSTH") + print("Average:", f.avrg()) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 945dd4e..4d3f9f6 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,7 +4,7 @@ Copyright 2014-2015, Mario Mulansky Distributed under the BSD License """ -__all__ = ["isi_distance", "spike_distance", "spike_sync", +__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] @@ -18,6 +18,7 @@ from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix +from psth import psth from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ spike_train_from_string, merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 481daf9..4efefc5 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -482,4 +482,3 @@ def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): # the last value is again the end of the interval # only use the data that was actually filled return x_new[:index+1], y_new[:index+1], mp_new[:index+1] - diff --git a/pyspike/psth.py b/pyspike/psth.py new file mode 100644 index 0000000..8516460 --- /dev/null +++ b/pyspike/psth.py @@ -0,0 +1,27 @@ +""" + +Module containing functions to compute the PSTH profile + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +import numpy as np +from pyspike import PieceWiseConstFunc + + +# Computes the Peristimulus time histogram of a set of spike trains +def psth(spike_trains, bin_size): + + bins = int((spike_trains[0][-1] - spike_trains[0][0]) / bin_size) + + N = len(spike_trains) + combined_spike_train = spike_trains[0][1:-1] + for i in xrange(1, len(spike_trains)): + combined_spike_train = np.append(combined_spike_train, + spike_trains[i][1:-1]) + + vals, edges = np.histogram(combined_spike_train, bins, density=False) + bin_size = edges[1]-edges[0] + return PieceWiseConstFunc(edges, vals/(N*bin_size)) -- cgit v1.2.3 From 27aa30d1fdb830a04b608c702cf7b616115eeb50 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 22 Apr 2015 18:18:30 +0200 Subject: added SpikeTrain class, changed isi_distance spike trains are now represented as SpikeTrain objects consisting of the spike times and the interval edges. The implementation of the ISI-distance has been modified accordingly. The SPIKE-distance and SPIKE-Synchronization are still to be done. --- pyspike/SpikeTrain.py | 34 +++++++++++++++ pyspike/__init__.py | 3 +- pyspike/cython/cython_distance.pyx | 85 ++++++++++++++++++++++++-------------- pyspike/cython/python_backend.py | 72 +++++++++++++++++++++----------- pyspike/isi_distance.py | 17 ++++---- test/test_distance.py | 19 ++++----- 6 files changed, 156 insertions(+), 74 deletions(-) create mode 100644 pyspike/SpikeTrain.py (limited to 'pyspike/__init__.py') diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py new file mode 100644 index 0000000..4760014 --- /dev/null +++ b/pyspike/SpikeTrain.py @@ -0,0 +1,34 @@ +""" Module containing the class representing spike trains for PySpike. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +import numpy as np +import collections + + +class SpikeTrain: + """ Class representing spike trains for the PySpike Module.""" + + def __init__(self, spike_times, interval): + """ Constructs the SpikeTrain + :param spike_times: ordered array of spike times. + :param interval: interval defining the edges of the spike train. + Given as a pair of floats (T0, T1) or a single float T1, where T0=0 is + assumed. + """ + + # TODO: sanity checks + self.spikes = np.array(spike_times) + + # check if interval is as sequence + if not isinstance(interval, collections.Sequence): + # treat value as end time and assume t_start = 0 + self.t_start = 0.0 + self.t_end = interval + else: + # extract times from sequence + self.t_start = interval[0] + self.t_end = interval[1] diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 4d3f9f6..76e58a1 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -5,12 +5,13 @@ Distributed under the BSD License """ __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "PieceWiseConstFunc", "PieceWiseLinFunc", + "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc from DiscreteFunc import DiscreteFunc +from SpikeTrain import SpikeTrain from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index 2834ca5..1d652ee 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -42,57 +42,82 @@ ctypedef np.float_t DTYPE_t ############################################################ # isi_distance_cython ############################################################ -def isi_distance_cython(double[:] s1, - double[:] s2): +def isi_distance_cython(double[:] s1, double[:] s2, + double t_start, double t_end): cdef double[:] spike_events cdef double[:] isi_values cdef int index1, index2, index cdef int N1, N2 cdef double nu1, nu2 - N1 = len(s1)-1 - N2 = len(s2)-1 + N1 = len(s1) + N2 = len(s2) + + spike_events = np.empty(N1+N2+2) + # the values have one entry less as they are defined at the intervals + isi_values = np.empty(N1+N2+1) + + # first x-value of the profile + spike_events[0] = t_start + + # first interspike interval - check if a spike exists at the start time + if s1[0] > t_start: + nu1 = s1[0] - t_start + index1 = -1 + else: + nu1 = s1[1]-s1[0] + index1 = 0 + + if s2[0] > t_start: + nu2 = s2[0] - t_start + index2 = -1 + else: + nu2 = s2[1]-s2[0] + index2 = 0 - nu1 = s1[1]-s1[0] - nu2 = s2[1]-s2[0] - spike_events = np.empty(N1+N2) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(N1+N2-1) + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) + index = 1 with nogil: # release the interpreter to allow multithreading - isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: + while index1+index2 < N1+N2-2: + # check which spike is next, only if there are spikes left in 1 + # next spike in 1 is earlier, or there are no spikes left in 2 + if (index1 < N1-1) and ((index2 == N2-1) or + (s1[index1+1] < s2[index2+1])): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= N1: - break spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - elif s1[index1+1] > s2[index2+1]: + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + elif (index2 < N2-1) and ((index1 == N1-1) or + (s1[index1+1] > s2[index2+1])): index2 += 1 - if index2 >= N2: - break spike_events[index] = s2[index2] - nu2 = s2[index2+1]-s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] else: # s1[index1+1] == s2[index2+1] index1 += 1 index2 += 1 - if (index1 >= N1) or (index2 >= N2): - break spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - nu2 = s2[index2+1]-s2[index2] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] # compute the corresponding isi-distance isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 # the last event is the interval end - spike_events[index] = s1[N1] + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end # end nogil return spike_events[:index+1], isi_values[:index] diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 749507a..4c37236 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -15,50 +15,72 @@ import numpy as np ############################################################ # isi_distance_python ############################################################ -def isi_distance_python(s1, s2): +def isi_distance_python(s1, s2, t_start, t_end): """ Plain Python implementation of the isi distance. """ - # compute the interspike interval - nu1 = s1[1:] - s1[:-1] - nu2 = s2[1:] - s2[:-1] + N1 = len(s1) + N2 = len(s2) # compute the isi-distance - spike_events = np.empty(len(nu1) + len(nu2)) - spike_events[0] = s1[0] + spike_events = np.empty(N1+N2+2) + spike_events[0] = t_start # the values have one entry less - the number of intervals between events isi_values = np.empty(len(spike_events) - 1) - # add the distance of the first events - # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ - # else 1.0 - nu2[0]/nu1[0] - isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) - index1 = 0 - index2 = 0 + if s1[0] > t_start: + nu1 = s1[0] - t_start + index1 = -1 + else: + nu1 = s1[1] - s1[0] + index1 = 0 + if s2[0] > t_start: + nu2 = s2[0] - t_start + index2 = -1 + else: + nu2 = s2[1] - s2[0] + index2 = 0 + + isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2) index = 1 - while True: + while index1+index2 < N1+N2-2: # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: + if (index1 < N1-1) and (index2 == N2-1 or s1[index1+1] < s2[index2+1]): index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= len(nu1): - break spike_events[index] = s1[index1] - elif s1[index1+1] > s2[index2+1]: + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + + elif (index2 < N2-1) and (index1 == N1-1 or + s1[index1+1] > s2[index2+1]): index2 += 1 - if index2 >= len(nu2): - break spike_events[index] = s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] + else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 index2 += 1 - if (index1 >= len(nu1)) or (index2 >= len(nu2)): - break spike_events[index] = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + nu1 = t_end-s1[index1] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + nu2 = t_end-s2[index2] # compute the corresponding isi-distance - isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ - max(nu1[index1], nu2[index2]) + isi_values[index] = abs(nu1 - nu2) / \ + max(nu1, nu2) index += 1 # the last event is the interval end - spike_events[index] = s1[-1] + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end # use only the data added above # could be less than original length due to equal spike times return spike_events[:index + 1], isi_values[:index] diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index c2ef8e8..a34e135 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -14,23 +14,25 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix ############################################################ # isi_profile ############################################################ -def isi_profile(spikes1, spikes2): +def isi_profile(spike_train1, spike_train2): """ Computes the isi-distance profile :math:`S_{isi}(t)` of the two given spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi values are defined positive S_isi(t)>=0. The spike trains are expected to have auxiliary spikes at the beginning and end of the interval. Use the function add_auxiliary_spikes to add those spikes to the spike train. - :param spikes1: ordered array of spike times with auxiliary spikes. - :param spikes2: ordered array of spike times with auxiliary spikes. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: `SpikeTrain` :returns: The isi-distance profile :math:`S_{isi}(t)` :rtype: :class:`pyspike.function.PieceWiseConstFunc` """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ + assert spike_train1.t_end == spike_train2.t_end, \ "Given spike trains seems not to have auxiliary spikes!" # load cython implementation @@ -45,7 +47,8 @@ Falling back to slow python backend.") from cython.python_backend import isi_distance_python \ as isi_distance_impl - times, values = isi_distance_impl(spikes1, spikes2) + times, values = isi_distance_impl(spike_train1.spikes, spike_train2.spikes, + spike_train1.t_start, spike_train1.t_end) return PieceWiseConstFunc(times, values) diff --git a/test/test_distance.py b/test/test_distance.py index ba19f5e..b54e908 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -15,12 +15,13 @@ from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk +from pyspike import SpikeTrain def test_isi(): # generate two spike trains: - t1 = np.array([0.2, 0.4, 0.6, 0.7]) - t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95]) + t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) + t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) # pen&paper calculation of the isi distance expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] @@ -32,8 +33,6 @@ def test_isi(): expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) * expected_isi)/(expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.isi_profile(t1, t2) # print("ISI: ", f.y) @@ -44,8 +43,8 @@ def test_isi(): assert_equal(spk.isi_distance(t1, t2), expected_isi_val) # check with some equal spike times - t1 = np.array([0.2, 0.4, 0.6]) - t2 = np.array([0.1, 0.4, 0.5, 0.6]) + t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0]) + t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] expected_isi = [0.1/0.2, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5] @@ -55,8 +54,6 @@ def test_isi(): expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) * expected_isi)/(expected_times[-1]-expected_times[0]) - t1 = spk.add_auxiliary_spikes(t1, 1.0) - t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.isi_profile(t1, t2) assert_equal(f.x, expected_times) @@ -318,6 +315,6 @@ def test_multi_variate_subsets(): if __name__ == "__main__": test_isi() - test_spike() - test_multi_isi() - test_multi_spike() + # test_spike() + # test_multi_isi() + # test_multi_spike() -- cgit v1.2.3 From 3bf9e12e6b5667fb1ea72c969848dacaff3cb470 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 24 Apr 2015 14:58:39 +0200 Subject: further adjustments in spike sync --- pyspike/SpikeTrain.py | 2 +- pyspike/__init__.py | 4 +- pyspike/cython/cython_distance.pyx | 6 +- pyspike/spike_sync.py | 6 +- pyspike/spikes.py | 113 ++++++++++++------------------------- test/test_distance.py | 5 +- 6 files changed, 49 insertions(+), 87 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py index 041f897..89520c9 100644 --- a/pyspike/SpikeTrain.py +++ b/pyspike/SpikeTrain.py @@ -9,7 +9,7 @@ import numpy as np import collections -class SpikeTrain: +class SpikeTrain(object): """ Class representing spike trains for the PySpike Module.""" def __init__(self, spike_times, interval): diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 76e58a1..a5f9f0a 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -21,5 +21,5 @@ from spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix from psth import psth -from spikes import add_auxiliary_spikes, load_spike_trains_from_txt, \ - spike_train_from_string, merge_spike_trains, generate_poisson_spikes +from spikes import load_spike_trains_from_txt, spike_train_from_string, \ + merge_spike_trains, generate_poisson_spikes diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx index 6d998b9..2841da8 100644 --- a/pyspike/cython/cython_distance.pyx +++ b/pyspike/cython/cython_distance.pyx @@ -337,10 +337,10 @@ def spike_distance_cython(double[:] t1, double[:] t2, # coincidence_python ############################################################ cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, max_tau): + int i, int j, double max_tau): cdef double m = 1E100 # some huge number - cdef int N1 = len(spikes1)-1 - cdef int N2 = len(spikes2)-1 + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 if i < N1 and i > -1: m = fmin(m, spikes1[i+1]-spikes1[i]) if j < N2 and j > -1: diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index bca6f73..8ddd32c 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -109,10 +109,10 @@ def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None): """ prof_func = partial(spike_sync_profile, max_tau=max_tau) - average_dist, M = _generic_profile_multi(spike_trains, prof_func, + average_prof, M = _generic_profile_multi(spike_trains, prof_func, indices) # average_dist.mul_scalar(1.0/M) # no normalization here! - return average_dist + return average_prof ############################################################ @@ -122,7 +122,7 @@ def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None): """ Computes the multi-variate spike synchronization value for a set of spike trains. - :param spike_trains: list of spike trains + :param spike_trains: list of :class:`pyspike.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 9d7d6f4..128873d 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -8,82 +8,46 @@ Distributed under the BSD License """ import numpy as np - - -############################################################ -# add_auxiliary_spikes -############################################################ -def add_auxiliary_spikes(spike_train, time_interval): - """ Adds spikes at the beginning and end of the given time interval. - - :param spike_train: ordered array of spike times - :param time_interval: A pair (T_start, T_end) of values representing the - start and end time of the spike train measurement or - a single value representing the end time, the T_start - is then assuemd as 0. Auxiliary spikes will be added - to the spike train at the beginning and end of this - interval, if they are not yet present. - :type time_interval: pair of doubles or double - :returns: spike train with additional spikes at T_start and T_end. - - """ - try: - T_start = time_interval[0] - T_end = time_interval[1] - except: - T_start = 0 - T_end = time_interval - - assert spike_train[0] >= T_start, \ - "Spike train has events before the given start time" - assert spike_train[-1] <= T_end, \ - "Spike train has events after the given end time" - if spike_train[0] != T_start: - spike_train = np.insert(spike_train, 0, T_start) - if spike_train[-1] != T_end: - spike_train = np.append(spike_train, T_end) - return spike_train +from pyspike import SpikeTrain ############################################################ # spike_train_from_string ############################################################ -def spike_train_from_string(s, sep=' ', is_sorted=False): - """ Converts a string of times into an array of spike times. +def spike_train_from_string(s, interval, sep=' ', is_sorted=False): + """ Converts a string of times into a :class:`pyspike.SpikeTrain`. - :param s: the string with (ordered) spike times + :param s: the string with (ordered) spike times. + :param interval: interval defining the edges of the spike train. + Given as a pair of floats (T0, T1) or a single float T1, where T0=0 is + assumed. :param sep: The separator between the time numbers, default=' '. :param is_sorted: if True, the spike times are not sorted after loading, if False, spike times are sorted with `np.sort` - :returns: array of spike times + :returns: :class:`pyspike.SpikeTrain` """ if not(is_sorted): - return np.sort(np.fromstring(s, sep=sep)) + return SpikeTrain(np.sort(np.fromstring(s, sep=sep)), interval) else: - return np.fromstring(s, sep=sep) + return SpikeTrain(np.fromstring(s, sep=sep), interval) ############################################################ # load_spike_trains_txt ############################################################ -def load_spike_trains_from_txt(file_name, time_interval=None, +def load_spike_trains_from_txt(file_name, interval=None, separator=' ', comment='#', is_sorted=False): """ Loads a number of spike trains from a text file. Each line of the text file should contain one spike train as a sequence of spike times separated by `separator`. Empty lines as well as lines starting with `comment` are - neglected. The `time_interval` represents the start and the end of the - spike trains and it is used to add auxiliary spikes at the beginning and - end of each spike train. However, if `time_interval == None`, no auxiliary - spikes are added, but note that the Spike and ISI distance both require - auxiliary spikes. + neglected. The `interval` represents the start and the end of the + spike trains. :param file_name: The name of the text file. - :param time_interval: A pair (T_start, T_end) of values representing the - start and end time of the spike train measurement - or a single value representing the end time, the - T_start is then assuemd as 0. Auxiliary spikes will - be added to the spike train at the beginning and end - of this interval. + :param interval: A pair (T_start, T_end) of values representing the + start and end time of the spike train measurement + or a single value representing the end time, the + T_start is then assuemd as 0. :param separator: The character used to seprate the values in the text file :param comment: Lines starting with this character are ignored. :param sort: If true, the spike times are order via `np.sort`, default=True @@ -94,9 +58,8 @@ def load_spike_trains_from_txt(file_name, time_interval=None, for line in spike_file: if len(line) > 1 and not line.startswith(comment): # use only the lines with actual data and not commented - spike_train = spike_train_from_string(line, separator, is_sorted) - if time_interval is not None: # add auxil. spikes if times given - spike_train = add_auxiliary_spikes(spike_train, time_interval) + spike_train = spike_train_from_string(line, interval, + separator, is_sorted) spike_trains.append(spike_train) return spike_trains @@ -111,14 +74,14 @@ def merge_spike_trains(spike_trains): :returns: spike train with the merged spike times """ # get the lengths of the spike trains - lens = np.array([len(st) for st in spike_trains]) + lens = np.array([len(st.spikes) for st in spike_trains]) merged_spikes = np.empty(np.sum(lens)) index = 0 # the index for merged_spikes indices = np.zeros_like(lens) # indices of the spike trains index_list = np.arange(len(indices)) # indices of indices of spike trains # that have not yet reached the end # list of the possible events in the spike trains - vals = [spike_trains[i][indices[i]] for i in index_list] + vals = [spike_trains[i].spikes[indices[i]] for i in index_list] while len(index_list) > 0: i = np.argmin(vals) # the next spike is the minimum merged_spikes[index] = vals[i] # put it to the merged spike train @@ -127,33 +90,34 @@ def merge_spike_trains(spike_trains): indices[i] += 1 # next index for the chosen spike train if indices[i] >= lens[i]: # remove spike train index if ended index_list = index_list[index_list != i] - vals = [spike_trains[n][indices[n]] for n in index_list] - return merged_spikes + vals = [spike_trains[n].spikes[indices[n]] for n in index_list] + return SpikeTrain(merged_spikes, [spike_trains[0].t_start, + spike_trains[0].t_end]) ############################################################ # generate_poisson_spikes ############################################################ -def generate_poisson_spikes(rate, time_interval, add_aux_spikes=True): +def generate_poisson_spikes(rate, interval): """ Generates a Poisson spike train with the given rate in the given time interval :param rate: The rate of the spike trains - :param time_interval: A pair (T_start, T_end) of values representing the - start and end time of the spike train measurement or - a single value representing the end time, the T_start - is then assuemd as 0. Auxiliary spikes will be added - to the spike train at the beginning and end of this - interval, if they are not yet present. - :type time_interval: pair of doubles or double - :returns: Poisson spike train + :param interval: A pair (T_start, T_end) of values representing the + start and end time of the spike train measurement or + a single value representing the end time, the T_start + is then assuemd as 0. Auxiliary spikes will be added + to the spike train at the beginning and end of this + interval, if they are not yet present. + :type interval: pair of doubles or double + :returns: Poisson spike train as a :class:`pyspike.SpikeTrain` """ try: - T_start = time_interval[0] - T_end = time_interval[1] + T_start = interval[0] + T_end = interval[1] except: T_start = 0 - T_end = time_interval + T_end = interval # roughly how many spikes are required to fill the interval N = max(1, int(1.2 * rate * (T_end-T_start))) N_append = max(1, int(0.1 * rate * (T_end-T_start))) @@ -165,7 +129,4 @@ def generate_poisson_spikes(rate, time_interval, add_aux_spikes=True): np.random.exponential(1.0/rate, N_append)) spikes = T_start + np.cumsum(intervals) spikes = spikes[spikes < T_end] - if add_aux_spikes: - return add_auxiliary_spikes(spikes, time_interval) - else: - return spikes + return SpikeTrain(spikes, interval) diff --git a/test/test_distance.py b/test/test_distance.py index dbb72f1..0fff840 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -250,7 +250,8 @@ def test_multi_spike_sync(): # multivariate regression test spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt", - time_interval=(0, 4000)) + interval=(0, 4000)) + print(spike_trains[0].spikes) f = spk.spike_sync_profile_multi(spike_trains) assert_equal(np.sum(f.y[1:-1]), 39932) assert_equal(np.sum(f.mp[1:-1]), 85554) @@ -339,4 +340,4 @@ if __name__ == "__main__": test_spike_sync() test_multi_isi() test_multi_spike() - test_multi_spike_sync() + # test_multi_spike_sync() -- cgit v1.2.3 From 7757dc01e2d6020e0f6d6e44afdb734e61ef3c9c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 30 Apr 2015 17:23:06 +0200 Subject: addresses #5 - added __version__ property --- pyspike/__init__.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index a5f9f0a..3e836bd 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -23,3 +23,22 @@ from psth import psth from spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes + + +# define the __version__ following +# http://stackoverflow.com/questions/17583443 +from pkg_resources import get_distribution, DistributionNotFound +import os.path + +try: + _dist = get_distribution('pyspike') + # Normalize case for Windows systems + dist_loc = os.path.normcase(_dist.location) + here = os.path.normcase(__file__) + if not here.startswith(os.path.join(dist_loc, 'pyspike')): + # not installed, but there is another version that *is* + raise DistributionNotFound +except DistributionNotFound: + __version__ = 'Please install this project with setup.py' +else: + __version__ = _dist.version -- cgit v1.2.3 From 5119d47d0f00c3f7203cf94460730b59a7e473ec Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 7 Jul 2015 18:55:32 +0200 Subject: add disable_backend_warning property Users can now disable the warning messages produced when the cython backend is not available by writing spk.disable_backend_warning = True in the beginning --- examples/performance.py | 3 +++ examples/plot.py | 1 + pyspike/DiscreteFunc.py | 4 +++- pyspike/PieceWiseConstFunc.py | 7 +++++-- pyspike/PieceWiseLinFunc.py | 9 ++++++--- pyspike/__init__.py | 2 ++ pyspike/isi_distance.py | 6 ++++-- pyspike/spike_distance.py | 4 +++- pyspike/spike_sync.py | 4 +++- 9 files changed, 30 insertions(+), 10 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/examples/performance.py b/examples/performance.py index 1c31e8f..d0c3b91 100644 --- a/examples/performance.py +++ b/examples/performance.py @@ -14,6 +14,9 @@ from datetime import datetime import cProfile import pstats +# in case you dont have the cython backends, disable the warnings as follows: +# spk.disable_backend_warning = True + M = 100 # number of spike trains r = 1.0 # rate of Poisson spike times T = 1E3 # length of spike trains diff --git a/examples/plot.py b/examples/plot.py index c44afd1..1922939 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -16,6 +16,7 @@ import matplotlib.pyplot as plt import pyspike as spk + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", edges=(0, 4000)) diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 17153ee..a8c054e 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -202,7 +203,8 @@ class DiscreteFunc(object): from cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: - print("Warning: add_discrete_function_cython not found. Make \ + if not(pyspike.disable_backend_warning): + print("Warning: add_discrete_function_cython not found. Make \ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 2705443..23ff536 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -191,8 +192,10 @@ class PieceWiseConstFunc(object): from cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: - print("Warning: add_piece_wise_const_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_const_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_const_python as \ diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index c0dd475..0d51c76 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -230,9 +231,11 @@ class PieceWiseLinFunc: from cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: - print("Warning: add_piece_wise_lin_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_lin_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \n \ +Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3e836bd..2060f73 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -42,3 +42,5 @@ except DistributionNotFound: __version__ = 'Please install this project with setup.py' else: __version__ = _dist.version + +disable_backend_warning = False diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 5ea555d..e50f203 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,8 +35,9 @@ def isi_profile(spike_train1, spike_train2): from cython.cython_profiles import isi_profile_cython \ as isi_profile_impl except ImportError: - print("Warning: isi_distance_cython not found. Make sure that PySpike \ -is installed by running\n 'python setup.py build_ext --inplace'!\n \ + if not(pyspike.disable_backend_warning): + print("Warning: isi_profile_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import isi_distance_python \ diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index dd6d4f8..feea0c1 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,7 +35,8 @@ def spike_profile(spike_train1, spike_train2): from cython.cython_profiles import spike_profile_cython \ as spike_profile_impl except ImportError: - print("Warning: spike_profile_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_profile_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 40d98d2..10ebdc7 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -5,6 +5,7 @@ import numpy as np from functools import partial +import pyspike from pyspike import DiscreteFunc from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -39,7 +40,8 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): from cython.cython_profiles import coincidence_profile_cython \ as coincidence_profile_impl except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend -- cgit v1.2.3 From 29e50478dcfc31ce04c4343fa585463abe96caae Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 12 Aug 2015 18:42:54 +0200 Subject: new spike delay asymmetry measures added first version of spike delay asymmetry functions. still incomplete and untested. --- pyspike/__init__.py | 3 +- pyspike/directionality/__init__.py | 12 ++ pyspike/directionality/cython/__init__.py | 0 .../cython/cython_directionality.pyx | 177 +++++++++++++++++ pyspike/directionality/spike_delay_asymmetry.py | 212 +++++++++++++++++++++ pyspike/generic.py | 6 +- setup.py | 28 ++- 7 files changed, 427 insertions(+), 11 deletions(-) create mode 100644 pyspike/directionality/__init__.py create mode 100644 pyspike/directionality/cython/__init__.py create mode 100644 pyspike/directionality/cython/cython_directionality.pyx create mode 100644 pyspike/directionality/spike_delay_asymmetry.py (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 2060f73..8d92ea4 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -6,7 +6,7 @@ Distributed under the BSD License __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc"] + "DiscreteFunc", "directionality"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc @@ -24,6 +24,7 @@ from psth import psth from spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes +import directionality as drct # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py new file mode 100644 index 0000000..e6de1de --- /dev/null +++ b/pyspike/directionality/__init__.py @@ -0,0 +1,12 @@ +""" +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +__all__ = ["spike_delay_asymmetry"] + +from spike_delay_asymmetry import spike_delay_asymmetry_profile, \ + spike_delay_asymmetry, spike_delay_asymmetry_profile_multi, \ + spike_delay_asymmetry_matrix, optimal_asymmetry_order, \ + optimal_asymmetry_order_from_D, reorder_asymmetry_matrix diff --git a/pyspike/directionality/cython/__init__.py b/pyspike/directionality/cython/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyspike/directionality/cython/cython_directionality.pyx b/pyspike/directionality/cython/cython_directionality.pyx new file mode 100644 index 0000000..f5ea752 --- /dev/null +++ b/pyspike/directionality/cython/cython_directionality.pyx @@ -0,0 +1,177 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_delay_asymmetry_profile_cython +############################################################ +def spike_delay_asymmetry_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + + +############################################################ +# spike_delay_asymmetry_cython +############################################################ +def spike_delay_asymmetry_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int asym = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + asym -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + asym += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if asym == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + asym = 1 + mp = 1 + + return asym, mp diff --git a/pyspike/directionality/spike_delay_asymmetry.py b/pyspike/directionality/spike_delay_asymmetry.py new file mode 100644 index 0000000..7d59601 --- /dev/null +++ b/pyspike/directionality/spike_delay_asymmetry.py @@ -0,0 +1,212 @@ +# Module containing functions to compute multivariate spike delay asymmetry +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +import numpy as np +from math import exp +from functools import partial +# import pyspike +from pyspike import DiscreteFunc +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_delay_asymmetry_profile +############################################################ +def spike_delay_asymmetry_profile(spike_train1, spike_train2, max_tau=None): + """ Computes the spike delay asymmetry profile A(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_delay_asymmetry_profile_cython as \ + spike_delay_asymmetry_profile_impl + except ImportError: + raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.python_backend import coincidence_python \ +# as coincidence_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_delay_asymmetry_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_delay_asymmetry +############################################################ +def spike_delay_asymmetry(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Computes the overall spike delay asymmetry value for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_delay_asymmetry_cython as spike_delay_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_delay_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + return c + except ImportError: + # Cython backend not available: fall back to profile averaging + raise NotImplementedError() + # return spike_sync_profile(spike_train1, spike_train2, + # max_tau).integral(interval) + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_delay_asymmetry_profile_multi +############################################################ +def spike_delay_asymmetry_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike delay asymmetry profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + prof_func = partial(spike_delay_asymmetry_profile, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_prof + + +############################################################ +# spike_delay_asymmetry_matrix +############################################################ +def spike_delay_asymmetry_matrix(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike delay asymmetry matrix for the given spike trains. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_delay_asymmetry(spike_trains[i], spike_trains[j], + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# optimal_asymmetry_order_from_D +############################################################ +def optimal_asymmetry_order_from_D(D, full_output=False): + """ finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Internal function, don't call directly! Use optimal_asymmetry_order + instead. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T = 2*np.max(D) # starting temperature + T_end = 1E-5 * T # final temperature + alpha = 0.9 # cooling factor + total_iter = 0 + while T > T_end: + iterations = 0 + succ_iter = 0 + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + ind1 = np.random.randint(N-1) + delta_A = -2*D[p[ind1], p[ind1+1]] + if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # swap indices + p[ind1], p[ind1+1] = p[ind1+1], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + break + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# _optimal_asymmetry_order +############################################################ +def optimal_asymmetry_order(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ finds the best sorting of the given spike trains via simulated + annealing. + Returns the optimal permutation p and A value. + """ + D = spike_delay_asymmetry_matrix(spike_trains, indices, interval, max_tau) + return optimal_asymmetry_order_from_D(D, full_output) + + +############################################################ +# reorder_asymmetry_matrix +############################################################ +def reorder_asymmetry_matrix(D, p): + N = len(D) + D_p = np.empty_like(D) + for n in xrange(N): + for m in xrange(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/generic.py b/pyspike/generic.py index 2df34f1..515cbf4 100644 --- a/pyspike/generic.py +++ b/pyspike/generic.py @@ -37,13 +37,15 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): """ L1 = len(pairs1) if L1 > 1: - dist_prof1 = divide_and_conquer(pairs1[:L1//2], pairs1[int(L1//2):]) + dist_prof1 = divide_and_conquer(pairs1[:L1//2], + pairs1[int(L1//2):]) else: dist_prof1 = pair_distance_func(spike_trains[pairs1[0][0]], spike_trains[pairs1[0][1]]) L2 = len(pairs2) if L2 > 1: - dist_prof2 = divide_and_conquer(pairs2[:L2//2], pairs2[int(L2//2):]) + dist_prof2 = divide_and_conquer(pairs2[:L2//2], + pairs2[int(L2//2):]) else: dist_prof2 = pair_distance_func(spike_trains[pairs2[0][0]], spike_trains[pairs2[0][1]]) diff --git a/setup.py b/setup.py index d853cdf..960c684 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,8 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/directionality/cython/cython_directionality.c"): use_c = True else: use_c = False @@ -33,16 +34,26 @@ ext_modules = [] if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ - Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), - Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), - Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_add", + ["pyspike/cython/cython_add.pyx"]), + Extension("pyspike.cython.cython_profiles", + ["pyspike/cython/cython_profiles.pyx"]), + Extension("pyspike.cython.cython_distances", + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.directionality.cython.cython_directionality", + ["pyspike/directionality/cython/cython_directionality.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ - Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), - Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), - Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_add", + ["pyspike/cython/cython_add.c"]), + Extension("pyspike.cython.cython_profiles", + ["pyspike/cython/cython_profiles.c"]), + Extension("pyspike.cython.cython_distances", + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.directionality.cython.cython_directionality", + ["pyspike/directionality/cython/cython_directionality.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -81,7 +92,8 @@ train similarity', ], package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', - 'cython_distances.c'], + 'cython/cython_distances.c', + 'directionality/cython/cython_directionality.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From eeb4918ec2181f136e85bce976ec46a35a74b8f1 Mon Sep 17 00:00:00 2001 From: Igor Gnatenko Date: Sun, 13 Dec 2015 10:55:30 +0100 Subject: py3: absolute_import Signed-off-by: Igor Gnatenko --- pyspike/DiscreteFunc.py | 6 +++--- pyspike/PieceWiseConstFunc.py | 6 +++--- pyspike/PieceWiseLinFunc.py | 8 ++++---- pyspike/__init__.py | 22 ++++++++++++---------- pyspike/directionality/__init__.py | 4 +++- pyspike/directionality/spike_train_order.py | 12 +++++++----- pyspike/isi_distance.py | 8 +++++--- pyspike/spike_distance.py | 8 +++++--- pyspike/spike_sync.py | 8 +++++--- 9 files changed, 47 insertions(+), 35 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 9cc7bd5..55c0bc8 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -206,7 +206,7 @@ expected." # cython version try: - from cython.cython_add import add_discrete_function_cython as \ + from .cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -215,7 +215,7 @@ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from cython.python_backend import add_discrete_function_python as \ + from .cython.python_backend import add_discrete_function_python as \ add_discrete_function_impl self.x, self.y, self.mp = \ diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 23ff536..5ce5f27 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -189,7 +189,7 @@ class PieceWiseConstFunc(object): # cython version try: - from cython.cython_add import add_piece_wise_const_cython as \ + from .cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -198,7 +198,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from cython.python_backend import add_piece_wise_const_python as \ + from .cython.python_backend import add_piece_wise_const_python as \ add_piece_wise_const_impl self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 0d51c76..8145e63 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -222,13 +222,13 @@ class PieceWiseLinFunc: assert self.x[-1] == f.x[-1], "The functions have different intervals" # python implementation - # from python_backend import add_piece_wise_lin_python + # from .python_backend import add_piece_wise_lin_python # self.x, self.y1, self.y2 = add_piece_wise_lin_python( # self.x, self.y1, self.y2, f.x, f.y1, f.y2) # cython version try: - from cython.cython_add import add_piece_wise_lin_cython as \ + from .cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -237,7 +237,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'! \n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import add_piece_wise_lin_python as \ + from .cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl self.x, self.y1, self.y2 = add_piece_wise_lin_impl( diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 8d92ea4..335b1d3 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,27 +4,29 @@ Copyright 2014-2015, Mario Mulansky Distributed under the BSD License """ +from __future__ import absolute_import + __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc", "directionality"] -from PieceWiseConstFunc import PieceWiseConstFunc -from PieceWiseLinFunc import PieceWiseLinFunc -from DiscreteFunc import DiscreteFunc -from SpikeTrain import SpikeTrain +from .PieceWiseConstFunc import PieceWiseConstFunc +from .PieceWiseLinFunc import PieceWiseLinFunc +from .DiscreteFunc import DiscreteFunc +from .SpikeTrain import SpikeTrain -from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ +from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix -from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ +from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix -from spike_sync import spike_sync_profile, spike_sync,\ +from .spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix -from psth import psth +from .psth import psth -from spikes import load_spike_trains_from_txt, spike_train_from_string, \ +from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes -import directionality as drct +from . import directionality as drct # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py index 6f74c50..6ea38b2 100644 --- a/pyspike/directionality/__init__.py +++ b/pyspike/directionality/__init__.py @@ -4,9 +4,11 @@ Copyright 2015, Mario Mulansky Distributed under the BSD License """ +from __future__ import absolute_import + __all__ = ["spike_train_order"] -from spike_train_order import spike_train_order_profile, \ +from .spike_train_order import spike_train_order_profile, \ spike_train_order, spike_train_order_profile_multi, \ spike_train_order_matrix, spike_order_values, \ optimal_spike_train_order, optimal_spike_train_order_from_matrix, \ diff --git a/pyspike/directionality/spike_train_order.py b/pyspike/directionality/spike_train_order.py index 892ffd0..44d931d 100644 --- a/pyspike/directionality/spike_train_order.py +++ b/pyspike/directionality/spike_train_order.py @@ -2,6 +2,8 @@ # Copyright 2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np from math import exp from functools import partial @@ -35,7 +37,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_profile_cython as \ spike_train_order_profile_impl except ImportError: @@ -45,7 +47,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_train_order_python as spike_train_order_profile_impl if max_tau is None: @@ -72,7 +74,7 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_cython as spike_train_order_impl if max_tau is None: max_tau = 0.0 @@ -170,7 +172,7 @@ def spike_order_values(spike_trains, indices=None, # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_order_values_cython as spike_order_values_impl except ImportError: raise NotImplementedError() @@ -179,7 +181,7 @@ def spike_order_values(spike_trains, indices=None, # PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ # Falling back to slow python backend.") # # use python backend -# from cython.python_backend import coincidence_python \ +# from .cython.python_backend import coincidence_python \ # as coincidence_profile_impl if max_tau is None: diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index e50f203..0ae7393 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -2,6 +2,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import pyspike from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ @@ -32,7 +34,7 @@ def isi_profile(spike_train1, spike_train2): # load cython implementation try: - from cython.cython_profiles import isi_profile_cython \ + from .cython.cython_profiles import isi_profile_cython \ as isi_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -40,7 +42,7 @@ def isi_profile(spike_train1, spike_train2): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import isi_distance_python \ + from .cython.python_backend import isi_distance_python \ as isi_profile_impl times, values = isi_profile_impl(spike_train1.get_spikes_non_empty(), @@ -74,7 +76,7 @@ def isi_distance(spike_train1, spike_train2, interval=None): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import isi_distance_cython \ + from .cython.cython_distances import isi_distance_cython \ as isi_distance_impl return isi_distance_impl(spike_train1.get_spikes_non_empty(), diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index feea0c1..e418283 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -2,6 +2,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import pyspike from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ @@ -32,7 +34,7 @@ def spike_profile(spike_train1, spike_train2): # cython implementation try: - from cython.cython_profiles import spike_profile_cython \ + from .cython.cython_profiles import spike_profile_cython \ as spike_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -40,7 +42,7 @@ def spike_profile(spike_train1, spike_train2): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import spike_distance_python \ + from .cython.python_backend import spike_distance_python \ as spike_profile_impl times, y_starts, y_ends = spike_profile_impl( @@ -76,7 +78,7 @@ def spike_distance(spike_train1, spike_train2, interval=None): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import spike_distance_cython \ + from .cython.cython_distances import spike_distance_cython \ as spike_distance_impl return spike_distance_impl(spike_train1.get_spikes_non_empty(), spike_train2.get_spikes_non_empty(), diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 10ebdc7..3dc29ff 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -3,6 +3,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np from functools import partial import pyspike @@ -37,7 +39,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_profiles import coincidence_profile_cython \ + from .cython.cython_profiles import coincidence_profile_cython \ as coincidence_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -45,7 +47,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_python \ + from .cython.python_backend import coincidence_python \ as coincidence_profile_impl if max_tau is None: @@ -73,7 +75,7 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import coincidence_value_cython \ + from .cython.cython_distances import coincidence_value_cython \ as coincidence_value_impl if max_tau is None: max_tau = 0.0 -- cgit v1.2.3 From 9061f2a0c13134e53f937d730295a421fd671ea3 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 14 Dec 2015 16:50:55 +0100 Subject: removed directionality from __init__ and setup.py --- pyspike/__init__.py | 2 -- setup.py | 11 +++-------- 2 files changed, 3 insertions(+), 10 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 335b1d3..069090b 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -26,8 +26,6 @@ from .psth import psth from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes -from . import directionality as drct - # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/setup.py b/setup.py index a1ab122..8ef431a 100644 --- a/setup.py +++ b/setup.py @@ -23,8 +23,7 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/directionality/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_distances.c"): use_c = True else: use_c = False @@ -39,9 +38,7 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.pyx"]) + ["pyspike/cython/cython_distances.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -51,9 +48,7 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.c"]) + ["pyspike/cython/cython_distances.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend -- cgit v1.2.3 From 0d8af2c97d766a4fa514f0232189bc17c31c67a0 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 24 Mar 2016 15:54:48 +0100 Subject: +function for saving spike trains to txt files save_spike_trains_to_txt allows to save spike train data into txt files which can then be loaded via load_spike_trains_from_txt again. --- pyspike/__init__.py | 4 ++-- pyspike/spikes.py | 37 +++++++++++++++++++++++++++++-------- 2 files changed, 31 insertions(+), 10 deletions(-) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 069090b..4d75786 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -23,8 +23,8 @@ from .spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix from .psth import psth -from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ - merge_spike_trains, generate_poisson_spikes +from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ + spike_train_from_string, merge_spike_trains, generate_poisson_spikes # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/spikes.py b/pyspike/spikes.py index b18d7eb..966ad69 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -2,6 +2,7 @@ # Copyright 2014, Mario Mulansky # Distributed under the BSD License + import numpy as np from pyspike import SpikeTrain @@ -25,7 +26,7 @@ def spike_train_from_string(s, edges, sep=' ', is_sorted=False): ############################################################ -# load_spike_trains_txt +# load_spike_trains_from_txt ############################################################ def load_spike_trains_from_txt(file_name, edges, separator=' ', comment='#', is_sorted=False, @@ -47,16 +48,36 @@ def load_spike_trains_from_txt(file_name, edges, :returns: list of :class:`.SpikeTrain` """ spike_trains = [] - spike_file = open(file_name, 'r') - for line in spike_file: - if len(line) > 1 and not line.startswith(comment): - # use only the lines with actual data and not commented - spike_train = spike_train_from_string(line, edges, - separator, is_sorted) - spike_trains.append(spike_train) + with open(file_name, 'r') as spike_file: + for line in spike_file: + if len(line) > 1 and not line.startswith(comment): + # use only the lines with actual data and not commented + spike_train = spike_train_from_string(line, edges, + separator, is_sorted) + spike_trains.append(spike_train) return spike_trains +############################################################ +# save_spike_trains_to_txt +############################################################ +def save_spike_trains_to_txt(spike_trains, file_name, + separator=' ', precision=8): + """ Saves the given spike trains into a file with the given file name. + Each spike train will be stored in one line in the text file with the times + separated by `separator`. + + :param spike_trains: List of :class:`.SpikeTrain` objects + :param file_name: The name of the text file. + """ + # format string to print the spike times with given precision + format_str = "{:0.%de}" % precision + with open(file_name, 'w') as spike_file: + for st in spike_trains: + s = separator.join(map(format_str.format, st.spikes)) + spike_file.write(s+'\n') + + ############################################################ # merge_spike_trains ############################################################ -- cgit v1.2.3 From 4691d0e77a024fbc73d1098ee557d65f8f2ddc89 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 18 Jun 2016 16:27:51 -0700 Subject: added function to import time series new function import_spike_trains_from_time_series that loads spike trains from time series. --- pyspike/__init__.py | 1 + pyspike/spikes.py | 25 +++++++++++++++++++++++++ test/test_spikes.py | 19 +++++++++++++++++++ 3 files changed, 45 insertions(+) (limited to 'pyspike/__init__.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 069090b..1e879c4 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -24,6 +24,7 @@ from .spike_sync import spike_sync_profile, spike_sync,\ from .psth import psth from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ + import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes # define the __version__ following diff --git a/pyspike/spikes.py b/pyspike/spikes.py index b18d7eb..1bf474c 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -57,6 +57,31 @@ def load_spike_trains_from_txt(file_name, edges, return spike_trains +def import_spike_trains_from_time_series(file_name, start_time, time_bin, + separator=None, comment='#'): + """ Imports spike trains from time series consisting of 0 and 1 denoting + the absence or presence of a spike. Each line in the data file represents + one spike train. + + :param file_name: The name of the data file containing the time series. + :param edges: A pair (T_start, T_end) of values representing the + start and end time of the spike train measurement + or a single value representing the end time, the + T_start is then assuemd as 0. + :param separator: The character used to seprate the values in the text file + :param comment: Lines starting with this character are ignored. + + """ + data = np.loadtxt(file_name, comments=comment, delimiter=separator) + time_points = start_time + time_bin + np.arange(len(data[0, :]))*time_bin + spike_trains = [] + for time_series in data: + spike_trains.append(SpikeTrain(time_points[time_series > 0], + edges=[start_time, + time_points[-1]])) + return spike_trains + + ############################################################ # merge_spike_trains ############################################################ diff --git a/test/test_spikes.py b/test/test_spikes.py index 609a819..bcface2 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -17,6 +17,10 @@ import os TEST_PATH = os.path.dirname(os.path.realpath(__file__)) TEST_DATA = os.path.join(TEST_PATH, "PySpike_testdata.txt") +TIME_SERIES_DATA = os.path.join(TEST_PATH, "time_series.txt") +TIME_SERIES_SPIKES = os.path.join(TEST_PATH, "time_series_spike_trains.txt") + + def test_load_from_txt(): spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=(0, 4000)) assert len(spike_trains) == 40 @@ -33,6 +37,21 @@ def test_load_from_txt(): assert spike_train.t_end == 4000 +def test_load_time_series(): + spike_trains = spk.import_spike_trains_from_time_series(TIME_SERIES_DATA, + start_time=0, + time_bin=1) + assert len(spike_trains) == 40 + spike_trains_check = spk.load_spike_trains_from_txt(TIME_SERIES_SPIKES, + edges=(0, 4000)) + + # check spike trains + for n in range(len(spike_trains)): + assert_equal(spike_trains[n].spikes, spike_trains_check[n].spikes) + assert_equal(spike_trains[n].t_start, 0) + assert_equal(spike_trains[n].t_end, 4000) + + def check_merged_spikes(merged_spikes, spike_trains): # create a flat array with all spike events all_spikes = np.array([]) -- cgit v1.2.3 From 34bd30415dd93a2425ce566627e24ee9483ada3e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:49:42 -0700 Subject: Spike Order support (#39) * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- Changelog | 3 + Readme.rst | 9 +- doc/pyspike.rst | 6 + doc/tutorial.rst | 66 +++ examples/spike_train_order.py | 52 +++ pyspike/PieceWiseConstFunc.py | 32 +- pyspike/PieceWiseLinFunc.py | 42 +- pyspike/__init__.py | 16 +- pyspike/cython/cython_directionality.pyx | 262 ++++++++++++ pyspike/cython/cython_distances.pyx | 200 +++++++++ pyspike/cython/cython_profiles.pyx | 33 ++ pyspike/cython/cython_simulated_annealing.pyx | 82 ++++ pyspike/cython/directionality_python_backend.py | 144 +++++++ pyspike/cython/python_backend.py | 67 ++- pyspike/spike_directionality.py | 522 ++++++++++++++++++++++++ pyspike/spike_sync.py | 55 ++- setup.py | 28 +- test/test_directionality.py | 97 +++++ test/test_function.py | 62 +++ test/test_sync_filter.py | 95 +++++ 20 files changed, 1812 insertions(+), 61 deletions(-) create mode 100644 examples/spike_train_order.py create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/cython_simulated_annealing.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py create mode 100644 test/test_sync_filter.py (limited to 'pyspike/__init__.py') diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,12 @@ PSTH :undoc-members: :show-inheritance: +Directionality +........................................ +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from .spike_directionality import spike_directionality, \ + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..ac37690 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,262 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + + +############################################################ +# spike_directionality_profiles_cython +############################################################ +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_directionality_cython +############################################################ +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 # directionality value + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + + return d diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if fabs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..c1d820b --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,144 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..248862c --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,522 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +from __future__ import absolute_import + +import numpy as np +import pyspike +from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_directionality_cython as spike_directionality_impl + if max_tau is None: + max_tau = 0.0 + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) + if normalize: + return 1.0*d/c + else: + return d + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. + + Valid call structures:: + + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) + + +############################################################ +# spike_train_order_profile_bi +############################################################ +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_train_order_profile_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl +############################################################ +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_func + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_func(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau=max_tau).integral(interval) + return c, mp + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + + + +############################################################ +# optimal_spike_train_sorting_from_matrix +############################################################ +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_sorting +############################################################ +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return _optimal_spike_train_sorting_from_matrix(D, full_output) + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` + """ + N = len(D) + D_p = np.empty_like(D) + for n in range(N): + for m in range(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ @@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + removed_spike_trains = [] + + # cython implementation + try: + from .cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.python_backend import coincidence_single_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N): + if i == j: + continue + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 5b9e677..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -90,11 +100,17 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..c2e9bfe --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,97 @@ +""" test_directionality.py + +Tests the directionality functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc + + +def test_spike_directionality(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_values([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) diff --git a/test/test_function.py b/test/test_function.py index 92d378d..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -162,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..e259903 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,95 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_single_prof(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import \ + coincidence_single_python as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3