summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2018-09-20 10:49:42 -0700
committerGitHub <noreply@github.com>2018-09-20 10:49:42 -0700
commit34bd30415dd93a2425ce566627e24ee9483ada3e (patch)
treedcfa9164d46e3cf501a1e8dcf4970f350063561a
parent44d23620d2faa78ca74437fbd3f1b95da722a853 (diff)
Spike Order support (#39)0.6.0
* reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial.
-rw-r--r--Changelog3
-rw-r--r--Readme.rst9
-rw-r--r--doc/pyspike.rst6
-rw-r--r--doc/tutorial.rst66
-rw-r--r--examples/spike_train_order.py52
-rw-r--r--pyspike/PieceWiseConstFunc.py32
-rw-r--r--pyspike/PieceWiseLinFunc.py42
-rw-r--r--pyspike/__init__.py16
-rw-r--r--pyspike/cython/cython_directionality.pyx262
-rw-r--r--pyspike/cython/cython_distances.pyx200
-rw-r--r--pyspike/cython/cython_profiles.pyx33
-rw-r--r--pyspike/cython/cython_simulated_annealing.pyx82
-rw-r--r--pyspike/cython/directionality_python_backend.py144
-rw-r--r--pyspike/cython/python_backend.py67
-rw-r--r--pyspike/spike_directionality.py522
-rw-r--r--pyspike/spike_sync.py55
-rw-r--r--setup.py28
-rw-r--r--test/test_directionality.py97
-rw-r--r--test/test_function.py62
-rw-r--r--test/test_sync_filter.py95
20 files changed, 1812 insertions, 61 deletions
diff --git a/Changelog b/Changelog
index 21b7cb0..88e16cc 100644
--- a/Changelog
+++ b/Changelog
@@ -1,3 +1,6 @@
+PySpike v0.6:
+ * Support for computing spike directionality and spike train order
+
PySpike v0.5:
* First beta release
* Python 2.6 support removed
diff --git a/Readme.rst b/Readme.rst
index 0422dad..74b014b 100644
--- a/Readme.rst
+++ b/Readme.rst
@@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2
Important Changelog
-----------------------------
+With version 0.6.0, the spike directionality and spike train order function have been added.
+
With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated.
With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains.
This is a breaking change in the function interfaces.
Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions.
-
-Upcoming Functionality
--------------------------
-
-In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library.
-The new measures and algorithms are described in `this preprint <https://arxiv.org/abs/1610.07986>`_.
-
Requirements and Installation
-----------------------------
diff --git a/doc/pyspike.rst b/doc/pyspike.rst
index 74ab439..3b10d2a 100644
--- a/doc/pyspike.rst
+++ b/doc/pyspike.rst
@@ -64,6 +64,12 @@ PSTH
:undoc-members:
:show-inheritance:
+Directionality
+........................................
+.. automodule:: pyspike.spike_directionality
+ :members:
+ :undoc-members:
+ :show-inheritance:
Helper functions
........................................
diff --git a/doc/tutorial.rst b/doc/tutorial.rst
index aff03a8..377c0a2 100644
--- a/doc/tutorial.rst
+++ b/doc/tutorial.rst
@@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w
plt.title("SPIKE-Sync")
plt.show()
+
+
+Quantifying Leaders and Followers: Spike Train Order
+---------------------------------------
+
+PySpike provides functionality to quantify how much a set of spike trains
+resembles a synfire pattern (ie perfect leader-follower pattern). For details
+on the algorithms please see
+`our article in NJP <http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3>`_.
+
+The following example computes the Spike Order profile and Synfire Indicator
+of two Poissonian spike trains.
+
+.. code:: python
+ import numpy as np
+ from matplotlib import pyplot as plt
+ import pyspike as spk
+
+
+ st1 = spk.generate_poisson_spikes(1.0, [0, 20])
+ st2 = spk.generate_poisson_spikes(1.0, [0, 20])
+
+ d = spk.spike_directionality(st1, st2)
+
+ print "Spike Directionality of two Poissonian spike trains:", d
+
+ E = spk.spike_train_order_profile(st1, st2)
+
+ plt.figure()
+ x, y = E.get_plottable_data()
+ plt.plot(x, y, '-ob')
+ plt.ylim(-1.1, 1.1)
+ plt.xlabel("t")
+ plt.ylabel("E")
+ plt.title("Spike Train Order Profile")
+
+ plt.show()
+
+Additionally, PySpike can also compute the optimal ordering of the spike trains,
+ie the ordering that most resembles a synfire pattern. The following example
+computes the optimal order of a set of 20 Poissonian spike trains:
+
+.. code:: python
+
+ M = 20
+ spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)]
+
+ F_init = spk.spike_train_order(spike_trains)
+ print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init
+
+ D_init = spk.spike_directionality_matrix(spike_trains)
+ phi, _ = spk.optimal_spike_train_sorting(spike_trains)
+ F_opt = spk.spike_train_order(spike_trains, indices=phi)
+ print "Synfire Indicator of optimized spike train sorting:", F_opt
+
+ D_opt = spk.permutate_matrix(D_init, phi)
+
+ plt.figure()
+ plt.imshow(D_init)
+ plt.title("Initial Directionality Matrix")
+
+ plt.figure()
+ plt.imshow(D_opt)
+ plt.title("Optimized Directionality Matrix")
+
+ plt.show()
diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py
new file mode 100644
index 0000000..3a42472
--- /dev/null
+++ b/examples/spike_train_order.py
@@ -0,0 +1,52 @@
+import numpy as np
+from matplotlib import pyplot as plt
+import pyspike as spk
+
+
+st1 = spk.generate_poisson_spikes(1.0, [0, 20])
+st2 = spk.generate_poisson_spikes(1.0, [0, 20])
+
+d = spk.spike_directionality(st1, st2)
+
+print "Spike Directionality of two Poissonian spike trains:", d
+
+E = spk.spike_train_order_profile(st1, st2)
+
+plt.figure()
+x, y = E.get_plottable_data()
+plt.plot(x, y, '-ob')
+plt.ylim(-1.1, 1.1)
+plt.xlabel("t")
+plt.ylabel("E")
+plt.title("Spike Train Order Profile")
+
+
+###### Optimize spike train order of 20 Random spike trains #######
+
+M = 20
+
+spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)]
+
+F_init = spk.spike_train_order(spike_trains)
+
+print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init
+
+D_init = spk.spike_directionality_matrix(spike_trains)
+
+phi, _ = spk.optimal_spike_train_sorting(spike_trains)
+
+F_opt = spk.spike_train_order(spike_trains, indices=phi)
+
+print "Synfire Indicator of optimized spike train sorting:", F_opt
+
+D_opt = spk.permutate_matrix(D_init, phi)
+
+plt.figure()
+plt.imshow(D_init)
+plt.title("Initial Directionality Matrix")
+
+plt.figure()
+plt.imshow(D_opt)
+plt.title("Optimized Directionality Matrix")
+
+plt.show()
diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py
index 5ce5f27..17fdd3f 100644
--- a/pyspike/PieceWiseConstFunc.py
+++ b/pyspike/PieceWiseConstFunc.py
@@ -129,19 +129,31 @@ class PieceWiseConstFunc(object):
# no interval given, integrate over the whole spike train
a = np.sum((self.x[1:]-self.x[:-1]) * self.y)
else:
+ if interval[0]>interval[1]:
+ raise ValueError("Invalid averaging interval: interval[0]>=interval[1]")
+ if interval[0]<self.x[0]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[0]")
+ if interval[1]>self.x[-1]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[-1]")
# find the indices corresponding to the interval
start_ind = np.searchsorted(self.x, interval[0], side='right')
end_ind = np.searchsorted(self.x, interval[1], side='left')-1
- assert start_ind > 0 and end_ind < len(self.x), \
- "Invalid averaging interval"
- # first the contribution from between the indices
- a = np.sum((self.x[start_ind+1:end_ind+1] -
- self.x[start_ind:end_ind]) *
- self.y[start_ind:end_ind])
- # correction from start to first index
- a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1]
- # correction from last index to end
- a += (interval[1]-self.x[end_ind]) * self.y[end_ind]
+ if start_ind > end_ind:
+ # contribution from between two closest edges
+ a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind]
+ # minus the part that is not within the interval
+ a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind]
+ else:
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ # first the contribution from between the indices
+ a = np.sum((self.x[start_ind+1:end_ind+1] -
+ self.x[start_ind:end_ind]) *
+ self.y[start_ind:end_ind])
+ # correction from start to first index
+ a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1]
+ # correction from last index to end
+ a += (interval[1]-self.x[end_ind]) * self.y[end_ind]
return a
def avrg(self, interval=None):
diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py
index 8145e63..8faaec4 100644
--- a/pyspike/PieceWiseLinFunc.py
+++ b/pyspike/PieceWiseLinFunc.py
@@ -146,31 +146,47 @@ class PieceWiseLinFunc:
if interval is None:
# no interval given, integrate over the whole spike train
- integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2))
+ return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2))
+
+ # find the indices corresponding to the interval
+ start_ind = np.searchsorted(self.x, interval[0], side='right')
+ end_ind = np.searchsorted(self.x, interval[1], side='left')-1
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ if start_ind > end_ind:
+ print(start_ind, end_ind, self.x[start_ind])
+ # contribution from between two closest edges
+ y_x0 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[0])
+ y_x1 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[1])
+ print(y_x0, y_x1, interval[1] - interval[0])
+ integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0])
+ print(integral)
else:
- # find the indices corresponding to the interval
- start_ind = np.searchsorted(self.x, interval[0], side='right')
- end_ind = np.searchsorted(self.x, interval[1], side='left')-1
- assert start_ind > 0 and end_ind < len(self.x), \
- "Invalid averaging interval"
# first the contribution from between the indices
integral = np.sum((self.x[start_ind+1:end_ind+1] -
- self.x[start_ind:end_ind]) *
- 0.5*(self.y1[start_ind:end_ind] +
- self.y2[start_ind:end_ind]))
+ self.x[start_ind:end_ind]) *
+ 0.5*(self.y1[start_ind:end_ind] +
+ self.y2[start_ind:end_ind]))
# correction from start to first index
integral += (self.x[start_ind]-interval[0]) * 0.5 * \
(self.y2[start_ind-1] +
- intermediate_value(self.x[start_ind-1],
+ intermediate_value(self.x[start_ind-1],
self.x[start_ind],
self.y1[start_ind-1],
self.y2[start_ind-1],
- interval[0]
- ))
+ interval[0]))
# correction from last index to end
integral += (interval[1]-self.x[end_ind]) * 0.5 * \
(self.y1[end_ind] +
- intermediate_value(self.x[end_ind], self.x[end_ind+1],
+ intermediate_value(self.x[end_ind], self.x[end_ind+1],
self.y1[end_ind], self.y2[end_ind],
interval[1]
))
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
index 08253fb..3897d18 100644
--- a/pyspike/__init__.py
+++ b/pyspike/__init__.py
@@ -1,5 +1,5 @@
"""
-Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+Copyright 2014-2018, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
@@ -7,8 +7,8 @@ Distributed under the BSD License
from __future__ import absolute_import
__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth",
- "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc",
- "DiscreteFunc", "directionality"]
+ "spikes", "spike_directionality", "SpikeTrain",
+ "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"]
from .PieceWiseConstFunc import PieceWiseConstFunc
from .PieceWiseLinFunc import PieceWiseLinFunc
@@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\
from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\
spike_distance_multi, spike_distance_matrix
from .spike_sync import spike_sync_profile, spike_sync,\
- spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix
+ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\
+ filter_by_spike_sync
from .psth import psth
from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \
spike_train_from_string, import_spike_trains_from_time_series, \
merge_spike_trains, generate_poisson_spikes
+from .spike_directionality import spike_directionality, \
+ spike_directionality_values, spike_directionality_matrix, \
+ spike_train_order_profile, spike_train_order_profile_bi, \
+ spike_train_order_profile_multi, spike_train_order, \
+ spike_train_order_bi, spike_train_order_multi, \
+ optimal_spike_train_sorting, permutate_matrix
+
# define the __version__ following
# http://stackoverflow.com/questions/17583443
from pkg_resources import get_distribution, DistributionNotFound
diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx
new file mode 100644
index 0000000..ac37690
--- /dev/null
+++ b/pyspike/cython/cython_directionality.pyx
@@ -0,0 +1,262 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_directionality.pyx
+
+cython implementation of the spike delay asymmetry measures
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_directionality.pyx
+
+which gives::
+
+ cython_directionality.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+from libc.math cimport fmax
+from libc.math cimport fmin
+
+# from pyspike.cython.cython_distances cimport get_tau
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# get_tau
+############################################################
+cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
+ int i, int j, double interval, double max_tau):
+ cdef double m = interval # use interval length as initial tau
+ cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
+ cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
+ if i < N1 and i > -1:
+ m = fmin(m, spikes1[i+1]-spikes1[i])
+ if j < N2 and j > -1:
+ m = fmin(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = fmin(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = fmin(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = fmin(m, max_tau)
+ return m
+
+
+############################################################
+# spike_train_order_profile_cython
+############################################################
+def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int n = 0
+ cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
+ cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values
+ cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # both get marked with -1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
+
+
+############################################################
+# spike_train_order_cython
+############################################################
+def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0
+ cdef int mp = 0
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 2 appeared before spike in spike train 1
+ # mark with -1
+ d -= 2
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 1 appeared before spike in spike train 2
+ # mark with +1
+ d += 2
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # add only one event with multiplicity 2, but no asymmetry counting
+ mp += 2
+
+ if d == 0 and mp == 0:
+ # empty spike trains -> spike sync = 1 by definition
+ d = 1
+ mp = 1
+
+ return d, mp
+
+
+############################################################
+# spike_directionality_profiles_cython
+############################################################
+def spike_directionality_profiles_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef double[:] d1 = np.zeros(N1) # directionality values
+ cdef double[:] d2 = np.zeros(N2) # directionality values
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # equal spike times: zero asymmetry value
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_directionality_cython
+############################################################
+def spike_directionality_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0 # directionality value
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d -= 1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d += 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+
+ return d
diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx
index ac5f226..d4070ae 100644
--- a/pyspike/cython/cython_distances.pyx
+++ b/pyspike/cython/cython_distances.pyx
@@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil:
return 0.5*(isi1+isi2)*(isi1+isi2)
# alternative definition to obtain <S> ~ 0.5 for Poisson spikes
# return 0.5*(isi1*isi1+isi2*isi2)
+ # another alternative definition without second normalization
+ # return 0.5*(isi1+isi2)
############################################################
@@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
index2 = 0
y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
index = 1
while index1+index2 < N1+N2-2:
@@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
t_curr = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
@@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
index2 += 1
# first calculate the previous interval end value
@@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
t_curr = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
@@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s2 = dt_p2
# s1 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
else: # t_f1 == t_f2 - generate only one event
index1 += 1
index2 += 1
@@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
+ # end nogil
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_value / (t_end-t_start)
+
+
+############################################################
+# isi_avrg_rf_cython
+############################################################
+cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil:
+ # rate free version
+ return (isi1+isi2)
+
+
+############################################################
+# spike_distance_rf_cython
+############################################################
+def spike_distance_rf_cython(double[:] t1, double[:] t2,
+ double t_start, double t_end):
+
+ cdef int N1, N2, index1, index2, index
+ cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
+ cdef double isi1, isi2, s1, s2
+ cdef double y_start, y_end, t_last, t_current, spike_value
+
+ spike_value = 0.0
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ with nogil: # release the interpreter to allow multithreading
+ t_last = t_start
+ t_p1 = t_start
+ t_p2 = t_start
+ if t1[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_p1 = dt_f1
+ s1 = dt_p1*(t_f1-t_start)/isi1
+ index1 = -1
+ else:
+ t_f1 = t1[1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ dt_p1 = 0.0
+ isi1 = t1[1]-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = dt_f2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
+ s2 = dt_p2*(t_f2-t_start)/isi2
+ index2 = -1
+ else:
+ t_f2 = t2[1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = 0.0
+ isi2 = t2[1]-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_end
+ t_curr = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # s1 needs adjustment due to change of isi1
+ s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s2 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p2 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_end
+ t_curr = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ # s2 needs adjustment due to change of isi2
+ s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s1 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ t_curr = t_f1
+ y_end = 0.0
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+ y_start = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_end
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_end
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ index += 1
+ t_last = t_curr
+ # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ s1 = dt_f1*(t_end-t1[N1-1])/isi1
+ s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
# end nogil
diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx
index 4a42cdb..aa24db4 100644
--- a/pyspike/cython/cython_profiles.pyx
+++ b/pyspike/cython/cython_profiles.pyx
@@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2,
c[1] = 1
return st, c, mp
+
+
+############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int j = -1
+ cdef double[:] c = np.zeros(N1) # coincidences
+ cdef double interval = t_end - t_start
+ cdef double tau
+ for i in xrange(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the one
+ # right before (see above), hence we move one forward and also
+ # check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if fabs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx
new file mode 100644
index 0000000..be9423c
--- /dev/null
+++ b/pyspike/cython/cython_simulated_annealing.pyx
@@ -0,0 +1,82 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_simulated_annealing.pyx
+
+cython implementation of a simulated annealing algorithm to find the optimal
+spike train order
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_simulated_annealing.pyx
+
+which gives:
+
+ cython_simulated_annealing.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport exp
+from libc.math cimport fmod
+from libc.stdlib cimport rand
+from libc.stdlib cimport RAND_MAX
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha):
+
+ cdef long N = len(D)
+ cdef double A = np.sum(np.triu(D, 0))
+ cdef long[:] p = np.arange(N)
+ cdef double T = T_start
+ cdef long iterations
+ cdef long succ_iter
+ cdef long total_iter = 0
+ cdef double delta_A
+ cdef long ind1
+ cdef long ind2
+
+ while T > T_end:
+ iterations = 0
+ succ_iter = 0
+ # equilibrate for 100*N steps or 10*N successful steps
+ while iterations < 100*N and succ_iter < 10*N:
+ # exchange two rows and cols
+ # ind1 = np.random.randint(N-1)
+ ind1 = rand() % (N-1)
+ if ind1 < N-1:
+ ind2 = ind1+1
+ else: # this can never happen!
+ ind2 = 0
+ delta_A = -2*D[p[ind1], p[ind2]]
+ if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX):
+ # swap indices
+ p[ind1], p[ind2] = p[ind2], p[ind1]
+ A += delta_A
+ succ_iter += 1
+ iterations += 1
+ total_iter += iterations
+ T *= alpha # cool down
+ if succ_iter == 0:
+ # no successful step -> we believe we have converged
+ break
+
+ return p, A, total_iter
diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py
new file mode 100644
index 0000000..c1d820b
--- /dev/null
+++ b/pyspike/cython/directionality_python_backend.py
@@ -0,0 +1,144 @@
+""" directionality_python_backend.py
+
+Collection of python functions that can be used instead of the cython
+implementation.
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ d1 = np.zeros(N1) # directionality values
+ d2 = np.zeros(N2) # directionality values
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in first spike train occurs after second
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in second spike train occurs after first
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ n = 0
+ st = np.zeros(N1 + N2 + 2) # spike times
+ a = np.zeros(N1 + N2 + 2) # coincidences
+ mp = np.ones(N1 + N2 + 2) # multiplicity
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py
index 6b7209a..e75f181 100644
--- a/pyspike/cython/python_backend.py
+++ b/pyspike/cython/python_backend.py
@@ -3,7 +3,7 @@
Collection of python functions that can be used instead of the cython
implementation.
-Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
+Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
@@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2):
return st, c
+def get_tau(spikes1, spikes2, i, j, max_tau, init_tau):
+ m = init_tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+
############################################################
# coincidence_python
############################################################
def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
- def get_tau(spikes1, spikes2, i, j, max_tau):
- m = t_end - t_start # use interval as initial tau
- if i < len(spikes1)-1 and i > -1:
- m = min(m, spikes1[i+1]-spikes1[i])
- if j < len(spikes2)-1 and j > -1:
- m = min(m, spikes2[j+1]-spikes2[j])
- if i > 0:
- m = min(m, spikes1[i]-spikes1[i-1])
- if j > 0:
- m = min(m, spikes2[j]-spikes2[j-1])
- m *= 0.5
- if max_tau > 0.0:
- m = min(m, max_tau)
- return m
-
N1 = len(spikes1)
N2 = len(spikes2)
i = -1
@@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
i += 1
n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
st[n] = spikes1[i]
if j > -1 and spikes1[i]-spikes2[j] < tau:
# coincidence between the current spike and the previous spike
@@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
j += 1
n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
st[n] = spikes2[j]
if i > -1 and spikes2[j]-spikes1[i] < tau:
# coincidence between the current spike and the previous spike
@@ -434,6 +435,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau):
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ j = -1
+ c = np.zeros(N1) # coincidences
+ for i in range(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if j > -1 and abs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the first or
+ # the one right before (see above), hence we move one forward and
+ # also check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if abs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
+
+
+############################################################
# add_piece_wise_const_python
############################################################
def add_piece_wise_const_python(x1, y1, x2, y2):
diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py
new file mode 100644
index 0000000..248862c
--- /dev/null
+++ b/pyspike/spike_directionality.py
@@ -0,0 +1,522 @@
+# Module containing functions to compute the SPIKE directionality and the
+# spike train order profile
+# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import numpy as np
+import pyspike
+from pyspike import DiscreteFunc
+from functools import partial
+from pyspike.generic import _generic_profile_multi
+
+
+############################################################
+# spike_directionality_values
+############################################################
+def spike_directionality_values(*args, **kwargs):
+ """ Computes the spike directionality value for each spike in
+ each spike train. Returns a list containing an array of spike directionality
+ values for every given spike train.
+
+ Valid call structures::
+
+ spike_directionality_values(st1, st2) # returns the bi-variate profile
+ spike_directionality_values(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_directionality_values(spike_trains) # profile of the list of spike trains
+ spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window (default=None).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike directionality values :math:`D^n_i` as a list of arrays.
+ """
+ if len(args) == 1:
+ return _spike_directionality_values_impl(args[0], **kwargs)
+ else:
+ return _spike_directionality_values_impl(args, **kwargs)
+
+
+def _spike_directionality_values_impl(spike_trains, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the multi-variate spike directionality profile
+ of the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if interval is not None:
+ raise NotImplementedError("Parameter `interval` not supported.")
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # list of arrays for resulting asymmetry values
+ asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices]
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_profiles_cython as profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_directionality_profile_python as profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, j in pairs:
+ d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes,
+ spike_trains[i].t_start, spike_trains[i].t_end,
+ max_tau)
+ asymmetry_list[i] += d1
+ asymmetry_list[j] += d2
+ for a in asymmetry_list:
+ a /= len(spike_trains)-1
+ return asymmetry_list
+
+
+############################################################
+# spike_directionality
+############################################################
+def spike_directionality(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike directionality of the first spike train with
+ respect to the second spike train.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_cython as spike_directionality_impl
+ if max_tau is None:
+ max_tau = 0.0
+ d = spike_directionality_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ c = len(spike_train1.spikes)
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use profile.
+ d1, x = spike_directionality_values([spike_train1, spike_train2],
+ interval=interval,
+ max_tau=max_tau)
+ d = np.sum(d1)
+ c = len(spike_train1.spikes)
+ if normalize:
+ return 1.0*d/c
+ else:
+ return d
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_directionality_matrix
+############################################################
+def spike_directionality_matrix(spike_trains, normalize=True, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the spike directionality matrix for the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ distance_matrix = np.zeros((len(indices), len(indices)))
+ for i, j in pairs:
+ d = spike_directionality(spike_trains[i], spike_trains[j], normalize,
+ interval, max_tau=max_tau)
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = -d
+ return distance_matrix
+
+
+############################################################
+# spike_train_order_profile
+############################################################
+def spike_train_order_profile(*args, **kwargs):
+ """ Computes the spike train order profile :math:`E(t)` of the given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ Valid call structures::
+
+ spike_train_order_profile(st1, st2) # returns the bi-variate profile
+ spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order_profile(spike_trains) # profile of the list of spike trains
+ spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window, `default=None`.
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike train order profile :math:`E(t)`
+ :rtype: :class:`.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_train_order_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_profile_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_profile_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_profile_bi
+############################################################
+def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Computes the spike train order profile P(t) of the two given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_profile_cython as \
+ spike_train_order_profile_impl
+ except ImportError:
+ # raise NotImplementedError()
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_train_order_profile_python as spike_train_order_profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ times, coincidences, multiplicity \
+ = spike_train_order_profile_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+
+ return DiscreteFunc(times, coincidences, multiplicity)
+
+
+############################################################
+# spike_train_order_profile_multi
+############################################################
+def spike_train_order_profile_multi(spike_trains, indices=None,
+ max_tau=None):
+ """ Computes the multi-variate spike train order profile for a set of
+ spike trains. For each spike in the set of spike trains, the multi-variate
+ profile is defined as the sum of asymmetry values divided by the number of
+ spike trains pairs involving the spike train of containing this spike,
+ which is the number of spike trains minus one (N-1).
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ return average_prof
+
+
+
+############################################################
+# _spike_train_order_impl
+############################################################
+def _spike_train_order_impl(spike_train1, spike_train2,
+ interval=None, max_tau=None):
+ """ Implementation of bi-variatae spike train order value (Synfire Indicator).
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_cython as spike_train_order_func
+ if max_tau is None:
+ max_tau = 0.0
+ c, mp = spike_train_order_func(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ c, mp = spike_train_order_profile(spike_train1, spike_train2,
+ max_tau=max_tau).integral(interval)
+ return c, mp
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_train_order
+############################################################
+def spike_train_order(*args, **kwargs):
+ """ Computes the spike train order (Synfire Indicator) of the given
+ spike trains.
+
+ Valid call structures::
+
+ spike_train_order(st1, st2, normalize=True) # normalized bi-variate
+ # spike train order
+ spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order(spike_trains) # result for the list of spike trains
+ spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ - `max_tau` Upper bound for coincidence window, `default=None`.
+ - `normalize` Flag indicating if the reslut should be normalized by the
+ number of spikes , default=`False`
+
+
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if len(args) == 1:
+ return spike_train_order_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_bi
+############################################################
+def spike_train_order_bi(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for two spike trains.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau)
+ if normalize:
+ return 1.0*c/mp
+ else:
+ return c
+
+############################################################
+# spike_train_order_multi
+############################################################
+def spike_train_order_multi(spike_trains, indices=None, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for many spike trains.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :param normalize: Normalize by the number of spike (multiplicity).
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: Spike train order values (Synfire Indicator) F for the given spike trains.
+ :rtype: double
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ e_total = 0.0
+ m_total = 0.0
+ for (i, j) in pairs:
+ e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j],
+ interval, max_tau)
+ e_total += e
+ m_total += m
+
+ if m == 0.0:
+ return 1.0
+ else:
+ return e_total/m_total
+
+
+
+############################################################
+# optimal_spike_train_sorting_from_matrix
+############################################################
+def _optimal_spike_train_sorting_from_matrix(D, full_output=False):
+ """ Finds the best sorting via simulated annealing.
+ Returns the optimal permutation p and A value.
+ Not for direct use, call :func:`.optimal_spike_train_sorting` instead.
+
+ :param D: The directionality (Spike-ORDER) matrix.
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ N = len(D)
+ A = np.sum(np.triu(D, 0))
+
+ p = np.arange(N)
+
+ T_start = 2*np.max(D) # starting temperature
+ T_end = 1E-5 * T_start # final temperature
+ alpha = 0.9 # cooling factor
+
+ try:
+ from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann
+ except ImportError:
+ raise NotImplementedError("PySpike with Cython required for computing spike train"
+ " sorting!")
+
+ p, A, total_iter = sim_ann(D, T_start, T_end, alpha)
+
+ if full_output:
+ return p, A, total_iter
+ else:
+ return p, A
+
+
+############################################################
+# optimal_spike_train_sorting
+############################################################
+def optimal_spike_train_sorting(spike_trains, indices=None, interval=None,
+ max_tau=None, full_output=False):
+ """ Finds the best sorting of the given spike trains by computing the spike
+ directionality matrix and optimize the order using simulated annealing.
+ For a detailed description of the algorithm see:
+ `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta`
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: time interval filter given as a pair of floats, if None
+ the full spike trains are used (default=None).
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound (default=None).
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ D = spike_directionality_matrix(spike_trains, normalize=False,
+ indices=indices, interval=interval,
+ max_tau=max_tau)
+ return _optimal_spike_train_sorting_from_matrix(D, full_output)
+
+############################################################
+# permutate_matrix
+############################################################
+def permutate_matrix(D, p):
+ """ Helper function that applies the permutation p to the columns and rows
+ of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`.
+
+ :param D: The matrix.
+ :param d: The permutation.
+ :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]`
+ """
+ N = len(D)
+ D_p = np.empty_like(D)
+ for n in range(N):
+ for m in range(N):
+ D_p[n, m] = D[p[n], p[m]]
+ return D_p
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index 80f7805..95ef454 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -8,7 +8,7 @@ from __future__ import absolute_import
import numpy as np
from functools import partial
import pyspike
-from pyspike import DiscreteFunc
+from pyspike import DiscreteFunc, SpikeTrain
from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
@@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs):
if len(args) == 1:
return spike_sync_profile_multi(args[0], **kwargs)
elif len(args) == 2:
- return spike_sync_profile_bi(args[0], args[1])
+ return spike_sync_profile_bi(args[0], args[1], **kwargs)
else:
- return spike_sync_profile_multi(args)
+ return spike_sync_profile_multi(args, **kwargs)
############################################################
@@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
dist_func = partial(spike_sync_bi, max_tau=max_tau)
return _generic_distance_matrix(spike_trains, dist_func,
indices, interval)
+
+
+############################################################
+# filter_by_spike_sync
+############################################################
+def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None,
+ return_removed_spikes=False):
+ """ Removes the spikes with a multi-variate spike_sync value below
+ threshold.
+ """
+ N = len(spike_trains)
+ filtered_spike_trains = []
+ removed_spike_trains = []
+
+ # cython implementation
+ try:
+ from .cython.cython_profiles import coincidence_single_profile_cython \
+ as coincidence_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: coincidence_single_profile_cython not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import coincidence_single_python \
+ as coincidence_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, st in enumerate(spike_trains):
+ coincidences = np.zeros_like(st)
+ for j in range(N):
+ if i == j:
+ continue
+ coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes,
+ st.t_start, st.t_end, max_tau)
+ filtered_spikes = st[coincidences > threshold*(N-1)]
+ filtered_spike_trains.append(SpikeTrain(filtered_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ removed_spikes = st[coincidences <= threshold*(N-1)]
+ removed_spike_trains.append(SpikeTrain(removed_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ return [filtered_spike_trains, removed_spike_trains]
+ else:
+ return filtered_spike_trains
diff --git a/setup.py b/setup.py
index 5b9e677..b5b01a6 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,9 @@ class numpy_include(object):
if os.path.isfile("pyspike/cython/cython_add.c") and \
os.path.isfile("pyspike/cython/cython_profiles.c") and \
- os.path.isfile("pyspike/cython/cython_distances.c"):
+ os.path.isfile("pyspike/cython/cython_distances.c") and \
+ os.path.isfile("pyspike/cython/cython_directionality.c") and \
+ os.path.isfile("pyspike/cython/cython_simulated_annealing.c"):
use_c = True
else:
use_c = False
@@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c
Extension("pyspike.cython.cython_profiles",
["pyspike/cython/cython_profiles.pyx"]),
Extension("pyspike.cython.cython_distances",
- ["pyspike/cython/cython_distances.pyx"])
+ ["pyspike/cython/cython_distances.pyx"]),
+ Extension("pyspike.cython.cython_directionality",
+ ["pyspike/cython/cython_directionality.pyx"]),
+ Extension("pyspike.cython.cython_simulated_annealing",
+ ["pyspike/cython/cython_simulated_annealing.pyx"])
]
cmdclass.update({'build_ext': build_ext})
elif use_c: # c files are there, compile to binaries
@@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries
Extension("pyspike.cython.cython_profiles",
["pyspike/cython/cython_profiles.c"]),
Extension("pyspike.cython.cython_distances",
- ["pyspike/cython/cython_distances.c"])
+ ["pyspike/cython/cython_distances.c"]),
+ Extension("pyspike.cython.cython_directionality",
+ ["pyspike/cython/cython_directionality.c"]),
+ Extension("pyspike.cython.cython_simulated_annealing",
+ ["pyspike/cython/cython_simulated_annealing.c"])
]
# neither cython nor c files available -> automatic fall-back to python backend
setup(
name='pyspike',
packages=find_packages(exclude=['doc']),
- version='0.5.3',
+ version='0.6.0',
cmdclass=cmdclass,
ext_modules=ext_modules,
include_dirs=[numpy_include()],
@@ -90,11 +100,17 @@ train similarity',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
-
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
- ]
+ ],
+ package_data={
+ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c',
+ 'cython/cython_distances.c',
+ 'cython/cython_directionality.c',
+ 'cython/cython_simulated_annealing.c'],
+ 'test': ['Spike_testdata.txt']
+ }
)
diff --git a/test/test_directionality.py b/test/test_directionality.py
new file mode 100644
index 0000000..c2e9bfe
--- /dev/null
+++ b/test/test_directionality.py
@@ -0,0 +1,97 @@
+""" test_directionality.py
+
+Tests the directionality functions
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+from numpy.testing import assert_equal, assert_almost_equal, \
+ assert_array_equal
+
+import pyspike as spk
+from pyspike import SpikeTrain, DiscreteFunc
+
+
+def test_spike_directionality():
+ st1 = SpikeTrain([100, 200, 300], [0, 1000])
+ st2 = SpikeTrain([105, 205, 300], [0, 1000])
+ assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0)
+ assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False),
+ 2.0)
+
+ # exchange order of spike trains should give exact negative profile
+ assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0)
+ assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False),
+ -2.0)
+
+ st3 = SpikeTrain([105, 195, 500], [0, 1000])
+ assert_almost_equal(spk.spike_directionality(st1, st3), 0.0)
+ assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False),
+ 0.0)
+ assert_almost_equal(spk.spike_directionality(st3, st1), 0.0)
+
+ D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False)
+ D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]])
+ assert_array_equal(D, D_expected)
+
+ dir_profs = spk.spike_directionality_values([st1, st2, st3])
+ assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0])
+ assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0])
+
+
+def test_spike_train_order():
+ st1 = SpikeTrain([100, 200, 300], [0, 1000])
+ st2 = SpikeTrain([105, 205, 300], [0, 1000])
+ st3 = SpikeTrain([105, 195, 500], [0, 1000])
+
+ expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000])
+ expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0])
+ expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2])
+
+ f = spk.spike_train_order_profile(st1, st2)
+
+ assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12,
+ expected_mp12))
+ assert_almost_equal(f.avrg(), 2.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), 4.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0)
+
+ expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000])
+ expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0])
+ expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1])
+
+ f = spk.spike_train_order_profile(st2, st3)
+
+ assert_array_equal(f.x, expected_x23)
+ assert_array_equal(f.y, expected_y23)
+ assert_array_equal(f.mp, expected_mp23)
+ assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23,
+ expected_mp23))
+ assert_almost_equal(f.avrg(), -1.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), -2.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0)
+
+ f = spk.spike_train_order_profile_multi([st1, st2, st3])
+
+ expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000])
+ expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0])
+ expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2])
+
+ assert_array_equal(f.x, expected_x)
+ assert_array_equal(f.y, expected_y)
+ assert_array_equal(f.mp, expected_mp)
+
+ # Averaging the profile should be the same as computing the synfire indicator directly.
+ assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3]))
+
+ # We can also compute the synfire indicator from the Directionality Matrix:
+ D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False)
+ num_spikes = np.sum(len(st) for st in [st1, st2, st3])
+ syn_fire = np.sum(np.triu(D_matrix)) / num_spikes
+ assert_almost_equal(f.avrg(), syn_fire)
diff --git a/test/test_function.py b/test/test_function.py
index 92d378d..6c04839 100644
--- a/test/test_function.py
+++ b/test/test_function.py
@@ -10,6 +10,7 @@ Distributed under the BSD License
from __future__ import print_function
import numpy as np
from copy import copy
+from nose.tools import raises
from numpy.testing import assert_equal, assert_almost_equal, \
assert_array_equal, assert_array_almost_equal
@@ -49,6 +50,8 @@ def test_pwc():
assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16)
a = f.avrg([1.5, 3.5])
assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16)
+ a = f.avrg([1.0, 2.0])
+ assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16)
a = f.avrg([1.0, 3.5])
assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16)
a = f.avrg([1.0, 4.0])
@@ -120,6 +123,53 @@ def test_pwc_avrg():
assert_array_almost_equal(f1.x, x_expected, decimal=16)
assert_array_almost_equal(f1.y, y_expected, decimal=16)
+def test_pwc_integral():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+
+ # test full interval
+ full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75;
+ assert_equal(f1.integral(), full)
+ assert_equal(f1.integral((np.min(x),np.max(x))), full)
+ # test part interval, spanning an edge
+ assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5)
+ # test part interval, just over two edges
+ assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14)
+ # test part interval, between two edges
+ assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5)
+ assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5)
+ # test part interval, start to before and after edge
+ assert_equal(f1.integral((0.0,0.7)), 0.7*1.0)
+ assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5)
+ # test part interval, before and after edge till end
+ assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75)
+ assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75)
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_inv():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((3,2))
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_oob_1():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((1,6))
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_oob_2():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((-1,3))
def test_pwl():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
@@ -162,6 +212,18 @@ def test_pwl():
a = f.avrg([1.0, 4.0])
assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16)
+ # interval between support points
+ a = f.avrg([1.1, 1.5])
+ assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14)
+
+ # starting at a support point
+ a = f.avrg([1.0, 1.5])
+ assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14)
+
+ # start and end at support point
+ a = f.avrg([1.0, 2.0])
+ assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14)
+
# averaging over multiple intervals
a = f.avrg([(0.5, 1.5), (1.5, 2.5)])
assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16)
diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py
new file mode 100644
index 0000000..e259903
--- /dev/null
+++ b/test/test_sync_filter.py
@@ -0,0 +1,95 @@
+""" test_sync_filter.py
+
+Tests the spike sync based filtering
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+from __future__ import print_function
+import numpy as np
+from numpy.testing import assert_equal, assert_almost_equal, \
+ assert_array_almost_equal
+
+import pyspike as spk
+from pyspike import SpikeTrain
+
+
+def test_single_prof():
+ st1 = np.array([1.0, 2.0, 3.0, 4.0])
+ st2 = np.array([1.1, 2.1, 3.8])
+ st3 = np.array([0.9, 3.1, 4.1])
+
+ # cython implementation
+ try:
+ from pyspike.cython.cython_profiles import \
+ coincidence_single_profile_cython as coincidence_impl
+ except ImportError:
+ from pyspike.cython.python_backend import \
+ coincidence_single_python as coincidence_impl
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st2, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0))
+ print(coincidences)
+ for i, t in enumerate(st1):
+ assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t],
+ "At index %d" % i)
+
+ coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0))
+ for i, t in enumerate(st2):
+ assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t],
+ "At index %d" % i)
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st3, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0))
+ for i, t in enumerate(st1):
+ assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t],
+ "At index %d" % i)
+
+ st1 = np.array([1.0, 2.0, 3.0, 4.0])
+ st2 = np.array([1.0, 2.0, 4.0])
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st2, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0))
+ for i, t in enumerate(st1):
+ expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t]
+ assert_equal(coincidences[i], expected,
+ "At index %d" % i)
+
+
+def test_filter():
+ st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0)
+ st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0)
+ st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0)
+
+ # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5)
+
+ # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0])
+ # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8])
+
+ # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5)
+
+ # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8])
+ # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0])
+
+ filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75)
+
+ for st in filtered_spike_trains:
+ print(st.spikes)
+
+ assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0])
+ assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8])
+ assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1])
+
+
+if __name__ == "main":
+ test_single_prof()
+ test_filter()