summaryrefslogtreecommitdiff
path: root/pyspike
diff options
context:
space:
mode:
Diffstat (limited to 'pyspike')
-rw-r--r--pyspike/DiscreteFunc.py250
-rw-r--r--pyspike/PieceWiseConstFunc.py225
-rw-r--r--pyspike/PieceWiseLinFunc.py270
-rw-r--r--pyspike/SpikeTrain.py75
-rw-r--r--pyspike/__init__.py56
-rw-r--r--pyspike/cython/__init__.py0
-rw-r--r--pyspike/cython/cython_add.pyx232
-rw-r--r--pyspike/cython/cython_directionality.pyx262
-rw-r--r--pyspike/cython/cython_distances.pyx631
-rw-r--r--pyspike/cython/cython_profiles.pyx485
-rw-r--r--pyspike/cython/cython_simulated_annealing.pyx82
-rw-r--r--pyspike/cython/directionality_python_backend.py144
-rw-r--r--pyspike/cython/python_backend.py638
-rw-r--r--pyspike/generic.py150
-rw-r--r--pyspike/isi_distance.py230
-rw-r--r--pyspike/psth.py34
-rw-r--r--pyspike/spike_directionality.py522
-rw-r--r--pyspike/spike_distance.py231
-rw-r--r--pyspike/spike_sync.py341
-rw-r--r--pyspike/spikes.py161
20 files changed, 5019 insertions, 0 deletions
diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py
new file mode 100644
index 0000000..caad290
--- /dev/null
+++ b/pyspike/DiscreteFunc.py
@@ -0,0 +1,250 @@
+# Class representing discrete functions.
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import, print_function
+
+import numpy as np
+import collections
+import pyspike
+
+
+##############################################################
+# DiscreteFunc
+##############################################################
+class DiscreteFunc(object):
+ """ A class representing values defined on a discrete set of points.
+ """
+
+ def __init__(self, x, y, multiplicity):
+ """ Constructs the discrete function.
+
+ :param x: array of length N defining the points at which the values are
+ defined.
+ :param y: array of length N degining the values at the points x.
+ :param multiplicity: array of length N defining the multiplicity of the
+ values.
+ """
+ # convert parameters to arrays, also ensures copying
+ self.x = np.array(x)
+ self.y = np.array(y)
+ self.mp = np.array(multiplicity)
+
+ def copy(self):
+ """ Returns a copy of itself
+
+ :rtype: :class:`DiscreteFunc`
+ """
+ return DiscreteFunc(self.x, self.y, self.mp)
+
+ def almost_equal(self, other, decimal=14):
+ """ Checks if the function is equal to another function up to `decimal`
+ precision.
+
+ :param other: another :class:`DiscreteFunc`
+ :returns: True if the two functions are equal up to `decimal` decimals,
+ False otherwise
+ :rtype: bool
+ """
+ eps = 10.0**(-decimal)
+ return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \
+ np.allclose(self.y, other.y, atol=eps, rtol=0.0) and \
+ np.allclose(self.mp, other.mp, atol=eps, rtol=0.0)
+
+ def get_plottable_data(self, averaging_window_size=0):
+ """ Returns two arrays containing x- and y-coordinates for plotting
+ the interval sequence. The optional parameter `averaging_window_size`
+ determines the size of an averaging window to smoothen the profile. If
+ this value is 0, no averaging is performed.
+
+ :param averaging_window_size: size of the averaging window, default=0.
+ :returns: (x_plot, y_plot) containing plottable data
+ :rtype: pair of np.array
+
+ Example::
+
+ x, y = f.get_plottable_data()
+ plt.plot(x, y, '-o', label="Discrete function")
+ """
+
+ if averaging_window_size > 0:
+ # for the averaged profile we have to take the multiplicity into
+ # account. values with higher multiplicity should be consider as if
+ # they appeared several times. Hence we can not know how many
+ # entries we have to consider to the left and right. Rather, we
+ # will iterate until some wanted multiplicity is reached.
+
+ # the first value in self.mp contains the number of averaged
+ # profiles without any possible extra multiplicities
+ # (by implementation)
+ expected_mp = (averaging_window_size+1) * int(self.mp[0])
+ y_plot = np.zeros_like(self.y)
+ # compute the values in a loop, could be done in cython if required
+ for i in range(len(y_plot)):
+
+ if self.mp[i] >= expected_mp:
+ # the current value contains already all the wanted
+ # multiplicity
+ y_plot[i] = self.y[i]/self.mp[i]
+ continue
+
+ # first look to the right
+ y = self.y[i]
+ mp_r = self.mp[i]
+ j = i+1
+ while j < len(y_plot):
+ if mp_r+self.mp[j] < expected_mp:
+ # if we still dont reach the required multiplicity
+ # we take the whole value
+ y += self.y[j]
+ mp_r += self.mp[j]
+ else:
+ # otherwise, just some fraction
+ y += self.y[j] * (expected_mp - mp_r)/self.mp[j]
+ mp_r += (expected_mp - mp_r)
+ break
+ j += 1
+
+ # same story to the left
+ mp_l = self.mp[i]
+ j = i-1
+ while j >= 0:
+ if mp_l+self.mp[j] < expected_mp:
+ y += self.y[j]
+ mp_l += self.mp[j]
+ else:
+ y += self.y[j] * (expected_mp - mp_l)/self.mp[j]
+ mp_l += (expected_mp - mp_l)
+ break
+ j -= 1
+ y_plot[i] = y/(mp_l+mp_r-self.mp[i])
+ return 1.0*self.x, y_plot
+
+ else: # k = 0
+
+ return 1.0*self.x, 1.0*self.y/self.mp
+
+ def integral(self, interval=None):
+ """ Returns the integral over the given interval. For the discrete
+ function, this amounts to two values: the sum over all values and the
+ sum over all multiplicities.
+
+ :param interval: integration interval given as a pair of floats, or a
+ sequence of pairs in case of multiple intervals, if
+ None the integral over the whole function is computed.
+ :type interval: Pair, sequence of pairs, or None.
+ :returns: the summed values and the summed multiplicity
+ :rtype: pair of float
+ """
+
+ value = 0.0
+ multiplicity = 0.0
+
+ def get_indices(ival):
+ """ Retuns the indeces surrounding the given interval"""
+ start_ind = np.searchsorted(self.x, ival[0], side='right')
+ end_ind = np.searchsorted(self.x, ival[1], side='left')
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ return start_ind, end_ind
+
+ if interval is None:
+ # no interval given, integrate over the whole spike train
+ # don't count the first value, which is zero by definition
+ value = 1.0 * np.sum(self.y[1:-1])
+ multiplicity = np.sum(self.mp[1:-1])
+ else:
+ # check if interval is as sequence
+ assert isinstance(interval, collections.Sequence), \
+ "Invalid value for `interval`. None, Sequence or Tuple \
+expected."
+ # check if interval is a sequence of intervals
+ if not isinstance(interval[0], collections.Sequence):
+ # find the indices corresponding to the interval
+ start_ind, end_ind = get_indices(interval)
+ value = np.sum(self.y[start_ind:end_ind])
+ multiplicity = np.sum(self.mp[start_ind:end_ind])
+ else:
+ for ival in interval:
+ # find the indices corresponding to the interval
+ start_ind, end_ind = get_indices(ival)
+ value += np.sum(self.y[start_ind:end_ind])
+ multiplicity += np.sum(self.mp[start_ind:end_ind])
+ return (value, multiplicity)
+
+ def avrg(self, interval=None, normalize=True):
+ """ Computes the average of the interval sequence:
+ :math:`a = 1/N \\sum f_n` where N is the number of intervals.
+
+ :param interval: averaging interval given as a pair of floats, a
+ sequence of pairs for averaging multiple intervals, or
+ None, if None the average over the whole function is
+ computed.
+ :type interval: Pair, sequence of pairs, or None.
+ :returns: the average a.
+ :rtype: float
+ """
+ val, mp = self.integral(interval)
+ if normalize:
+ if mp > 0:
+ return val/mp
+ else:
+ return 1.0
+ else:
+ return val
+
+ def add(self, f):
+ """ Adds another `DiscreteFunc` function to this function.
+ Note: only functions defined on the same interval can be summed.
+
+ :param f: :class:`DiscreteFunc` function to be added.
+ :rtype: None
+ """
+ assert self.x[0] == f.x[0], "The functions have different intervals"
+ assert self.x[-1] == f.x[-1], "The functions have different intervals"
+
+ # cython version
+ try:
+ from .cython.cython_add import add_discrete_function_cython as \
+ add_discrete_function_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: add_discrete_function_cython not found. Make \
+sure that PySpike is installed by running\n\
+'python setup.py build_ext --inplace'! \
+\n Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import add_discrete_function_python as \
+ add_discrete_function_impl
+
+ self.x, self.y, self.mp = \
+ add_discrete_function_impl(self.x, self.y, self.mp,
+ f.x, f.y, f.mp)
+
+ def mul_scalar(self, fac):
+ """ Multiplies the function with a scalar value
+
+ :param fac: Value to multiply
+ :type fac: double
+ :rtype: None
+ """
+ self.y *= fac
+
+
+def average_profile(profiles):
+ """ Computes the average profile from the given ISI- or SPIKE-profiles.
+
+ :param profiles: list of :class:`PieceWiseConstFunc` or
+ :class:`PieceWiseLinFunc` representing ISI- or
+ SPIKE-profiles to be averaged.
+ :returns: the averages profile :math:`<S_{isi}>` or :math:`<S_{spike}>`.
+ :rtype: :class:`PieceWiseConstFunc` or :class:`PieceWiseLinFunc`
+ """
+ assert len(profiles) > 1
+
+ avrg_profile = profiles[0].copy()
+ for i in range(1, len(profiles)):
+ avrg_profile.add(profiles[i])
+ avrg_profile.mul_scalar(1.0/len(profiles)) # normalize
+
+ return avrg_profile
diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py
new file mode 100644
index 0000000..17fdd3f
--- /dev/null
+++ b/pyspike/PieceWiseConstFunc.py
@@ -0,0 +1,225 @@
+# Class representing piece-wise constant functions.
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import, print_function
+
+import numpy as np
+import collections
+import pyspike
+
+
+##############################################################
+# PieceWiseConstFunc
+##############################################################
+class PieceWiseConstFunc(object):
+ """ A class representing a piece-wise constant function. """
+
+ def __init__(self, x, y):
+ """ Constructs the piece-wise const function.
+
+ :param x: array of length N+1 defining the edges of the intervals of
+ the pwc function.
+ :param y: array of length N defining the function values at the
+ intervals.
+ """
+ # convert parameters to arrays, also ensures copying
+ self.x = np.array(x)
+ self.y = np.array(y)
+
+ def __call__(self, t):
+ """ Returns the function value for the given time t. If t is a list of
+ times, the corresponding list of values is returned.
+
+ :param: time t, or list of times
+ :returns: function value(s) at that time(s).
+ """
+ assert np.all(t >= self.x[0]) and np.all(t <= self.x[-1]), \
+ "Invalid time: " + str(t)
+
+ ind = np.searchsorted(self.x, t, side='right')
+
+ if isinstance(t, collections.Sequence):
+ # t is a sequence of values
+ # correct the cases t == x[0], t == x[-1]
+ ind[ind == 0] = 1
+ ind[ind == len(self.x)] = len(self.x)-1
+ value = self.y[ind-1]
+ # correct the values at exact spike times: there the value should
+ # be the at half of the step
+ # obtain the 'left' side indices for t
+ ind_l = np.searchsorted(self.x, t, side='left')
+ # if left and right side indices differ, the time t has to appear
+ # in self.x
+ ind_at_spike = np.logical_and(np.logical_and(ind != ind_l,
+ ind > 1),
+ ind < len(self.x))
+ # get the corresponding indices for the resulting value array
+ val_ind = np.arange(len(ind))[ind_at_spike]
+ # and for the arrays self.x, y1, y2
+ xy_ind = ind[ind_at_spike]
+ # use the middle of the left and right ISI value
+ value[val_ind] = 0.5 * (self.y[xy_ind-1] + self.y[xy_ind-2])
+ return value
+ else: # t is a single value
+ # specific check for interval edges
+ if t == self.x[0]:
+ return self.y[0]
+ if t == self.x[-1]:
+ return self.y[-1]
+ # check if we are on any other exact spike time
+ if sum(self.x == t) > 0:
+ # use the middle of the left and right ISI value
+ return 0.5 * (self.y[ind-1] + self.y[ind-2])
+ return self.y[ind-1]
+
+ def copy(self):
+ """ Returns a copy of itself
+
+ :rtype: :class:`PieceWiseConstFunc`
+ """
+ return PieceWiseConstFunc(self.x, self.y)
+
+ def almost_equal(self, other, decimal=14):
+ """ Checks if the function is equal to another function up to `decimal`
+ precision.
+
+ :param other: another :class:`PieceWiseConstFunc`
+ :returns: True if the two functions are equal up to `decimal` decimals,
+ False otherwise
+ :rtype: bool
+ """
+ eps = 10.0**(-decimal)
+ return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \
+ np.allclose(self.y, other.y, atol=eps, rtol=0.0)
+
+ def get_plottable_data(self):
+ """ Returns two arrays containing x- and y-coordinates for immeditate
+ plotting of the piece-wise function.
+
+ :returns: (x_plot, y_plot) containing plottable data
+ :rtype: pair of np.array
+
+ Example::
+
+ x, y = f.get_plottable_data()
+ plt.plot(x, y, '-o', label="Piece-wise const function")
+ """
+
+ x_plot = np.empty(2*len(self.x)-2)
+ x_plot[0] = self.x[0]
+ x_plot[1::2] = self.x[1:]
+ x_plot[2::2] = self.x[1:-1]
+ y_plot = np.empty(2*len(self.y))
+ y_plot[::2] = self.y
+ y_plot[1::2] = self.y
+
+ return x_plot, y_plot
+
+ def integral(self, interval=None):
+ """ Returns the integral over the given interval.
+
+ :param interval: integration interval given as a pair of floats, if
+ None the integral over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: the integral
+ :rtype: float
+ """
+ if interval is None:
+ # no interval given, integrate over the whole spike train
+ a = np.sum((self.x[1:]-self.x[:-1]) * self.y)
+ else:
+ if interval[0]>interval[1]:
+ raise ValueError("Invalid averaging interval: interval[0]>=interval[1]")
+ if interval[0]<self.x[0]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[0]")
+ if interval[1]>self.x[-1]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[-1]")
+ # find the indices corresponding to the interval
+ start_ind = np.searchsorted(self.x, interval[0], side='right')
+ end_ind = np.searchsorted(self.x, interval[1], side='left')-1
+ if start_ind > end_ind:
+ # contribution from between two closest edges
+ a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind]
+ # minus the part that is not within the interval
+ a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind]
+ else:
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ # first the contribution from between the indices
+ a = np.sum((self.x[start_ind+1:end_ind+1] -
+ self.x[start_ind:end_ind]) *
+ self.y[start_ind:end_ind])
+ # correction from start to first index
+ a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1]
+ # correction from last index to end
+ a += (interval[1]-self.x[end_ind]) * self.y[end_ind]
+ return a
+
+ def avrg(self, interval=None):
+ """ Computes the average of the piece-wise const function:
+ :math:`a = 1/T \int_0^T f(x) dx` where T is the length of the interval.
+
+ :param interval: averaging interval given as a pair of floats, a
+ sequence of pairs for averaging multiple intervals, or
+ None, if None the average over the whole function is
+ computed.
+ :type interval: Pair, sequence of pairs, or None.
+ :returns: the average a.
+ :rtype: float
+ """
+ if interval is None:
+ # no interval given, average over the whole spike train
+ return self.integral() / (self.x[-1]-self.x[0])
+
+ # check if interval is as sequence
+ assert isinstance(interval, collections.Sequence), \
+ "Invalid value for `interval`. None, Sequence or Tuple expected."
+ # check if interval is a sequence of intervals
+ if not isinstance(interval[0], collections.Sequence):
+ # just one interval
+ a = self.integral(interval) / (interval[1]-interval[0])
+ else:
+ # several intervals
+ a = 0.0
+ int_length = 0.0
+ for ival in interval:
+ a += self.integral(ival)
+ int_length += ival[1] - ival[0]
+ a /= int_length
+ return a
+
+ def add(self, f):
+ """ Adds another PieceWiseConst function to this function.
+ Note: only functions defined on the same interval can be summed.
+
+ :param f: :class:`PieceWiseConstFunc` function to be added.
+ :rtype: None
+ """
+ assert self.x[0] == f.x[0], "The functions have different intervals"
+ assert self.x[-1] == f.x[-1], "The functions have different intervals"
+
+ # cython version
+ try:
+ from .cython.cython_add import add_piece_wise_const_cython as \
+ add_piece_wise_const_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: add_piece_wise_const_cython not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'! \
+\n Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import add_piece_wise_const_python as \
+ add_piece_wise_const_impl
+
+ self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y)
+
+ def mul_scalar(self, fac):
+ """ Multiplies the function with a scalar value
+
+ :param fac: Value to multiply
+ :type fac: double
+ :rtype: None
+ """
+ self.y *= fac
diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py
new file mode 100644
index 0000000..8faaec4
--- /dev/null
+++ b/pyspike/PieceWiseLinFunc.py
@@ -0,0 +1,270 @@
+# Class representing piece-wise linear functions.
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import, print_function
+
+import numpy as np
+import collections
+import pyspike
+
+
+##############################################################
+# PieceWiseLinFunc
+##############################################################
+class PieceWiseLinFunc:
+ """ A class representing a piece-wise linear function. """
+
+ def __init__(self, x, y1, y2):
+ """ Constructs the piece-wise linear function.
+
+ :param x: array of length N+1 defining the edges of the intervals of
+ the pwc function.
+ :param y1: array of length N defining the function values at the left
+ of the intervals.
+ :param y2: array of length N defining the function values at the right
+ of the intervals.
+ """
+ # convert to array, which also ensures copying
+ self.x = np.array(x)
+ self.y1 = np.array(y1)
+ self.y2 = np.array(y2)
+
+ def __call__(self, t):
+ """ Returns the function value for the given time t. If t is a list of
+ times, the corresponding list of values is returned.
+
+ :param: time t, or list of times
+ :returns: function value(s) at that time(s).
+ """
+ def intermediate_value(x0, x1, y0, y1, x):
+ """ computes the intermediate value of a linear function """
+ return y0 + (y1-y0)*(x-x0)/(x1-x0)
+
+ assert np.all(t >= self.x[0]) and np.all(t <= self.x[-1]), \
+ "Invalid time: " + str(t)
+
+ ind = np.searchsorted(self.x, t, side='right')
+
+ if isinstance(t, collections.Sequence):
+ # t is a sequence of values
+ # correct the cases t == x[0], t == x[-1]
+ ind[ind == 0] = 1
+ ind[ind == len(self.x)] = len(self.x)-1
+ value = intermediate_value(self.x[ind-1],
+ self.x[ind],
+ self.y1[ind-1],
+ self.y2[ind-1],
+ t)
+ # correct the values at exact spike times: there the value should
+ # be the at half of the step
+ # obtain the 'left' side indices for t
+ ind_l = np.searchsorted(self.x, t, side='left')
+ # if left and right side indices differ, the time t has to appear
+ # in self.x
+ ind_at_spike = np.logical_and(np.logical_and(ind != ind_l,
+ ind > 1),
+ ind < len(self.x))
+ # get the corresponding indices for the resulting value array
+ val_ind = np.arange(len(ind))[ind_at_spike]
+ # and for the values in self.x, y1, y2
+ xy_ind = ind[ind_at_spike]
+ # the values are defined as the average of the left and right limit
+ value[val_ind] = 0.5 * (self.y1[xy_ind-1] + self.y2[xy_ind-2])
+ return value
+ else: # t is a single value
+ # specific check for interval edges
+ if t == self.x[0]:
+ return self.y1[0]
+ if t == self.x[-1]:
+ return self.y2[-1]
+ # check if we are on any other exact spike time
+ if sum(self.x == t) > 0:
+ # use the middle of the left and right Spike value
+ return 0.5 * (self.y1[ind-1] + self.y2[ind-2])
+ return intermediate_value(self.x[ind-1],
+ self.x[ind],
+ self.y1[ind-1],
+ self.y2[ind-1],
+ t)
+
+ def copy(self):
+ """ Returns a copy of itself
+
+ :rtype: :class:`PieceWiseLinFunc`
+ """
+ return PieceWiseLinFunc(self.x, self.y1, self.y2)
+
+ def almost_equal(self, other, decimal=14):
+ """ Checks if the function is equal to another function up to `decimal`
+ precision.
+
+ :param other: another :class:`PieceWiseLinFunc`
+ :returns: True if the two functions are equal up to `decimal` decimals,
+ False otherwise
+ :rtype: bool
+ """
+ eps = 10.0**(-decimal)
+ return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \
+ np.allclose(self.y1, other.y1, atol=eps, rtol=0.0) and \
+ np.allclose(self.y2, other.y2, atol=eps, rtol=0.0)
+
+ def get_plottable_data(self):
+ """ Returns two arrays containing x- and y-coordinates for immeditate
+ plotting of the piece-wise function.
+
+ :returns: (x_plot, y_plot) containing plottable data
+ :rtype: pair of np.array
+
+ Example::
+
+ x, y = f.get_plottable_data()
+ plt.plot(x, y, '-o', label="Piece-wise const function")
+ """
+ x_plot = np.empty(2*len(self.x)-2)
+ x_plot[0] = self.x[0]
+ x_plot[1::2] = self.x[1:]
+ x_plot[2::2] = self.x[1:-1]
+ y_plot = np.empty_like(x_plot)
+ y_plot[0::2] = self.y1
+ y_plot[1::2] = self.y2
+ return x_plot, y_plot
+
+ def integral(self, interval=None):
+ """ Returns the integral over the given interval.
+
+ :param interval: integration interval given as a pair of floats, if
+ None the integral over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: the integral
+ :rtype: float
+ """
+
+ def intermediate_value(x0, x1, y0, y1, x):
+ """ computes the intermediate value of a linear function """
+ return y0 + (y1-y0)*(x-x0)/(x1-x0)
+
+ if interval is None:
+ # no interval given, integrate over the whole spike train
+ return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2))
+
+ # find the indices corresponding to the interval
+ start_ind = np.searchsorted(self.x, interval[0], side='right')
+ end_ind = np.searchsorted(self.x, interval[1], side='left')-1
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ if start_ind > end_ind:
+ print(start_ind, end_ind, self.x[start_ind])
+ # contribution from between two closest edges
+ y_x0 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[0])
+ y_x1 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[1])
+ print(y_x0, y_x1, interval[1] - interval[0])
+ integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0])
+ print(integral)
+ else:
+ # first the contribution from between the indices
+ integral = np.sum((self.x[start_ind+1:end_ind+1] -
+ self.x[start_ind:end_ind]) *
+ 0.5*(self.y1[start_ind:end_ind] +
+ self.y2[start_ind:end_ind]))
+ # correction from start to first index
+ integral += (self.x[start_ind]-interval[0]) * 0.5 * \
+ (self.y2[start_ind-1] +
+ intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[0]))
+ # correction from last index to end
+ integral += (interval[1]-self.x[end_ind]) * 0.5 * \
+ (self.y1[end_ind] +
+ intermediate_value(self.x[end_ind], self.x[end_ind+1],
+ self.y1[end_ind], self.y2[end_ind],
+ interval[1]
+ ))
+ return integral
+
+ def avrg(self, interval=None):
+ """ Computes the average of the piece-wise linear function:
+ :math:`a = 1/T \int_0^T f(x) dx` where T is the interval length.
+
+ :param interval: averaging interval given as a pair of floats, a
+ sequence of pairs for averaging multiple intervals, or
+ None, if None the average over the whole function is
+ computed.
+ :type interval: Pair, sequence of pairs, or None.
+ :returns: the average a.
+ :rtype: float
+
+ """
+
+ if interval is None:
+ # no interval given, average over the whole spike train
+ return self.integral() / (self.x[-1]-self.x[0])
+
+ # check if interval is as sequence
+ assert isinstance(interval, collections.Sequence), \
+ "Invalid value for `interval`. None, Sequence or Tuple expected."
+ # check if interval is a sequence of intervals
+ if not isinstance(interval[0], collections.Sequence):
+ # just one interval
+ a = self.integral(interval) / (interval[1]-interval[0])
+ else:
+ # several intervals
+ a = 0.0
+ int_length = 0.0
+ for ival in interval:
+ a += self.integral(ival)
+ int_length += ival[1] - ival[0]
+ a /= int_length
+ return a
+
+ def add(self, f):
+ """ Adds another PieceWiseLin function to this function.
+ Note: only functions defined on the same interval can be summed.
+
+ :param f: :class:`PieceWiseLinFunc` function to be added.
+ :rtype: None
+ """
+ assert self.x[0] == f.x[0], "The functions have different intervals"
+ assert self.x[-1] == f.x[-1], "The functions have different intervals"
+
+ # python implementation
+ # from .python_backend import add_piece_wise_lin_python
+ # self.x, self.y1, self.y2 = add_piece_wise_lin_python(
+ # self.x, self.y1, self.y2, f.x, f.y1, f.y2)
+
+ # cython version
+ try:
+ from .cython.cython_add import add_piece_wise_lin_cython as \
+ add_piece_wise_lin_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: add_piece_wise_lin_cython not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'! \n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import add_piece_wise_lin_python as \
+ add_piece_wise_lin_impl
+
+ self.x, self.y1, self.y2 = add_piece_wise_lin_impl(
+ self.x, self.y1, self.y2, f.x, f.y1, f.y2)
+
+ def mul_scalar(self, fac):
+ """ Multiplies the function with a scalar value
+
+ :param fac: Value to multiply
+ :type fac: double
+ :rtype: None
+ """
+ self.y1 *= fac
+ self.y2 *= fac
diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py
new file mode 100644
index 0000000..19f2419
--- /dev/null
+++ b/pyspike/SpikeTrain.py
@@ -0,0 +1,75 @@
+# Module containing the class representing spike trains for PySpike.
+# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+import numpy as np
+
+
+class SpikeTrain(object):
+ """ Class representing spike trains for the PySpike Module."""
+
+ def __init__(self, spike_times, edges, is_sorted=True):
+ """ Constructs the SpikeTrain.
+
+ :param spike_times: ordered array of spike times.
+ :param edges: The edges of the spike train. Given as a pair of floats
+ (T0, T1) or a single float T1, where then T0=0 is
+ assumed.
+ :param is_sorted: If `False`, the spike times will sorted by `np.sort`.
+
+ """
+
+ # TODO: sanity checks
+ if is_sorted:
+ self.spikes = np.array(spike_times, dtype=float)
+ else:
+ self.spikes = np.sort(np.array(spike_times, dtype=float))
+
+ try:
+ self.t_start = float(edges[0])
+ self.t_end = float(edges[1])
+ except:
+ self.t_start = 0.0
+ self.t_end = float(edges)
+
+ def __getitem__(self, index):
+ """ Returns the time of the spike given by index.
+
+ :param index: Index of the spike.
+ :return: spike time.
+ """
+ return self.spikes[index]
+
+ def __len__(self):
+ """ Returns the number of spikes.
+
+ :return: Number of spikes.
+ """
+ return len(self.spikes)
+
+ def sort(self):
+ """ Sorts the spike times of this spike train using `np.sort`
+ """
+ self.spikes = np.sort(self.spikes)
+
+ def copy(self):
+ """ Returns a copy of this spike train.
+ Use this function if you want to create a real (deep) copy of this
+ spike train. Simple assignment `t2 = t1` does not create a copy of the
+ spike train data, but a reference as `numpy.array` is used for storing
+ the data.
+
+ :return: :class:`.SpikeTrain` copy of this spike train.
+
+ """
+ return SpikeTrain(self.spikes.copy(), [self.t_start, self.t_end])
+
+ def get_spikes_non_empty(self):
+ """Returns the spikes of this spike train with auxiliary spikes in case
+ of empty spike trains.
+ """
+ if len(self.spikes) < 1:
+ return np.unique(np.insert([self.t_start, self.t_end], 1,
+ self.spikes))
+ else:
+ return self.spikes
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
new file mode 100644
index 0000000..3897d18
--- /dev/null
+++ b/pyspike/__init__.py
@@ -0,0 +1,56 @@
+"""
+Copyright 2014-2018, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+"""
+
+from __future__ import absolute_import
+
+__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth",
+ "spikes", "spike_directionality", "SpikeTrain",
+ "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"]
+
+from .PieceWiseConstFunc import PieceWiseConstFunc
+from .PieceWiseLinFunc import PieceWiseLinFunc
+from .DiscreteFunc import DiscreteFunc
+from .SpikeTrain import SpikeTrain
+
+from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\
+ isi_distance_multi, isi_distance_matrix
+from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\
+ spike_distance_multi, spike_distance_matrix
+from .spike_sync import spike_sync_profile, spike_sync,\
+ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\
+ filter_by_spike_sync
+from .psth import psth
+
+from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \
+ spike_train_from_string, import_spike_trains_from_time_series, \
+ merge_spike_trains, generate_poisson_spikes
+
+from .spike_directionality import spike_directionality, \
+ spike_directionality_values, spike_directionality_matrix, \
+ spike_train_order_profile, spike_train_order_profile_bi, \
+ spike_train_order_profile_multi, spike_train_order, \
+ spike_train_order_bi, spike_train_order_multi, \
+ optimal_spike_train_sorting, permutate_matrix
+
+# define the __version__ following
+# http://stackoverflow.com/questions/17583443
+from pkg_resources import get_distribution, DistributionNotFound
+import os.path
+
+try:
+ _dist = get_distribution('pyspike')
+ # Normalize case for Windows systems
+ dist_loc = os.path.normcase(_dist.location)
+ here = os.path.normcase(__file__)
+ if not here.startswith(os.path.join(dist_loc, 'pyspike')):
+ # not installed, but there is another version that *is*
+ raise DistributionNotFound
+except DistributionNotFound:
+ __version__ = 'Please install this project with setup.py'
+else:
+ __version__ = _dist.version
+
+disable_backend_warning = False
diff --git a/pyspike/cython/__init__.py b/pyspike/cython/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pyspike/cython/__init__.py
diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx
new file mode 100644
index 0000000..25f1181
--- /dev/null
+++ b/pyspike/cython/cython_add.pyx
@@ -0,0 +1,232 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_add.pyx
+
+cython implementation of the add function for piece-wise const and
+piece-wise linear functions
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_add.pyx
+
+which gives::
+
+ cython_add.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# add_piece_wise_const_cython
+############################################################
+def add_piece_wise_const_cython(double[:] x1, double[:] y1,
+ double[:] x2, double[:] y2):
+
+ cdef int N1 = len(x1)
+ cdef int N2 = len(x2)
+ cdef double[:] x_new = np.empty(N1+N2)
+ cdef double[:] y_new = np.empty(N1+N2-1)
+ cdef int index1 = 0
+ cdef int index2 = 0
+ cdef int index = 0
+ cdef int i
+ with nogil: # release the interpreter lock to allow multi-threading
+ x_new[0] = x1[0]
+ y_new[0] = y1[0] + y2[0]
+ while (index1+1 < N1-1) and (index2+1 < N2-1):
+ index += 1
+ # print(index1+1, x1[index1+1], y1[index1+1], x_new[index])
+ if x1[index1+1] < x2[index2+1]:
+ index1 += 1
+ x_new[index] = x1[index1]
+ elif x1[index1+1] > x2[index2+1]:
+ index2 += 1
+ x_new[index] = x2[index2]
+ else: # x1[index1+1] == x2[index2+1]:
+ index1 += 1
+ index2 += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1] + y2[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < N1-1:
+ x_new[index+1:index+1+N1-index1-1] = x1[index1+1:]
+ for i in xrange(N1-index1-2):
+ y_new[index+1+i] = y1[index1+1+i] + y2[N2-2]
+ index += N1-index1-2
+ elif index2+1 < N2-1:
+ x_new[index+1:index+1+N2-index2-1] = x2[index2+1:]
+ for i in xrange(N2-index2-2):
+ y_new[index+1+i] = y2[index2+1+i] + y1[N1-2]
+ index += N2-index2-2
+ else: # both arrays reached the end simultaneously
+ # only the last x-value missing
+ x_new[index+1] = x1[N1-1]
+ # end nogil
+ # return np.asarray(x_new[:index+2]), np.asarray(y_new[:index+1])
+ return np.asarray(x_new[:index+2]), np.asarray(y_new[:index+1])
+
+
+############################################################
+# add_piece_wise_lin_cython
+############################################################
+def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12,
+ double[:] x2, double[:] y21, double[:] y22):
+ cdef int N1 = len(x1)
+ cdef int N2 = len(x2)
+ cdef double[:] x_new = np.empty(N1+N2)
+ cdef double[:] y1_new = np.empty(N1+N2-1)
+ cdef double[:] y2_new = np.empty_like(y1_new)
+ cdef int index1 = 0 # index for self
+ cdef int index2 = 0 # index for f
+ cdef int index = 0 # index for new
+ cdef int i
+ cdef double y
+ with nogil: # release the interpreter lock to allow multi-threading
+ x_new[0] = x1[0]
+ y1_new[0] = y11[0] + y21[0]
+ while (index1+1 < N1-1) and (index2+1 < N2-1):
+ # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index])
+ if x1[index1+1] < x2[index2+1]:
+ # first compute the end value of the previous interval
+ # linear interpolation of the interval
+ y = y21[index2] + (y22[index2]-y21[index2]) * \
+ (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2])
+ y2_new[index] = y12[index1] + y
+ index1 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ # and the starting value for the next interval
+ y1_new[index] = y11[index1] + y
+ elif x1[index1+1] > x2[index2+1]:
+ # first compute the end value of the previous interval
+ # linear interpolation of the interval
+ y = y11[index1] + (y12[index1]-y11[index1]) * \
+ (x2[index2+1]-x1[index1]) / \
+ (x1[index1+1]-x1[index1])
+ y2_new[index] = y22[index2] + y
+ index2 += 1
+ index += 1
+ x_new[index] = x2[index2]
+ # and the starting value for the next interval
+ y1_new[index] = y21[index2] + y
+ else: # x1[index1+1] == x2[index2+1]:
+ y2_new[index] = y12[index1] + y22[index2]
+ index1 += 1
+ index2 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y1_new[index] = y11[index1] + y21[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < N1-1:
+ x_new[index+1:index+1+N1-index1-1] = x1[index1+1:]
+ for i in xrange(N1-index1-2):
+ # compute the linear interpolations value
+ y = y21[index2] + (y22[index2]-y21[index2]) * \
+ (x1[index1+1+i]-x2[index2]) / (x2[index2+1]-x2[index2])
+ y1_new[index+1+i] = y11[index1+1+i] + y
+ y2_new[index+i] = y12[index1+i] + y
+ index += N1-index1-2
+ elif index2+1 < N2-1:
+ x_new[index+1:index+1+N2-index2-1] = x2[index2+1:]
+ # compute the linear interpolations values
+ for i in xrange(N2-index2-2):
+ y = y11[index1] + (y12[index1]-y11[index1]) * \
+ (x2[index2+1+i]-x1[index1]) / \
+ (x1[index1+1]-x1[index1])
+ y1_new[index+1+i] = y21[index2+1+i] + y
+ y2_new[index+i] = y22[index2+i] + y
+ index += N2-index2-2
+ else: # both arrays reached the end simultaneously
+ # only the last x-value missing
+ x_new[index+1] = x1[N1-1]
+ # finally, the end value for the last interval
+ y2_new[index] = y12[N1-2]+y22[N2-2]
+ # only use the data that was actually filled
+ # end nogil
+ return (np.asarray(x_new[:index+2]),
+ np.asarray(y1_new[:index+1]),
+ np.asarray(y2_new[:index+1]))
+
+
+############################################################
+# add_discrete_function_cython
+############################################################
+def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1,
+ double[:] x2, double[:] y2, double[:] mp2):
+
+ cdef double[:] x_new = np.empty(len(x1) + len(x2))
+ cdef double[:] y_new = np.empty_like(x_new)
+ cdef double[:] mp_new = np.empty_like(x_new)
+ cdef int index1 = 0
+ cdef int index2 = 0
+ cdef int index = 0
+ cdef int N1 = len(y1)-1
+ cdef int N2 = len(y2)-1
+ x_new[0] = x1[0]
+ while (index1+1 < N1) and (index2+1 < N2):
+ if x1[index1+1] < x2[index2+1]:
+ index1 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1]
+ mp_new[index] = mp1[index1]
+ elif x1[index1+1] > x2[index2+1]:
+ index2 += 1
+ index += 1
+ x_new[index] = x2[index2]
+ y_new[index] = y2[index2]
+ mp_new[index] = mp2[index2]
+ else: # x1[index1+1] == x2[index2+1]
+ index1 += 1
+ index2 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1] + y2[index2]
+ mp_new[index] = mp1[index1] + mp2[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < N1:
+ x_new[index+1:index+1+N1-index1] = x1[index1+1:]
+ y_new[index+1:index+1+N1-index1] = y1[index1+1:]
+ mp_new[index+1:index+1+N1-index1] = mp1[index1+1:]
+ index += N1-index1
+ elif index2+1 < N2:
+ x_new[index+1:index+1+N2-index2] = x2[index2+1:]
+ y_new[index+1:index+1+N2-index2] = y2[index2+1:]
+ mp_new[index+1:index+1+N2-index2] = mp2[index2+1:]
+ index += N2-index2
+ else: # both arrays reached the end simultaneously
+ x_new[index+1] = x1[index1+1]
+ y_new[index+1] = y1[index1+1] + y2[index2+1]
+ mp_new[index+1] = mp1[index1+1] + mp2[index2+1]
+ index += 1
+
+ y_new[0] = y_new[1]
+ mp_new[0] = mp_new[1]
+
+ # the last value is again the end of the interval
+ # only use the data that was actually filled
+ return (np.asarray(x_new[:index+1]),
+ np.asarray(y_new[:index+1]),
+ np.asarray(mp_new[:index+1]))
diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx
new file mode 100644
index 0000000..ac37690
--- /dev/null
+++ b/pyspike/cython/cython_directionality.pyx
@@ -0,0 +1,262 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_directionality.pyx
+
+cython implementation of the spike delay asymmetry measures
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_directionality.pyx
+
+which gives::
+
+ cython_directionality.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+from libc.math cimport fmax
+from libc.math cimport fmin
+
+# from pyspike.cython.cython_distances cimport get_tau
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# get_tau
+############################################################
+cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
+ int i, int j, double interval, double max_tau):
+ cdef double m = interval # use interval length as initial tau
+ cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
+ cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
+ if i < N1 and i > -1:
+ m = fmin(m, spikes1[i+1]-spikes1[i])
+ if j < N2 and j > -1:
+ m = fmin(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = fmin(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = fmin(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = fmin(m, max_tau)
+ return m
+
+
+############################################################
+# spike_train_order_profile_cython
+############################################################
+def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int n = 0
+ cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
+ cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values
+ cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # both get marked with -1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
+
+
+############################################################
+# spike_train_order_cython
+############################################################
+def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0
+ cdef int mp = 0
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 2 appeared before spike in spike train 1
+ # mark with -1
+ d -= 2
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 1 appeared before spike in spike train 2
+ # mark with +1
+ d += 2
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # add only one event with multiplicity 2, but no asymmetry counting
+ mp += 2
+
+ if d == 0 and mp == 0:
+ # empty spike trains -> spike sync = 1 by definition
+ d = 1
+ mp = 1
+
+ return d, mp
+
+
+############################################################
+# spike_directionality_profiles_cython
+############################################################
+def spike_directionality_profiles_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef double[:] d1 = np.zeros(N1) # directionality values
+ cdef double[:] d2 = np.zeros(N2) # directionality values
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # equal spike times: zero asymmetry value
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_directionality_cython
+############################################################
+def spike_directionality_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0 # directionality value
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d -= 1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d += 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+
+ return d
diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx
new file mode 100644
index 0000000..d4070ae
--- /dev/null
+++ b/pyspike/cython/cython_distances.pyx
@@ -0,0 +1,631 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_distances.pyx
+
+cython implementation of the isi-, spike- and spike-sync distances
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_distances.pyx
+
+which gives::
+
+ cython_distances.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+from libc.math cimport fmax
+from libc.math cimport fmin
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# isi_distance_cython
+############################################################
+def isi_distance_cython(double[:] s1, double[:] s2,
+ double t_start, double t_end):
+
+ cdef double isi_value
+ cdef int index1, index2, index
+ cdef int N1, N2
+ cdef double nu1, nu2
+ cdef double last_t, curr_t, curr_isi
+ isi_value = 0.0
+ N1 = len(s1)
+ N2 = len(s2)
+
+ # first interspike interval - check if a spike exists at the start time
+ # and also account for spike trains with single spikes
+ if s1[0] > t_start:
+ # edge correction for the first interspike interval:
+ # take the maximum of the distance from the beginning to the first
+ # spike and the interval between the first two spikes.
+ # if there is only one spike, take the its distance to the beginning
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
+ index1 = -1
+ else:
+ # if the first spike is exactly at the start, take the distance
+ # to the next spike. If this is the only spike, take the distance to
+ # the end.
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
+ index1 = 0
+
+ if s2[0] > t_start:
+ # edge correction as above
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
+ index2 = -1
+ else:
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
+ index2 = 0
+
+ last_t = t_start
+ curr_isi = fabs(nu1-nu2)/fmax(nu1, nu2)
+ index = 1
+
+ with nogil: # release the interpreter to allow multithreading
+ while index1+index2 < N1+N2-2:
+ # check which spike is next, only if there are spikes left in 1
+ # next spike in 1 is earlier, or there are no spikes left in 2
+ if (index1 < N1-1) and ((index2 == N2-1) or
+ (s1[index1+1] < s2[index2+1])):
+ index1 += 1
+ curr_t = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction for the last ISI:
+ # take the max of the distance of the last
+ # spike to the end and the previous ISI. If there was only
+ # one spike, always take the distance to the end.
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
+ elif (index2 < N2-1) and ((index1 == N1-1) or
+ (s1[index1+1] > s2[index2+1])):
+ index2 += 1
+ curr_t = s2[index2]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
+ else: # s1[index1+1] == s2[index2+1]
+ index1 += 1
+ index2 += 1
+ curr_t = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction for the end as above
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
+ # compute the corresponding isi-distance
+ isi_value += curr_isi * (curr_t - last_t)
+ curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2)
+ last_t = curr_t
+ index += 1
+
+ isi_value += curr_isi * (t_end - last_t)
+ # end nogil
+
+ return isi_value / (t_end-t_start)
+
+
+############################################################
+# get_min_dist_cython
+############################################################
+cdef inline double get_min_dist_cython(double spike_time,
+ double[:] spike_train,
+ # use memory view to ensure inlining
+ # np.ndarray[DTYPE_t,ndim=1] spike_train,
+ int N,
+ int start_index,
+ double t_start, double t_end) nogil:
+ """ Returns the minimal distance |spike_time - spike_train[i]|
+ with i>=start_index.
+ """
+ cdef double d, d_temp
+ # start with the distance to the start time
+ d = fabs(spike_time - t_start)
+ if start_index < 0:
+ start_index = 0
+ while start_index < N:
+ d_temp = fabs(spike_time - spike_train[start_index])
+ if d_temp > d:
+ return d
+ else:
+ d = d_temp
+ start_index += 1
+
+ # finally, check the distance to end time
+ d_temp = fabs(t_end - spike_time)
+ if d_temp > d:
+ return d
+ else:
+ return d_temp
+
+
+############################################################
+# isi_avrg_cython
+############################################################
+cdef inline double isi_avrg_cython(double isi1, double isi2) nogil:
+ return 0.5*(isi1+isi2)*(isi1+isi2)
+ # alternative definition to obtain <S> ~ 0.5 for Poisson spikes
+ # return 0.5*(isi1*isi1+isi2*isi2)
+ # another alternative definition without second normalization
+ # return 0.5*(isi1+isi2)
+
+
+############################################################
+# spike_distance_cython
+############################################################
+def spike_distance_cython(double[:] t1, double[:] t2,
+ double t_start, double t_end):
+
+ cdef int N1, N2, index1, index2, index
+ cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
+ cdef double isi1, isi2, s1, s2
+ cdef double y_start, y_end, t_last, t_current, spike_value
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
+
+ spike_value = 0.0
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
+
+ with nogil: # release the interpreter to allow multithreading
+ t_last = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]+-t2[N2-2]) if N2 > 1 else t_end
+ # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1])
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
+ if t1[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
+ dt_p1 = dt_f1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
+ index1 = -1
+ else: # t1[0] == t_start
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = dt_f2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
+ index2 = -1
+ else: # t2[0] == t_start
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ # dt_p2 = t_start-t_p1 # 0.0
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_aux1[1]
+ t_curr = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ # s1 needs adjustment due to change of isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
+ # s2 is the same as above, thus we can compute y2 immediately
+ y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p2 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_aux2[1]
+ t_curr = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ # s2 needs adjustment due to change of isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
+ # s1 is the same as above, thus we can compute y2 immediately
+ y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ t_curr = t_f1
+ y_end = 0.0
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+ y_start = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_aux2[0], t_aux2[1])
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_aux1[1]
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_aux1[0], t_aux1[1])
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_aux2[1]
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ index += 1
+ t_last = t_curr
+ # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
+ y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
+ # end nogil
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_value / (t_end-t_start)
+
+
+############################################################
+# isi_avrg_rf_cython
+############################################################
+cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil:
+ # rate free version
+ return (isi1+isi2)
+
+
+############################################################
+# spike_distance_rf_cython
+############################################################
+def spike_distance_rf_cython(double[:] t1, double[:] t2,
+ double t_start, double t_end):
+
+ cdef int N1, N2, index1, index2, index
+ cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
+ cdef double isi1, isi2, s1, s2
+ cdef double y_start, y_end, t_last, t_current, spike_value
+
+ spike_value = 0.0
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ with nogil: # release the interpreter to allow multithreading
+ t_last = t_start
+ t_p1 = t_start
+ t_p2 = t_start
+ if t1[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_p1 = dt_f1
+ s1 = dt_p1*(t_f1-t_start)/isi1
+ index1 = -1
+ else:
+ t_f1 = t1[1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ dt_p1 = 0.0
+ isi1 = t1[1]-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = dt_f2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
+ s2 = dt_p2*(t_f2-t_start)/isi2
+ index2 = -1
+ else:
+ t_f2 = t2[1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = 0.0
+ isi2 = t2[1]-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_end
+ t_curr = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # s1 needs adjustment due to change of isi1
+ s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s2 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p2 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_end
+ t_curr = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ # s2 needs adjustment due to change of isi2
+ s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s1 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ t_curr = t_f1
+ y_end = 0.0
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+ y_start = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_end
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_end
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ index += 1
+ t_last = t_curr
+ # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ s1 = dt_f1*(t_end-t1[N1-1])/isi1
+ s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
+ # end nogil
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_value / (t_end-t_start)
+
+
+
+############################################################
+# get_tau
+############################################################
+cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
+ int i, int j, double interval, double max_tau):
+ cdef double m = interval # use interval length as initial tau
+ cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
+ cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
+ if i < N1 and i > -1:
+ m = fmin(m, spikes1[i+1]-spikes1[i])
+ if j < N2 and j > -1:
+ m = fmin(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = fmin(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = fmin(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = fmin(m, max_tau)
+ return m
+
+
+############################################################
+# coincidence_value_cython
+############################################################
+def coincidence_value_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef double coinc = 0.0
+ cdef double mp = 0.0
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ coinc += 2
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ coinc += 2
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # add only one event, but with coincidence 2 and multiplicity 2
+ mp += 2
+ coinc += 2
+
+ return coinc, mp
diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx
new file mode 100644
index 0000000..aa24db4
--- /dev/null
+++ b/pyspike/cython/cython_profiles.pyx
@@ -0,0 +1,485 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_profiles.pyx
+
+cython implementation of the isi-, spike- and spike-sync profiles
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_profiles.pyx
+
+which gives::
+
+ cython_profiles.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+from libc.math cimport fmax
+from libc.math cimport fmin
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# isi_profile_cython
+############################################################
+def isi_profile_cython(double[:] s1, double[:] s2,
+ double t_start, double t_end):
+
+ cdef double[:] spike_events
+ cdef double[:] isi_values
+ cdef int index1, index2, index
+ cdef int N1, N2
+ cdef double nu1, nu2
+ N1 = len(s1)
+ N2 = len(s2)
+
+ spike_events = np.empty(N1+N2+2)
+ # the values have one entry less as they are defined at the intervals
+ isi_values = np.empty(N1+N2+1)
+
+ # first x-value of the profile
+ spike_events[0] = t_start
+
+ # first interspike interval - check if a spike exists at the start time
+ if s1[0] > t_start:
+ # edge correction
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
+ index1 = -1
+ else:
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
+ index1 = 0
+
+ if s2[0] > t_start:
+ # edge correction
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
+ index2 = -1
+ else:
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
+ index2 = 0
+
+ isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2)
+ index = 1
+
+ with nogil: # release the interpreter to allow multithreading
+ while index1+index2 < N1+N2-2:
+ # check which spike is next, only if there are spikes left in 1
+ # next spike in 1 is earlier, or there are no spikes left in 2
+ if (index1 < N1-1) and ((index2 == N2-1) or
+ (s1[index1+1] < s2[index2+1])):
+ index1 += 1
+ spike_events[index] = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
+ elif (index2 < N2-1) and ((index1 == N1-1) or
+ (s1[index1+1] > s2[index2+1])):
+ index2 += 1
+ spike_events[index] = s2[index2]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
+ else: # s1[index1+1] == s2[index2+1]
+ index1 += 1
+ index2 += 1
+ spike_events[index] = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
+ # compute the corresponding isi-distance
+ isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2)
+ index += 1
+ # the last event is the interval end
+ if spike_events[index-1] == t_end:
+ index -= 1
+ else:
+ spike_events[index] = t_end
+ # end nogil
+
+ return spike_events[:index+1], isi_values[:index]
+
+
+############################################################
+# get_min_dist_cython
+############################################################
+cdef inline double get_min_dist_cython(double spike_time,
+ double[:] spike_train,
+ # use memory view to ensure inlining
+ # np.ndarray[DTYPE_t,ndim=1] spike_train,
+ int N,
+ int start_index,
+ double t_start, double t_end) nogil:
+ """ Returns the minimal distance |spike_time - spike_train[i]|
+ with i>=start_index.
+ """
+ cdef double d, d_temp
+ # start with the distance to the start time
+ d = fabs(spike_time - t_start)
+ if start_index < 0:
+ start_index = 0
+ while start_index < N:
+ d_temp = fabs(spike_time - spike_train[start_index])
+ if d_temp > d:
+ return d
+ else:
+ d = d_temp
+ start_index += 1
+
+ # finally, check the distance to end time
+ d_temp = fabs(t_end - spike_time)
+ if d_temp > d:
+ return d
+ else:
+ return d_temp
+
+
+############################################################
+# isi_avrg_cython
+############################################################
+cdef inline double isi_avrg_cython(double isi1, double isi2) nogil:
+ return 0.5*(isi1+isi2)*(isi1+isi2)
+ # alternative definition to obtain <S> ~ 0.5 for Poisson spikes
+ # return 0.5*(isi1*isi1+isi2*isi2)
+
+
+############################################################
+# spike_profile_cython
+############################################################
+def spike_profile_cython(double[:] t1, double[:] t2,
+ double t_start, double t_end):
+
+ cdef double[:] spike_events
+ cdef double[:] y_starts
+ cdef double[:] y_ends
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
+
+ cdef int N1, N2, index1, index2, index
+ cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
+ cdef double isi1, isi2, s1, s2
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
+ spike_events = np.empty(N1+N2+2)
+
+ y_starts = np.empty(len(spike_events)-1)
+ y_ends = np.empty(len(spike_events)-1)
+
+ with nogil: # release the interpreter to allow multithreading
+ spike_events[0] = t_start
+ # t_p1 = t_start
+ # t_p2 = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]-t2[N2-2]) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
+ if t1[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
+ dt_p1 = dt_f1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
+ index1 = -1
+ else:
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = dt_f2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
+ index2 = -1
+ else:
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_aux1[1]
+ spike_events[index] = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
+ isi2)
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ # s1 needs adjustment due to change of isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
+ # s2 is the same as above, thus we can compute y2 immediately
+ y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
+ isi2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p2 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_aux2[1]
+ spike_events[index] = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1,
+ isi2)
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ # s2 needs adjustment due to change of isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
+ # s2 is the same as above, thus we can compute y2 immediately
+ y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ spike_events[index] = t_f1
+ y_ends[index-1] = 0.0
+ y_starts[index] = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_aux2[0], t_aux2[1])
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_aux1[1]
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_aux1[0], t_aux1[1])
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_aux2[1]
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ index += 1
+ # the last event is the interval end
+ if spike_events[index-1] == t_end:
+ index -= 1
+ else:
+ spike_events[index] = t_end
+ s1 = dt_f1
+ s2 = dt_f2
+ y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # end nogil
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_events[:index+1], y_starts[:index], y_ends[:index]
+
+
+
+############################################################
+# get_tau
+############################################################
+cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
+ int i, int j, double interval, double max_tau):
+ cdef double m = interval # use interval as initial tau
+ cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
+ cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
+ if i < N1 and i > -1:
+ m = fmin(m, spikes1[i+1]-spikes1[i])
+ if j < N2 and j > -1:
+ m = fmin(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = fmin(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = fmin(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = fmin(m, max_tau)
+ return m
+
+
+############################################################
+# coincidence_profile_cython
+############################################################
+def coincidence_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int n = 0
+ cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
+ cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences
+ cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ c[n] = 1
+ c[n-1] = 1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ c[n] = 1
+ c[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event, but with coincidence 2 and multiplicity 2
+ st[n] = spikes1[i]
+ c[n] = 2
+ mp[n] = 2
+
+ st = st[:n+2]
+ c = c[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ c[0] = c[1]
+ c[len(c)-1] = c[len(c)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ c[0] = 1
+ c[1] = 1
+
+ return st, c, mp
+
+
+############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int j = -1
+ cdef double[:] c = np.zeros(N1) # coincidences
+ cdef double interval = t_end - t_start
+ cdef double tau
+ for i in xrange(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the one
+ # right before (see above), hence we move one forward and also
+ # check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if fabs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx
new file mode 100644
index 0000000..be9423c
--- /dev/null
+++ b/pyspike/cython/cython_simulated_annealing.pyx
@@ -0,0 +1,82 @@
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_simulated_annealing.pyx
+
+cython implementation of a simulated annealing algorithm to find the optimal
+spike train order
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_simulated_annealing.pyx
+
+which gives:
+
+ cython_simulated_annealing.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport exp
+from libc.math cimport fmod
+from libc.stdlib cimport rand
+from libc.stdlib cimport RAND_MAX
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha):
+
+ cdef long N = len(D)
+ cdef double A = np.sum(np.triu(D, 0))
+ cdef long[:] p = np.arange(N)
+ cdef double T = T_start
+ cdef long iterations
+ cdef long succ_iter
+ cdef long total_iter = 0
+ cdef double delta_A
+ cdef long ind1
+ cdef long ind2
+
+ while T > T_end:
+ iterations = 0
+ succ_iter = 0
+ # equilibrate for 100*N steps or 10*N successful steps
+ while iterations < 100*N and succ_iter < 10*N:
+ # exchange two rows and cols
+ # ind1 = np.random.randint(N-1)
+ ind1 = rand() % (N-1)
+ if ind1 < N-1:
+ ind2 = ind1+1
+ else: # this can never happen!
+ ind2 = 0
+ delta_A = -2*D[p[ind1], p[ind2]]
+ if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX):
+ # swap indices
+ p[ind1], p[ind2] = p[ind2], p[ind1]
+ A += delta_A
+ succ_iter += 1
+ iterations += 1
+ total_iter += iterations
+ T *= alpha # cool down
+ if succ_iter == 0:
+ # no successful step -> we believe we have converged
+ break
+
+ return p, A, total_iter
diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py
new file mode 100644
index 0000000..c1d820b
--- /dev/null
+++ b/pyspike/cython/directionality_python_backend.py
@@ -0,0 +1,144 @@
+""" directionality_python_backend.py
+
+Collection of python functions that can be used instead of the cython
+implementation.
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ d1 = np.zeros(N1) # directionality values
+ d2 = np.zeros(N2) # directionality values
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in first spike train occurs after second
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in second spike train occurs after first
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ n = 0
+ st = np.zeros(N1 + N2 + 2) # spike times
+ a = np.zeros(N1 + N2 + 2) # coincidences
+ mp = np.ones(N1 + N2 + 2) # multiplicity
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py
new file mode 100644
index 0000000..e75f181
--- /dev/null
+++ b/pyspike/cython/python_backend.py
@@ -0,0 +1,638 @@
+""" python_backend.py
+
+Collection of python functions that can be used instead of the cython
+implementation.
+
+Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+
+
+############################################################
+# isi_distance_python
+############################################################
+def isi_distance_python(s1, s2, t_start, t_end):
+ """ Plain Python implementation of the isi distance.
+ """
+ N1 = len(s1)
+ N2 = len(s2)
+
+ # compute the isi-distance
+ spike_events = np.empty(N1+N2+2)
+ spike_events[0] = t_start
+ # the values have one entry less - the number of intervals between events
+ isi_values = np.empty(len(spike_events) - 1)
+ if s1[0] > t_start:
+ # edge correction
+ nu1 = max(s1[0] - t_start, s1[1] - s1[0]) if N1 > 1 else s1[0]-t_start
+ index1 = -1
+ else:
+ nu1 = s1[1] - s1[0] if N1 > 1 else t_end-s1[0]
+ index1 = 0
+ if s2[0] > t_start:
+ # edge correction
+ nu2 = max(s2[0] - t_start, s2[1] - s2[0]) if N2 > 1 else s2[0]-t_start
+ index2 = -1
+ else:
+ nu2 = s2[1] - s2[0] if N2 > 1 else t_end-s2[0]
+ index2 = 0
+
+ isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2)
+ index = 1
+ while index1+index2 < N1+N2-2:
+ # check which spike is next - from s1 or s2
+ if (index1 < N1-1) and (index2 == N2-1 or s1[index1+1] < s2[index2+1]):
+ index1 += 1
+ spike_events[index] = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
+
+ elif (index2 < N2-1) and (index1 == N1-1 or
+ s1[index1+1] > s2[index2+1]):
+ index2 += 1
+ spike_events[index] = s2[index2]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
+
+ else: # s1[index1 + 1] == s2[index2 + 1]
+ index1 += 1
+ index2 += 1
+ spike_events[index] = s1[index1]
+ if index1 < N1-1:
+ nu1 = s1[index1+1]-s1[index1]
+ else:
+ # edge correction
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
+ if index2 < N2-1:
+ nu2 = s2[index2+1]-s2[index2]
+ else:
+ # edge correction
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
+ # compute the corresponding isi-distance
+ isi_values[index] = abs(nu1 - nu2) / \
+ max(nu1, nu2)
+ index += 1
+ # the last event is the interval end
+ if spike_events[index-1] == t_end:
+ index -= 1
+ else:
+ spike_events[index] = t_end
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_events[:index + 1], isi_values[:index]
+
+
+############################################################
+# get_min_dist
+############################################################
+def get_min_dist(spike_time, spike_train, start_index, t_start, t_end):
+ """ Returns the minimal distance |spike_time - spike_train[i]|
+ with i>=start_index.
+ """
+ d = abs(spike_time - t_start)
+ if start_index < 0:
+ start_index = 0
+ while start_index < len(spike_train):
+ d_temp = abs(spike_time - spike_train[start_index])
+ if d_temp > d:
+ return d
+ else:
+ d = d_temp
+ start_index += 1
+ # finally, check the distance to end time
+ d_temp = abs(t_end - spike_time)
+ if d_temp > d:
+ return d
+ else:
+ return d_temp
+
+
+############################################################
+# spike_distance_python
+############################################################
+def spike_distance_python(spikes1, spikes2, t_start, t_end):
+ """ Computes the instantaneous spike-distance S_spike (t) of the two given
+ spike trains. The spike trains are expected to have auxiliary spikes at the
+ beginning and end of the interval. Use the function add_auxiliary_spikes to
+ add those spikes to the spike train.
+ Args:
+ - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes.
+ - t_start, t_end: edges of the spike train
+ Returns:
+ - PieceWiseLinFunc describing the spike-distance.
+ """
+
+ # shorter variables
+ t1 = spikes1
+ t2 = spikes2
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ spike_events = np.empty(N1+N2+2)
+
+ y_starts = np.empty(len(spike_events)-1)
+ y_ends = np.empty(len(spike_events)-1)
+
+ t_aux1 = np.zeros(2)
+ t_aux2 = np.zeros(2)
+ t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) if N1 > 1 else t_start
+ t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) if N1 > 1 else t_end
+ t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) if N2 > 1 else t_start
+ t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
+
+ # print "t_aux1", t_aux1, ", t_aux2:", t_aux2
+
+ spike_events[0] = t_start
+ if t1[0] > t_start:
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = dt_f1
+ isi1 = max(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
+ index1 = -1
+ else:
+ # dt_p1 = t_start-t_p2
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1])
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = dt_f2
+ isi2 = max(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
+ index2 = -1
+ else:
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1])
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ y_starts[0] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_aux1[1]
+ spike_events[index] = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ # s1 needs adjustment due to change of isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
+ # s2 is the same as above, thus we can compute y2 immediately
+ y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p1 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_aux2[1]
+ spike_events[index] = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ # s2 needs adjustment due to change of isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's adjustment: no correction
+ s2 = dt_p2
+ # s2 is the same as above, thus we can compute y2 immediately
+ y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ spike_events[index] = t_f1
+ y_ends[index-1] = 0.0
+ y_starts[index] = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_aux1[1]
+ dt_f1 = dt_p1
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_aux2[1]
+ dt_f2 = dt_p2
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
+ index += 1
+
+ # the last event is the interval end
+ if spike_events[index-1] == t_end:
+ index -= 1
+ else:
+ spike_events[index] = t_end
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
+ y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_events[:index+1], y_starts[:index], y_ends[:index]
+
+
+############################################################
+# cumulative_sync_python
+############################################################
+def cumulative_sync_python(spikes1, spikes2):
+
+ def get_tau(spikes1, spikes2, i, j):
+ return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i],
+ spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]])
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = 0
+ j = 0
+ n = 0
+ st = np.zeros(N1 + N2 - 2)
+ c = np.zeros(N1 + N2 - 3)
+ c[0] = 0
+ st[0] = 0
+ while n < N1 + N2:
+ if spikes1[i+1] < spikes2[j+1]:
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j)
+ st[n] = spikes1[i]
+ if spikes1[i]-spikes2[j] > tau:
+ c[n] = c[n-1]
+ else:
+ c[n] = c[n-1]+1
+ elif spikes1[i+1] > spikes2[j+1]:
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j)
+ st[n] = spikes2[j]
+ if spikes2[j]-spikes1[i] > tau:
+ c[n] = c[n-1]
+ else:
+ c[n] = c[n-1]+1
+ else: # spikes1[i+1] = spikes2[j+1]
+ j += 1
+ i += 1
+ if i == N1-1 or j == N2-1:
+ break
+ n += 1
+ st[n] = spikes1[i]
+ c[n] = c[n-1]
+ n += 1
+ st[n] = spikes1[i]
+ c[n] = c[n-1]+1
+ c[0] = 0
+ st[0] = spikes1[0]
+ st[-1] = spikes1[-1]
+
+ return st, c
+
+
+def get_tau(spikes1, spikes2, i, j, max_tau, init_tau):
+ m = init_tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+
+############################################################
+# coincidence_python
+############################################################
+def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ n = 0
+ st = np.zeros(N1 + N2 + 2) # spike times
+ c = np.zeros(N1 + N2 + 2) # coincidences
+ mp = np.ones(N1 + N2 + 2) # multiplicity
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ c[n] = 1
+ c[n-1] = 1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ c[n] = 1
+ c[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event, but with coincidence 2 and multiplicity 2
+ st[n] = spikes1[i]
+ c[n] = 2
+ mp[n] = 2
+
+ st = st[:n+2]
+ c = c[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ c[0] = c[1]
+ c[len(c)-1] = c[len(c)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ c[0] = 1
+ c[1] = 1
+
+ return st, c, mp
+
+
+############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau):
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ j = -1
+ c = np.zeros(N1) # coincidences
+ for i in range(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if j > -1 and abs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the first or
+ # the one right before (see above), hence we move one forward and
+ # also check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if abs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
+
+
+############################################################
+# add_piece_wise_const_python
+############################################################
+def add_piece_wise_const_python(x1, y1, x2, y2):
+ x_new = np.empty(len(x1) + len(x2))
+ y_new = np.empty(len(x_new)-1)
+ x_new[0] = x1[0]
+ y_new[0] = y1[0] + y2[0]
+ index1 = 0
+ index2 = 0
+ index = 0
+ while (index1+1 < len(y1)) and (index2+1 < len(y2)):
+ index += 1
+ # print(index1+1, x1[index1+1], y1[index1+1], x_new[index])
+ if x1[index1+1] < x2[index2+1]:
+ index1 += 1
+ x_new[index] = x1[index1]
+ elif x1[index1+1] > x2[index2+1]:
+ index2 += 1
+ x_new[index] = x2[index2]
+ else: # x1[index1+1] == x2[index2+1]:
+ index1 += 1
+ index2 += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1] + y2[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < len(y1):
+ x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:]
+ y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1]
+ index += len(x1)-index1-2
+ elif index2+1 < len(y2):
+ x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:]
+ y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1]
+ index += len(x2)-index2-2
+ else: # both arrays reached the end simultaneously
+ # only the last x-value missing
+ x_new[index+1] = x1[-1]
+ # the last value is again the end of the interval
+ # x_new[index+1] = x1[-1]
+ # only use the data that was actually filled
+
+ return x_new[:index+2], y_new[:index+1]
+
+
+############################################################
+# add_piece_lin_const_python
+############################################################
+def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22):
+ x_new = np.empty(len(x1) + len(x2))
+ y1_new = np.empty(len(x_new)-1)
+ y2_new = np.empty_like(y1_new)
+ x_new[0] = x1[0]
+ y1_new[0] = y11[0] + y21[0]
+ index1 = 0 # index for self
+ index2 = 0 # index for f
+ index = 0 # index for new
+ while (index1+1 < len(y11)) and (index2+1 < len(y21)):
+ # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index])
+ if x1[index1+1] < x2[index2+1]:
+ # first compute the end value of the previous interval
+ # linear interpolation of the interval
+ y = y21[index2] + (y22[index2]-y21[index2]) * \
+ (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2])
+ y2_new[index] = y12[index1] + y
+ index1 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ # and the starting value for the next interval
+ y1_new[index] = y11[index1] + y
+ elif x1[index1+1] > x2[index2+1]:
+ # first compute the end value of the previous interval
+ # linear interpolation of the interval
+ y = y11[index1] + (y12[index1]-y11[index1]) * \
+ (x2[index2+1]-x1[index1]) / \
+ (x1[index1+1]-x1[index1])
+ y2_new[index] = y22[index2] + y
+ index2 += 1
+ index += 1
+ x_new[index] = x2[index2]
+ # and the starting value for the next interval
+ y1_new[index] = y21[index2] + y
+ else: # x1[index1+1] == x2[index2+1]:
+ y2_new[index] = y12[index1] + y22[index2]
+ index1 += 1
+ index2 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y1_new[index] = y11[index1] + y21[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < len(y11):
+ # compute the linear interpolations values
+ y = y21[index2] + (y22[index2]-y21[index2]) * \
+ (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2])
+ x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:]
+ y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y
+ y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y
+ index += len(x1)-index1-2
+ elif index2+1 < len(y21):
+ # compute the linear interpolations values
+ y = y11[index1] + (y12[index1]-y11[index1]) * \
+ (x2[index2+1:-1]-x1[index1]) / \
+ (x1[index1+1]-x1[index1])
+ x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:]
+ y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y
+ y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y
+ index += len(x2)-index2-2
+ else: # both arrays reached the end simultaneously
+ # only the last x-value missing
+ x_new[index+1] = x1[-1]
+ # finally, the end value for the last interval
+ y2_new[index] = y12[-1]+y22[-1]
+ # only use the data that was actually filled
+ return x_new[:index+2], y1_new[:index+1], y2_new[:index+1]
+
+
+############################################################
+# add_discrete_function_python
+############################################################
+def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2):
+
+ x_new = np.empty(len(x1) + len(x2))
+ y_new = np.empty_like(x_new)
+ mp_new = np.empty_like(x_new)
+ x_new[0] = x1[0]
+ index1 = 0
+ index2 = 0
+ index = 0
+ N1 = len(x1)-1
+ N2 = len(x2)-1
+ while (index1+1 < N1) and (index2+1 < N2):
+ if x1[index1+1] < x2[index2+1]:
+ index1 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1]
+ mp_new[index] = mp1[index1]
+ elif x1[index1+1] > x2[index2+1]:
+ index2 += 1
+ index += 1
+ x_new[index] = x2[index2]
+ y_new[index] = y2[index2]
+ mp_new[index] = mp2[index2]
+ else: # x1[index1+1] == x2[index2+1]
+ index1 += 1
+ index2 += 1
+ index += 1
+ x_new[index] = x1[index1]
+ y_new[index] = y1[index1] + y2[index2]
+ mp_new[index] = mp1[index1] + mp2[index2]
+ # one array reached the end -> copy the contents of the other to the end
+ if index1+1 < N1:
+ x_new[index+1:index+1+N1-index1] = x1[index1+1:]
+ y_new[index+1:index+1+N1-index1] = y1[index1+1:]
+ mp_new[index+1:index+1+N1-index1] = mp1[index1+1:]
+ index += N1-index1
+ elif index2+1 < N2:
+ x_new[index+1:index+1+N2-index2] = x2[index2+1:]
+ y_new[index+1:index+1+N2-index2] = y2[index2+1:]
+ mp_new[index+1:index+1+N2-index2] = mp2[index2+1:]
+ index += N2-index2
+ else: # both arrays reached the end simultaneously
+ x_new[index+1] = x1[-1]
+ y_new[index+1] = y1[-1] + y2[-1]
+ mp_new[index+1] = mp1[-1] + mp2[-1]
+ index += 1
+
+ y_new[0] = y_new[1]
+ mp_new[0] = mp_new[1]
+
+ # the last value is again the end of the interval
+ # only use the data that was actually filled
+ return x_new[:index+1], y_new[:index+1], mp_new[:index+1]
diff --git a/pyspike/generic.py b/pyspike/generic.py
new file mode 100644
index 0000000..5ad06f1
--- /dev/null
+++ b/pyspike/generic.py
@@ -0,0 +1,150 @@
+"""
+
+Generic functions to compute multi-variate profiles and distance matrices.
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+"""
+
+from __future__ import division
+
+import numpy as np
+
+
+############################################################
+# _generic_profile_multi
+############################################################
+def _generic_profile_multi(spike_trains, pair_distance_func, indices=None):
+ """ Internal implementation detail, don't call this function directly,
+ use isi_profile_multi or spike_profile_multi instead.
+
+ Computes the multi-variate distance for a set of spike-trains using the
+ pair_dist_func to compute pair-wise distances. That is it computes the
+ average distance of all pairs of spike-trains:
+ :math:`S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j}`,
+ where the sum goes over all pairs <i,j>.
+ Args:
+ - spike_trains: list of spike trains
+ - pair_distance_func: function computing the distance of two spike trains
+ - indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ Returns:
+ - The averaged multi-variate distance of all pairs
+ """
+
+ def divide_and_conquer(pairs1, pairs2):
+ """ recursive calls by splitting the two lists in half.
+ """
+ L1 = len(pairs1)
+ if L1 > 1:
+ dist_prof1 = divide_and_conquer(pairs1[:L1//2],
+ pairs1[L1//2:])
+ else:
+ dist_prof1 = pair_distance_func(spike_trains[pairs1[0][0]],
+ spike_trains[pairs1[0][1]])
+ L2 = len(pairs2)
+ if L2 > 1:
+ dist_prof2 = divide_and_conquer(pairs2[:L2//2],
+ pairs2[L2//2:])
+ else:
+ dist_prof2 = pair_distance_func(spike_trains[pairs2[0][0]],
+ spike_trains[pairs2[0][1]])
+ dist_prof1.add(dist_prof2)
+ return dist_prof1
+
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ L = len(pairs)
+ if L > 1:
+ # recursive iteration through the list of pairs to get average profile
+ avrg_dist = divide_and_conquer(pairs[:len(pairs)//2],
+ pairs[len(pairs)//2:])
+ else:
+ avrg_dist = pair_distance_func(spike_trains[pairs[0][0]],
+ spike_trains[pairs[0][1]])
+
+ return avrg_dist, L
+
+
+############################################################
+# _generic_distance_multi
+############################################################
+def _generic_distance_multi(spike_trains, pair_distance_func,
+ indices=None, interval=None):
+ """ Internal implementation detail, don't call this function directly,
+ use isi_distance_multi or spike_distance_multi instead.
+
+ Computes the multi-variate distance for a set of spike-trains using the
+ pair_dist_func to compute pair-wise distances. That is it computes the
+ average distance of all pairs of spike-trains:
+ :math:`S(t) = 2/((N(N-1)) sum_{<i,j>} D_{i,j}`,
+ where the sum goes over all pairs <i,j>.
+ Args:
+ - spike_trains: list of spike trains
+ - pair_distance_func: function computing the distance of two spike trains
+ - indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ Returns:
+ - The averaged multi-variate distance of all pairs
+ """
+
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ avrg_dist = 0.0
+ for (i, j) in pairs:
+ avrg_dist += pair_distance_func(spike_trains[i], spike_trains[j],
+ interval)
+
+ return avrg_dist/len(pairs)
+
+
+############################################################
+# generic_distance_matrix
+############################################################
+def _generic_distance_matrix(spike_trains, dist_function,
+ indices=None, interval=None):
+ """ Internal implementation detail. Don't use this function directly.
+ Instead use isi_distance_matrix or spike_distance_matrix.
+ Computes the time averaged distance of all pairs of spike-trains.
+ Args:
+ - spike_trains: list of spike trains
+ - indices: list of indices defining which spike-trains to use
+ if None all given spike-trains are used (default=None)
+ Return:
+ - a 2D array of size len(indices)*len(indices) containing the average
+ pair-wise distance
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(i, j) for i in range(len(indices))
+ for j in range(i+1, len(indices))]
+
+ distance_matrix = np.zeros((len(indices), len(indices)))
+ for i, j in pairs:
+ d = dist_function(spike_trains[indices[i]], spike_trains[indices[j]],
+ interval)
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = d
+ return distance_matrix
diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py
new file mode 100644
index 0000000..e91dce2
--- /dev/null
+++ b/pyspike/isi_distance.py
@@ -0,0 +1,230 @@
+# Module containing several functions to compute the ISI profiles and distances
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import pyspike
+from pyspike import PieceWiseConstFunc
+from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
+ _generic_distance_matrix
+
+
+############################################################
+# isi_profile
+############################################################
+def isi_profile(*args, **kwargs):
+ """ Computes the isi-distance profile :math:`I(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstFunc object. The
+ ISI-values are defined positive :math:`I(t)>=0`.
+
+ Valid call structures::
+
+ isi_profile(st1, st2) # returns the bi-variate profile
+ isi_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_profile(spike_trains) # profile of the list of spike trains
+ isi_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate ISI distance profile for a set of spike trains is defined
+ as the average ISI-profile of all pairs of spike-trains:
+
+ .. math:: <I(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+ :returns: The isi-distance profile :math:`I(t)`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ if len(args) == 1:
+ return isi_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_profile_bi(args[0], args[1])
+ else:
+ return isi_profile_multi(args)
+
+
+############################################################
+# isi_profile_bi
+############################################################
+def isi_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate ISI-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.isi_profile` to compute ISI-profiles.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`.SpikeTrain`
+ :returns: The isi-distance profile :math:`I(t)`
+ :rtype: :class:`.PieceWiseConstFunc`
+
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # load cython implementation
+ try:
+ from .cython.cython_profiles import isi_profile_cython \
+ as isi_profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: isi_profile_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import isi_distance_python \
+ as isi_profile_impl
+
+ times, values = isi_profile_impl(spike_train1.get_spikes_non_empty(),
+ spike_train2.get_spikes_non_empty(),
+ spike_train1.t_start, spike_train1.t_end)
+ return PieceWiseConstFunc(times, values)
+
+
+############################################################
+# isi_profile_multi
+############################################################
+def isi_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute the multivariate ISI-profile for a set of
+ spike trains. This is a deprecated function and should not be called
+ directly. Use :func:`.isi_profile` to compute ISI-profiles.
+
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type state: list or None
+ :returns: The averaged isi profile :math:`<I(t)>`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, isi_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
+# isi_distance
+############################################################
+def isi_distance(*args, **kwargs):
+ """ Computes the ISI-distance :math:`D_I` of the given spike trains. The
+ isi-distance is the integral over the isi distance profile
+ :math:`I(t)`:
+
+ .. math:: D_I = \\int_{T_0}^{T_1} I(t) dt.
+
+ In the multivariate case it is the integral over the multivariate
+ ISI-profile, i.e. the average profile over all spike train pairs:
+
+ .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+
+ Valid call structures::
+
+ isi_distance(st1, st2) # returns the bi-variate distance
+ isi_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_distance(spike_trains) # distance of the list of spike trains
+ isi_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ :returns: The isi-distance :math:`D_I`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return isi_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return isi_distance_multi(args, **kwargs)
+
+
+############################################################
+# _isi_distance_bi
+############################################################
+def isi_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute the bivariate ISI-distance.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`.SpikeTrain`
+ :param interval: averaging interval given as a pair of floats (T0, T1),
+ if None the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: The isi-distance :math:`D_I`.
+ :rtype: double
+ """
+
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_distances import isi_distance_cython \
+ as isi_distance_impl
+
+ return isi_distance_impl(spike_train1.get_spikes_non_empty(),
+ spike_train2.get_spikes_non_empty(),
+ spike_train1.t_start, spike_train1.t_end)
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
+ else:
+ # some specific interval is provided: use profile
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
+
+
+############################################################
+# isi_distance_multi
+############################################################
+def isi_distance_multi(spike_trains, indices=None, interval=None):
+ """ Specific function to compute the multivariate ISI-distance.
+ This is a deprecfated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: The time-averaged multivariate ISI distance :math:`D_I`
+ :rtype: double
+ """
+ return _generic_distance_multi(spike_trains, isi_distance_bi, indices,
+ interval)
+
+
+############################################################
+# isi_distance_matrix
+############################################################
+def isi_distance_matrix(spike_trains, indices=None, interval=None):
+ """ Computes the time averaged isi-distance of all pairs of spike-trains.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: 2D array with the pair wise time average isi distances
+ :math:`D_{I}^{ij}`
+ :rtype: np.array
+ """
+ return _generic_distance_matrix(spike_trains, isi_distance_bi,
+ indices=indices, interval=interval)
diff --git a/pyspike/psth.py b/pyspike/psth.py
new file mode 100644
index 0000000..7cf1140
--- /dev/null
+++ b/pyspike/psth.py
@@ -0,0 +1,34 @@
+# Module containing functions to compute the PSTH profile
+# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+import numpy as np
+from pyspike import PieceWiseConstFunc
+
+
+# Computes the peri-stimulus time histogram of a set of spike trains
+def psth(spike_trains, bin_size):
+ """ Computes the peri-stimulus time histogram of a set of
+ :class:`.SpikeTrain`. The PSTH is simply the histogram of merged spike
+ events. The :code:`bin_size` defines the width of the histogram bins.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param bin_size: width of the histogram bins.
+ :return: The PSTH as a :class:`.PieceWiseConstFunc`
+ """
+
+ bin_count = int((spike_trains[0].t_end - spike_trains[0].t_start) /
+ bin_size)
+ bins = np.linspace(spike_trains[0].t_start, spike_trains[0].t_end,
+ bin_count+1)
+
+ # N = len(spike_trains)
+ combined_spike_train = spike_trains[0].spikes
+ for i in range(1, len(spike_trains)):
+ combined_spike_train = np.append(combined_spike_train,
+ spike_trains[i].spikes)
+
+ vals, edges = np.histogram(combined_spike_train, bins, density=False)
+
+ bin_size = edges[1]-edges[0]
+ return PieceWiseConstFunc(edges, vals) # /(N*bin_size))
diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py
new file mode 100644
index 0000000..248862c
--- /dev/null
+++ b/pyspike/spike_directionality.py
@@ -0,0 +1,522 @@
+# Module containing functions to compute the SPIKE directionality and the
+# spike train order profile
+# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import numpy as np
+import pyspike
+from pyspike import DiscreteFunc
+from functools import partial
+from pyspike.generic import _generic_profile_multi
+
+
+############################################################
+# spike_directionality_values
+############################################################
+def spike_directionality_values(*args, **kwargs):
+ """ Computes the spike directionality value for each spike in
+ each spike train. Returns a list containing an array of spike directionality
+ values for every given spike train.
+
+ Valid call structures::
+
+ spike_directionality_values(st1, st2) # returns the bi-variate profile
+ spike_directionality_values(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_directionality_values(spike_trains) # profile of the list of spike trains
+ spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window (default=None).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike directionality values :math:`D^n_i` as a list of arrays.
+ """
+ if len(args) == 1:
+ return _spike_directionality_values_impl(args[0], **kwargs)
+ else:
+ return _spike_directionality_values_impl(args, **kwargs)
+
+
+def _spike_directionality_values_impl(spike_trains, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the multi-variate spike directionality profile
+ of the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if interval is not None:
+ raise NotImplementedError("Parameter `interval` not supported.")
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # list of arrays for resulting asymmetry values
+ asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices]
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_profiles_cython as profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_directionality_profile_python as profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, j in pairs:
+ d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes,
+ spike_trains[i].t_start, spike_trains[i].t_end,
+ max_tau)
+ asymmetry_list[i] += d1
+ asymmetry_list[j] += d2
+ for a in asymmetry_list:
+ a /= len(spike_trains)-1
+ return asymmetry_list
+
+
+############################################################
+# spike_directionality
+############################################################
+def spike_directionality(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike directionality of the first spike train with
+ respect to the second spike train.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_cython as spike_directionality_impl
+ if max_tau is None:
+ max_tau = 0.0
+ d = spike_directionality_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ c = len(spike_train1.spikes)
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use profile.
+ d1, x = spike_directionality_values([spike_train1, spike_train2],
+ interval=interval,
+ max_tau=max_tau)
+ d = np.sum(d1)
+ c = len(spike_train1.spikes)
+ if normalize:
+ return 1.0*d/c
+ else:
+ return d
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_directionality_matrix
+############################################################
+def spike_directionality_matrix(spike_trains, normalize=True, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the spike directionality matrix for the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ distance_matrix = np.zeros((len(indices), len(indices)))
+ for i, j in pairs:
+ d = spike_directionality(spike_trains[i], spike_trains[j], normalize,
+ interval, max_tau=max_tau)
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = -d
+ return distance_matrix
+
+
+############################################################
+# spike_train_order_profile
+############################################################
+def spike_train_order_profile(*args, **kwargs):
+ """ Computes the spike train order profile :math:`E(t)` of the given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ Valid call structures::
+
+ spike_train_order_profile(st1, st2) # returns the bi-variate profile
+ spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order_profile(spike_trains) # profile of the list of spike trains
+ spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window, `default=None`.
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike train order profile :math:`E(t)`
+ :rtype: :class:`.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_train_order_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_profile_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_profile_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_profile_bi
+############################################################
+def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Computes the spike train order profile P(t) of the two given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_profile_cython as \
+ spike_train_order_profile_impl
+ except ImportError:
+ # raise NotImplementedError()
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_train_order_profile_python as spike_train_order_profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ times, coincidences, multiplicity \
+ = spike_train_order_profile_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+
+ return DiscreteFunc(times, coincidences, multiplicity)
+
+
+############################################################
+# spike_train_order_profile_multi
+############################################################
+def spike_train_order_profile_multi(spike_trains, indices=None,
+ max_tau=None):
+ """ Computes the multi-variate spike train order profile for a set of
+ spike trains. For each spike in the set of spike trains, the multi-variate
+ profile is defined as the sum of asymmetry values divided by the number of
+ spike trains pairs involving the spike train of containing this spike,
+ which is the number of spike trains minus one (N-1).
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ return average_prof
+
+
+
+############################################################
+# _spike_train_order_impl
+############################################################
+def _spike_train_order_impl(spike_train1, spike_train2,
+ interval=None, max_tau=None):
+ """ Implementation of bi-variatae spike train order value (Synfire Indicator).
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_cython as spike_train_order_func
+ if max_tau is None:
+ max_tau = 0.0
+ c, mp = spike_train_order_func(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ c, mp = spike_train_order_profile(spike_train1, spike_train2,
+ max_tau=max_tau).integral(interval)
+ return c, mp
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_train_order
+############################################################
+def spike_train_order(*args, **kwargs):
+ """ Computes the spike train order (Synfire Indicator) of the given
+ spike trains.
+
+ Valid call structures::
+
+ spike_train_order(st1, st2, normalize=True) # normalized bi-variate
+ # spike train order
+ spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order(spike_trains) # result for the list of spike trains
+ spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ - `max_tau` Upper bound for coincidence window, `default=None`.
+ - `normalize` Flag indicating if the reslut should be normalized by the
+ number of spikes , default=`False`
+
+
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if len(args) == 1:
+ return spike_train_order_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_bi
+############################################################
+def spike_train_order_bi(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for two spike trains.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau)
+ if normalize:
+ return 1.0*c/mp
+ else:
+ return c
+
+############################################################
+# spike_train_order_multi
+############################################################
+def spike_train_order_multi(spike_trains, indices=None, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for many spike trains.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :param normalize: Normalize by the number of spike (multiplicity).
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: Spike train order values (Synfire Indicator) F for the given spike trains.
+ :rtype: double
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ e_total = 0.0
+ m_total = 0.0
+ for (i, j) in pairs:
+ e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j],
+ interval, max_tau)
+ e_total += e
+ m_total += m
+
+ if m == 0.0:
+ return 1.0
+ else:
+ return e_total/m_total
+
+
+
+############################################################
+# optimal_spike_train_sorting_from_matrix
+############################################################
+def _optimal_spike_train_sorting_from_matrix(D, full_output=False):
+ """ Finds the best sorting via simulated annealing.
+ Returns the optimal permutation p and A value.
+ Not for direct use, call :func:`.optimal_spike_train_sorting` instead.
+
+ :param D: The directionality (Spike-ORDER) matrix.
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ N = len(D)
+ A = np.sum(np.triu(D, 0))
+
+ p = np.arange(N)
+
+ T_start = 2*np.max(D) # starting temperature
+ T_end = 1E-5 * T_start # final temperature
+ alpha = 0.9 # cooling factor
+
+ try:
+ from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann
+ except ImportError:
+ raise NotImplementedError("PySpike with Cython required for computing spike train"
+ " sorting!")
+
+ p, A, total_iter = sim_ann(D, T_start, T_end, alpha)
+
+ if full_output:
+ return p, A, total_iter
+ else:
+ return p, A
+
+
+############################################################
+# optimal_spike_train_sorting
+############################################################
+def optimal_spike_train_sorting(spike_trains, indices=None, interval=None,
+ max_tau=None, full_output=False):
+ """ Finds the best sorting of the given spike trains by computing the spike
+ directionality matrix and optimize the order using simulated annealing.
+ For a detailed description of the algorithm see:
+ `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta`
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: time interval filter given as a pair of floats, if None
+ the full spike trains are used (default=None).
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound (default=None).
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ D = spike_directionality_matrix(spike_trains, normalize=False,
+ indices=indices, interval=interval,
+ max_tau=max_tau)
+ return _optimal_spike_train_sorting_from_matrix(D, full_output)
+
+############################################################
+# permutate_matrix
+############################################################
+def permutate_matrix(D, p):
+ """ Helper function that applies the permutation p to the columns and rows
+ of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`.
+
+ :param D: The matrix.
+ :param d: The permutation.
+ :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]`
+ """
+ N = len(D)
+ D_p = np.empty_like(D)
+ for n in range(N):
+ for m in range(N):
+ D_p[n, m] = D[p[n], p[m]]
+ return D_p
diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py
new file mode 100644
index 0000000..0fd86c1
--- /dev/null
+++ b/pyspike/spike_distance.py
@@ -0,0 +1,231 @@
+# Module containing several functions to compute SPIKE profiles and distances
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import pyspike
+from pyspike import PieceWiseLinFunc
+from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
+ _generic_distance_matrix
+
+
+############################################################
+# spike_profile
+############################################################
+def spike_profile(*args, **kwargs):
+ """ Computes the spike-distance profile :math:`S(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstLin object. The
+ SPIKE-values are defined positive :math:`S(t)>=0`.
+
+ Valid call structures::
+
+ spike_profile(st1, st2) # returns the bi-variate profile
+ spike_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_profile(spike_trains) # profile of the list of spike trains
+ spike_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate spike-distance profile is defined as the average of all
+ pairs of spike-trains:
+
+ .. math:: <S(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} S^{i, j}`,
+
+ where the sum goes over all pairs <i,j>
+
+ :returns: The spike-distance profile :math:`S(t)`
+ :rtype: :class:`.PieceWiseConstLin`
+ """
+ if len(args) == 1:
+ return spike_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_profile_bi(args[0], args[1])
+ else:
+ return spike_profile_multi(args)
+
+
+############################################################
+# spike_profile_bi
+############################################################
+def spike_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`.SpikeTrain`
+ :returns: The spike-distance profile :math:`S(t)`.
+ :rtype: :class:`.PieceWiseLinFunc`
+
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from .cython.cython_profiles import spike_profile_cython \
+ as spike_profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_profile_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import spike_distance_python \
+ as spike_profile_impl
+
+ times, y_starts, y_ends = spike_profile_impl(
+ spike_train1.get_spikes_non_empty(),
+ spike_train2.get_spikes_non_empty(),
+ spike_train1.t_start, spike_train1.t_end)
+
+ return PieceWiseLinFunc(times, y_starts, y_ends)
+
+
+############################################################
+# spike_profile_multi
+############################################################
+def spike_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute a multivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :returns: The averaged spike profile :math:`<S>(t)`
+ :rtype: :class:`.PieceWiseLinFunc`
+
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, spike_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
+# spike_distance
+############################################################
+def spike_distance(*args, **kwargs):
+ """ Computes the SPIKE-distance :math:`D_S` of the given spike trains. The
+ spike-distance is the integral over the spike distance profile
+ :math:`D(t)`:
+
+ .. math:: D_S = \\int_{T_0}^{T_1} S(t) dt.
+
+
+ Valid call structures::
+
+ spike_distance(st1, st2) # returns the bi-variate distance
+ spike_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_distance(spike_trains) # distance of the list of spike trains
+ spike_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ In the multivariate case, the spike distance is given as the integral over
+ the multivariate profile, that is the average profile of all spike train
+ pairs:
+
+ .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>}
+ S^{i, j} dt
+
+ :returns: The spike-distance :math:`D_S`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return spike_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_distance_multi(args, **kwargs)
+
+
+############################################################
+# spike_distance_bi
+############################################################
+def spike_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute a bivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`.SpikeTrain`
+ :param interval: averaging interval given as a pair of floats (T0, T1),
+ if None the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: The spike-distance.
+ :rtype: double
+
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_distances import spike_distance_cython \
+ as spike_distance_impl
+ return spike_distance_impl(spike_train1.get_spikes_non_empty(),
+ spike_train2.get_spikes_non_empty(),
+ spike_train1.t_start,
+ spike_train1.t_end)
+ except ImportError:
+ # Cython backend not available: fall back to average profile
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
+ else:
+ # some specific interval is provided: compute the whole profile
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
+
+
+############################################################
+# spike_distance_multi
+############################################################
+def spike_distance_multi(spike_trains, indices=None, interval=None):
+ """ Specific function to compute a multivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: The averaged multi-variate spike distance :math:`D_S`.
+ :rtype: double
+ """
+ return _generic_distance_multi(spike_trains, spike_distance_bi, indices,
+ interval)
+
+
+############################################################
+# spike_distance_matrix
+############################################################
+def spike_distance_matrix(spike_trains, indices=None, interval=None):
+ """ Computes the time averaged spike-distance of all pairs of spike-trains.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :returns: 2D array with the pair wise time average spike distances
+ :math:`D_S^{ij}`
+ :rtype: np.array
+ """
+ return _generic_distance_matrix(spike_trains, spike_distance_bi,
+ indices, interval)
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
new file mode 100644
index 0000000..95ef454
--- /dev/null
+++ b/pyspike/spike_sync.py
@@ -0,0 +1,341 @@
+# Module containing several functions to compute SPIKE-Synchronization profiles
+# and distances
+# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import numpy as np
+from functools import partial
+import pyspike
+from pyspike import DiscreteFunc, SpikeTrain
+from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
+
+
+############################################################
+# spike_sync_profile
+############################################################
+def spike_sync_profile(*args, **kwargs):
+ """ Computes the spike-synchronization profile S_sync(t) of the given
+ spike trains. Returns the profile as a DiscreteFunction object. In the
+ bivariate case, he S_sync values are either 1 or 0, indicating the presence
+ or absence of a coincidence. For multi-variate cases, each spike in the set
+ of spike trains, the profile is defined as the number of coincidences
+ divided by the number of spike trains pairs involving the spike train of
+ containing this spike, which is the number of spike trains minus one (N-1).
+
+ Valid call structures::
+
+ spike_sync_profile(st1, st2) # returns the bi-variate profile
+ spike_sync_profile(st1, st2, st3) # multi-variate profile of 3 sts
+
+ sts = [st1, st2, st3, st4] # list of spike trains
+ spike_sync_profile(sts) # profile of the list of spike trains
+ spike_sync_profile(sts, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ In the multivariate case, the profile is defined as the number of
+ coincidences for each spike in the set of spike trains divided by the
+ number of spike trains pairs involving the spike train of containing this
+ spike, which is the number of spike trains minus one (N-1).
+
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_sync_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_profile_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_sync_profile_multi(args, **kwargs)
+
+
+############################################################
+# spike_sync_profile_bi
+############################################################
+def spike_sync_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from .cython.cython_profiles import coincidence_profile_cython \
+ as coincidence_profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import coincidence_python \
+ as coincidence_profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ times, coincidences, multiplicity \
+ = coincidence_profile_impl(spike_train1.spikes, spike_train2.spikes,
+ spike_train1.t_start, spike_train1.t_end,
+ max_tau)
+
+ return DiscreteFunc(times, coincidences, multiplicity)
+
+
+############################################################
+# spike_sync_profile_multi
+############################################################
+def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
+ """ Specific function to compute a multivariate SPIKE-Sync-profile.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+
+ """
+ prof_func = partial(spike_sync_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ # average_dist.mul_scalar(1.0/M) # no normalization here!
+ return average_prof
+
+
+############################################################
+# _spike_sync_values
+############################################################
+def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
+ """" Internal function. Computes the summed coincidences and multiplicity
+ for spike synchronization of the two given spike trains.
+
+ Do not call this function directly, use `spike_sync` or `spike_sync_multi`
+ instead.
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_distances import coincidence_value_cython \
+ as coincidence_value_impl
+ if max_tau is None:
+ max_tau = 0.0
+ c, mp = coincidence_value_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ return c, mp
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
+ else:
+ # some specific interval is provided: use profile
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
+
+
+############################################################
+# spike_sync
+############################################################
+def spike_sync(*args, **kwargs):
+ """ Computes the spike synchronization value SYNC of the given spike
+ trains. The spike synchronization value is the computed as the total number
+ of coincidences divided by the total number of spikes:
+
+ .. math:: SYNC = \sum_n C_n / N.
+
+
+ Valid call structures::
+
+ spike_sync(st1, st2) # returns the bi-variate spike synchronization
+ spike_sync(st1, st2, st3) # multi-variate result for 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_sync(spike_trains) # spike-sync of the list of spike trains
+ spike_sync(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate SPIKE-Sync is again defined as the overall ratio of all
+ coincidence values divided by the total number of spikes.
+
+ :returns: The spike synchronization value.
+ :rtype: `double`
+ """
+
+ if len(args) == 1:
+ return spike_sync_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_sync_multi(args, **kwargs)
+
+
+############################################################
+# spike_sync_bi
+############################################################
+def spike_sync_bi(spike_train1, spike_train2, interval=None, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param interval: averaging interval given as a pair of floats (T0, T1),
+ if `None` the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike synchronization value.
+ :rtype: `double`
+
+ """
+ c, mp = _spike_sync_values(spike_train1, spike_train2, interval, max_tau)
+ if mp == 0:
+ return 1.0
+ else:
+ return 1.0*c/mp
+
+
+############################################################
+# spike_sync_multi
+############################################################
+def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None):
+ """ Specific function to compute a multivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike synchronization value SYNC.
+ :rtype: double
+
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ coincidence = 0.0
+ mp = 0.0
+ for (i, j) in pairs:
+ c, m = _spike_sync_values(spike_trains[i], spike_trains[j],
+ interval, max_tau)
+ coincidence += c
+ mp += m
+
+ if mp == 0.0:
+ return 1.0
+ else:
+ return coincidence/mp
+
+
+############################################################
+# spike_sync_matrix
+############################################################
+def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
+ """ Computes the overall spike-synchronization value of all pairs of
+ spike-trains.
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: 2D array with the pair wise time spike synchronization values
+ :math:`SYNC_{ij}`
+ :rtype: np.array
+
+ """
+ dist_func = partial(spike_sync_bi, max_tau=max_tau)
+ return _generic_distance_matrix(spike_trains, dist_func,
+ indices, interval)
+
+
+############################################################
+# filter_by_spike_sync
+############################################################
+def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None,
+ return_removed_spikes=False):
+ """ Removes the spikes with a multi-variate spike_sync value below
+ threshold.
+ """
+ N = len(spike_trains)
+ filtered_spike_trains = []
+ removed_spike_trains = []
+
+ # cython implementation
+ try:
+ from .cython.cython_profiles import coincidence_single_profile_cython \
+ as coincidence_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: coincidence_single_profile_cython not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import coincidence_single_python \
+ as coincidence_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, st in enumerate(spike_trains):
+ coincidences = np.zeros_like(st)
+ for j in range(N):
+ if i == j:
+ continue
+ coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes,
+ st.t_start, st.t_end, max_tau)
+ filtered_spikes = st[coincidences > threshold*(N-1)]
+ filtered_spike_trains.append(SpikeTrain(filtered_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ removed_spikes = st[coincidences <= threshold*(N-1)]
+ removed_spike_trains.append(SpikeTrain(removed_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ return [filtered_spike_trains, removed_spike_trains]
+ else:
+ return filtered_spike_trains
diff --git a/pyspike/spikes.py b/pyspike/spikes.py
new file mode 100644
index 0000000..cf47043
--- /dev/null
+++ b/pyspike/spikes.py
@@ -0,0 +1,161 @@
+# Module containing several function to load and transform spike trains
+# Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+
+import numpy as np
+from pyspike import SpikeTrain
+
+
+############################################################
+# spike_train_from_string
+############################################################
+def spike_train_from_string(s, edges, sep=' ', is_sorted=False):
+ """ Converts a string of times into a :class:`.SpikeTrain`.
+
+ :param s: the string with (ordered) spike times.
+ :param edges: interval defining the edges of the spike train.
+ Given as a pair of floats (T0, T1) or a single float T1,
+ where T0=0 is assumed.
+ :param sep: The separator between the time numbers, default=' '.
+ :param is_sorted: if True, the spike times are not sorted after loading,
+ if False, spike times are sorted with `np.sort`
+ :returns: :class:`.SpikeTrain`
+ """
+ return SpikeTrain(np.fromstring(s, sep=sep), edges, is_sorted)
+
+
+############################################################
+# load_spike_trains_from_txt
+############################################################
+def load_spike_trains_from_txt(file_name, edges,
+ separator=' ', comment='#', is_sorted=False,
+ ignore_empty_lines=True):
+ """ Loads a number of spike trains from a text file. Each line of the text
+ file should contain one spike train as a sequence of spike times separated
+ by `separator`. Empty lines as well as lines starting with `comment` are
+ neglected. The `edges` represents the start and the end of the
+ spike trains.
+
+ :param file_name: The name of the text file.
+ :param edges: A pair (T_start, T_end) of values representing the
+ start and end time of the spike train measurement
+ or a single value representing the end time, the
+ T_start is then assuemd as 0.
+ :param separator: The character used to seprate the values in the text file
+ :param comment: Lines starting with this character are ignored.
+ :param sort: If true, the spike times are order via `np.sort`, default=True
+ :returns: list of :class:`.SpikeTrain`
+ """
+ spike_trains = []
+ with open(file_name, 'r') as spike_file:
+ for line in spike_file:
+ if not line.startswith(comment): # ignore comments
+ if len(line) > 1:
+ # ignore empty lines
+ spike_train = spike_train_from_string(line, edges,
+ separator, is_sorted)
+ spike_trains.append(spike_train)
+ elif not(ignore_empty_lines):
+ # add empty spike train
+ spike_trains.append(SpikeTrain([], edges))
+ return spike_trains
+
+
+def import_spike_trains_from_time_series(file_name, start_time, time_bin,
+ separator=None, comment='#'):
+ """ Imports spike trains from time series consisting of 0 and 1 denoting
+ the absence or presence of a spike. Each line in the data file represents
+ one spike train.
+
+ :param file_name: The name of the data file containing the time series.
+ :param edges: A pair (T_start, T_end) of values representing the
+ start and end time of the spike train measurement
+ or a single value representing the end time, the
+ T_start is then assuemd as 0.
+ :param separator: The character used to seprate the values in the text file
+ :param comment: Lines starting with this character are ignored.
+
+ """
+ data = np.loadtxt(file_name, comments=comment, delimiter=separator)
+ time_points = start_time + time_bin + np.arange(len(data[0, :]))*time_bin
+ spike_trains = []
+ for time_series in data:
+ spike_trains.append(SpikeTrain(time_points[time_series > 0],
+ edges=[start_time,
+ time_points[-1]]))
+ return spike_trains
+
+
+############################################################
+# save_spike_trains_to_txt
+############################################################
+def save_spike_trains_to_txt(spike_trains, file_name,
+ separator=' ', precision=8):
+ """ Saves the given spike trains into a file with the given file name.
+ Each spike train will be stored in one line in the text file with the times
+ separated by `separator`.
+
+ :param spike_trains: List of :class:`.SpikeTrain` objects
+ :param file_name: The name of the text file.
+ """
+ # format string to print the spike times with given precision
+ format_str = "{0:.%de}" % precision
+ with open(file_name, 'w') as spike_file:
+ for st in spike_trains:
+ s = separator.join(map(format_str.format, st.spikes))
+ spike_file.write(s+'\n')
+
+
+############################################################
+# merge_spike_trains
+############################################################
+def merge_spike_trains(spike_trains):
+ """ Merges a number of spike trains into a single spike train.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :returns: spike train with the merged spike times
+ """
+ # concatenating and sorting with numpy is fast, it also means we can handle
+ # empty spike trains
+ merged_spikes = np.concatenate([st.spikes for st in spike_trains])
+ merged_spikes.sort()
+ return SpikeTrain(merged_spikes, [spike_trains[0].t_start,
+ spike_trains[0].t_end])
+
+
+############################################################
+# generate_poisson_spikes
+############################################################
+def generate_poisson_spikes(rate, interval):
+ """ Generates a Poisson spike train with the given rate in the given time
+ interval
+
+ :param rate: The rate of the spike trains
+ :param interval: A pair (T_start, T_end) of values representing the
+ start and end time of the spike train measurement or
+ a single value representing the end time, the T_start
+ is then assuemd as 0. Auxiliary spikes will be added
+ to the spike train at the beginning and end of this
+ interval, if they are not yet present.
+ :type interval: pair of doubles or double
+ :returns: Poisson spike train as a :class:`.SpikeTrain`
+ """
+ try:
+ T_start = interval[0]
+ T_end = interval[1]
+ except:
+ T_start = 0
+ T_end = interval
+ # roughly how many spikes are required to fill the interval
+ N = max(1, int(1.2 * rate * (T_end-T_start)))
+ N_append = max(1, int(0.1 * rate * (T_end-T_start)))
+ intervals = np.random.exponential(1.0/rate, N)
+ # make sure we have enough spikes
+ while T_start + sum(intervals) < T_end:
+ # print T_start + sum(intervals)
+ intervals = np.append(intervals,
+ np.random.exponential(1.0/rate, N_append))
+ spikes = T_start + np.cumsum(intervals)
+ spikes = spikes[spikes < T_end]
+ return SpikeTrain(spikes, interval)