summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/python-package.yml51
-rw-r--r--.travis.yml22
-rw-r--r--Changelog9
-rw-r--r--Readme.rst18
-rw-r--r--doc/pyspike.rst6
-rw-r--r--doc/tutorial.rst67
-rw-r--r--examples/spike_train_order.py52
-rw-r--r--pyspike/DiscreteFunc.py6
-rw-r--r--pyspike/PieceWiseConstFunc.py40
-rw-r--r--pyspike/PieceWiseLinFunc.py50
-rw-r--r--pyspike/__init__.py16
-rw-r--r--pyspike/cython/cython_add.pyx1
-rw-r--r--pyspike/cython/cython_directionality.pyx263
-rw-r--r--pyspike/cython/cython_distances.pyx201
-rw-r--r--pyspike/cython/cython_profiles.pyx34
-rw-r--r--pyspike/cython/cython_simulated_annealing.pyx83
-rw-r--r--pyspike/cython/directionality_python_backend.py144
-rw-r--r--pyspike/cython/python_backend.py67
-rw-r--r--pyspike/spike_directionality.py522
-rw-r--r--pyspike/spike_sync.py55
-rw-r--r--setup.py38
-rw-r--r--test/test_directionality.py97
-rw-r--r--test/test_distance.py70
-rw-r--r--test/test_empty.py36
-rw-r--r--test/test_function.py96
-rw-r--r--test/test_generic_interfaces.py18
-rw-r--r--test/test_regression/test_regression_15.py20
-rw-r--r--test/test_spikes.py10
-rw-r--r--test/test_sync_filter.py95
29 files changed, 1990 insertions, 197 deletions
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
new file mode 100644
index 0000000..6585af4
--- /dev/null
+++ b/.github/workflows/python-package.yml
@@ -0,0 +1,51 @@
+# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
+# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
+
+name: Python package
+
+on:
+ push:
+ branches: [ master, develop ]
+ pull_request:
+ branches: [ master, develop ]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.7", "3.8", "3.9", "3.10"]
+ cython: ['python -m pip install -q cython', 'echo "No Cython"']
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ sudo apt-get update
+ sudo apt-get install libblas-dev
+ sudo apt-get install liblapack-dev
+ sudo apt-get install gfortran
+ python -m pip install --upgrade pip
+ python -m pip install flake8 pytest nose numpy scipy
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+ - name: Install Cython
+ run: |
+ ${{ matrix.cython }}
+ - name: Install package
+ run: |
+ python setup.py build_ext --inplace
+ # - name: Lint with flake8
+ # run: |
+ # # stop the build if there are Python syntax errors or undefined names
+ # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+ # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+ # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+ - name: Test with PyTest
+ run: |
+ python -m pytest
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 2877331..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-language: python
-python:
- - "2.7"
- - "3.4"
- - "3.5"
- - "3.6"
-env:
- - CYTHON_INSTALL="pip install -q cython"
- - CYTHON_INSTALL=""
-before_install:
- - sudo apt-get update
- - sudo apt-get install libblas-dev
- - sudo apt-get install liblapack-dev
- - sudo apt-get install gfortran
-install:
- - pip install scipy
- - $CYTHON_INSTALL
-
-script:
- - python setup.py build_ext --inplace
- - nosetests
- - travis_wait 30 nosetests test/numeric
diff --git a/Changelog b/Changelog
index 21b7cb0..3641458 100644
--- a/Changelog
+++ b/Changelog
@@ -1,3 +1,12 @@
+PySpike v0.7:
+ * Python 3.10 compatible
+ * Dropped support for Python2
+ * Small fixes in tests
+ * Fixed documentation links
+
+PySpike v0.6:
+ * Support for computing spike directionality and spike train order
+
PySpike v0.5:
* First beta release
* Python 2.6 support removed
diff --git a/Readme.rst b/Readme.rst
index 0422dad..bd91347 100644
--- a/Readme.rst
+++ b/Readme.rst
@@ -22,28 +22,26 @@ If you use PySpike in your research, please cite our SoftwareX publication on Py
Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2] or SPIKE-Synchronization [3], please cite one or more of the following publications:
-.. [#] Kreuz T, Haas JS, Morelli A, Abarbanel HDI, Politi A, *Measuring spike train synchrony.* J Neurosci Methods 165, 151 (2007) `[pdf] <http://wwwold.fi.isc.cnr.it/users/thomas.kreuz/images/Kreuz_JNeurosciMethods_2007_Spike-Train-Synchrony.pdf>`_
+.. [#] Kreuz T, Haas JS, Morelli A, Abarbanel HDI, Politi A, *Measuring spike train synchrony.* J Neurosci Methods 165, 151 (2007) `[pdf] <https://drive.google.com/file/d/113cr1xUhKe0rMIiFc1vMoIQ7j9noobKW/view>`_
-.. [#] Kreuz T, Chicharro D, Houghton C, Andrzejak RG, Mormann F, *Monitoring spike train synchrony.* J Neurophysiol 109, 1457 (2013) `[pdf] <http://wwwold.fi.isc.cnr.it/users/thomas.kreuz/images/Kreuz_JNeurophysiol_2013_SPIKE-distance.pdf>`_
+.. [#] Kreuz T, Chicharro D, Houghton C, Andrzejak RG, Mormann F, *Monitoring spike train synchrony.* J Neurophysiol 109, 1457 (2013) `[pdf] <https://drive.google.com/file/d/1oppf86V4cBVakPiv6Mbn_WaoKoKWzmIl/view>`_
.. [#] Kreuz T, Mulansky M and Bozanic N, *SPIKY: A graphical user interface for monitoring spike train synchrony*, J Neurophysiol, JNeurophysiol 113, 3432 (2015)
Important Changelog
-----------------------------
+With version 0.7.0, support for Python 2 was dropped, PySpike now officially supports
+Python 3.7, 3.8, 3.9, 3.10.
+
+With version 0.6.0, the spike directionality and spike train order function have been added.
+
With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated.
With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains.
This is a breaking change in the function interfaces.
Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions.
-
-Upcoming Functionality
--------------------------
-
-In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library.
-The new measures and algorithms are described in `this preprint <https://arxiv.org/abs/1610.07986>`_.
-
Requirements and Installation
-----------------------------
@@ -140,5 +138,5 @@ Curie Initial Training Network* `Neural Engineering Transformative Technologies
.. _SPIKE: http://www.scholarpedia.org/article/SPIKE-distance
.. _SPIKE-Synchronization: http://www.scholarpedia.org/article/Measures_of_spike_train_synchrony#SPIKE_synchronization
.. _cython: http://www.cython.org
-.. _SPIKY: http://wwwold.fi.isc.cnr.it/users/thomas.kreuz/Source-Code/SPIKY.html
+.. _SPIKY: https://thomas-kreuz.complexworld.net/source-codes/spiky
.. _BSD_License: http://opensource.org/licenses/BSD-2-Clause
diff --git a/doc/pyspike.rst b/doc/pyspike.rst
index 74ab439..3b10d2a 100644
--- a/doc/pyspike.rst
+++ b/doc/pyspike.rst
@@ -64,6 +64,12 @@ PSTH
:undoc-members:
:show-inheritance:
+Directionality
+........................................
+.. automodule:: pyspike.spike_directionality
+ :members:
+ :undoc-members:
+ :show-inheritance:
Helper functions
........................................
diff --git a/doc/tutorial.rst b/doc/tutorial.rst
index aff03a8..7c3e9b6 100644
--- a/doc/tutorial.rst
+++ b/doc/tutorial.rst
@@ -231,3 +231,70 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w
plt.title("SPIKE-Sync")
plt.show()
+
+
+Quantifying Leaders and Followers: Spike Train Order
+---------------------------------------
+
+PySpike provides functionality to quantify how much a set of spike trains
+resembles a synfire pattern (ie perfect leader-follower pattern). For details
+on the algorithms please see
+`our article in NJP <http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3>`_.
+
+The following example computes the Spike Order profile and Synfire Indicator
+of two Poissonian spike trains.
+
+.. code:: python
+
+ import numpy as np
+ from matplotlib import pyplot as plt
+ import pyspike as spk
+
+
+ st1 = spk.generate_poisson_spikes(1.0, [0, 20])
+ st2 = spk.generate_poisson_spikes(1.0, [0, 20])
+
+ d = spk.spike_directionality(st1, st2)
+
+ print "Spike Directionality of two Poissonian spike trains:", d
+
+ E = spk.spike_train_order_profile(st1, st2)
+
+ plt.figure()
+ x, y = E.get_plottable_data()
+ plt.plot(x, y, '-ob')
+ plt.ylim(-1.1, 1.1)
+ plt.xlabel("t")
+ plt.ylabel("E")
+ plt.title("Spike Train Order Profile")
+
+ plt.show()
+
+Additionally, PySpike can also compute the optimal ordering of the spike trains,
+ie the ordering that most resembles a synfire pattern. The following example
+computes the optimal order of a set of 20 Poissonian spike trains:
+
+.. code:: python
+
+ M = 20
+ spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)]
+
+ F_init = spk.spike_train_order(spike_trains)
+ print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init
+
+ D_init = spk.spike_directionality_matrix(spike_trains)
+ phi, _ = spk.optimal_spike_train_sorting(spike_trains)
+ F_opt = spk.spike_train_order(spike_trains, indices=phi)
+ print "Synfire Indicator of optimized spike train sorting:", F_opt
+
+ D_opt = spk.permutate_matrix(D_init, phi)
+
+ plt.figure()
+ plt.imshow(D_init)
+ plt.title("Initial Directionality Matrix")
+
+ plt.figure()
+ plt.imshow(D_opt)
+ plt.title("Optimized Directionality Matrix")
+
+ plt.show()
diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py
new file mode 100644
index 0000000..3a42472
--- /dev/null
+++ b/examples/spike_train_order.py
@@ -0,0 +1,52 @@
+import numpy as np
+from matplotlib import pyplot as plt
+import pyspike as spk
+
+
+st1 = spk.generate_poisson_spikes(1.0, [0, 20])
+st2 = spk.generate_poisson_spikes(1.0, [0, 20])
+
+d = spk.spike_directionality(st1, st2)
+
+print "Spike Directionality of two Poissonian spike trains:", d
+
+E = spk.spike_train_order_profile(st1, st2)
+
+plt.figure()
+x, y = E.get_plottable_data()
+plt.plot(x, y, '-ob')
+plt.ylim(-1.1, 1.1)
+plt.xlabel("t")
+plt.ylabel("E")
+plt.title("Spike Train Order Profile")
+
+
+###### Optimize spike train order of 20 Random spike trains #######
+
+M = 20
+
+spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)]
+
+F_init = spk.spike_train_order(spike_trains)
+
+print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init
+
+D_init = spk.spike_directionality_matrix(spike_trains)
+
+phi, _ = spk.optimal_spike_train_sorting(spike_trains)
+
+F_opt = spk.spike_train_order(spike_trains, indices=phi)
+
+print "Synfire Indicator of optimized spike train sorting:", F_opt
+
+D_opt = spk.permutate_matrix(D_init, phi)
+
+plt.figure()
+plt.imshow(D_init)
+plt.title("Initial Directionality Matrix")
+
+plt.figure()
+plt.imshow(D_opt)
+plt.title("Optimized Directionality Matrix")
+
+plt.show()
diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py
index caad290..48bc787 100644
--- a/pyspike/DiscreteFunc.py
+++ b/pyspike/DiscreteFunc.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, print_function
import numpy as np
-import collections
+import collections.abc
import pyspike
@@ -155,11 +155,11 @@ class DiscreteFunc(object):
multiplicity = np.sum(self.mp[1:-1])
else:
# check if interval is as sequence
- assert isinstance(interval, collections.Sequence), \
+ assert isinstance(interval, collections.abc.Sequence), \
"Invalid value for `interval`. None, Sequence or Tuple \
expected."
# check if interval is a sequence of intervals
- if not isinstance(interval[0], collections.Sequence):
+ if not isinstance(interval[0], collections.abc.Sequence):
# find the indices corresponding to the interval
start_ind, end_ind = get_indices(interval)
value = np.sum(self.y[start_ind:end_ind])
diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py
index 5ce5f27..e33c61d 100644
--- a/pyspike/PieceWiseConstFunc.py
+++ b/pyspike/PieceWiseConstFunc.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, print_function
import numpy as np
-import collections
+import collections.abc
import pyspike
@@ -39,7 +39,7 @@ class PieceWiseConstFunc(object):
ind = np.searchsorted(self.x, t, side='right')
- if isinstance(t, collections.Sequence):
+ if isinstance(t, collections.abc.Sequence):
# t is a sequence of values
# correct the cases t == x[0], t == x[-1]
ind[ind == 0] = 1
@@ -129,19 +129,31 @@ class PieceWiseConstFunc(object):
# no interval given, integrate over the whole spike train
a = np.sum((self.x[1:]-self.x[:-1]) * self.y)
else:
+ if interval[0]>interval[1]:
+ raise ValueError("Invalid averaging interval: interval[0]>=interval[1]")
+ if interval[0]<self.x[0]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[0]")
+ if interval[1]>self.x[-1]:
+ raise ValueError("Invalid averaging interval: interval[0]<self.x[-1]")
# find the indices corresponding to the interval
start_ind = np.searchsorted(self.x, interval[0], side='right')
end_ind = np.searchsorted(self.x, interval[1], side='left')-1
- assert start_ind > 0 and end_ind < len(self.x), \
- "Invalid averaging interval"
- # first the contribution from between the indices
- a = np.sum((self.x[start_ind+1:end_ind+1] -
- self.x[start_ind:end_ind]) *
- self.y[start_ind:end_ind])
- # correction from start to first index
- a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1]
- # correction from last index to end
- a += (interval[1]-self.x[end_ind]) * self.y[end_ind]
+ if start_ind > end_ind:
+ # contribution from between two closest edges
+ a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind]
+ # minus the part that is not within the interval
+ a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind]
+ else:
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ # first the contribution from between the indices
+ a = np.sum((self.x[start_ind+1:end_ind+1] -
+ self.x[start_ind:end_ind]) *
+ self.y[start_ind:end_ind])
+ # correction from start to first index
+ a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1]
+ # correction from last index to end
+ a += (interval[1]-self.x[end_ind]) * self.y[end_ind]
return a
def avrg(self, interval=None):
@@ -161,10 +173,10 @@ class PieceWiseConstFunc(object):
return self.integral() / (self.x[-1]-self.x[0])
# check if interval is as sequence
- assert isinstance(interval, collections.Sequence), \
+ assert isinstance(interval, collections.abc.Sequence), \
"Invalid value for `interval`. None, Sequence or Tuple expected."
# check if interval is a sequence of intervals
- if not isinstance(interval[0], collections.Sequence):
+ if not isinstance(interval[0], collections.abc.Sequence):
# just one interval
a = self.integral(interval) / (interval[1]-interval[0])
else:
diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py
index 8145e63..b3b503b 100644
--- a/pyspike/PieceWiseLinFunc.py
+++ b/pyspike/PieceWiseLinFunc.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, print_function
import numpy as np
-import collections
+import collections.abc
import pyspike
@@ -46,7 +46,7 @@ class PieceWiseLinFunc:
ind = np.searchsorted(self.x, t, side='right')
- if isinstance(t, collections.Sequence):
+ if isinstance(t, collections.abc.Sequence):
# t is a sequence of values
# correct the cases t == x[0], t == x[-1]
ind[ind == 0] = 1
@@ -146,31 +146,47 @@ class PieceWiseLinFunc:
if interval is None:
# no interval given, integrate over the whole spike train
- integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2))
+ return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2))
+
+ # find the indices corresponding to the interval
+ start_ind = np.searchsorted(self.x, interval[0], side='right')
+ end_ind = np.searchsorted(self.x, interval[1], side='left')-1
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ if start_ind > end_ind:
+ print(start_ind, end_ind, self.x[start_ind])
+ # contribution from between two closest edges
+ y_x0 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[0])
+ y_x1 = intermediate_value(self.x[start_ind-1],
+ self.x[start_ind],
+ self.y1[start_ind-1],
+ self.y2[start_ind-1],
+ interval[1])
+ print(y_x0, y_x1, interval[1] - interval[0])
+ integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0])
+ print(integral)
else:
- # find the indices corresponding to the interval
- start_ind = np.searchsorted(self.x, interval[0], side='right')
- end_ind = np.searchsorted(self.x, interval[1], side='left')-1
- assert start_ind > 0 and end_ind < len(self.x), \
- "Invalid averaging interval"
# first the contribution from between the indices
integral = np.sum((self.x[start_ind+1:end_ind+1] -
- self.x[start_ind:end_ind]) *
- 0.5*(self.y1[start_ind:end_ind] +
- self.y2[start_ind:end_ind]))
+ self.x[start_ind:end_ind]) *
+ 0.5*(self.y1[start_ind:end_ind] +
+ self.y2[start_ind:end_ind]))
# correction from start to first index
integral += (self.x[start_ind]-interval[0]) * 0.5 * \
(self.y2[start_ind-1] +
- intermediate_value(self.x[start_ind-1],
+ intermediate_value(self.x[start_ind-1],
self.x[start_ind],
self.y1[start_ind-1],
self.y2[start_ind-1],
- interval[0]
- ))
+ interval[0]))
# correction from last index to end
integral += (interval[1]-self.x[end_ind]) * 0.5 * \
(self.y1[end_ind] +
- intermediate_value(self.x[end_ind], self.x[end_ind+1],
+ intermediate_value(self.x[end_ind], self.x[end_ind+1],
self.y1[end_ind], self.y2[end_ind],
interval[1]
))
@@ -195,10 +211,10 @@ class PieceWiseLinFunc:
return self.integral() / (self.x[-1]-self.x[0])
# check if interval is as sequence
- assert isinstance(interval, collections.Sequence), \
+ assert isinstance(interval, collections.abc.Sequence), \
"Invalid value for `interval`. None, Sequence or Tuple expected."
# check if interval is a sequence of intervals
- if not isinstance(interval[0], collections.Sequence):
+ if not isinstance(interval[0], collections.abc.Sequence):
# just one interval
a = self.integral(interval) / (interval[1]-interval[0])
else:
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
index 08253fb..3897d18 100644
--- a/pyspike/__init__.py
+++ b/pyspike/__init__.py
@@ -1,5 +1,5 @@
"""
-Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+Copyright 2014-2018, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
@@ -7,8 +7,8 @@ Distributed under the BSD License
from __future__ import absolute_import
__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth",
- "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc",
- "DiscreteFunc", "directionality"]
+ "spikes", "spike_directionality", "SpikeTrain",
+ "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"]
from .PieceWiseConstFunc import PieceWiseConstFunc
from .PieceWiseLinFunc import PieceWiseLinFunc
@@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\
from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\
spike_distance_multi, spike_distance_matrix
from .spike_sync import spike_sync_profile, spike_sync,\
- spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix
+ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\
+ filter_by_spike_sync
from .psth import psth
from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \
spike_train_from_string, import_spike_trains_from_time_series, \
merge_spike_trains, generate_poisson_spikes
+from .spike_directionality import spike_directionality, \
+ spike_directionality_values, spike_directionality_matrix, \
+ spike_train_order_profile, spike_train_order_profile_bi, \
+ spike_train_order_profile_multi, spike_train_order, \
+ spike_train_order_bi, spike_train_order_multi, \
+ optimal_spike_train_sorting, permutate_matrix
+
# define the __version__ following
# http://stackoverflow.com/questions/17583443
from pkg_resources import get_distribution, DistributionNotFound
diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx
index 25f1181..f38406a 100644
--- a/pyspike/cython/cython_add.pyx
+++ b/pyspike/cython/cython_add.pyx
@@ -1,3 +1,4 @@
+#cython: language_level=3
#cython: boundscheck=False
#cython: wraparound=False
#cython: cdivision=True
diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx
new file mode 100644
index 0000000..40450cd
--- /dev/null
+++ b/pyspike/cython/cython_directionality.pyx
@@ -0,0 +1,263 @@
+#cython: language_level=3
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_directionality.pyx
+
+cython implementation of the spike delay asymmetry measures
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_directionality.pyx
+
+which gives::
+
+ cython_directionality.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport fabs
+from libc.math cimport fmax
+from libc.math cimport fmin
+
+# from pyspike.cython.cython_distances cimport get_tau
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+############################################################
+# get_tau
+############################################################
+cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
+ int i, int j, double interval, double max_tau):
+ cdef double m = interval # use interval length as initial tau
+ cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
+ cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
+ if i < N1 and i > -1:
+ m = fmin(m, spikes1[i+1]-spikes1[i])
+ if j < N2 and j > -1:
+ m = fmin(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = fmin(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = fmin(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = fmin(m, max_tau)
+ return m
+
+
+############################################################
+# spike_train_order_profile_cython
+############################################################
+def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int n = 0
+ cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
+ cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values
+ cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # both get marked with -1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
+
+
+############################################################
+# spike_train_order_cython
+############################################################
+def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0
+ cdef int mp = 0
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 2 appeared before spike in spike train 1
+ # mark with -1
+ d -= 2
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 1 appeared before spike in spike train 2
+ # mark with +1
+ d += 2
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # add only one event with multiplicity 2, but no asymmetry counting
+ mp += 2
+
+ if d == 0 and mp == 0:
+ # empty spike trains -> spike sync = 1 by definition
+ d = 1
+ mp = 1
+
+ return d, mp
+
+
+############################################################
+# spike_directionality_profiles_cython
+############################################################
+def spike_directionality_profiles_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef double[:] d1 = np.zeros(N1) # directionality values
+ cdef double[:] d2 = np.zeros(N2) # directionality values
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # equal spike times: zero asymmetry value
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_directionality_cython
+############################################################
+def spike_directionality_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0 # directionality value
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d -= 1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d += 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+
+ return d
diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx
index ac5f226..f049718 100644
--- a/pyspike/cython/cython_distances.pyx
+++ b/pyspike/cython/cython_distances.pyx
@@ -1,3 +1,4 @@
+#cython: language_level=3
#cython: boundscheck=False
#cython: wraparound=False
#cython: cdivision=True
@@ -178,6 +179,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil:
return 0.5*(isi1+isi2)*(isi1+isi2)
# alternative definition to obtain <S> ~ 0.5 for Poisson spikes
# return 0.5*(isi1*isi1+isi2*isi2)
+ # another alternative definition without second normalization
+ # return 0.5*(isi1+isi2)
############################################################
@@ -248,6 +251,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
index2 = 0
y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
index = 1
while index1+index2 < N1+N2-2:
@@ -267,6 +272,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
t_curr = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
@@ -286,6 +293,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
index2 += 1
# first calculate the previous interval end value
@@ -301,6 +310,8 @@ def spike_distance_cython(double[:] t1, double[:] t2,
t_curr = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
@@ -320,6 +331,9 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s2 = dt_p2
# s1 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
else: # t_f1 == t_f2 - generate only one event
index1 += 1
index2 += 1
@@ -358,6 +372,193 @@ def spike_distance_cython(double[:] t1, double[:] t2,
s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
+ # end nogil
+
+ # use only the data added above
+ # could be less than original length due to equal spike times
+ return spike_value / (t_end-t_start)
+
+
+############################################################
+# isi_avrg_rf_cython
+############################################################
+cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil:
+ # rate free version
+ return (isi1+isi2)
+
+
+############################################################
+# spike_distance_rf_cython
+############################################################
+def spike_distance_rf_cython(double[:] t1, double[:] t2,
+ double t_start, double t_end):
+
+ cdef int N1, N2, index1, index2, index
+ cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
+ cdef double isi1, isi2, s1, s2
+ cdef double y_start, y_end, t_last, t_current, spike_value
+
+ spike_value = 0.0
+
+ N1 = len(t1)
+ N2 = len(t2)
+
+ with nogil: # release the interpreter to allow multithreading
+ t_last = t_start
+ t_p1 = t_start
+ t_p2 = t_start
+ if t1[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f1 = t1[0]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_p1 = dt_f1
+ s1 = dt_p1*(t_f1-t_start)/isi1
+ index1 = -1
+ else:
+ t_f1 = t1[1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
+ dt_p1 = 0.0
+ isi1 = t1[1]-t1[0]
+ s1 = dt_p1
+ index1 = 0
+ if t2[0] > t_start:
+ # dt_p1 = t2[0]-t_start
+ t_f2 = t2[0]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = dt_f2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
+ s2 = dt_p2*(t_f2-t_start)/isi2
+ index2 = -1
+ else:
+ t_f2 = t2[1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_p2 = 0.0
+ isi2 = t2[1]-t2[0]
+ s2 = dt_p2
+ index2 = 0
+
+ # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ index = 1
+
+ while index1+index2 < N1+N2-2:
+ # print(index, index1, index2)
+ if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1):
+ index1 += 1
+ # first calculate the previous interval end value
+ s1 = dt_f1*(t_f1-t_p1) / isi1
+ # the previous time now was the following time before:
+ dt_p1 = dt_f1
+ t_p1 = t_f1 # t_p1 contains the current time point
+ # get the next time
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ else:
+ t_f1 = t_end
+ t_curr = t_p1
+ s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
+ # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index1 < N1-1:
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1-t_p1
+ s1 = dt_p1
+ else:
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # s1 needs adjustment due to change of isi1
+ s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s2 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+ elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
+ index2 += 1
+ # first calculate the previous interval end value
+ s2 = dt_f2*(t_f2-t_p2) / isi2
+ # the previous time now was the following time before:
+ dt_p2 = dt_f2
+ t_p2 = t_f2 # t_p2 contains the current time point
+ # get the next time
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ else:
+ t_f2 = t_end
+ t_curr = t_p2
+ s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+
+ # now the next interval start value
+ if index2 < N2-1:
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2-t_p2
+ s2 = dt_p2
+ else:
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ # s2 needs adjustment due to change of isi2
+ s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s1 is the same as above, thus we can compute y2 immediately
+ # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
+ else: # t_f1 == t_f2 - generate only one event
+ index1 += 1
+ index2 += 1
+ t_p1 = t_f1
+ t_p2 = t_f2
+ dt_p1 = 0.0
+ dt_p2 = 0.0
+ t_curr = t_f1
+ y_end = 0.0
+ spike_value += 0.5*(y_start + y_end) * (t_curr - t_last)
+ y_start = 0.0
+ if index1 < N1-1:
+ t_f1 = t1[index1+1]
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
+ t_start, t_end)
+ isi1 = t_f1 - t_p1
+ else:
+ t_f1 = t_end
+ dt_f1 = dt_p1
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ if index2 < N2-1:
+ t_f2 = t2[index2+1]
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
+ t_start, t_end)
+ isi2 = t_f2 - t_p2
+ else:
+ t_f2 = t_end
+ dt_f2 = dt_p2
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ index += 1
+ t_last = t_curr
+ # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ s1 = dt_f1*(t_end-t1[N1-1])/isi1
+ s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
+ # alternative definition without second normalization
+ y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2)
+
spike_value += 0.5*(y_start + y_end) * (t_end - t_last)
# end nogil
diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx
index 4a42cdb..a83e4f7 100644
--- a/pyspike/cython/cython_profiles.pyx
+++ b/pyspike/cython/cython_profiles.pyx
@@ -1,3 +1,4 @@
+#cython: language_level=3
#cython: boundscheck=False
#cython: wraparound=False
#cython: cdivision=True
@@ -450,3 +451,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2,
c[1] = 1
return st, c, mp
+
+
+############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int j = -1
+ cdef double[:] c = np.zeros(N1) # coincidences
+ cdef double interval = t_end - t_start
+ cdef double tau
+ for i in xrange(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the one
+ # right before (see above), hence we move one forward and also
+ # check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if fabs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx
new file mode 100644
index 0000000..53ecde7
--- /dev/null
+++ b/pyspike/cython/cython_simulated_annealing.pyx
@@ -0,0 +1,83 @@
+#cython: language_level=3
+#cython: boundscheck=False
+#cython: wraparound=False
+#cython: cdivision=True
+
+"""
+cython_simulated_annealing.pyx
+
+cython implementation of a simulated annealing algorithm to find the optimal
+spike train order
+
+Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects
+improves the performance of spike_distance by a factor of 10!
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+"""
+To test whether things can be optimized: remove all yellow stuff
+in the html output::
+
+ cython -a cython_simulated_annealing.pyx
+
+which gives:
+
+ cython_simulated_annealing.html
+
+"""
+
+import numpy as np
+cimport numpy as np
+
+from libc.math cimport exp
+from libc.math cimport fmod
+from libc.stdlib cimport rand
+from libc.stdlib cimport RAND_MAX
+
+DTYPE = np.float
+ctypedef np.float_t DTYPE_t
+
+
+def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha):
+
+ cdef long N = len(D)
+ cdef double A = np.sum(np.triu(D, 0))
+ cdef long[:] p = np.arange(N)
+ cdef double T = T_start
+ cdef long iterations
+ cdef long succ_iter
+ cdef long total_iter = 0
+ cdef double delta_A
+ cdef long ind1
+ cdef long ind2
+
+ while T > T_end:
+ iterations = 0
+ succ_iter = 0
+ # equilibrate for 100*N steps or 10*N successful steps
+ while iterations < 100*N and succ_iter < 10*N:
+ # exchange two rows and cols
+ # ind1 = np.random.randint(N-1)
+ ind1 = rand() % (N-1)
+ if ind1 < N-1:
+ ind2 = ind1+1
+ else: # this can never happen!
+ ind2 = 0
+ delta_A = -2*D[p[ind1], p[ind2]]
+ if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX):
+ # swap indices
+ p[ind1], p[ind2] = p[ind2], p[ind1]
+ A += delta_A
+ succ_iter += 1
+ iterations += 1
+ total_iter += iterations
+ T *= alpha # cool down
+ if succ_iter == 0:
+ # no successful step -> we believe we have converged
+ break
+
+ return p, A, total_iter
diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py
new file mode 100644
index 0000000..c1d820b
--- /dev/null
+++ b/pyspike/cython/directionality_python_backend.py
@@ -0,0 +1,144 @@
+""" directionality_python_backend.py
+
+Collection of python functions that can be used instead of the cython
+implementation.
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ d1 = np.zeros(N1) # directionality values
+ d2 = np.zeros(N2) # directionality values
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in first spike train occurs after second
+ d1[i] = -1
+ d2[j] = +1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in second spike train occurs after first
+ d1[i] = +1
+ d2[j] = -1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ d1[i] = 0
+ d2[j] = 0
+
+ return d1, d2
+
+
+############################################################
+# spike_train_order_python
+############################################################
+def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end,
+ max_tau):
+
+ def get_tau(spikes1, spikes2, i, j, max_tau):
+ m = t_end - t_start # use interval as initial tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ i = -1
+ j = -1
+ n = 0
+ st = np.zeros(N1 + N2 + 2) # spike times
+ a = np.zeros(N1 + N2 + 2) # coincidences
+ mp = np.ones(N1 + N2 + 2) # multiplicity
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes1[i]
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = -1
+ a[n-1] = -1
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ n += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ st[n] = spikes2[j]
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # both get marked with 1
+ a[n] = 1
+ a[n-1] = 1
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ n += 1
+ # add only one event with zero asymmetry value and multiplicity 2
+ st[n] = spikes1[i]
+ a[n] = 0
+ mp[n] = 2
+
+ st = st[:n+2]
+ a = a[:n+2]
+ mp = mp[:n+2]
+
+ st[0] = t_start
+ st[len(st)-1] = t_end
+ if N1 + N2 > 0:
+ a[0] = a[1]
+ a[len(a)-1] = a[len(a)-2]
+ mp[0] = mp[1]
+ mp[len(mp)-1] = mp[len(mp)-2]
+ else:
+ a[0] = 1
+ a[1] = 1
+
+ return st, a, mp
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py
index 6b7209a..e75f181 100644
--- a/pyspike/cython/python_backend.py
+++ b/pyspike/cython/python_backend.py
@@ -3,7 +3,7 @@
Collection of python functions that can be used instead of the cython
implementation.
-Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
+Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
@@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2):
return st, c
+def get_tau(spikes1, spikes2, i, j, max_tau, init_tau):
+ m = init_tau
+ if i < len(spikes1)-1 and i > -1:
+ m = min(m, spikes1[i+1]-spikes1[i])
+ if j < len(spikes2)-1 and j > -1:
+ m = min(m, spikes2[j+1]-spikes2[j])
+ if i > 0:
+ m = min(m, spikes1[i]-spikes1[i-1])
+ if j > 0:
+ m = min(m, spikes2[j]-spikes2[j-1])
+ m *= 0.5
+ if max_tau > 0.0:
+ m = min(m, max_tau)
+ return m
+
+
############################################################
# coincidence_python
############################################################
def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
- def get_tau(spikes1, spikes2, i, j, max_tau):
- m = t_end - t_start # use interval as initial tau
- if i < len(spikes1)-1 and i > -1:
- m = min(m, spikes1[i+1]-spikes1[i])
- if j < len(spikes2)-1 and j > -1:
- m = min(m, spikes2[j+1]-spikes2[j])
- if i > 0:
- m = min(m, spikes1[i]-spikes1[i-1])
- if j > 0:
- m = min(m, spikes2[j]-spikes2[j-1])
- m *= 0.5
- if max_tau > 0.0:
- m = min(m, max_tau)
- return m
-
N1 = len(spikes1)
N2 = len(spikes2)
i = -1
@@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
i += 1
n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
st[n] = spikes1[i]
if j > -1 and spikes1[i]-spikes2[j] < tau:
# coincidence between the current spike and the previous spike
@@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
j += 1
n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
st[n] = spikes2[j]
if i > -1 and spikes2[j]-spikes1[i] < tau:
# coincidence between the current spike and the previous spike
@@ -434,6 +435,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau):
############################################################
+# coincidence_single_profile_cython
+############################################################
+def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau):
+
+ N1 = len(spikes1)
+ N2 = len(spikes2)
+ j = -1
+ c = np.zeros(N1) # coincidences
+ for i in range(N1):
+ while j < N2-1 and spikes2[j+1] < spikes1[i]:
+ # move forward until spikes2[j] is the last spike before spikes1[i]
+ # note that if spikes2[j] is after spikes1[i] we dont do anything
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if j > -1 and abs(spikes1[i]-spikes2[j]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]):
+ # in case spikes2[j] is before spikes1[i] it has to be the first or
+ # the one right before (see above), hence we move one forward and
+ # also check the next spike
+ j += 1
+ tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start)
+ if abs(spikes2[j]-spikes1[i]) < tau:
+ # current spike in st1 is coincident
+ c[i] = 1
+ return c
+
+
+############################################################
# add_piece_wise_const_python
############################################################
def add_piece_wise_const_python(x1, y1, x2, y2):
diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py
new file mode 100644
index 0000000..248862c
--- /dev/null
+++ b/pyspike/spike_directionality.py
@@ -0,0 +1,522 @@
+# Module containing functions to compute the SPIKE directionality and the
+# spike train order profile
+# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+# Distributed under the BSD License
+
+from __future__ import absolute_import
+
+import numpy as np
+import pyspike
+from pyspike import DiscreteFunc
+from functools import partial
+from pyspike.generic import _generic_profile_multi
+
+
+############################################################
+# spike_directionality_values
+############################################################
+def spike_directionality_values(*args, **kwargs):
+ """ Computes the spike directionality value for each spike in
+ each spike train. Returns a list containing an array of spike directionality
+ values for every given spike train.
+
+ Valid call structures::
+
+ spike_directionality_values(st1, st2) # returns the bi-variate profile
+ spike_directionality_values(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_directionality_values(spike_trains) # profile of the list of spike trains
+ spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window (default=None).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike directionality values :math:`D^n_i` as a list of arrays.
+ """
+ if len(args) == 1:
+ return _spike_directionality_values_impl(args[0], **kwargs)
+ else:
+ return _spike_directionality_values_impl(args, **kwargs)
+
+
+def _spike_directionality_values_impl(spike_trains, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the multi-variate spike directionality profile
+ of the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if interval is not None:
+ raise NotImplementedError("Parameter `interval` not supported.")
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # list of arrays for resulting asymmetry values
+ asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices]
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_profiles_cython as profile_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_directionality_profile_python as profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, j in pairs:
+ d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes,
+ spike_trains[i].t_start, spike_trains[i].t_end,
+ max_tau)
+ asymmetry_list[i] += d1
+ asymmetry_list[j] += d2
+ for a in asymmetry_list:
+ a /= len(spike_trains)-1
+ return asymmetry_list
+
+
+############################################################
+# spike_directionality
+############################################################
+def spike_directionality(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike directionality of the first spike train with
+ respect to the second spike train.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_directionality_cython as spike_directionality_impl
+ if max_tau is None:
+ max_tau = 0.0
+ d = spike_directionality_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ c = len(spike_train1.spikes)
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use profile.
+ d1, x = spike_directionality_values([spike_train1, spike_train2],
+ interval=interval,
+ max_tau=max_tau)
+ d = np.sum(d1)
+ c = len(spike_train1.spikes)
+ if normalize:
+ return 1.0*d/c
+ else:
+ return d
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_directionality_matrix
+############################################################
+def spike_directionality_matrix(spike_trains, normalize=True, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the spike directionality matrix for the given spike trains.
+
+ :param spike_trains: List of spike trains.
+ :type spike_trains: List of :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-directionality values.
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ distance_matrix = np.zeros((len(indices), len(indices)))
+ for i, j in pairs:
+ d = spike_directionality(spike_trains[i], spike_trains[j], normalize,
+ interval, max_tau=max_tau)
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = -d
+ return distance_matrix
+
+
+############################################################
+# spike_train_order_profile
+############################################################
+def spike_train_order_profile(*args, **kwargs):
+ """ Computes the spike train order profile :math:`E(t)` of the given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ Valid call structures::
+
+ spike_train_order_profile(st1, st2) # returns the bi-variate profile
+ spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3
+ # spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order_profile(spike_trains) # profile of the list of spike trains
+ spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ :param max_tau: Upper bound for coincidence window, `default=None`.
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+
+ :returns: The spike train order profile :math:`E(t)`
+ :rtype: :class:`.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_train_order_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_profile_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_profile_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_profile_bi
+############################################################
+def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Computes the spike train order profile P(t) of the two given
+ spike trains. Returns the profile as a DiscreteFunction object.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order profile :math:`E(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_profile_cython as \
+ spike_train_order_profile_impl
+ except ImportError:
+ # raise NotImplementedError()
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.directionality_python_backend import \
+ spike_train_order_profile_python as spike_train_order_profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ times, coincidences, multiplicity \
+ = spike_train_order_profile_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+
+ return DiscreteFunc(times, coincidences, multiplicity)
+
+
+############################################################
+# spike_train_order_profile_multi
+############################################################
+def spike_train_order_profile_multi(spike_trains, indices=None,
+ max_tau=None):
+ """ Computes the multi-variate spike train order profile for a set of
+ spike trains. For each spike in the set of spike trains, the multi-variate
+ profile is defined as the sum of asymmetry values divided by the number of
+ spike trains pairs involving the spike train of containing this spike,
+ which is the number of spike trains minus one (N-1).
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ return average_prof
+
+
+
+############################################################
+# _spike_train_order_impl
+############################################################
+def _spike_train_order_impl(spike_train1, spike_train2,
+ interval=None, max_tau=None):
+ """ Implementation of bi-variatae spike train order value (Synfire Indicator).
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from .cython.cython_directionality import \
+ spike_train_order_cython as spike_train_order_func
+ if max_tau is None:
+ max_tau = 0.0
+ c, mp = spike_train_order_func(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ c, mp = spike_train_order_profile(spike_train1, spike_train2,
+ max_tau=max_tau).integral(interval)
+ return c, mp
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError("Parameter `interval` not supported.")
+
+
+############################################################
+# spike_train_order
+############################################################
+def spike_train_order(*args, **kwargs):
+ """ Computes the spike train order (Synfire Indicator) of the given
+ spike trains.
+
+ Valid call structures::
+
+ spike_train_order(st1, st2, normalize=True) # normalized bi-variate
+ # spike train order
+ spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_train_order(spike_trains) # result for the list of spike trains
+ spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ Additonal arguments:
+ - `max_tau` Upper bound for coincidence window, `default=None`.
+ - `normalize` Flag indicating if the reslut should be normalized by the
+ number of spikes , default=`False`
+
+
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ if len(args) == 1:
+ return spike_train_order_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_train_order_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_train_order_multi(args, **kwargs)
+
+
+############################################################
+# spike_train_order_bi
+############################################################
+def spike_train_order_bi(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for two spike trains.
+
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param normalize: Normalize by the number of spikes (multiplicity).
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike train order value (Synfire Indicator)
+ """
+ c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau)
+ if normalize:
+ return 1.0*c/mp
+ else:
+ return c
+
+############################################################
+# spike_train_order_multi
+############################################################
+def spike_train_order_multi(spike_trains, indices=None, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike train order value (Synfire Indicator)
+ for many spike trains.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :param normalize: Normalize by the number of spike (multiplicity).
+ :param interval: averaging interval given as a pair of floats, if None
+ the average over the whole function is computed.
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: Spike train order values (Synfire Indicator) F for the given spike trains.
+ :rtype: double
+ """
+ if indices is None:
+ indices = np.arange(len(spike_trains))
+ indices = np.array(indices)
+ # check validity of indices
+ assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
+ "Invalid index list."
+ # generate a list of possible index pairs
+ pairs = [(indices[i], j) for i in range(len(indices))
+ for j in indices[i+1:]]
+
+ e_total = 0.0
+ m_total = 0.0
+ for (i, j) in pairs:
+ e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j],
+ interval, max_tau)
+ e_total += e
+ m_total += m
+
+ if m == 0.0:
+ return 1.0
+ else:
+ return e_total/m_total
+
+
+
+############################################################
+# optimal_spike_train_sorting_from_matrix
+############################################################
+def _optimal_spike_train_sorting_from_matrix(D, full_output=False):
+ """ Finds the best sorting via simulated annealing.
+ Returns the optimal permutation p and A value.
+ Not for direct use, call :func:`.optimal_spike_train_sorting` instead.
+
+ :param D: The directionality (Spike-ORDER) matrix.
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ N = len(D)
+ A = np.sum(np.triu(D, 0))
+
+ p = np.arange(N)
+
+ T_start = 2*np.max(D) # starting temperature
+ T_end = 1E-5 * T_start # final temperature
+ alpha = 0.9 # cooling factor
+
+ try:
+ from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann
+ except ImportError:
+ raise NotImplementedError("PySpike with Cython required for computing spike train"
+ " sorting!")
+
+ p, A, total_iter = sim_ann(D, T_start, T_end, alpha)
+
+ if full_output:
+ return p, A, total_iter
+ else:
+ return p, A
+
+
+############################################################
+# optimal_spike_train_sorting
+############################################################
+def optimal_spike_train_sorting(spike_trains, indices=None, interval=None,
+ max_tau=None, full_output=False):
+ """ Finds the best sorting of the given spike trains by computing the spike
+ directionality matrix and optimize the order using simulated annealing.
+ For a detailed description of the algorithm see:
+ `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta`
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param interval: time interval filter given as a pair of floats, if None
+ the full spike trains are used (default=None).
+ :type interval: Pair of floats or None.
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound (default=None).
+ :param full_output: If true, then function will additionally return the
+ number of performed iterations (default=False)
+ :return: (p, F) - tuple with the optimal permutation and synfire indicator.
+ if `full_output=True` , (p, F, iter) is returned.
+ """
+ D = spike_directionality_matrix(spike_trains, normalize=False,
+ indices=indices, interval=interval,
+ max_tau=max_tau)
+ return _optimal_spike_train_sorting_from_matrix(D, full_output)
+
+############################################################
+# permutate_matrix
+############################################################
+def permutate_matrix(D, p):
+ """ Helper function that applies the permutation p to the columns and rows
+ of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`.
+
+ :param D: The matrix.
+ :param d: The permutation.
+ :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]`
+ """
+ N = len(D)
+ D_p = np.empty_like(D)
+ for n in range(N):
+ for m in range(N):
+ D_p[n, m] = D[p[n], p[m]]
+ return D_p
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index 80f7805..95ef454 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -8,7 +8,7 @@ from __future__ import absolute_import
import numpy as np
from functools import partial
import pyspike
-from pyspike import DiscreteFunc
+from pyspike import DiscreteFunc, SpikeTrain
from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
@@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs):
if len(args) == 1:
return spike_sync_profile_multi(args[0], **kwargs)
elif len(args) == 2:
- return spike_sync_profile_bi(args[0], args[1])
+ return spike_sync_profile_bi(args[0], args[1], **kwargs)
else:
- return spike_sync_profile_multi(args)
+ return spike_sync_profile_multi(args, **kwargs)
############################################################
@@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
dist_func = partial(spike_sync_bi, max_tau=max_tau)
return _generic_distance_matrix(spike_trains, dist_func,
indices, interval)
+
+
+############################################################
+# filter_by_spike_sync
+############################################################
+def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None,
+ return_removed_spikes=False):
+ """ Removes the spikes with a multi-variate spike_sync value below
+ threshold.
+ """
+ N = len(spike_trains)
+ filtered_spike_trains = []
+ removed_spike_trains = []
+
+ # cython implementation
+ try:
+ from .cython.cython_profiles import coincidence_single_profile_cython \
+ as coincidence_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: coincidence_single_profile_cython not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from .cython.python_backend import coincidence_single_python \
+ as coincidence_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, st in enumerate(spike_trains):
+ coincidences = np.zeros_like(st)
+ for j in range(N):
+ if i == j:
+ continue
+ coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes,
+ st.t_start, st.t_end, max_tau)
+ filtered_spikes = st[coincidences > threshold*(N-1)]
+ filtered_spike_trains.append(SpikeTrain(filtered_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ removed_spikes = st[coincidences <= threshold*(N-1)]
+ removed_spike_trains.append(SpikeTrain(removed_spikes,
+ [st.t_start, st.t_end]))
+ if return_removed_spikes:
+ return [filtered_spike_trains, removed_spike_trains]
+ else:
+ return filtered_spike_trains
diff --git a/setup.py b/setup.py
index 5b9e677..0aea3a0 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,9 @@ class numpy_include(object):
if os.path.isfile("pyspike/cython/cython_add.c") and \
os.path.isfile("pyspike/cython/cython_profiles.c") and \
- os.path.isfile("pyspike/cython/cython_distances.c"):
+ os.path.isfile("pyspike/cython/cython_distances.c") and \
+ os.path.isfile("pyspike/cython/cython_directionality.c") and \
+ os.path.isfile("pyspike/cython/cython_simulated_annealing.c"):
use_c = True
else:
use_c = False
@@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c
Extension("pyspike.cython.cython_profiles",
["pyspike/cython/cython_profiles.pyx"]),
Extension("pyspike.cython.cython_distances",
- ["pyspike/cython/cython_distances.pyx"])
+ ["pyspike/cython/cython_distances.pyx"]),
+ Extension("pyspike.cython.cython_directionality",
+ ["pyspike/cython/cython_directionality.pyx"]),
+ Extension("pyspike.cython.cython_simulated_annealing",
+ ["pyspike/cython/cython_simulated_annealing.pyx"])
]
cmdclass.update({'build_ext': build_ext})
elif use_c: # c files are there, compile to binaries
@@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries
Extension("pyspike.cython.cython_profiles",
["pyspike/cython/cython_profiles.c"]),
Extension("pyspike.cython.cython_distances",
- ["pyspike/cython/cython_distances.c"])
+ ["pyspike/cython/cython_distances.c"]),
+ Extension("pyspike.cython.cython_directionality",
+ ["pyspike/cython/cython_directionality.c"]),
+ Extension("pyspike.cython.cython_simulated_annealing",
+ ["pyspike/cython/cython_simulated_annealing.c"])
]
# neither cython nor c files available -> automatic fall-back to python backend
setup(
name='pyspike',
packages=find_packages(exclude=['doc']),
- version='0.5.3',
+ version='0.7.0',
cmdclass=cmdclass,
ext_modules=ext_modules,
include_dirs=[numpy_include()],
@@ -88,13 +98,17 @@ train similarity',
'License :: OSI Approved :: BSD License',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
-
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6'
- ]
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ ],
+ package_data={
+ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c',
+ 'cython/cython_distances.c',
+ 'cython/cython_directionality.c',
+ 'cython/cython_simulated_annealing.c'],
+ 'test': ['Spike_testdata.txt']
+ }
)
diff --git a/test/test_directionality.py b/test/test_directionality.py
new file mode 100644
index 0000000..c2e9bfe
--- /dev/null
+++ b/test/test_directionality.py
@@ -0,0 +1,97 @@
+""" test_directionality.py
+
+Tests the directionality functions
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+import numpy as np
+from numpy.testing import assert_equal, assert_almost_equal, \
+ assert_array_equal
+
+import pyspike as spk
+from pyspike import SpikeTrain, DiscreteFunc
+
+
+def test_spike_directionality():
+ st1 = SpikeTrain([100, 200, 300], [0, 1000])
+ st2 = SpikeTrain([105, 205, 300], [0, 1000])
+ assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0)
+ assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False),
+ 2.0)
+
+ # exchange order of spike trains should give exact negative profile
+ assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0)
+ assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False),
+ -2.0)
+
+ st3 = SpikeTrain([105, 195, 500], [0, 1000])
+ assert_almost_equal(spk.spike_directionality(st1, st3), 0.0)
+ assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False),
+ 0.0)
+ assert_almost_equal(spk.spike_directionality(st3, st1), 0.0)
+
+ D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False)
+ D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]])
+ assert_array_equal(D, D_expected)
+
+ dir_profs = spk.spike_directionality_values([st1, st2, st3])
+ assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0])
+ assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0])
+
+
+def test_spike_train_order():
+ st1 = SpikeTrain([100, 200, 300], [0, 1000])
+ st2 = SpikeTrain([105, 205, 300], [0, 1000])
+ st3 = SpikeTrain([105, 195, 500], [0, 1000])
+
+ expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000])
+ expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0])
+ expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2])
+
+ f = spk.spike_train_order_profile(st1, st2)
+
+ assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12,
+ expected_mp12))
+ assert_almost_equal(f.avrg(), 2.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), 4.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0)
+
+ expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000])
+ expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0])
+ expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1])
+
+ f = spk.spike_train_order_profile(st2, st3)
+
+ assert_array_equal(f.x, expected_x23)
+ assert_array_equal(f.y, expected_y23)
+ assert_array_equal(f.mp, expected_mp23)
+ assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23,
+ expected_mp23))
+ assert_almost_equal(f.avrg(), -1.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), -2.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0)
+
+ f = spk.spike_train_order_profile_multi([st1, st2, st3])
+
+ expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000])
+ expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0])
+ expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2])
+
+ assert_array_equal(f.x, expected_x)
+ assert_array_equal(f.y, expected_y)
+ assert_array_equal(f.mp, expected_mp)
+
+ # Averaging the profile should be the same as computing the synfire indicator directly.
+ assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3]))
+
+ # We can also compute the synfire indicator from the Directionality Matrix:
+ D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False)
+ num_spikes = np.sum(len(st) for st in [st1, st2, st3])
+ syn_fire = np.sum(np.triu(D_matrix)) / num_spikes
+ assert_almost_equal(f.avrg(), syn_fire)
diff --git a/test/test_distance.py b/test/test_distance.py
index fe09f34..7ec3a72 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -11,7 +11,7 @@ Distributed under the BSD License
from __future__ import print_function
import numpy as np
from copy import copy
-from numpy.testing import assert_equal, assert_almost_equal, \
+from numpy.testing import assert_allclose, assert_almost_equal, \
assert_array_almost_equal
import pyspike as spk
@@ -41,10 +41,10 @@ def test_isi():
# print("ISI: ", f.y)
print("ISI value:", expected_isi_val)
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
assert_array_almost_equal(f.y, expected_isi, decimal=15)
- assert_equal(f.avrg(), expected_isi_val)
- assert_equal(spk.isi_distance(t1, t2), expected_isi_val)
+ assert_allclose(f.avrg(), expected_isi_val)
+ assert_allclose(spk.isi_distance(t1, t2), expected_isi_val)
# check with some equal spike times
t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0])
@@ -60,10 +60,10 @@ def test_isi():
f = spk.isi_profile(t1, t2)
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
assert_array_almost_equal(f.y, expected_isi, decimal=15)
- assert_equal(f.avrg(), expected_isi_val)
- assert_equal(spk.isi_distance(t1, t2), expected_isi_val)
+ assert_allclose(f.avrg(), expected_isi_val)
+ assert_allclose(spk.isi_distance(t1, t2), expected_isi_val)
def test_spike():
@@ -75,7 +75,7 @@ def test_spike():
f = spk.spike_profile(t1, t2)
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
# from SPIKY:
y_all = np.array([0.000000000000000000, 0.555555555555555580,
@@ -89,7 +89,7 @@ def test_spike():
assert_array_almost_equal(f.y2, y_all[1::2])
assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15)
- assert_equal(spk.spike_distance(t1, t2), f.avrg())
+ assert_allclose(spk.spike_distance(t1, t2), f.avrg())
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
@@ -118,7 +118,7 @@ def test_spike():
f = spk.spike_profile(t1, t2)
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=15)
assert_array_almost_equal(f.y2, expected_y2, decimal=15)
assert_almost_equal(f.avrg(), expected_spike_val, decimal=15)
@@ -157,7 +157,7 @@ def test_spike():
f = spk.spike_profile(t1, t2)
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=14)
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
assert_almost_equal(f.avrg(), expected_spike_val, decimal=16)
@@ -236,8 +236,8 @@ def test_spike_sync():
f.add(f2)
i12 = f.integral()
- assert_equal(i1[0]+i2[0], i12[0])
- assert_equal(i1[1]+i2[1], i12[1])
+ assert_allclose(i1[0]+i2[0], i12[0])
+ assert_allclose(i1[1]+i2[1], i12[1])
def check_multi_profile(profile_func, profile_func_multi, dist_func_multi):
@@ -258,7 +258,7 @@ def check_multi_profile(profile_func, profile_func_multi, dist_func_multi):
f_multi = profile_func_multi(spike_trains, [0, 1])
assert f_multi.almost_equal(f12, decimal=14)
d = dist_func_multi(spike_trains, [0, 1])
- assert_equal(f_multi.avrg(), d)
+ assert_allclose(f_multi.avrg(), d)
f_multi1 = profile_func_multi(spike_trains, [1, 2, 3])
f_multi2 = profile_func_multi(spike_trains[1:])
@@ -329,11 +329,11 @@ def test_multi_spike_sync():
f = spk.spike_sync_profile_multi(spike_trains)
- assert_equal(spike_times, f.x[1:-1])
- assert_equal(len(f.x), len(f.y))
+ assert_allclose(spike_times, f.x[1:-1])
+ assert_allclose(len(f.x), len(f.y))
- assert_equal(np.sum(f.y[1:-1]), 39932)
- assert_equal(np.sum(f.mp[1:-1]), 85554)
+ assert_allclose(np.sum(f.y[1:-1]), 39932)
+ assert_allclose(np.sum(f.mp[1:-1]), 85554)
# example with 2 empty spike trains
sts = []
@@ -365,16 +365,16 @@ def check_dist_matrix(dist_func, dist_matrix_func):
f_matrix = dist_matrix_func(spike_trains)
# check zero diagonal
for i in range(4):
- assert_equal(0.0, f_matrix[i, i])
+ assert_allclose(0.0, f_matrix[i, i])
for i in range(4):
for j in range(i+1, 4):
- assert_equal(f_matrix[i, j], f_matrix[j, i])
- assert_equal(f12, f_matrix[1, 0])
- assert_equal(f13, f_matrix[2, 0])
- assert_equal(f14, f_matrix[3, 0])
- assert_equal(f23, f_matrix[2, 1])
- assert_equal(f24, f_matrix[3, 1])
- assert_equal(f34, f_matrix[3, 2])
+ assert_allclose(f_matrix[i, j], f_matrix[j, i])
+ assert_allclose(f12, f_matrix[1, 0])
+ assert_allclose(f13, f_matrix[2, 0])
+ assert_allclose(f14, f_matrix[3, 0])
+ assert_allclose(f23, f_matrix[2, 1])
+ assert_allclose(f24, f_matrix[3, 1])
+ assert_allclose(f34, f_matrix[3, 2])
def test_isi_matrix():
@@ -397,13 +397,13 @@ def test_regression_spiky():
isi_dist = spk.isi_distance(st1, st2)
assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15)
isi_profile = spk.isi_profile(st1, st2)
- assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y))
+ assert_allclose(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y))
spike_dist = spk.spike_distance(st1, st2)
- assert_equal(spike_dist, 0.211058782487353908)
+ assert_allclose(spike_dist, 0.211058782487353908)
spike_sync = spk.spike_sync(st1, st2)
- assert_equal(spike_sync, 8.6956521739130432e-01)
+ assert_allclose(spike_sync, 8.6956521739130432e-01)
# multivariate check
@@ -414,7 +414,7 @@ def test_regression_spiky():
assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15)
spike_profile = spk.spike_profile_multi(spike_trains)
- assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252)
+ assert_allclose(len(spike_profile.y1)+len(spike_profile.y2), 1252)
spike_dist = spk.spike_distance_multi(spike_trains)
# get the full precision from SPIKY
@@ -422,7 +422,7 @@ def test_regression_spiky():
spike_sync = spk.spike_sync_multi(spike_trains)
# get the full precision from SPIKY
- assert_equal(spike_sync, 0.7183531505298066)
+ assert_allclose(spike_sync, 0.7183531505298066)
# Eero's edge correction example
st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0)
@@ -439,7 +439,7 @@ def test_regression_spiky():
expected_y1 = y_all[::2]
expected_y2 = y_all[1::2]
- assert_equal(f.x, expected_times)
+ assert_allclose(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=14)
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
@@ -452,15 +452,15 @@ def test_multi_variate_subsets():
v1 = spk.isi_distance_multi(spike_trains_sub_set)
v2 = spk.isi_distance_multi(spike_trains, sub_set)
- assert_equal(v1, v2)
+ assert_allclose(v1, v2)
v1 = spk.spike_distance_multi(spike_trains_sub_set)
v2 = spk.spike_distance_multi(spike_trains, sub_set)
- assert_equal(v1, v2)
+ assert_allclose(v1, v2)
v1 = spk.spike_sync_multi(spike_trains_sub_set)
v2 = spk.spike_sync_multi(spike_trains, sub_set)
- assert_equal(v1, v2)
+ assert_allclose(v1, v2)
if __name__ == "__main__":
diff --git a/test/test_empty.py b/test/test_empty.py
index 4d0a5cf..93fd2c1 100644
--- a/test/test_empty.py
+++ b/test/test_empty.py
@@ -10,7 +10,7 @@ Distributed under the BSD License
from __future__ import print_function
import numpy as np
-from numpy.testing import assert_equal, assert_almost_equal, \
+from numpy.testing import assert_allclose, assert_almost_equal, \
assert_array_equal, assert_array_almost_equal
import pyspike as spk
@@ -33,18 +33,18 @@ def test_isi_empty():
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([], edges=(0.0, 1.0))
d = spk.isi_distance(st1, st2)
- assert_equal(d, 0.0)
+ assert_allclose(d, 0.0)
prof = spk.isi_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 1.0])
assert_array_equal(prof.y, [0.0, ])
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0))
d = spk.isi_distance(st1, st2)
- assert_equal(d, 0.6*0.4+0.4*0.6)
+ assert_allclose(d, 0.6*0.4+0.4*0.6)
prof = spk.isi_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 0.4, 1.0])
assert_array_equal(prof.y, [0.6, 0.4])
@@ -53,7 +53,7 @@ def test_isi_empty():
d = spk.isi_distance(st1, st2)
assert_almost_equal(d, 0.2/0.6*0.4 + 0.0 + 0.2/0.6*0.4, decimal=15)
prof = spk.isi_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15)
assert_array_almost_equal(prof.y, [0.2/0.6, 0.0, 0.2/0.6], decimal=15)
@@ -62,9 +62,9 @@ def test_spike_empty():
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([], edges=(0.0, 1.0))
d = spk.spike_distance(st1, st2)
- assert_equal(d, 0.0)
+ assert_allclose(d, 0.0)
prof = spk.spike_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 1.0])
assert_array_equal(prof.y1, [0.0, ])
assert_array_equal(prof.y2, [0.0, ])
@@ -75,7 +75,7 @@ def test_spike_empty():
d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2
assert_almost_equal(d, d_expect, decimal=15)
prof = spk.spike_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 0.4, 1.0])
assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2,
2*0.4*1.0/(0.6+1.0)**2],
@@ -100,7 +100,7 @@ def test_spike_empty():
assert_almost_equal(d, expected_spike_val, decimal=15)
prof = spk.spike_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15)
assert_array_almost_equal(prof.y1, expected_y1, decimal=15)
assert_array_almost_equal(prof.y2, expected_y2, decimal=15)
@@ -110,18 +110,18 @@ def test_spike_sync_empty():
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([], edges=(0.0, 1.0))
d = spk.spike_sync(st1, st2)
- assert_equal(d, 1.0)
+ assert_allclose(d, 1.0)
prof = spk.spike_sync_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 1.0])
assert_array_equal(prof.y, [1.0, 1.0])
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0))
d = spk.spike_sync(st1, st2)
- assert_equal(d, 0.0)
+ assert_allclose(d, 0.0)
prof = spk.spike_sync_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 0.4, 1.0])
assert_array_equal(prof.y, [0.0, 0.0, 0.0])
@@ -130,7 +130,7 @@ def test_spike_sync_empty():
d = spk.spike_sync(st1, st2)
assert_almost_equal(d, 1.0, decimal=15)
prof = spk.spike_sync_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15)
assert_array_almost_equal(prof.y, [1.0, 1.0, 1.0, 1.0], decimal=15)
@@ -139,7 +139,7 @@ def test_spike_sync_empty():
d = spk.spike_sync(st1, st2)
assert_almost_equal(d, 0.0, decimal=15)
prof = spk.spike_sync_profile(st1, st2)
- assert_equal(d, prof.avrg())
+ assert_allclose(d, prof.avrg())
assert_array_almost_equal(prof.x, [0.0, 0.2, 0.8, 1.0], decimal=15)
assert_array_almost_equal(prof.y, [0.0, 0.0, 0.0, 0.0], decimal=15)
@@ -148,9 +148,9 @@ def test_spike_sync_empty():
st2 = SpikeTrain([2.1, 7.0], [0, 10.0])
st3 = SpikeTrain([5.1, 6.0], [0, 10.0])
res = spk.spike_sync_profile(st1, st2).avrg(interval=[3.0, 4.0])
- assert_equal(res, 1.0)
+ assert_allclose(res, 1.0)
res = spk.spike_sync(st1, st2, interval=[3.0, 4.0])
- assert_equal(res, 1.0)
+ assert_allclose(res, 1.0)
sync_matrix = spk.spike_sync_matrix([st1, st2, st3], interval=[3.0, 4.0])
assert_array_equal(sync_matrix, np.ones((3, 3)) - np.diag(np.ones(3)))
diff --git a/test/test_function.py b/test/test_function.py
index 92d378d..ba10ae7 100644
--- a/test/test_function.py
+++ b/test/test_function.py
@@ -10,7 +10,8 @@ Distributed under the BSD License
from __future__ import print_function
import numpy as np
from copy import copy
-from numpy.testing import assert_equal, assert_almost_equal, \
+from nose.tools import raises
+from numpy.testing import assert_allclose, assert_almost_equal, \
assert_array_equal, assert_array_almost_equal
import pyspike as spk
@@ -23,14 +24,14 @@ def test_pwc():
f = spk.PieceWiseConstFunc(x, y)
# function values
- assert_equal(f(0.0), 1.0)
- assert_equal(f(0.5), 1.0)
- assert_equal(f(1.0), 0.25)
- assert_equal(f(2.0), 0.5)
- assert_equal(f(2.25), 1.5)
- assert_equal(f(2.5), 2.25/2)
- assert_equal(f(3.5), 0.75)
- assert_equal(f(4.0), 0.75)
+ assert_allclose(f(0.0), 1.0)
+ assert_allclose(f(0.5), 1.0)
+ assert_allclose(f(1.0), 0.25)
+ assert_allclose(f(2.0), 0.5)
+ assert_allclose(f(2.25), 1.5)
+ assert_allclose(f(2.5), 2.25/2)
+ assert_allclose(f(3.5), 0.75)
+ assert_allclose(f(4.0), 0.75)
assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]),
[1.0, 1.0, 0.25, 0.5, 1.5, 2.25/2, 0.75, 0.75])
@@ -49,6 +50,8 @@ def test_pwc():
assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16)
a = f.avrg([1.5, 3.5])
assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16)
+ a = f.avrg([1.0, 2.0])
+ assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16)
a = f.avrg([1.0, 3.5])
assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16)
a = f.avrg([1.0, 4.0])
@@ -120,6 +123,53 @@ def test_pwc_avrg():
assert_array_almost_equal(f1.x, x_expected, decimal=16)
assert_array_almost_equal(f1.y, y_expected, decimal=16)
+def test_pwc_integral():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+
+ # test full interval
+ full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75;
+ assert_allclose(f1.integral(), full)
+ assert_allclose(f1.integral((np.min(x),np.max(x))), full)
+ # test part interval, spanning an edge
+ assert_allclose(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5)
+ # test part interval, just over two edges
+ assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14)
+ # test part interval, between two edges
+ assert_allclose(f1.integral((1.0,2.0)), 1.0*-0.5)
+ assert_allclose(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5)
+ # test part interval, start to before and after edge
+ assert_allclose(f1.integral((0.0,0.7)), 0.7*1.0)
+ assert_allclose(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5)
+ # test part interval, before and after edge till end
+ assert_allclose(f1.integral((2.6,4.0)), (4.0-2.6)*0.75)
+ assert_allclose(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75)
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_inv():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((3,2))
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_oob_1():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((1,6))
+
+@raises(ValueError)
+def test_pwc_integral_bad_bounds_oob_2():
+ # some random data
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [1.0, -0.5, 1.5, 0.75]
+ f1 = spk.PieceWiseConstFunc(x, y)
+ f1.integral((-1,3))
def test_pwl():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
@@ -128,14 +178,14 @@ def test_pwl():
f = spk.PieceWiseLinFunc(x, y1, y2)
# function values
- assert_equal(f(0.0), 1.0)
- assert_equal(f(0.5), 1.25)
- assert_equal(f(1.0), 0.5)
- assert_equal(f(2.0), 1.1/2)
- assert_equal(f(2.25), 1.5)
- assert_equal(f(2.5), 2.25/2)
- assert_equal(f(3.5), 0.75-0.5*1.0/1.5)
- assert_equal(f(4.0), 0.25)
+ assert_allclose(f(0.0), 1.0)
+ assert_allclose(f(0.5), 1.25)
+ assert_allclose(f(1.0), 0.5)
+ assert_allclose(f(2.0), 1.1/2)
+ assert_allclose(f(2.25), 1.5)
+ assert_allclose(f(2.5), 2.25/2)
+ assert_allclose(f(3.5), 0.75-0.5*1.0/1.5)
+ assert_allclose(f(4.0), 0.25)
assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]),
[1.0, 1.25, 0.5, 0.55, 1.5, 2.25/2, 0.75-0.5/1.5, 0.25])
@@ -162,6 +212,18 @@ def test_pwl():
a = f.avrg([1.0, 4.0])
assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16)
+ # interval between support points
+ a = f.avrg([1.1, 1.5])
+ assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14)
+
+ # starting at a support point
+ a = f.avrg([1.0, 1.5])
+ assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14)
+
+ # start and end at support point
+ a = f.avrg([1.0, 2.0])
+ assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14)
+
# averaging over multiple intervals
a = f.avrg([(0.5, 1.5), (1.5, 2.5)])
assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16)
diff --git a/test/test_generic_interfaces.py b/test/test_generic_interfaces.py
index 7f08067..553f3f4 100644
--- a/test/test_generic_interfaces.py
+++ b/test/test_generic_interfaces.py
@@ -9,7 +9,7 @@ Distributed under the BSD License
"""
from __future__ import print_function
-from numpy.testing import assert_equal
+from numpy.testing import assert_allclose
import pyspike as spk
from pyspike import SpikeTrain
@@ -43,33 +43,33 @@ def check_func(dist_func):
isi12 = dist_func(t1, t2)
isi12_ = dist_func([t1, t2])
- assert_equal(isi12, isi12_)
+ assert_allclose(isi12, isi12_)
isi12_ = dist_func(spike_trains, indices=[0, 1])
- assert_equal(isi12, isi12_)
+ assert_allclose(isi12, isi12_)
isi123 = dist_func(t1, t2, t3)
isi123_ = dist_func([t1, t2, t3])
- assert_equal(isi123, isi123_)
+ assert_allclose(isi123, isi123_)
isi123_ = dist_func(spike_trains, indices=[0, 1, 2])
- assert_equal(isi123, isi123_)
+ assert_allclose(isi123, isi123_)
# run the same test with an additional interval parameter
isi12 = dist_func(t1, t2, interval=[0.0, 0.5])
isi12_ = dist_func([t1, t2], interval=[0.0, 0.5])
- assert_equal(isi12, isi12_)
+ assert_allclose(isi12, isi12_)
isi12_ = dist_func(spike_trains, indices=[0, 1], interval=[0.0, 0.5])
- assert_equal(isi12, isi12_)
+ assert_allclose(isi12, isi12_)
isi123 = dist_func(t1, t2, t3, interval=[0.0, 0.5])
isi123_ = dist_func([t1, t2, t3], interval=[0.0, 0.5])
- assert_equal(isi123, isi123_)
+ assert_allclose(isi123, isi123_)
isi123_ = dist_func(spike_trains, indices=[0, 1, 2], interval=[0.0, 0.5])
- assert_equal(isi123, isi123_)
+ assert_allclose(isi123, isi123_)
def test_isi_profile():
diff --git a/test/test_regression/test_regression_15.py b/test/test_regression/test_regression_15.py
index 54adf23..81b5bb0 100644
--- a/test/test_regression/test_regression_15.py
+++ b/test/test_regression/test_regression_15.py
@@ -11,7 +11,7 @@ Distributed under the BSD License
from __future__ import division
import numpy as np
-from numpy.testing import assert_equal, assert_almost_equal, \
+from numpy.testing import assert_allclose, assert_almost_equal, \
assert_array_almost_equal
import pyspike as spk
@@ -28,15 +28,15 @@ def test_regression_15_isi():
N = len(spike_trains)
dist_mat = spk.isi_distance_matrix(spike_trains)
- assert_equal(dist_mat.shape, (N, N))
+ assert_allclose(dist_mat.shape, (N, N))
ind = np.arange(N//2)
dist_mat = spk.isi_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
ind = np.arange(N//2, N)
dist_mat = spk.isi_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
def test_regression_15_spike():
@@ -46,15 +46,15 @@ def test_regression_15_spike():
N = len(spike_trains)
dist_mat = spk.spike_distance_matrix(spike_trains)
- assert_equal(dist_mat.shape, (N, N))
+ assert_allclose(dist_mat.shape, (N, N))
ind = np.arange(N//2)
dist_mat = spk.spike_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
ind = np.arange(N//2, N)
dist_mat = spk.spike_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
def test_regression_15_sync():
@@ -64,15 +64,15 @@ def test_regression_15_sync():
N = len(spike_trains)
dist_mat = spk.spike_sync_matrix(spike_trains)
- assert_equal(dist_mat.shape, (N, N))
+ assert_allclose(dist_mat.shape, (N, N))
ind = np.arange(N//2)
dist_mat = spk.spike_sync_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
ind = np.arange(N//2, N)
dist_mat = spk.spike_sync_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N//2, N//2))
+ assert_allclose(dist_mat.shape, (N//2, N//2))
if __name__ == "__main__":
diff --git a/test/test_spikes.py b/test/test_spikes.py
index ee505b5..579f8e1 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -9,7 +9,7 @@ Distributed under the BSD License
from __future__ import print_function
import numpy as np
-from numpy.testing import assert_equal
+from numpy.testing import assert_allclose
import pyspike as spk
@@ -29,7 +29,7 @@ def test_load_from_txt():
spike_times = [64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
3644.3, 3936.3]
- assert_equal(spike_times, spike_trains[0].spikes)
+ assert_allclose(spike_times, spike_trains[0].spikes)
# check auxiliary spikes
for spike_train in spike_trains:
@@ -47,9 +47,9 @@ def test_load_time_series():
# check spike trains
for n in range(len(spike_trains)):
- assert_equal(spike_trains[n].spikes, spike_trains_check[n].spikes)
- assert_equal(spike_trains[n].t_start, 0)
- assert_equal(spike_trains[n].t_end, 4000)
+ assert_allclose(spike_trains[n].spikes, spike_trains_check[n].spikes)
+ assert_allclose(spike_trains[n].t_start, 0)
+ assert_allclose(spike_trains[n].t_end, 4000)
def check_merged_spikes(merged_spikes, spike_trains):
diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py
new file mode 100644
index 0000000..0b915db
--- /dev/null
+++ b/test/test_sync_filter.py
@@ -0,0 +1,95 @@
+""" test_sync_filter.py
+
+Tests the spike sync based filtering
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+from __future__ import print_function
+import numpy as np
+from numpy.testing import assert_allclose, assert_almost_equal, \
+ assert_array_almost_equal
+
+import pyspike as spk
+from pyspike import SpikeTrain
+
+
+def test_single_prof():
+ st1 = np.array([1.0, 2.0, 3.0, 4.0])
+ st2 = np.array([1.1, 2.1, 3.8])
+ st3 = np.array([0.9, 3.1, 4.1])
+
+ # cython implementation
+ try:
+ from pyspike.cython.cython_profiles import \
+ coincidence_single_profile_cython as coincidence_impl
+ except ImportError:
+ from pyspike.cython.python_backend import \
+ coincidence_single_python as coincidence_impl
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st2, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0))
+ print(coincidences)
+ for i, t in enumerate(st1):
+ assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t],
+ err_msg="At index %d" % i)
+
+ coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0))
+ for i, t in enumerate(st2):
+ assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t],
+ err_msg="At index %d" % i)
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st3, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0))
+ for i, t in enumerate(st1):
+ assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t],
+ err_msg="At index %d" % i)
+
+ st1 = np.array([1.0, 2.0, 3.0, 4.0])
+ st2 = np.array([1.0, 2.0, 4.0])
+
+ sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0),
+ SpikeTrain(st2, 5.0))
+
+ coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0))
+ for i, t in enumerate(st1):
+ expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t]
+ assert_allclose(coincidences[i], expected,
+ err_msg="At index %d" % i)
+
+
+def test_filter():
+ st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0)
+ st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0)
+ st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0)
+
+ # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5)
+
+ # assert_allclose(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0])
+ # assert_allclose(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8])
+
+ # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5)
+
+ # assert_allclose(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8])
+ # assert_allclose(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0])
+
+ filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75)
+
+ for st in filtered_spike_trains:
+ print(st.spikes)
+
+ assert_allclose(filtered_spike_trains[0].spikes, [1.0, 4.0])
+ assert_allclose(filtered_spike_trains[1].spikes, [1.1, 3.8])
+ assert_allclose(filtered_spike_trains[2].spikes, [0.9, 4.1])
+
+
+if __name__ == "main":
+ test_single_prof()
+ test_filter()