summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2016-03-29 12:35:51 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2016-03-29 12:35:51 +0200
commitad29154d8c152996d16c012dcc5798c5f1957aed (patch)
treef01575082a649061362178d4a9870cb5fe2c33f6
parent1fa8493ce15af8cd4c057eece155f1557fc241ea (diff)
parentc17cc8602414cec883c412008a4300b2c7ac7f80 (diff)
Merge branch 'master' into new_directionality
Conflicts: pyspike/__init__.py pyspike/cython/cython_directionality.pyx pyspike/cython/directionality_python_backend.py pyspike/spike_directionality.py setup.py
-rw-r--r--.travis.yml10
-rw-r--r--Changelog6
-rw-r--r--Contributors.txt1
-rw-r--r--MANIFEST.in7
-rw-r--r--Readme.rst16
-rw-r--r--doc/conf.py4
-rw-r--r--doc/tutorial.rst54
-rw-r--r--examples/averages.py2
-rw-r--r--examples/merge.py6
-rw-r--r--examples/multivariate.py6
-rw-r--r--examples/performance.py29
-rw-r--r--examples/plot.py5
-rw-r--r--examples/profiles.py4
-rw-r--r--examples/spike_sync.py2
-rw-r--r--pyspike/DiscreteFunc.py10
-rw-r--r--pyspike/PieceWiseConstFunc.py6
-rw-r--r--pyspike/PieceWiseLinFunc.py8
-rw-r--r--pyspike/SpikeTrain.py2
-rw-r--r--pyspike/__init__.py22
-rw-r--r--pyspike/cython/cython_distances.pyx134
-rw-r--r--pyspike/cython/cython_profiles.pyx110
-rw-r--r--pyspike/cython/python_backend.py108
-rw-r--r--pyspike/generic.py9
-rw-r--r--pyspike/isi_distance.py157
-rw-r--r--pyspike/psth.py2
-rw-r--r--pyspike/spike_distance.py163
-rw-r--r--pyspike/spike_sync.py164
-rw-r--r--setup.py17
-rw-r--r--test/numeric/regression_random_results_cSPIKY.matbin0 -> 149104 bytes
-rw-r--r--test/numeric/regression_random_spikes.matbin0 -> 16241579 bytes
-rw-r--r--test/numeric/test_regression_random_spikes.py127
-rw-r--r--test/test_distance.py71
-rw-r--r--test/test_empty.py18
-rw-r--r--test/test_generic_interfaces.py105
-rw-r--r--test/test_regression/test_regression_15.py39
-rw-r--r--test/test_spikes.py9
36 files changed, 1031 insertions, 402 deletions
diff --git a/.travis.yml b/.travis.yml
index 1035775..d23d865 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,12 +2,22 @@ language: python
python:
- "2.6"
- "2.7"
+ - "3.3"
+ - "3.4"
+ - "3.5"
env:
- CYTHON_INSTALL="pip install -q cython"
- CYTHON_INSTALL=""
+before_install:
+ - sudo apt-get update
+ - sudo apt-get install libblas-dev
+ - sudo apt-get install liblapack-dev
+ - sudo apt-get install gfortran
install:
+ - pip install scipy
- $CYTHON_INSTALL
script:
- python setup.py build_ext --inplace
- nosetests
+ - nosetests test/numeric
diff --git a/Changelog b/Changelog
index 519dd3b..2be5e52 100644
--- a/Changelog
+++ b/Changelog
@@ -1,3 +1,9 @@
+PySpike v0.4:
+ * Python 3 support (thanks to Igor Gnatenko)
+ * list interface to SpikeTrain class
+ * disable_backend_warning property
+ * several bugfixes
+
PySpike v0.3:
* addition of __version__ attribute
* restructured docs, Readme now only contains basic examples
diff --git a/Contributors.txt b/Contributors.txt
index 83563c7..512aa7f 100644
--- a/Contributors.txt
+++ b/Contributors.txt
@@ -1,5 +1,6 @@
Python/C Programming:
- Mario Mulansky
+- Igor Gnatenko
Scientific Methods:
- Thomas Kreuz
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..aed0ae0
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,7 @@
+include *.rst
+include *.txt
+include pyspike/cython/*.c
+include directionality/cython/*.c
+recursive-include examples *.py *.txt
+recursive-include test *.py *.txt
+recursive-include doc *
diff --git a/Readme.rst b/Readme.rst
index 69a86e8..542f4b3 100644
--- a/Readme.rst
+++ b/Readme.rst
@@ -7,8 +7,8 @@ PySpike
:target: https://travis-ci.org/mariomulansky/PySpike
PySpike is a Python library for the numerical analysis of spike train similarity.
-Its core functionality is the implementation of the bivariate ISI_ and SPIKE_ distance [#]_ [#]_ as well as SPIKE-Synchronization_ [#]_.
-Additionally, it provides functions to compute multivariate profiles, distance matrices, as well as averaging and general spike train processing.
+Its core functionality is the implementation of the ISI_ and SPIKE_ distance [#]_ [#]_ as well as SPIKE-Synchronization_ [#]_.
+It provides functions to compute multivariate profiles, distance matrices, as well as averaging and general spike train processing.
All computation intensive parts are implemented in C via cython_ to reach a competitive performance (factor 100-200 over plain Python).
PySpike provides the same fundamental functionality as the SPIKY_ framework for Matlab, which additionally contains spike-train generators, more spike train distance measures and many visualization routines.
@@ -24,6 +24,8 @@ All source codes are available on `Github <https://github.com/mariomulansky/PySp
Important Changelog
-----------------------------
+With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated.
+
With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains.
This is a breaking change in the function interfaces.
Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions.
@@ -76,7 +78,7 @@ Therefore, add your :code:`/path/to/PySpike` to the :code:`$PYTHONPATH` environm
Examples
-----------------------------
-The following code loads some exemplary spike trains, computes the dissimilarity profile of the ISI-distance of the first two :code:`SpikeTrain` s, and plots it with matplotlib:
+The following code loads some exemplary spike trains, computes the dissimilarity profile of the ISI-distance of the first two :code:`SpikeTrain` objects, and plots it with matplotlib:
.. code:: python
@@ -92,15 +94,15 @@ The following code loads some exemplary spike trains, computes the dissimilarity
plt.show()
-The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains using the :code:`isi_profile_multi`, :code:`spike_profile_multi`, :code:`spike_sync_profile_multi` functions:
+The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains loaded from a text file:
.. code:: python
spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
edges=(0, 4000))
- avrg_isi_profile = spk.isi_profile_multi(spike_trains)
- avrg_spike_profile = spk.spike_profile_multi(spike_trains)
- avrg_spike_sync_profile = spk.spike_sync_profile_multi(spike_trains)
+ avrg_isi_profile = spk.isi_profile(spike_trains)
+ avrg_spike_profile = spk.spike_profile(spike_trains)
+ avrg_spike_sync_profile = spk.spike_sync_profile(spike_trains)
More examples with detailed descriptions can be found in the `tutorial section <http://mariomulansky.github.io/PySpike/#tutorial>`_.
diff --git a/doc/conf.py b/doc/conf.py
index 8011ea9..807dec6 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -64,9 +64,9 @@ copyright = u'2014-2015, Mario Mulansky'
# built documents.
#
# The short X.Y version.
-version = '0.3'
+version = '0.5'
# The full version, including alpha/beta/rc tags.
-release = '0.3.0'
+release = '0.5.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/tutorial.rst b/doc/tutorial.rst
index f7fc20b..aff03a8 100644
--- a/doc/tutorial.rst
+++ b/doc/tutorial.rst
@@ -88,10 +88,9 @@ If you are only interested in the scalar ISI-distance and not the profile, you c
.. code:: python
- isi_dist = spk.isi_distance(spike_trains[0], spike_trains[1], interval)
-
-where :code:`interval` is optional, as above, and if omitted the ISI-distance is computed for the complete spike trains.
+ isi_dist = spk.isi_distance(spike_trains[0], spike_trains[1], interval=(0, 1000))
+where :code:`interval` is optional, as above, and if omitted the ISI-distance is computed for the complete spike train.
SPIKE-distance
..............
@@ -113,19 +112,20 @@ But the general approach is very similar:
plt.show()
This short example computes and plots the SPIKE-profile of the first two spike trains in the file :code:`PySpike_testdata.txt`.
+
In contrast to the ISI-profile, a SPIKE-profile is a piece-wise *linear* function and is therefore represented by a :class:`.PieceWiseLinFunc` object.
Just like the :class:`.PieceWiseConstFunc` for the ISI-profile, the :class:`.PieceWiseLinFunc` provides a :meth:`.PieceWiseLinFunc.get_plottable_data` member function that returns arrays that can be used directly to plot the function.
Furthermore, the :meth:`.PieceWiseLinFunc.avrg` member function returns the average of the profile defined as the overall SPIKE distance.
As above, you can provide an interval as a pair of floats as well as a sequence of such pairs to :code:`avrg` to specify the averaging interval if required.
-Again, you can use
+Again, you can use:
.. code:: python
- spike_dist = spk.spike_distance(spike_trains[0], spike_trains[1], interval)
+ spike_dist = spk.spike_distance(spike_trains[0], spike_trains[1], interval=ival)
to compute the SPIKE distance directly, if you are not interested in the profile at all.
-The parameter :code:`interval` is optional and if neglected the whole spike train is used.
+The parameter :code:`interval` is optional and if neglected the whole time interval is used.
SPIKE synchronization
@@ -164,26 +164,47 @@ For the direct computation of the overall spike synchronization value within som
.. code:: python
- spike_sync = spk.spike_sync(spike_trains[0], spike_trains[1], interval)
-
+ spike_sync = spk.spike_sync(spike_trains[0], spike_trains[1], interval=ival)
Computing multivariate profiles and distances
----------------------------------------------
-To compute the multivariate ISI-profile, SPIKE-profile or SPIKE-Synchronization profile f a set of spike trains, PySpike provides multi-variate version of the profile function.
-The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains using the :func:`.isi_profile_multi`, :func:`.spike_profile_multi`, :func:`.spike_sync_profile_multi` functions:
+To compute the multivariate ISI-profile, SPIKE-profile or SPIKE-Synchronization profile for a set of spike trains, simply provide a list of spike trains to the profile or distance functions.
+The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains:
.. code:: python
spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
edges=(0, 4000))
- avrg_isi_profile = spk.isi_profile_multi(spike_trains)
- avrg_spike_profile = spk.spike_profile_multi(spike_trains)
- avrg_spike_sync_profile = spk.spike_sync_profile_multi(spike_trains)
+ avrg_isi_profile = spk.isi_profile(spike_trains)
+ avrg_spike_profile = spk.spike_profile(spike_trains)
+ avrg_spike_sync_profile = spk.spike_sync_profile(spike_trains)
+
+All functions also take an optional parameter :code:`indices`, a list of indices that allows to define the spike trains that should be used for the multivariate profile.
+As before, if you are only interested in the distance values, and not in the profile, you can call the functions: :func:`.isi_distance`, :func:`.spike_distance` and :func:`.spike_sync` with a list of spike trains.
+They return the scalar overall multivariate ISI-, SPIKE-distance or the SPIKE-Synchronization value.
+
+The following code is equivalent to the bivariate example above, computing the ISI-Distance between the first two spike trains in the given interval using the :code:`indices` parameter:
+
+.. code:: python
+
+ isi_dist = spk.isi_distance(spike_trains, indices=[0, 1], interval=(0, 1000))
+
+As you can see, the distance functions also accept an :code:`interval` parameter that can be used to specify the begin and end of the averaging interval as a pair of floats, if neglected the complete interval is used.
+
+**Note:**
+
+------------------------------
+
+ Instead of providing lists of spike trains to the profile or distance functions, you can also call those functions with many spike trains as (unnamed) parameters, e.g.:
+
+ .. code:: python
+
+ # st1, st2, st3, st4 are spike trains
+ spike_prof = spk.spike_profile(st1, st2, st3, st4)
+
+------------------------------
-All functions take an optional parameter :code:`indices`, a list of indices that allows to define the spike trains that should be used for the multivariate profile.
-As before, if you are only interested in the distance values, and not in the profile, PySpike offers the functions: :func:`.isi_distance_multi`, :func:`.spike_distance_multi` and :func:`.spike_sync_multi`, that return the scalar overall multivariate ISI- and SPIKE-distance as well as the SPIKE-Synchronization value.
-Those functions also accept an :code:`interval` parameter that can be used to specify the begin and end of the averaging interval as a pair of floats, if neglected the complete interval is used.
Another option to characterize large sets of spike trains are distance matrices.
Each entry in the distance matrix represents a bivariate distance (similarity for SPIKE-Synchronization) of two spike trains.
@@ -210,4 +231,3 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w
plt.title("SPIKE-Sync")
plt.show()
-
diff --git a/examples/averages.py b/examples/averages.py
index c3e81e2..8b405d0 100644
--- a/examples/averages.py
+++ b/examples/averages.py
@@ -12,7 +12,7 @@ from __future__ import print_function
import pyspike as spk
spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0, 4000))
+ edges=(0, 4000))
f = spk.isi_profile(spike_trains[0], spike_trains[1])
diff --git a/examples/merge.py b/examples/merge.py
index 2ea96ea..b4437a3 100644
--- a/examples/merge.py
+++ b/examples/merge.py
@@ -21,9 +21,9 @@ merged_spike_train = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
print(merged_spike_train.spikes)
-plt.plot(spike_trains[0].spikes, np.ones_like(spike_trains[0].spikes), 'o')
-plt.plot(spike_trains[1].spikes, np.ones_like(spike_trains[1].spikes), 'x')
+plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o')
+plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x')
plt.plot(merged_spike_train.spikes,
- 2*np.ones_like(merged_spike_train.spikes), 'o')
+ 2*np.ones_like(merged_spike_train), 'o')
plt.show()
diff --git a/examples/multivariate.py b/examples/multivariate.py
index 9a44758..e9579a5 100644
--- a/examples/multivariate.py
+++ b/examples/multivariate.py
@@ -24,11 +24,11 @@ t_loading = time.clock()
print("Number of spike trains: %d" % len(spike_trains))
num_of_spikes = sum([len(spike_trains[i])
- for i in xrange(len(spike_trains))])
+ for i in range(len(spike_trains))])
print("Number of spikes: %d" % num_of_spikes)
# calculate the multivariate spike distance
-f = spk.spike_profile_multi(spike_trains)
+f = spk.spike_profile(spike_trains)
t_spike = time.clock()
@@ -39,7 +39,7 @@ print("Spike distance from average: %.8f" % avrg)
t_avrg = time.clock()
# compute average distance directly, should give the same result as above
-spike_dist = spk.spike_distance_multi(spike_trains)
+spike_dist = spk.spike_distance(spike_trains)
print("Spike distance directly: %.8f" % spike_dist)
t_dist = time.clock()
diff --git a/examples/performance.py b/examples/performance.py
index d0c3b91..30691f8 100644
--- a/examples/performance.py
+++ b/examples/performance.py
@@ -26,43 +26,46 @@ print("%d spike trains with %d spikes" % (M, int(r*T)))
spike_trains = []
t_start = datetime.now()
-for i in xrange(M):
+for i in range(M):
spike_trains.append(spk.generate_poisson_spikes(r, T))
t_end = datetime.now()
runtime = (t_end-t_start).total_seconds()
+sort_by = 'tottime'
+# sort_by = 'cumtime'
+
print("Spike generation runtime: %.3fs" % runtime)
print()
print("================ ISI COMPUTATIONS ================")
print(" MULTIVARIATE DISTANCE")
-cProfile.run('spk.isi_distance_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.isi_distance(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
print(" MULTIVARIATE PROFILE")
-cProfile.run('spk.isi_profile_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.isi_profile(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
print("================ SPIKE COMPUTATIONS ================")
print(" MULTIVARIATE DISTANCE")
-cProfile.run('spk.spike_distance_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.spike_distance(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
print(" MULTIVARIATE PROFILE")
-cProfile.run('spk.spike_profile_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.spike_profile(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
print("================ SPIKE-SYNC COMPUTATIONS ================")
print(" MULTIVARIATE DISTANCE")
-cProfile.run('spk.spike_sync_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.spike_sync(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
print(" MULTIVARIATE PROFILE")
-cProfile.run('spk.spike_sync_profile_multi(spike_trains)', 'performance.stat')
+cProfile.run('spk.spike_sync_profile(spike_trains)', 'performance.stat')
p = pstats.Stats('performance.stat')
-p.strip_dirs().sort_stats('tottime').print_stats(5)
+p.strip_dirs().sort_stats(sort_by).print_stats(5)
diff --git a/examples/plot.py b/examples/plot.py
index 1922939..a0e04da 100644
--- a/examples/plot.py
+++ b/examples/plot.py
@@ -24,7 +24,8 @@ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
for (i, spike_train) in enumerate(spike_trains):
plt.scatter(spike_train, i*np.ones_like(spike_train), marker='|')
-f = spk.isi_profile(spike_trains[0], spike_trains[1])
+# profile of the first two spike trains
+f = spk.isi_profile(spike_trains, indices=[0, 1])
x, y = f.get_plottable_data()
plt.figure()
@@ -32,7 +33,7 @@ plt.plot(x, np.abs(y), '--k', label="ISI-profile")
print("ISI-distance: %.8f" % f.avrg())
-f = spk.spike_profile(spike_trains[0], spike_trains[1])
+f = spk.spike_profile(spike_trains, indices=[0, 1])
x, y = f.get_plottable_data()
plt.plot(x, y, '-b', label="SPIKE-profile")
diff --git a/examples/profiles.py b/examples/profiles.py
index 05494bd..8412ffb 100644
--- a/examples/profiles.py
+++ b/examples/profiles.py
@@ -29,7 +29,7 @@ print("Average ISI distance:", f.avrg())
print()
# compute the multivariate ISI profile
-f = spk.isi_profile_multi(spike_trains)
+f = spk.isi_profile(spike_trains)
t = 1200
print("Multivariate ISI value at t =", t, ":", f(t))
@@ -56,7 +56,7 @@ print("Average SPIKE distance:", f.avrg())
print()
# compute the multivariate SPIKE profile
-f = spk.spike_profile_multi(spike_trains)
+f = spk.spike_profile(spike_trains)
# SPIKE values at certain points
t = 1200
diff --git a/examples/spike_sync.py b/examples/spike_sync.py
index 37dbff4..13ca0ce 100644
--- a/examples/spike_sync.py
+++ b/examples/spike_sync.py
@@ -31,7 +31,7 @@ plt.figure()
plt.subplot(211)
-f = spk.spike_sync_profile_multi(spike_trains)
+f = spk.spike_sync_profile(spike_trains)
x, y = f.get_plottable_data()
plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile")
diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py
index 9cc7bd5..fe97bc2 100644
--- a/pyspike/DiscreteFunc.py
+++ b/pyspike/DiscreteFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -80,7 +80,7 @@ class DiscreteFunc(object):
expected_mp = (averaging_window_size+1) * int(self.mp[0])
y_plot = np.zeros_like(self.y)
# compute the values in a loop, could be done in cython if required
- for i in xrange(len(y_plot)):
+ for i in range(len(y_plot)):
if self.mp[i] >= expected_mp:
# the current value contains already all the wanted
@@ -206,7 +206,7 @@ expected."
# cython version
try:
- from cython.cython_add import add_discrete_function_cython as \
+ from .cython.cython_add import add_discrete_function_cython as \
add_discrete_function_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -215,7 +215,7 @@ sure that PySpike is installed by running\n\
'python setup.py build_ext --inplace'! \
\n Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_discrete_function_python as \
+ from .cython.python_backend import add_discrete_function_python as \
add_discrete_function_impl
self.x, self.y, self.mp = \
@@ -244,7 +244,7 @@ def average_profile(profiles):
assert len(profiles) > 1
avrg_profile = profiles[0].copy()
- for i in xrange(1, len(profiles)):
+ for i in range(1, len(profiles)):
avrg_profile.add(profiles[i])
avrg_profile.mul_scalar(1.0/len(profiles)) # normalize
diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py
index 23ff536..5ce5f27 100644
--- a/pyspike/PieceWiseConstFunc.py
+++ b/pyspike/PieceWiseConstFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -189,7 +189,7 @@ class PieceWiseConstFunc(object):
# cython version
try:
- from cython.cython_add import add_piece_wise_const_cython as \
+ from .cython.cython_add import add_piece_wise_const_cython as \
add_piece_wise_const_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -198,7 +198,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'! \
\n Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_piece_wise_const_python as \
+ from .cython.python_backend import add_piece_wise_const_python as \
add_piece_wise_const_impl
self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y)
diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py
index 0d51c76..8145e63 100644
--- a/pyspike/PieceWiseLinFunc.py
+++ b/pyspike/PieceWiseLinFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -222,13 +222,13 @@ class PieceWiseLinFunc:
assert self.x[-1] == f.x[-1], "The functions have different intervals"
# python implementation
- # from python_backend import add_piece_wise_lin_python
+ # from .python_backend import add_piece_wise_lin_python
# self.x, self.y1, self.y2 = add_piece_wise_lin_python(
# self.x, self.y1, self.y2, f.x, f.y1, f.y2)
# cython version
try:
- from cython.cython_add import add_piece_wise_lin_cython as \
+ from .cython.cython_add import add_piece_wise_lin_cython as \
add_piece_wise_lin_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -237,7 +237,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'! \n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_piece_wise_lin_python as \
+ from .cython.python_backend import add_piece_wise_lin_python as \
add_piece_wise_lin_impl
self.x, self.y1, self.y2 = add_piece_wise_lin_impl(
diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py
index 4b59a5d..19f2419 100644
--- a/pyspike/SpikeTrain.py
+++ b/pyspike/SpikeTrain.py
@@ -68,7 +68,7 @@ class SpikeTrain(object):
"""Returns the spikes of this spike train with auxiliary spikes in case
of empty spike trains.
"""
- if len(self.spikes) < 2:
+ if len(self.spikes) < 1:
return np.unique(np.insert([self.t_start, self.t_end], 1,
self.spikes))
else:
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
index 4c1e47e..7f578b0 100644
--- a/pyspike/__init__.py
+++ b/pyspike/__init__.py
@@ -4,28 +4,30 @@ Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
+from __future__ import absolute_import
+
__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth",
"spikes", "spike_directionality", "SpikeTrain",
"PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"]
-from PieceWiseConstFunc import PieceWiseConstFunc
-from PieceWiseLinFunc import PieceWiseLinFunc
-from DiscreteFunc import DiscreteFunc
-from SpikeTrain import SpikeTrain
+from .PieceWiseConstFunc import PieceWiseConstFunc
+from .PieceWiseLinFunc import PieceWiseLinFunc
+from .DiscreteFunc import DiscreteFunc
+from .SpikeTrain import SpikeTrain
-from isi_distance import isi_profile, isi_distance, isi_profile_multi,\
+from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\
isi_distance_multi, isi_distance_matrix
-from spike_distance import spike_profile, spike_distance, spike_profile_multi,\
+from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\
spike_distance_multi, spike_distance_matrix
-from spike_sync import spike_sync_profile, spike_sync,\
+from .spike_sync import spike_sync_profile, spike_sync,\
spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\
filter_by_spike_sync
-from psth import psth
+from .psth import psth
-from spikes import load_spike_trains_from_txt, spike_train_from_string, \
+from .spikes import load_spike_trains_from_txt, spike_train_from_string, \
merge_spike_trains, generate_poisson_spikes
-from spike_directionality import spike_directionality, \
+from .spike_directionality import spike_directionality, \
spike_directionality_profiles, spike_directionality_matrix, \
spike_train_order_profile, spike_train_order, \
spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \
diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx
index c017bf9..f50700f 100644
--- a/pyspike/cython/cython_distances.pyx
+++ b/pyspike/cython/cython_distances.pyx
@@ -55,20 +55,27 @@ def isi_distance_cython(double[:] s1, double[:] s2,
N2 = len(s2)
# first interspike interval - check if a spike exists at the start time
+ # and also account for spike trains with single spikes
if s1[0] > t_start:
- # edge correction
- nu1 = fmax(s1[0]-t_start, s1[1]-s1[0])
+ # edge correction for the first interspike interval:
+ # take the maximum of the distance from the beginning to the first
+ # spike and the interval between the first two spikes.
+ # if there is only one spike, take the its distance to the beginning
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1]-s1[0]
+ # if the first spike is exactly at the start, take the distance
+ # to the next spike. If this is the only spike, take the distance to
+ # the end.
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
- # edge correction
- nu2 = fmax(s2[0]-t_start, s2[1]-s2[0])
+ # edge correction as above
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1]-s2[0]
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
last_t = t_start
@@ -86,8 +93,12 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index1 < N1-1:
nu1 = s1[index1+1]-s1[index1]
else:
- # edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ # edge correction for the last ISI:
+ # take the max of the distance of the last
+ # spike to the end and the previous ISI. If there was only
+ # one spike, always take the distance to the end.
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
elif (index2 < N2-1) and ((index1 == N1-1) or
(s1[index1+1] > s2[index2+1])):
index2 += 1
@@ -95,8 +106,9 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
- # edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
else: # s1[index1+1] == s2[index2+1]
index1 += 1
index2 += 1
@@ -104,13 +116,15 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index1 < N1-1:
nu1 = s1[index1+1]-s1[index1]
else:
- # edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ # edge correction for the end as above
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
- # edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
# compute the corresponding isi-distance
isi_value += curr_isi * (curr_t - last_t)
curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2)
@@ -178,44 +192,60 @@ def spike_distance_cython(double[:] t1, double[:] t2,
cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
cdef double isi1, isi2, s1, s2
cdef double y_start, y_end, t_last, t_current, spike_value
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
spike_value = 0.0
N1 = len(t1)
N2 = len(t2)
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
+
with nogil: # release the interpreter to allow multithreading
t_last = t_start
- t_p1 = t_start
- t_p2 = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]+-t2[N2-2]) if N2 > 1 else t_end
+ # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1])
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
if t1[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f1 = t1[0]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
dt_p1 = dt_f1
- s1 = dt_p1*(t_f1-t_start)/isi1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
- else:
- t_f1 = t1[1]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- dt_p1 = 0.0
- isi1 = t1[1]-t1[0]
+ else: # t1[0] == t_start
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
- else:
- t_f2 = t2[1]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
- dt_p2 = 0.0
- isi2 = t2[1]-t2[0]
+ else: # t2[0] == t_start
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ # dt_p2 = t_start-t_p1 # 0.0
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -237,7 +267,7 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
t_curr = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
@@ -249,14 +279,17 @@ def spike_distance_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index1 < N1-1:
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
@@ -272,7 +305,7 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
t_curr = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
@@ -284,14 +317,17 @@ def spike_distance_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index2 < N2-1:
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
# s1 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
@@ -311,27 +347,29 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
t_last = t_curr
# isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
# isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
# y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx
index 4663f2e..aa24db4 100644
--- a/pyspike/cython/cython_profiles.pyx
+++ b/pyspike/cython/cython_profiles.pyx
@@ -63,18 +63,18 @@ def isi_profile_cython(double[:] s1, double[:] s2,
# first interspike interval - check if a spike exists at the start time
if s1[0] > t_start:
# edge correction
- nu1 = fmax(s1[0]-t_start, s1[1]-s1[0])
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1]-s1[0]
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
# edge correction
- nu2 = fmax(s2[0]-t_start, s2[1]-s2[0])
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1]-s2[0]
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2)
@@ -92,7 +92,8 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
elif (index2 < N2-1) and ((index1 == N1-1) or
(s1[index1+1] > s2[index2+1])):
index2 += 1
@@ -101,7 +102,8 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
else: # s1[index1+1] == s2[index2+1]
index1 += 1
index2 += 1
@@ -110,12 +112,14 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
# compute the corresponding isi-distance
isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2)
index += 1
@@ -181,6 +185,8 @@ def spike_profile_cython(double[:] t1, double[:] t2,
cdef double[:] spike_events
cdef double[:] y_starts
cdef double[:] y_ends
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
cdef int N1, N2, index1, index2, index
cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
@@ -189,6 +195,10 @@ def spike_profile_cython(double[:] t1, double[:] t2,
N1 = len(t1)
N2 = len(t2)
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
spike_events = np.empty(N1+N2+2)
y_starts = np.empty(len(spike_events)-1)
@@ -196,36 +206,45 @@ def spike_profile_cython(double[:] t1, double[:] t2,
with nogil: # release the interpreter to allow multithreading
spike_events[0] = t_start
- t_p1 = t_start
- t_p2 = t_start
+ # t_p1 = t_start
+ # t_p2 = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]-t2[N2-2]) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
if t1[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f1 = t1[0]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
dt_p1 = dt_f1
- s1 = dt_p1*(t_f1-t_start)/isi1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
else:
- t_f1 = t1[1]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- dt_p1 = 0.0
- isi1 = t1[1]-t1[0]
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
else:
- t_f2 = t2[1]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
- dt_p2 = 0.0
- isi2 = t2[1]-t2[0]
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -245,7 +264,7 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
spike_events[index] = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
@@ -253,14 +272,17 @@ def spike_profile_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index1 < N1-1:
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
isi2)
@@ -275,7 +297,7 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
spike_events[index] = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1,
@@ -283,14 +305,17 @@ def spike_profile_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index2 < N2-1:
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
else: # t_f1 == t_f2 - generate only one event
@@ -306,32 +331,31 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
# the last event is the interval end
if spike_events[index-1] == t_end:
index -= 1
else:
spike_events[index] = t_end
- # the ending value of the last interval
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1
+ s2 = dt_f2
y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
# end nogil
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py
index 5c4c75d..11fbe62 100644
--- a/pyspike/cython/python_backend.py
+++ b/pyspike/cython/python_backend.py
@@ -28,17 +28,17 @@ def isi_distance_python(s1, s2, t_start, t_end):
isi_values = np.empty(len(spike_events) - 1)
if s1[0] > t_start:
# edge correction
- nu1 = max(s1[0] - t_start, s1[1] - s1[0])
+ nu1 = max(s1[0] - t_start, s1[1] - s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1] - s1[0]
+ nu1 = s1[1] - s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
# edge correction
- nu2 = max(s2[0] - t_start, s2[1] - s2[0])
+ nu2 = max(s2[0] - t_start, s2[1] - s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1] - s2[0]
+ nu2 = s2[1] - s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2)
@@ -52,7 +52,8 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2])
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
elif (index2 < N2-1) and (index1 == N1-1 or
s1[index1+1] > s2[index2+1]):
@@ -62,7 +63,8 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2])
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
else: # s1[index1 + 1] == s2[index2 + 1]
index1 += 1
@@ -72,12 +74,14 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2])
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2])
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
# compute the corresponding isi-distance
isi_values[index] = abs(nu1 - nu2) / \
max(nu1, nu2)
@@ -144,36 +148,48 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
y_starts = np.empty(len(spike_events)-1)
y_ends = np.empty(len(spike_events)-1)
+ t_aux1 = np.zeros(2)
+ t_aux2 = np.zeros(2)
+ t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) if N1 > 1 else t_start
+ t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) if N1 > 1 else t_end
+ t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) if N2 > 1 else t_start
+ t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
+
+ # print "t_aux1", t_aux1, ", t_aux2:", t_aux2
+
spike_events[0] = t_start
- t_p1 = t_start
- t_p2 = t_start
if t1[0] > t_start:
t_f1 = t1[0]
- dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
dt_p1 = dt_f1
- isi1 = max(t_f1-t_start, t1[1]-t1[0])
- s1 = dt_p1*(t_f1-t_start)/isi1
+ isi1 = max(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
else:
- dt_p1 = 0.0
- t_f1 = t1[1]
- dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end)
- isi1 = t1[1]-t1[0]
+ # dt_p1 = t_start-t_p2
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1])
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = max(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = max(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
else:
- dt_p2 = 0.0
- t_f2 = t2[1]
- dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end)
- isi2 = t2[1]-t2[0]
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1])
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -193,20 +209,23 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
spike_events[index] = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# now the next interval start value
if index1 < N1-1:
- dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
@@ -220,20 +239,23 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
spike_events[index] = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# now the next interval start value
if index2 < N2-1:
- dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's adjustment: no correction
+ s2 = dt_p2
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
else: # t_f1 == t_f2 - generate only one event
@@ -248,31 +270,31 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
y_starts[index] = 0.0
if index1 < N1-1:
t_f1 = t1[index1+1]
- dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
- dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
+
# the last event is the interval end
if spike_events[index-1] == t_end:
index -= 1
else:
spike_events[index] = t_end
- # the ending value of the last interval
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# use only the data added above
diff --git a/pyspike/generic.py b/pyspike/generic.py
index 904c3c2..5ad06f1 100644
--- a/pyspike/generic.py
+++ b/pyspike/generic.py
@@ -7,6 +7,7 @@ Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
+from __future__ import division
import numpy as np
@@ -38,14 +39,14 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None):
L1 = len(pairs1)
if L1 > 1:
dist_prof1 = divide_and_conquer(pairs1[:L1//2],
- pairs1[int(L1//2):])
+ pairs1[L1//2:])
else:
dist_prof1 = pair_distance_func(spike_trains[pairs1[0][0]],
spike_trains[pairs1[0][1]])
L2 = len(pairs2)
if L2 > 1:
dist_prof2 = divide_and_conquer(pairs2[:L2//2],
- pairs2[int(L2//2):])
+ pairs2[L2//2:])
else:
dist_prof2 = pair_distance_func(spike_trains[pairs2[0][0]],
spike_trains[pairs2[0][1]])
@@ -137,8 +138,8 @@ def _generic_distance_matrix(spike_trains, dist_function,
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
"Invalid index list."
# generate a list of possible index pairs
- pairs = [(i, j) for i in xrange(len(indices))
- for j in xrange(i+1, len(indices))]
+ pairs = [(i, j) for i in range(len(indices))
+ for j in range(i+1, len(indices))]
distance_matrix = np.zeros((len(indices), len(indices)))
for i, j in pairs:
diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py
index e50f203..e91dce2 100644
--- a/pyspike/isi_distance.py
+++ b/pyspike/isi_distance.py
@@ -2,6 +2,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import pyspike
from pyspike import PieceWiseConstFunc
from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
@@ -11,11 +13,48 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
############################################################
# isi_profile
############################################################
-def isi_profile(spike_train1, spike_train2):
- """ Computes the isi-distance profile :math:`I(t)` of the two given
- spike trains. Retruns the profile as a PieceWiseConstFunc object. The
+def isi_profile(*args, **kwargs):
+ """ Computes the isi-distance profile :math:`I(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstFunc object. The
ISI-values are defined positive :math:`I(t)>=0`.
+ Valid call structures::
+
+ isi_profile(st1, st2) # returns the bi-variate profile
+ isi_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_profile(spike_trains) # profile of the list of spike trains
+ isi_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate ISI distance profile for a set of spike trains is defined
+ as the average ISI-profile of all pairs of spike-trains:
+
+ .. math:: <I(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+ :returns: The isi-distance profile :math:`I(t)`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ if len(args) == 1:
+ return isi_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_profile_bi(args[0], args[1])
+ else:
+ return isi_profile_multi(args)
+
+
+############################################################
+# isi_profile_bi
+############################################################
+def isi_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate ISI-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.isi_profile` to compute ISI-profiles.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
:param spike_train2: Second spike train.
@@ -32,7 +71,7 @@ def isi_profile(spike_train1, spike_train2):
# load cython implementation
try:
- from cython.cython_profiles import isi_profile_cython \
+ from .cython.cython_profiles import isi_profile_cython \
as isi_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -40,7 +79,7 @@ def isi_profile(spike_train1, spike_train2):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import isi_distance_python \
+ from .cython.python_backend import isi_distance_python \
as isi_profile_impl
times, values = isi_profile_impl(spike_train1.get_spikes_non_empty(),
@@ -50,15 +89,76 @@ Falling back to slow python backend.")
############################################################
+# isi_profile_multi
+############################################################
+def isi_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute the multivariate ISI-profile for a set of
+ spike trains. This is a deprecated function and should not be called
+ directly. Use :func:`.isi_profile` to compute ISI-profiles.
+
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type state: list or None
+ :returns: The averaged isi profile :math:`<I(t)>`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, isi_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
# isi_distance
############################################################
-def isi_distance(spike_train1, spike_train2, interval=None):
+def isi_distance(*args, **kwargs):
""" Computes the ISI-distance :math:`D_I` of the given spike trains. The
isi-distance is the integral over the isi distance profile
:math:`I(t)`:
.. math:: D_I = \\int_{T_0}^{T_1} I(t) dt.
+ In the multivariate case it is the integral over the multivariate
+ ISI-profile, i.e. the average profile over all spike train pairs:
+
+ .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+
+ Valid call structures::
+
+ isi_distance(st1, st2) # returns the bi-variate distance
+ isi_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_distance(spike_trains) # distance of the list of spike trains
+ isi_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ :returns: The isi-distance :math:`D_I`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return isi_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return isi_distance_multi(args, **kwargs)
+
+
+############################################################
+# _isi_distance_bi
+############################################################
+def isi_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute the bivariate ISI-distance.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
:param spike_train2: Second spike train.
@@ -74,7 +174,7 @@ def isi_distance(spike_train1, spike_train2, interval=None):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import isi_distance_cython \
+ from .cython.cython_distances import isi_distance_cython \
as isi_distance_impl
return isi_distance_impl(spike_train1.get_spikes_non_empty(),
@@ -82,46 +182,19 @@ def isi_distance(spike_train1, spike_train2, interval=None):
spike_train1.t_start, spike_train1.t_end)
except ImportError:
# Cython backend not available: fall back to profile averaging
- return isi_profile(spike_train1, spike_train2).avrg(interval)
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
else:
# some specific interval is provided: use profile
- return isi_profile(spike_train1, spike_train2).avrg(interval)
-
-
-############################################################
-# isi_profile_multi
-############################################################
-def isi_profile_multi(spike_trains, indices=None):
- """ computes the multi-variate isi distance profile for a set of spike
- trains. That is the average isi-distance of all pairs of spike-trains:
-
- .. math:: <I(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
-
- where the sum goes over all pairs <i,j>
-
- :param spike_trains: list of :class:`.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type state: list or None
- :returns: The averaged isi profile :math:`<I(t)>`
- :rtype: :class:`.PieceWiseConstFunc`
- """
- average_dist, M = _generic_profile_multi(spike_trains, isi_profile,
- indices)
- average_dist.mul_scalar(1.0/M) # normalize
- return average_dist
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
############################################################
# isi_distance_multi
############################################################
def isi_distance_multi(spike_trains, indices=None, interval=None):
- """ computes the multi-variate isi-distance for a set of spike-trains.
- That is the time average of the multi-variate spike profile:
-
- .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
-
- where the sum goes over all pairs <i,j>
+ """ Specific function to compute the multivariate ISI-distance.
+ This is a deprecfated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
:param spike_trains: list of :class:`.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -132,7 +205,7 @@ def isi_distance_multi(spike_trains, indices=None, interval=None):
:returns: The time-averaged multivariate ISI distance :math:`D_I`
:rtype: double
"""
- return _generic_distance_multi(spike_trains, isi_distance, indices,
+ return _generic_distance_multi(spike_trains, isi_distance_bi, indices,
interval)
@@ -153,5 +226,5 @@ def isi_distance_matrix(spike_trains, indices=None, interval=None):
:math:`D_{I}^{ij}`
:rtype: np.array
"""
- return _generic_distance_matrix(spike_trains, isi_distance,
- indices, interval)
+ return _generic_distance_matrix(spike_trains, isi_distance_bi,
+ indices=indices, interval=interval)
diff --git a/pyspike/psth.py b/pyspike/psth.py
index 4027215..7cf1140 100644
--- a/pyspike/psth.py
+++ b/pyspike/psth.py
@@ -24,7 +24,7 @@ def psth(spike_trains, bin_size):
# N = len(spike_trains)
combined_spike_train = spike_trains[0].spikes
- for i in xrange(1, len(spike_trains)):
+ for i in range(1, len(spike_trains)):
combined_spike_train = np.append(combined_spike_train,
spike_trains[i].spikes)
diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py
index feea0c1..0fd86c1 100644
--- a/pyspike/spike_distance.py
+++ b/pyspike/spike_distance.py
@@ -2,6 +2,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import pyspike
from pyspike import PieceWiseLinFunc
from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
@@ -11,10 +13,46 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
############################################################
# spike_profile
############################################################
-def spike_profile(spike_train1, spike_train2):
- """ Computes the spike-distance profile :math:`S(t)` of the two given spike
- trains. Returns the profile as a PieceWiseLinFunc object. The SPIKE-values
- are defined positive :math:`S(t)>=0`.
+def spike_profile(*args, **kwargs):
+ """ Computes the spike-distance profile :math:`S(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstLin object. The
+ SPIKE-values are defined positive :math:`S(t)>=0`.
+
+ Valid call structures::
+
+ spike_profile(st1, st2) # returns the bi-variate profile
+ spike_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_profile(spike_trains) # profile of the list of spike trains
+ spike_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate spike-distance profile is defined as the average of all
+ pairs of spike-trains:
+
+ .. math:: <S(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} S^{i, j}`,
+
+ where the sum goes over all pairs <i,j>
+
+ :returns: The spike-distance profile :math:`S(t)`
+ :rtype: :class:`.PieceWiseConstLin`
+ """
+ if len(args) == 1:
+ return spike_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_profile_bi(args[0], args[1])
+ else:
+ return spike_profile_multi(args)
+
+
+############################################################
+# spike_profile_bi
+############################################################
+def spike_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
@@ -32,7 +70,7 @@ def spike_profile(spike_train1, spike_train2):
# cython implementation
try:
- from cython.cython_profiles import spike_profile_cython \
+ from .cython.cython_profiles import spike_profile_cython \
as spike_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -40,7 +78,7 @@ def spike_profile(spike_train1, spike_train2):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import spike_distance_python \
+ from .cython.python_backend import spike_distance_python \
as spike_profile_impl
times, y_starts, y_ends = spike_profile_impl(
@@ -52,14 +90,74 @@ Falling back to slow python backend.")
############################################################
+# spike_profile_multi
+############################################################
+def spike_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute a multivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :returns: The averaged spike profile :math:`<S>(t)`
+ :rtype: :class:`.PieceWiseLinFunc`
+
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, spike_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
# spike_distance
############################################################
-def spike_distance(spike_train1, spike_train2, interval=None):
- """ Computes the spike-distance :math:`D_S` of the given spike trains. The
+def spike_distance(*args, **kwargs):
+ """ Computes the SPIKE-distance :math:`D_S` of the given spike trains. The
spike-distance is the integral over the spike distance profile
- :math:`S(t)`:
+ :math:`D(t)`:
+
+ .. math:: D_S = \\int_{T_0}^{T_1} S(t) dt.
+
+
+ Valid call structures::
+
+ spike_distance(st1, st2) # returns the bi-variate distance
+ spike_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_distance(spike_trains) # distance of the list of spike trains
+ spike_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
- .. math:: D_S = \int_{T_0}^{T_1} S(t) dt.
+ In the multivariate case, the spike distance is given as the integral over
+ the multivariate profile, that is the average profile of all spike train
+ pairs:
+
+ .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>}
+ S^{i, j} dt
+
+ :returns: The spike-distance :math:`D_S`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return spike_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_distance_multi(args, **kwargs)
+
+
+############################################################
+# spike_distance_bi
+############################################################
+def spike_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute a bivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
@@ -76,7 +174,7 @@ def spike_distance(spike_train1, spike_train2, interval=None):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import spike_distance_cython \
+ from .cython.cython_distances import spike_distance_cython \
as spike_distance_impl
return spike_distance_impl(spike_train1.get_spikes_non_empty(),
spike_train2.get_spikes_non_empty(),
@@ -84,48 +182,19 @@ def spike_distance(spike_train1, spike_train2, interval=None):
spike_train1.t_end)
except ImportError:
# Cython backend not available: fall back to average profile
- return spike_profile(spike_train1, spike_train2).avrg(interval)
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
else:
# some specific interval is provided: compute the whole profile
- return spike_profile(spike_train1, spike_train2).avrg(interval)
-
-
-############################################################
-# spike_profile_multi
-############################################################
-def spike_profile_multi(spike_trains, indices=None):
- """ Computes the multi-variate spike distance profile for a set of spike
- trains. That is the average spike-distance of all pairs of spike-trains:
-
- .. math:: <S(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} S^{i, j}`,
-
- where the sum goes over all pairs <i,j>
-
- :param spike_trains: list of :class:`.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type indices: list or None
- :returns: The averaged spike profile :math:`<S>(t)`
- :rtype: :class:`.PieceWiseLinFunc`
-
- """
- average_dist, M = _generic_profile_multi(spike_trains, spike_profile,
- indices)
- average_dist.mul_scalar(1.0/M) # normalize
- return average_dist
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
############################################################
# spike_distance_multi
############################################################
def spike_distance_multi(spike_trains, indices=None, interval=None):
- """ Computes the multi-variate spike distance for a set of spike trains.
- That is the time average of the multi-variate spike profile:
-
- .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>}
- S^{i, j} dt
-
- where the sum goes over all pairs <i,j>
+ """ Specific function to compute a multivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
:param spike_trains: list of :class:`.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -137,7 +206,7 @@ def spike_distance_multi(spike_trains, indices=None, interval=None):
:returns: The averaged multi-variate spike distance :math:`D_S`.
:rtype: double
"""
- return _generic_distance_multi(spike_trains, spike_distance, indices,
+ return _generic_distance_multi(spike_trains, spike_distance_bi, indices,
interval)
@@ -158,5 +227,5 @@ def spike_distance_matrix(spike_trains, indices=None, interval=None):
:math:`D_S^{ij}`
:rtype: np.array
"""
- return _generic_distance_matrix(spike_trains, spike_distance,
+ return _generic_distance_matrix(spike_trains, spike_distance_bi,
indices, interval)
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index 7f1bce8..37590b4 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -3,6 +3,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import numpy as np
from functools import partial
import pyspike
@@ -13,11 +15,48 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
############################################################
# spike_sync_profile
############################################################
-def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
- """ Computes the spike-synchronization profile S_sync(t) of the two given
- spike trains. Returns the profile as a DiscreteFunction object. The S_sync
- values are either 1 or 0, indicating the presence or absence of a
- coincidence.
+def spike_sync_profile(*args, **kwargs):
+ """ Computes the spike-synchronization profile S_sync(t) of the given
+ spike trains. Returns the profile as a DiscreteFunction object. In the
+ bivariate case, he S_sync values are either 1 or 0, indicating the presence
+ or absence of a coincidence. For multi-variate cases, each spike in the set
+ of spike trains, the profile is defined as the number of coincidences
+ divided by the number of spike trains pairs involving the spike train of
+ containing this spike, which is the number of spike trains minus one (N-1).
+
+ Valid call structures::
+
+ spike_sync_profile(st1, st2) # returns the bi-variate profile
+ spike_sync_profile(st1, st2, st3) # multi-variate profile of 3 sts
+
+ sts = [st1, st2, st3, st4] # list of spike trains
+ spike_sync_profile(sts) # profile of the list of spike trains
+ spike_sync_profile(sts, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ In the multivariate case, the profile is defined as the number of
+ coincidences for each spike in the set of spike trains divided by the
+ number of spike trains pairs involving the spike train of containing this
+ spike, which is the number of spike trains minus one (N-1).
+
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_sync_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_profile_bi(args[0], args[1])
+ else:
+ return spike_sync_profile_multi(args)
+
+
+############################################################
+# spike_sync_profile_bi
+############################################################
+def spike_sync_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
:param spike_train1: First spike train.
:type spike_train1: :class:`pyspike.SpikeTrain`
@@ -25,7 +64,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
:type spike_train2: :class:`pyspike.SpikeTrain`
:param max_tau: Maximum coincidence window size. If 0 or `None`, the
coincidence window has no upper bound.
- :returns: The spike-distance profile :math:`S_{sync}(t)`.
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
:rtype: :class:`pyspike.function.DiscreteFunction`
"""
@@ -37,7 +76,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
# cython implementation
try:
- from cython.cython_profiles import coincidence_profile_cython \
+ from .cython.cython_profiles import coincidence_profile_cython \
as coincidence_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -45,7 +84,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import coincidence_python \
+ from .cython.python_backend import coincidence_python \
as coincidence_profile_impl
if max_tau is None:
@@ -60,6 +99,31 @@ Falling back to slow python backend.")
############################################################
+# spike_sync_profile_multi
+############################################################
+def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
+ """ Specific function to compute a multivariate SPIKE-Sync-profile.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+
+ """
+ prof_func = partial(spike_sync_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ # average_dist.mul_scalar(1.0/M) # no normalization here!
+ return average_prof
+
+
+############################################################
# _spike_sync_values
############################################################
def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
@@ -73,7 +137,7 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import coincidence_value_cython \
+ from .cython.cython_distances import coincidence_value_cython \
as coincidence_value_impl
if max_tau is None:
max_tau = 0.0
@@ -85,24 +149,58 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
return c, mp
except ImportError:
# Cython backend not available: fall back to profile averaging
- return spike_sync_profile(spike_train1, spike_train2,
- max_tau).integral(interval)
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
else:
# some specific interval is provided: use profile
- return spike_sync_profile(spike_train1, spike_train2,
- max_tau).integral(interval)
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
############################################################
# spike_sync
############################################################
-def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None):
+def spike_sync(*args, **kwargs):
""" Computes the spike synchronization value SYNC of the given spike
trains. The spike synchronization value is the computed as the total number
of coincidences divided by the total number of spikes:
.. math:: SYNC = \sum_n C_n / N.
+
+ Valid call structures::
+
+ spike_sync(st1, st2) # returns the bi-variate spike synchronization
+ spike_sync(st1, st2, st3) # multi-variate result for 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_sync(spike_trains) # spike-sync of the list of spike trains
+ spike_sync(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate SPIKE-Sync is again defined as the overall ratio of all
+ coincidence values divided by the total number of spikes.
+
+ :returns: The spike synchronization value.
+ :rtype: `double`
+ """
+
+ if len(args) == 1:
+ return spike_sync_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_sync_multi(args, **kwargs)
+
+
+############################################################
+# spike_sync_bi
+############################################################
+def spike_sync_bi(spike_train1, spike_train2, interval=None, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`pyspike.SpikeTrain`
:param spike_train2: Second spike train.
@@ -121,38 +219,12 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None):
############################################################
-# spike_sync_profile_multi
-############################################################
-def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
- """ Computes the multi-variate spike synchronization profile for a set of
- spike trains. For each spike in the set of spike trains, the multi-variate
- profile is defined as the number of coincidences divided by the number of
- spike trains pairs involving the spike train of containing this spike,
- which is the number of spike trains minus one (N-1).
-
- :param spike_trains: list of :class:`pyspike.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type indices: list or None
- :param max_tau: Maximum coincidence window size. If 0 or `None`, the
- coincidence window has no upper bound.
- :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
- :rtype: :class:`pyspike.function.DiscreteFunction`
-
- """
- prof_func = partial(spike_sync_profile, max_tau=max_tau)
- average_prof, M = _generic_profile_multi(spike_trains, prof_func,
- indices)
- # average_dist.mul_scalar(1.0/M) # no normalization here!
- return average_prof
-
-
-############################################################
# spike_sync_multi
############################################################
def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None):
- """ Computes the multi-variate spike synchronization value for a set of
- spike trains.
+ """ Specific function to compute a multivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
:param spike_trains: list of :class:`pyspike.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -209,7 +281,7 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
:rtype: np.array
"""
- dist_func = partial(spike_sync, max_tau=max_tau)
+ dist_func = partial(spike_sync_bi, max_tau=max_tau)
return _generic_distance_matrix(spike_trains, dist_func,
indices, interval)
@@ -228,7 +300,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None,
# cython implementation
try:
- from cython.cython_profiles import coincidence_single_profile_cython \
+ from .cython.cython_profiles import coincidence_single_profile_cython \
as coincidence_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -237,7 +309,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import coincidence_single_python \
+ from .cython.python_backend import coincidence_single_python \
as coincidence_impl
if max_tau is None:
diff --git a/setup.py b/setup.py
index ce35773..7a4ac56 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ to compile cython files:
python setup.py build_ext --inplace
-Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
+Copyright 2014-2016, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
@@ -65,7 +65,7 @@ elif use_c: # c files are there, compile to binaries
setup(
name='pyspike',
packages=find_packages(exclude=['doc']),
- version='0.3.0',
+ version='0.5.1',
cmdclass=cmdclass,
ext_modules=ext_modules,
include_dirs=[numpy.get_include()],
@@ -94,12 +94,9 @@ train similarity',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
- ],
- package_data={
- 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c',
- 'cython/cython_distances.c',
- 'cython/cython_directionality.c',
- 'cython/cython_simulated_annealing.c'],
- 'test': ['Spike_testdata.txt']
- }
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ]
)
diff --git a/test/numeric/regression_random_results_cSPIKY.mat b/test/numeric/regression_random_results_cSPIKY.mat
new file mode 100644
index 0000000..26f29ff
--- /dev/null
+++ b/test/numeric/regression_random_results_cSPIKY.mat
Binary files differ
diff --git a/test/numeric/regression_random_spikes.mat b/test/numeric/regression_random_spikes.mat
new file mode 100644
index 0000000..e5ebeb1
--- /dev/null
+++ b/test/numeric/regression_random_spikes.mat
Binary files differ
diff --git a/test/numeric/test_regression_random_spikes.py b/test/numeric/test_regression_random_spikes.py
new file mode 100644
index 0000000..6156bb4
--- /dev/null
+++ b/test/numeric/test_regression_random_spikes.py
@@ -0,0 +1,127 @@
+""" regression benchmark
+
+Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+"""
+from __future__ import print_function
+
+import numpy as np
+from scipy.io import loadmat
+import pyspike as spk
+
+from numpy.testing import assert_almost_equal
+
+spk.disable_backend_warning = True
+
+
+def test_regression_random():
+
+ spike_file = "test/numeric/regression_random_spikes.mat"
+ spikes_name = "spikes"
+ result_name = "Distances"
+ result_file = "test/numeric/regression_random_results_cSPIKY.mat"
+
+ spike_train_sets = loadmat(spike_file)[spikes_name][0]
+ results_cSPIKY = loadmat(result_file)[result_name]
+
+ for i, spike_train_data in enumerate(spike_train_sets):
+ spike_trains = []
+ for spikes in spike_train_data[0]:
+ spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0))
+
+ isi = spk.isi_distance_multi(spike_trains)
+ isi_prof = spk.isi_profile_multi(spike_trains).avrg()
+
+ spike = spk.spike_distance_multi(spike_trains)
+ spike_prof = spk.spike_profile_multi(spike_trains).avrg()
+ # spike_sync = spk.spike_sync_multi(spike_trains)
+
+ assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14,
+ err_msg="Index: %d, ISI" % i)
+ assert_almost_equal(isi_prof, results_cSPIKY[i][0], decimal=14,
+ err_msg="Index: %d, ISI" % i)
+
+ assert_almost_equal(spike, results_cSPIKY[i][1], decimal=14,
+ err_msg="Index: %d, SPIKE" % i)
+ assert_almost_equal(spike_prof, results_cSPIKY[i][1], decimal=14,
+ err_msg="Index: %d, SPIKE" % i)
+
+
+def check_regression_dataset(spike_file="benchmark.mat",
+ spikes_name="spikes",
+ result_file="results_cSPIKY.mat",
+ result_name="Distances"):
+ """ Debuging function """
+ np.set_printoptions(precision=15)
+
+ spike_train_sets = loadmat(spike_file)[spikes_name][0]
+
+ results_cSPIKY = loadmat(result_file)[result_name]
+
+ err_max = 0.0
+ err_max_ind = -1
+ err_count = 0
+
+ for i, spike_train_data in enumerate(spike_train_sets):
+ spike_trains = []
+ for spikes in spike_train_data[0]:
+ spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0))
+
+ isi = spk.isi_distance_multi(spike_trains)
+ spike = spk.spike_distance_multi(spike_trains)
+ # spike_sync = spk.spike_sync_multi(spike_trains)
+
+ if abs(isi - results_cSPIKY[i][0]) > 1E-14:
+ print("Error in ISI:", i, isi, results_cSPIKY[i][0])
+ print("Spike trains:")
+ for st in spike_trains:
+ print(st.spikes)
+
+ err = abs(spike - results_cSPIKY[i][1])
+ if err > 1E-14:
+ err_count += 1
+ if err > err_max:
+ err_max = err
+ err_max_ind = i
+
+ print("Total Errors:", err_count)
+
+ if err_max_ind > -1:
+ print("Max SPIKE distance error:", err_max, "at index:", err_max_ind)
+ spike_train_data = spike_train_sets[err_max_ind]
+ for spikes in spike_train_data[0]:
+ print(spikes.flatten())
+
+
+def check_single_spike_train_set(index):
+ """ Debuging function """
+ np.set_printoptions(precision=15)
+ spike_file = "regression_random_spikes.mat"
+ spikes_name = "spikes"
+ result_name = "Distances"
+ result_file = "regression_random_results_cSPIKY.mat"
+
+ spike_train_sets = loadmat(spike_file)[spikes_name][0]
+
+ results_cSPIKY = loadmat(result_file)[result_name]
+
+ spike_train_data = spike_train_sets[index]
+
+ spike_trains = []
+ for spikes in spike_train_data[0]:
+ print("Spikes:", spikes.flatten())
+ spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0))
+
+ print(spk.spike_distance_multi(spike_trains))
+
+ print(results_cSPIKY[index][1])
+
+ print(spike_trains[1].spikes)
+
+
+if __name__ == "__main__":
+
+ test_regression_random()
+ # check_regression_dataset()
+ # check_single_spike_train_set(7633)
diff --git a/test/test_distance.py b/test/test_distance.py
index e45ac16..083d8a3 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -17,6 +17,9 @@ from numpy.testing import assert_equal, assert_almost_equal, \
import pyspike as spk
from pyspike import SpikeTrain
+import os
+TEST_PATH = os.path.dirname(os.path.realpath(__file__))
+
def test_isi():
# generate two spike trains:
@@ -36,6 +39,7 @@ def test_isi():
f = spk.isi_profile(t1, t2)
# print("ISI: ", f.y)
+ print("ISI value:", expected_isi_val)
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y, expected_isi, decimal=15)
@@ -73,8 +77,19 @@ def test_spike():
assert_equal(f.x, expected_times)
- assert_almost_equal(f.avrg(), 1.6624149659863946e-01, decimal=15)
- assert_almost_equal(f.y2[-1], 0.1394558, decimal=6)
+ # from SPIKY:
+ y_all = np.array([0.000000000000000000, 0.555555555555555580,
+ 0.222222222222222210, 0.305555555555555580,
+ 0.255102040816326536, 0.000000000000000000,
+ 0.000000000000000000, 0.255102040816326536,
+ 0.255102040816326536, 0.285714285714285698,
+ 0.285714285714285698, 0.285714285714285698])
+
+ #assert_array_almost_equal(f.y1, y_all[::2])
+ assert_array_almost_equal(f.y2, y_all[1::2])
+
+ assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15)
+ assert_equal(spk.spike_distance(t1, t2), f.avrg())
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
@@ -99,6 +114,8 @@ def test_spike():
(expected_y1+expected_y2)/2)
expected_spike_val /= (expected_times[-1]-expected_times[0])
+ print("SPIKE value:", expected_spike_val)
+
f = spk.spike_profile(t1, t2)
assert_equal(f.x, expected_times)
@@ -117,9 +134,14 @@ def test_spike():
# for left and right values
s1_r = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
s1_l = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
- s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3,
+ # s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3,
+ # 0.0, 0.1, 0.0, 0.0])
+ # s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0,
+ # 0.1, 0.0, 0.0])
+ # eero's edge correction:
+ s2_r = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3,
0.0, 0.1, 0.0, 0.0])
- s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0,
+ s2_l = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3, 0.0,
0.1, 0.0, 0.0])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4])
isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4])
@@ -275,8 +297,8 @@ def test_multi_spike_sync():
expected, decimal=15)
# multivariate regression test
- spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt",
- edges=[0, 4000])
+ spike_trains = spk.load_spike_trains_from_txt(
+ os.path.join(TEST_PATH, "SPIKE_Sync_Test.txt"), edges=[0, 4000])
# extract all spike times
spike_times = np.array([])
for st in spike_trains:
@@ -309,10 +331,10 @@ def check_dist_matrix(dist_func, dist_matrix_func):
f_matrix = dist_matrix_func(spike_trains)
# check zero diagonal
- for i in xrange(4):
+ for i in range(4):
assert_equal(0.0, f_matrix[i, i])
- for i in xrange(4):
- for j in xrange(i+1, 4):
+ for i in range(4):
+ for j in range(i+1, 4):
assert_equal(f_matrix[i, j], f_matrix[j, i])
assert_equal(f12, f_matrix[1, 0])
assert_equal(f13, f_matrix[2, 0])
@@ -345,15 +367,15 @@ def test_regression_spiky():
assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y))
spike_dist = spk.spike_distance(st1, st2)
- assert_equal(spike_dist, 2.1105878248735391e-01)
+ assert_equal(spike_dist, 0.211058782487353908)
spike_sync = spk.spike_sync(st1, st2)
assert_equal(spike_sync, 8.6956521739130432e-01)
# multivariate check
- spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- (0.0, 4000.0))
+ spike_trains = spk.load_spike_trains_from_txt(
+ os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0))
isi_dist = spk.isi_distance_multi(spike_trains)
# get the full precision from SPIKY
assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15)
@@ -363,16 +385,35 @@ def test_regression_spiky():
spike_dist = spk.spike_distance_multi(spike_trains)
# get the full precision from SPIKY
- assert_almost_equal(spike_dist, 2.4432433330596512e-01, decimal=15)
+ assert_almost_equal(spike_dist, 0.25188056475463755, decimal=15)
spike_sync = spk.spike_sync_multi(spike_trains)
# get the full precision from SPIKY
assert_equal(spike_sync, 0.7183531505298066)
+ # Eero's edge correction example
+ st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0)
+ st2 = SpikeTrain([3.5, 4.5, 5.5], 6.0)
+
+ f = spk.spike_profile(st1, st2)
+
+ expected_times = np.array([0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0])
+ y_all = np.array([0.271604938271605, 0.271604938271605, 0.271604938271605,
+ 0.617283950617284, 0.617283950617284, 0.444444444444444,
+ 0.285714285714286, 0.285714285714286, 0.444444444444444,
+ 0.617283950617284, 0.617283950617284, 0.271604938271605,
+ 0.271604938271605, 0.271604938271605])
+ expected_y1 = y_all[::2]
+ expected_y2 = y_all[1::2]
+
+ assert_equal(f.x, expected_times)
+ assert_array_almost_equal(f.y1, expected_y1, decimal=14)
+ assert_array_almost_equal(f.y2, expected_y2, decimal=14)
+
def test_multi_variate_subsets():
- spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- (0.0, 4000.0))
+ spike_trains = spk.load_spike_trains_from_txt(
+ os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0))
sub_set = [1, 3, 5, 7]
spike_trains_sub_set = [spike_trains[i] for i in sub_set]
diff --git a/test/test_empty.py b/test/test_empty.py
index af7fb36..4d0a5cf 100644
--- a/test/test_empty.py
+++ b/test/test_empty.py
@@ -24,7 +24,9 @@ def test_get_non_empty():
st = SpikeTrain([0.5, ], edges=(0.0, 1.0))
spikes = st.get_spikes_non_empty()
- assert_array_equal(spikes, [0.0, 0.5, 1.0])
+ # assert_array_equal(spikes, [0.0, 0.5, 1.0])
+ # spike trains with one spike don't get edge spikes anymore
+ assert_array_equal(spikes, [0.5, ])
def test_isi_empty():
@@ -70,21 +72,23 @@ def test_spike_empty():
st1 = SpikeTrain([], edges=(0.0, 1.0))
st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0))
d = spk.spike_distance(st1, st2)
- assert_almost_equal(d, 0.4*0.4*1.0/(0.4+1.0)**2 + 0.6*0.4*1.0/(0.6+1.0)**2,
- decimal=15)
+ d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2
+ assert_almost_equal(d, d_expect, decimal=15)
prof = spk.spike_profile(st1, st2)
assert_equal(d, prof.avrg())
assert_array_equal(prof.x, [0.0, 0.4, 1.0])
- assert_array_almost_equal(prof.y1, [0.0, 2*0.4*1.0/(0.6+1.0)**2],
+ assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2,
+ 2*0.4*1.0/(0.6+1.0)**2],
decimal=15)
- assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2, 0.0],
+ assert_array_almost_equal(prof.y2, [2*0.4*1.0/(0.4+1.0)**2,
+ 2*0.4*1.0/(0.6+1.0)**2],
decimal=15)
st1 = SpikeTrain([0.6, ], edges=(0.0, 1.0))
st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0))
d = spk.spike_distance(st1, st2)
- s1 = np.array([0.0, 0.4*0.2/0.6, 0.2, 0.0])
- s2 = np.array([0.0, 0.2, 0.2*0.4/0.6, 0.0])
+ s1 = np.array([0.2, 0.2, 0.2, 0.2])
+ s2 = np.array([0.2, 0.2, 0.2, 0.2])
isi1 = np.array([0.6, 0.6, 0.4])
isi2 = np.array([0.4, 0.6, 0.6])
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
diff --git a/test/test_generic_interfaces.py b/test/test_generic_interfaces.py
new file mode 100644
index 0000000..7f08067
--- /dev/null
+++ b/test/test_generic_interfaces.py
@@ -0,0 +1,105 @@
+""" test_generic_interface.py
+
+Tests the generic interfaces of the profile and distance functions
+
+Copyright 2016, Mario Mulansky <mario.mulansky@gmx.net>
+
+Distributed under the BSD License
+
+"""
+
+from __future__ import print_function
+from numpy.testing import assert_equal
+
+import pyspike as spk
+from pyspike import SpikeTrain
+
+
+class dist_from_prof:
+ """ Simple functor that turns profile function into distance function by
+ calling profile.avrg().
+ """
+ def __init__(self, prof_func):
+ self.prof_func = prof_func
+
+ def __call__(self, *args, **kwargs):
+ if "interval" in kwargs:
+ # forward interval arg into avrg function
+ interval = kwargs.pop("interval")
+ return self.prof_func(*args, **kwargs).avrg(interval=interval)
+ else:
+ return self.prof_func(*args, **kwargs).avrg()
+
+
+def check_func(dist_func):
+ """ generic checker that tests the given distance function.
+ """
+ # generate spike trains:
+ t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
+ t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
+ t3 = SpikeTrain([0.2, 0.4, 0.6], 1.0)
+ t4 = SpikeTrain([0.1, 0.4, 0.5, 0.6], 1.0)
+ spike_trains = [t1, t2, t3, t4]
+
+ isi12 = dist_func(t1, t2)
+ isi12_ = dist_func([t1, t2])
+ assert_equal(isi12, isi12_)
+
+ isi12_ = dist_func(spike_trains, indices=[0, 1])
+ assert_equal(isi12, isi12_)
+
+ isi123 = dist_func(t1, t2, t3)
+ isi123_ = dist_func([t1, t2, t3])
+ assert_equal(isi123, isi123_)
+
+ isi123_ = dist_func(spike_trains, indices=[0, 1, 2])
+ assert_equal(isi123, isi123_)
+
+ # run the same test with an additional interval parameter
+
+ isi12 = dist_func(t1, t2, interval=[0.0, 0.5])
+ isi12_ = dist_func([t1, t2], interval=[0.0, 0.5])
+ assert_equal(isi12, isi12_)
+
+ isi12_ = dist_func(spike_trains, indices=[0, 1], interval=[0.0, 0.5])
+ assert_equal(isi12, isi12_)
+
+ isi123 = dist_func(t1, t2, t3, interval=[0.0, 0.5])
+ isi123_ = dist_func([t1, t2, t3], interval=[0.0, 0.5])
+ assert_equal(isi123, isi123_)
+
+ isi123_ = dist_func(spike_trains, indices=[0, 1, 2], interval=[0.0, 0.5])
+ assert_equal(isi123, isi123_)
+
+
+def test_isi_profile():
+ check_func(dist_from_prof(spk.isi_profile))
+
+
+def test_isi_distance():
+ check_func(spk.isi_distance)
+
+
+def test_spike_profile():
+ check_func(dist_from_prof(spk.spike_profile))
+
+
+def test_spike_distance():
+ check_func(spk.spike_distance)
+
+
+def test_spike_sync_profile():
+ check_func(dist_from_prof(spk.spike_sync_profile))
+
+
+def test_spike_sync():
+ check_func(spk.spike_sync)
+
+
+if __name__ == "__main__":
+ test_isi_profile()
+ test_isi_distance()
+ test_spike_profile()
+ test_spike_distance()
+ test_spike_sync_profile()
+ test_spike_sync()
diff --git a/test/test_regression/test_regression_15.py b/test/test_regression/test_regression_15.py
index 1ce1290..54adf23 100644
--- a/test/test_regression/test_regression_15.py
+++ b/test/test_regression/test_regression_15.py
@@ -8,68 +8,71 @@ Distributed under the BSD License
"""
+from __future__ import division
+
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal, \
assert_array_almost_equal
import pyspike as spk
+import os
+TEST_PATH = os.path.dirname(os.path.realpath(__file__))
+TEST_DATA = os.path.join(TEST_PATH, "..", "SPIKE_Sync_Test.txt")
+
def test_regression_15_isi():
# load spike trains
- spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt",
- edges=[0, 4000])
+ spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=[0, 4000])
N = len(spike_trains)
dist_mat = spk.isi_distance_matrix(spike_trains)
assert_equal(dist_mat.shape, (N, N))
- ind = np.arange(N/2)
+ ind = np.arange(N//2)
dist_mat = spk.isi_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
- ind = np.arange(N/2, N)
+ ind = np.arange(N//2, N)
dist_mat = spk.isi_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
def test_regression_15_spike():
# load spike trains
- spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt",
- edges=[0, 4000])
+ spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=[0, 4000])
N = len(spike_trains)
dist_mat = spk.spike_distance_matrix(spike_trains)
assert_equal(dist_mat.shape, (N, N))
- ind = np.arange(N/2)
+ ind = np.arange(N//2)
dist_mat = spk.spike_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
- ind = np.arange(N/2, N)
+ ind = np.arange(N//2, N)
dist_mat = spk.spike_distance_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
def test_regression_15_sync():
# load spike trains
- spike_trains = spk.load_spike_trains_from_txt("test/SPIKE_Sync_Test.txt",
- edges=[0, 4000])
+ spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=[0, 4000])
N = len(spike_trains)
dist_mat = spk.spike_sync_matrix(spike_trains)
assert_equal(dist_mat.shape, (N, N))
- ind = np.arange(N/2)
+ ind = np.arange(N//2)
dist_mat = spk.spike_sync_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
- ind = np.arange(N/2, N)
+ ind = np.arange(N//2, N)
dist_mat = spk.spike_sync_matrix(spike_trains, ind)
- assert_equal(dist_mat.shape, (N/2, N/2))
+ assert_equal(dist_mat.shape, (N//2, N//2))
if __name__ == "__main__":
diff --git a/test/test_spikes.py b/test/test_spikes.py
index d4eb131..609a819 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -13,10 +13,12 @@ from numpy.testing import assert_equal
import pyspike as spk
+import os
+TEST_PATH = os.path.dirname(os.path.realpath(__file__))
+TEST_DATA = os.path.join(TEST_PATH, "PySpike_testdata.txt")
def test_load_from_txt():
- spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- edges=(0, 4000))
+ spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=(0, 4000))
assert len(spike_trains) == 40
# check the first spike train
@@ -48,8 +50,7 @@ def check_merged_spikes(merged_spikes, spike_trains):
def test_merge_spike_trains():
# first load the data
- spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- edges=(0, 4000))
+ spike_trains = spk.load_spike_trains_from_txt(TEST_DATA, edges=(0, 4000))
merged_spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
# test if result is sorted