From 47c9bfc45db078d9a134a7b2b10dcf0cc6556641 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 1 Oct 2014 17:01:07 +0200 Subject: switched from pyximport to distutils --- setup.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 setup.py (limited to 'setup.py') diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..7c8e4e1 --- /dev/null +++ b/setup.py @@ -0,0 +1,16 @@ +""" setup.py + +Handles the compilation of pyx source files + +Copyright 2014, Mario Mulansky + +""" +import os +from distutils.core import setup +from Cython.Build import cythonize + +import numpy.distutils.intelccompiler + +setup( + ext_modules = cythonize("pyspike/*.pyx") +) -- cgit v1.2.3 From cf91333e4aac74d96cca32042f4363e79e7ab051 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 14 Oct 2014 17:14:44 +0200 Subject: more docs --- Readme.md | 27 +++++++++++++++++++++++++++ pyspike/spikes.py | 3 ++- setup.py | 5 +---- 3 files changed, 30 insertions(+), 5 deletions(-) (limited to 'setup.py') diff --git a/Readme.md b/Readme.md index a1b6e9e..55dbd33 100644 --- a/Readme.md +++ b/Readme.md @@ -64,6 +64,12 @@ If the time interval is provided (`time_interval is not None`), auxiliary spikes Furthermore, the spike trains are ordered via `np.sort` (disable this feature by providing `sort=False` as a parameter to the load function). As result, `load_spike_trains_from_txt` returns a *list of arrays* containing the spike trains in the text file. +If you load spike trains yourself, i.e. from data files with different structure, you can use the helper function `add_auxiliary_spikes` to add the auxiliary spikes at the beginning and end of the observation interval. +Both the ISI and the SPIKE distance computation require the presence of auxiliary spikes, so make sure you have those in your spike trains: + + spike_train = spk.add_auxiliary_spikes(spike_train, (T_start, T_end)) + # you provide only a single value, it is interpreted as T_end, while T_start=0 + spike_train = spk.add_auxiliary_spikes(spike_train, T_end) ## Computing bi-variate distances @@ -74,9 +80,30 @@ As result, `load_spike_trains_from_txt` returns a *list of arrays* containing th >For performance reasons, the PySpike distance functions do not check if the spike trains provided are indeed ordered. >Make sure that all your spike trains are ordered. >If in doubt, use `spike_train = np.sort(spike_train)` to obtain a correctly ordered spike train. +> +>Furthermore, the spike trains should have auxiliary spikes at the beginning and end of the observation interval. +>You can ensure this by providing the `time_interval` in the `load_spike_trains_from_txt` function, or calling `add_auxiliary_spikes` for your spike trains. +>The spike trains must have *the same* observation interval! ---------------------- +### ISI-distance + +The following code loads some exemplary spike trains, computes the dissimilarity profile ISI-distance of the first two spike trains, and plots it with matplotlib: + + import matplotlib.pyplot as plt + import pyspike as spk + + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) + isi_profile = spk.isi_distance(spike_trains[0], spike_trains[1]) + x, y = isi_profile.get_plottable_data() + plt.plot(x, np.abs(y), '--k') + print("ISI distance: %.8f" % isi_profil.abs_avrg()) + plt.show() + +The ISI-profile is a piece-wise constant function, there the function `isi_distance` returns an instance of the `PieceWiseConstFunc` class. +As above, this class allows you to obtain arrays that can be used to plot the function with `plt.plt`, but also to compute the absolute average, which amounts to the final scalar ISI-distance. ## Computing multi-variate distances diff --git a/pyspike/spikes.py b/pyspike/spikes.py index d390222..68c8bc1 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -20,7 +20,8 @@ def add_auxiliary_spikes(spike_train, time_interval): - time_interval: A pair (T_start, T_end) of values representing the start and end time of the spike train measurement or a single value representing the end time, the T_start is then assuemd as 0. Auxiliary spikes will be - added to the spike train at the beginning and end of this interval. + added to the spike train at the beginning and end of this interval, if they + are not yet present. Returns: - spike train with additional spikes at T_start and T_end. diff --git a/setup.py b/setup.py index 7c8e4e1..16a87ea 100644 --- a/setup.py +++ b/setup.py @@ -5,12 +5,9 @@ Handles the compilation of pyx source files Copyright 2014, Mario Mulansky """ -import os from distutils.core import setup from Cython.Build import cythonize -import numpy.distutils.intelccompiler - setup( - ext_modules = cythonize("pyspike/*.pyx") + ext_modules=cythonize("pyspike/*.pyx") ) -- cgit v1.2.3 From 4ff7112e69e75a31c476304ef35960432b8ee17b Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 14 Oct 2014 17:39:49 +0200 Subject: more isi docs --- Readme.md | 12 ++++++++++-- setup.py | 2 ++ 2 files changed, 12 insertions(+), 2 deletions(-) (limited to 'setup.py') diff --git a/Readme.md b/Readme.md index 55dbd33..6348d09 100644 --- a/Readme.md +++ b/Readme.md @@ -98,13 +98,21 @@ The following code loads some exemplary spike trains, computes the dissimilarity time_interval=(0, 4000)) isi_profile = spk.isi_distance(spike_trains[0], spike_trains[1]) x, y = isi_profile.get_plottable_data() - plt.plot(x, np.abs(y), '--k') - print("ISI distance: %.8f" % isi_profil.abs_avrg()) + plt.plot(x, y, '--k') + print("ISI distance: %.8f" % isi_profil.avrg()) plt.show() The ISI-profile is a piece-wise constant function, there the function `isi_distance` returns an instance of the `PieceWiseConstFunc` class. As above, this class allows you to obtain arrays that can be used to plot the function with `plt.plt`, but also to compute the absolute average, which amounts to the final scalar ISI-distance. +Furthermore, `PieceWiseConstFunc` provides an `add` function that can be used to add piece-wise constant function, and a `mul_scalar` function that rescales the function by a scalar. +This can be used to obtain an average profile: + + isi_profile1.add(isi_profile2) + isi_profile1.mul_scalar(0.5) + x, y = isi_profile1.get_plottable_data() + plt.plot(x, y, label="Average ISI profile") + ## Computing multi-variate distances diff --git a/setup.py b/setup.py index 16a87ea..31db6f8 100644 --- a/setup.py +++ b/setup.py @@ -4,6 +4,8 @@ Handles the compilation of pyx source files Copyright 2014, Mario Mulansky +Distributed under the BSD License + """ from distutils.core import setup from Cython.Build import cythonize -- cgit v1.2.3 From 6b1ed71863548c8059205993301ed36cdc42f594 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 16 Oct 2014 11:42:10 +0200 Subject: travis fix --- .travis.yml | 3 ++- setup.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) (limited to 'setup.py') diff --git a/.travis.yml b/.travis.yml index b687aaa..5981801 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,9 +4,10 @@ python: - "2.7" install: - - pip install cython + - pip install -q cython script: + - export $PYTHONPATH=$PYTHONPATH:$TRAVIS_BUILD_DIR - python setup.py build_ext --inplace - cd test - nosetests diff --git a/setup.py b/setup.py index 31db6f8..a9f40bd 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,9 @@ Distributed under the BSD License """ from distutils.core import setup from Cython.Build import cythonize +import numpy setup( - ext_modules=cythonize("pyspike/*.pyx") + ext_modules=cythonize("pyspike/*.pyx"), + include_dirs=[numpy.get_include()] ) -- cgit v1.2.3 From 7ff5a4afe2d7a40dce34ae187a23b7d0feba33ba Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 16 Oct 2014 15:26:58 +0200 Subject: catch ImportError in setup.py --- .travis.yml | 1 - setup.py | 17 ++++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) (limited to 'setup.py') diff --git a/.travis.yml b/.travis.yml index 2bbb9ca..1035775 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,4 +11,3 @@ install: script: - python setup.py build_ext --inplace - nosetests - diff --git a/setup.py b/setup.py index a9f40bd..fc8d0d2 100644 --- a/setup.py +++ b/setup.py @@ -2,16 +2,23 @@ Handles the compilation of pyx source files +run as: +python setup.py build_ext --inplace + Copyright 2014, Mario Mulansky Distributed under the BSD License """ from distutils.core import setup -from Cython.Build import cythonize import numpy -setup( - ext_modules=cythonize("pyspike/*.pyx"), - include_dirs=[numpy.get_include()] -) +try: + from Cython.Build import cythonize + setup( + ext_modules=cythonize("pyspike/*.pyx"), + include_dirs=[numpy.get_include()] + ) +except ImportError: + print("Error: Cython is not installed! You will only be able to use the \ +much slower Python backend in PySpike.") -- cgit v1.2.3 From fcfe1f9637973c4c5f7f2f57aba4ce77c0fb2002 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 4 Nov 2014 12:02:08 +0100 Subject: packaging info to distribute on PyPI --- Readme.rst | 19 +++++++++++++--- setup.cfg | 2 ++ setup.py | 73 ++++++++++++++++++++++++++++++++++++++++++++++++++++---------- 3 files changed, 80 insertions(+), 14 deletions(-) create mode 100644 setup.cfg (limited to 'setup.py') diff --git a/Readme.rst b/Readme.rst index 662cc1f..4053995 100644 --- a/Readme.rst +++ b/Readme.rst @@ -20,12 +20,25 @@ All source codes are published under the BSD_License_. Requirements and Installation ----------------------------- -To use PySpike you need Python installed with the following additional packages: +PySpike is available at Python Package Index and this is the easiest way to obtain the PySpike package. +If you have `pip` installed, just run + +.. code:: bash + + sudo pip install pyspike + +to install pyspike. +PySpike requires `numpy` as minimal requirement, as well as a C compiler to generate the binaries. + +Install from Github sources +........................... + +You can also obtain the latest PySpike developer version from the github repository. +For that, make sure you have the following Python libraries installed: - numpy -- scipy -- matplotlib - cython +- matplotlib (for the examples) - nosetests (for running the tests) In particular, make sure that cython_ is configured properly and able to locate a C compiler, otherwise PySpike will use the much slower Python implementations. diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..2aee194 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = Readme.rst diff --git a/setup.py b/setup.py index fc8d0d2..3bae9d5 100644 --- a/setup.py +++ b/setup.py @@ -1,24 +1,75 @@ """ setup.py -Handles the compilation of pyx source files - -run as: +to compile cython files: python setup.py build_ext --inplace + Copyright 2014, Mario Mulansky Distributed under the BSD License """ -from distutils.core import setup +from setuptools import setup, find_packages +from distutils.extension import Extension import numpy try: - from Cython.Build import cythonize - setup( - ext_modules=cythonize("pyspike/*.pyx"), - include_dirs=[numpy.get_include()] - ) + from Cython.Distutils import build_ext except ImportError: - print("Error: Cython is not installed! You will only be able to use the \ -much slower Python backend in PySpike.") + use_cython = False +else: + use_cython = True + +cmdclass = {} +ext_modules = [] + +if use_cython: + ext_modules += [ + Extension("pyspike.cython_add", ["pyspike/cython_add.pyx"]), + Extension("pyspike.cython_distance", ["pyspike/cython_distance.pyx"]), + ] + cmdclass.update({'build_ext': build_ext}) +else: + ext_modules += [ + Extension("pyspike.cython_add", ["pyspike/cython_add.c"]), + Extension("pyspike.cython_distance", ["pyspike/cython_distance.c"]), + ] + +setup( + name='pyspike', + packages=find_packages(exclude=['doc']), + version='0.1.2', + cmdclass=cmdclass, + ext_modules=ext_modules, + description='A Python library for the numerical analysis of spike\ +train similarity', + author='Mario Mulansky', + author_email='mario.mulanskygmx.net', + license='BSD', + url='https://github.com/mariomulansky/PySpike', + # download_url='https://github.com/mariomulansky/PySpike/tarball/0.1', + install_requires=['numpy'], + keywords=['data analysis', 'spike', 'neuroscience'], # arbitrary keywords + classifiers=[ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + 'Development Status :: 3 - Alpha', + + # Indicate who your project is intended for + 'Intended Audience :: Science/Research', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Information Analysis', + + 'License :: OSI Approved :: BSD License', + + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + ], + package_data={ + 'pyspike': ['cython_add.c', 'cython_distance.c'], + 'test': ['Spike_testdata.txt'] + } +) -- cgit v1.2.3 From 9a2471612551bb07726a9afc97fac5f3b704cc41 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 4 Nov 2014 12:20:07 +0100 Subject: fixed setup for travis test runs --- setup.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) (limited to 'setup.py') diff --git a/setup.py b/setup.py index 3bae9d5..eaef7e1 100644 --- a/setup.py +++ b/setup.py @@ -11,6 +11,7 @@ Distributed under the BSD License """ from setuptools import setup, find_packages from distutils.extension import Extension +import os.path import numpy try: @@ -20,20 +21,27 @@ except ImportError: else: use_cython = True +if os.path.isfile("pyspike/cython_add.c") and \ + os.path.isfile("pyspike/cython_distance.c"): + use_c = True +else: + use_c = False + cmdclass = {} ext_modules = [] -if use_cython: +if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ Extension("pyspike.cython_add", ["pyspike/cython_add.pyx"]), Extension("pyspike.cython_distance", ["pyspike/cython_distance.pyx"]), ] cmdclass.update({'build_ext': build_ext}) -else: +elif use_c: # c files are there, compile to binaries ext_modules += [ Extension("pyspike.cython_add", ["pyspike/cython_add.c"]), Extension("pyspike.cython_distance", ["pyspike/cython_distance.c"]), ] +# neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', @@ -41,6 +49,7 @@ setup( version='0.1.2', cmdclass=cmdclass, ext_modules=ext_modules, + include_dirs=[numpy.get_include()], description='A Python library for the numerical analysis of spike\ train similarity', author='Mario Mulansky', -- cgit v1.2.3 From 888f245fede0ddc9b5d742cb3cbf7a6727125ef0 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 4 Nov 2014 14:12:12 +0100 Subject: v0.1.3 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'setup.py') diff --git a/setup.py b/setup.py index eaef7e1..9cab5de 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.1.2', + version='0.1.3', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy.get_include()], -- cgit v1.2.3 From 6eb6bc486027d3d5304a94cfb417a2257f2b6fd9 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 12:19:53 +0100 Subject: moved cython functions to subdirectory --- pyspike/DiscreteFunc.py | 4 +- pyspike/PieceWiseConstFunc.py | 4 +- pyspike/PieceWiseLinFunc.py | 4 +- pyspike/cython/cython_add.pyx | 235 ++++++++++++++++++ pyspike/cython/cython_distance.pyx | 312 ++++++++++++++++++++++++ pyspike/cython/python_backend.py | 485 +++++++++++++++++++++++++++++++++++++ pyspike/cython_add.pyx | 235 ------------------ pyspike/cython_distance.pyx | 312 ------------------------ pyspike/isi_distance.py | 6 +- pyspike/python_backend.py | 485 ------------------------------------- pyspike/spike_distance.py | 5 +- pyspike/spike_sync.py | 4 +- setup.py | 14 +- 13 files changed, 1054 insertions(+), 1051 deletions(-) create mode 100644 pyspike/cython/cython_add.pyx create mode 100644 pyspike/cython/cython_distance.pyx create mode 100644 pyspike/cython/python_backend.py delete mode 100644 pyspike/cython_add.pyx delete mode 100644 pyspike/cython_distance.pyx delete mode 100644 pyspike/python_backend.py (limited to 'setup.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 3e24284..2283e03 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -198,7 +198,7 @@ class DiscreteFunc(object): # cython version try: - from cython_add import add_discrete_function_cython as \ + from cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: print("Warning: add_discrete_function_cython not found. Make \ @@ -206,7 +206,7 @@ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_discrete_function_python as \ + from cython.python_backend import add_discrete_function_python as \ add_discrete_function_impl self.x, self.y, self.mp = \ diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index e639dfc..dc57ab1 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -146,14 +146,14 @@ class PieceWiseConstFunc(object): # cython version try: - from cython_add import add_piece_wise_const_cython as \ + from cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: print("Warning: add_piece_wise_const_cython not found. Make sure \ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_piece_wise_const_python as \ + from cython.python_backend import add_piece_wise_const_python as \ add_piece_wise_const_impl self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 58a20e5..bc0aa2a 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -173,14 +173,14 @@ class PieceWiseLinFunc: # cython version try: - from cython_add import add_piece_wise_lin_cython as \ + from cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: print("Warning: add_piece_wise_lin_cython not found. Make sure \ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_piece_wise_lin_python as \ + from cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl self.x, self.y1, self.y2 = add_piece_wise_lin_impl( diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx new file mode 100644 index 0000000..ac64005 --- /dev/null +++ b/pyspike/cython/cython_add.pyx @@ -0,0 +1,235 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_add.pyx + +cython implementation of the add function for piece-wise const and +piece-wise linear functions + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_add.pyx + +which gives:: + + cython_add.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# add_piece_wise_const_cython +############################################################ +def add_piece_wise_const_cython(double[:] x1, double[:] y1, + double[:] x2, double[:] y2): + + cdef int N1 = len(x1) + cdef int N2 = len(x2) + cdef double[:] x_new = np.empty(N1+N2) + cdef double[:] y_new = np.empty(N1+N2-1) + cdef int index1 = 0 + cdef int index2 = 0 + cdef int index = 0 + cdef int i + with nogil: # release the interpreter lock to allow multi-threading + x_new[0] = x1[0] + y_new[0] = y1[0] + y2[0] + while (index1+1 < N1-1) and (index2+1 < N2-1): + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1]: + index1 += 1 + index2 += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < N1-1: + x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] + for i in xrange(N1-index1-2): + y_new[index+1+i] = y1[index1+1+i] + y2[N2-2] + index += N1-index1-2 + elif index2+1 < N2-1: + x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] + for i in xrange(N2-index2-2): + y_new[index+1+i] = y2[index2+1+i] + y1[N1-2] + index += N2-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[N1-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + x1 = x_new[:index+2] + y1 = y_new[:index+1] + # end nogil + return np.array(x_new[:index+2]), np.array(y_new[:index+1]) + + +############################################################ +# add_piece_wise_lin_cython +############################################################ +def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12, + double[:] x2, double[:] y21, double[:] y22): + cdef int N1 = len(x1) + cdef int N2 = len(x2) + cdef double[:] x_new = np.empty(N1+N2) + cdef double[:] y1_new = np.empty(N1+N2-1) + cdef double[:] y2_new = np.empty_like(y1_new) + cdef int index1 = 0 # index for self + cdef int index2 = 0 # index for f + cdef int index = 0 # index for new + cdef int i + cdef double y + with nogil: # release the interpreter lock to allow multi-threading + x_new[0] = x1[0] + y1_new[0] = y11[0] + y21[0] + while (index1+1 < N1-1) and (index2+1 < N2-1): + # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) + y2_new[index] = y12[index1] + y + index1 += 1 + index += 1 + x_new[index] = x1[index1] + # and the starting value for the next interval + y1_new[index] = y11[index1] + y + elif x1[index1+1] > x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y2_new[index] = y22[index2] + y + index2 += 1 + index += 1 + x_new[index] = x2[index2] + # and the starting value for the next interval + y1_new[index] = y21[index2] + y + else: # x1[index1+1] == x2[index2+1]: + y2_new[index] = y12[index1] + y22[index2] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y1_new[index] = y11[index1] + y21[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < N1-1: + x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] + for i in xrange(N1-index1-2): + # compute the linear interpolations value + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1+i]-x2[index2]) / (x2[index2+1]-x2[index2]) + y1_new[index+1+i] = y11[index1+1+i] + y + y2_new[index+i] = y12[index1+i] + y + index += N1-index1-2 + elif index2+1 < N2-1: + x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] + # compute the linear interpolations values + for i in xrange(N2-index2-2): + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1+i]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y1_new[index+1+i] = y21[index2+1+i] + y + y2_new[index+i] = y22[index2+i] + y + index += N2-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[N1-1] + # finally, the end value for the last interval + y2_new[index] = y12[N1-2]+y22[N2-2] + # only use the data that was actually filled + # end nogil + return (np.array(x_new[:index+2]), + np.array(y1_new[:index+1]), + np.array(y2_new[:index+1])) + + +############################################################ +# add_discrete_function_cython +############################################################ +def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, + double[:] x2, double[:] y2, double[:] mp2): + + cdef double[:] x_new = np.empty(len(x1) + len(x2)) + cdef double[:] y_new = np.empty_like(x_new) + cdef double[:] mp_new = np.empty_like(x_new) + cdef int index1 = 0 + cdef int index2 = 0 + cdef int index = 0 + cdef int N1 = len(y1) + cdef int N2 = len(y2) + x_new[0] = x1[0] + while (index1+1 < N1) and (index2+1 < N2): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return (np.array(x_new[:index+1]), + np.array(y_new[:index+1]), + np.array(mp_new[:index+1])) diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx new file mode 100644 index 0000000..489aab9 --- /dev/null +++ b/pyspike/cython/cython_distance.pyx @@ -0,0 +1,312 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_distances.pyx + +cython implementation of the isi- and spike-distance + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_distance.pyx + +which gives:: + + cython_distance.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# isi_distance_cython +############################################################ +def isi_distance_cython(double[:] s1, + double[:] s2): + + cdef double[:] spike_events + cdef double[:] isi_values + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + N1 = len(s1)-1 + N2 = len(s2)-1 + + nu1 = s1[1]-s1[0] + nu2 = s2[1]-s2[0] + spike_events = np.empty(N1+N2) + spike_events[0] = s1[0] + # the values have one entry less - the number of intervals between events + isi_values = np.empty(N1+N2-1) + + with nogil: # release the interpreter to allow multithreading + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= N1: + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= N2: + break + spike_events[index] = s2[index2] + nu2 = s2[index2+1]-s2[index2] + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + if (index1 >= N1) or (index2 >= N2): + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + nu2 = s2[index2+1]-s2[index2] + # compute the corresponding isi-distance + isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) + index += 1 + # the last event is the interval end + spike_events[index] = s1[N1] + # end nogil + + return spike_events[:index+1], isi_values[:index] + + +############################################################ +# get_min_dist_cython +############################################################ +cdef inline double get_min_dist_cython(double spike_time, + double[:] spike_train, + # use memory view to ensure inlining + # np.ndarray[DTYPE_t,ndim=1] spike_train, + int N, + int start_index=0) nogil: + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + cdef double d, d_temp + d = fabs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < N: + d_temp = fabs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + +############################################################ +# spike_distance_cython +############################################################ +def spike_distance_cython(double[:] t1, + double[:] t2): + + cdef double[:] spike_events + cdef double[:] y_starts + cdef double[:] y_ends + + cdef int N1, N2, index1, index2, index + cdef double dt_p1, dt_p2, dt_f1, dt_f2, isi1, isi2, s1, s2 + + N1 = len(t1) + N2 = len(t2) + + spike_events = np.empty(N1+N2-2) + spike_events[0] = t1[0] + y_starts = np.empty(len(spike_events)-1) + y_ends = np.empty(len(spike_events)-1) + + with nogil: # release the interpreter to allow multithreading + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist_cython(t1[1], t2, N2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist_cython(t2[1], t1, N1, 0) + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + while True: + # print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= N1: + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time now was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # now the next interval start value + dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + elif t1[index1+1] > t2[index2+1]: + index2 += 1 + if index2+1 >= N2: + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time now was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # now the next interval start value + dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) + #s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + else: # t1[index1+1] == t2[index2+1] - generate only one event + index1 += 1 + index2 += 1 + if (index1+1 >= N1) or (index2+1 >= N2): + break + spike_events[index] = t1[index1] + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + dt_p1 = 0.0 + dt_p2 = 0.0 + dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) + dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) + isi1 = t1[index1+1]-t1[index1] + isi2 = t2[index2+1]-t2[index2] + index += 1 + # the last event is the interval end + spike_events[index] = t1[N1-1] + # the ending value of the last interval + isi1 = max(t1[N1-1]-t1[N1-2], t1[N1-2]-t1[N1-3]) + isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) + s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 + s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + + +############################################################ +# coincidence_python +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, int i, int j): + cdef double m = 1E100 # some huge number + cdef int N1 = len(spikes1)-2 + cdef int N2 = len(spikes2)-2 + if i < N1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = fmin(m, spikes2[j]-spikes2[j-1]) + return 0.5*m + + +############################################################ +# coincidence_cython +############################################################ +def coincidence_cython(double[:] spikes1, double[:] spikes2): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = 0 + cdef int j = 0 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 - 2) # spike times + cdef double[:] c = np.zeros(N1 + N2 - 2) # coincidences + cdef double[:] mp = np.ones(N1 + N2 - 2) # multiplicity + cdef double tau + while n < N1 + N2 - 2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = spikes1[0] + st[len(st)-1] = spikes1[len(spikes1)-1] + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + + return st, c, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py new file mode 100644 index 0000000..481daf9 --- /dev/null +++ b/pyspike/cython/python_backend.py @@ -0,0 +1,485 @@ +""" python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# isi_distance_python +############################################################ +def isi_distance_python(s1, s2): + """ Plain Python implementation of the isi distance. + """ + # compute the interspike interval + nu1 = s1[1:] - s1[:-1] + nu2 = s2[1:] - s2[:-1] + + # compute the isi-distance + spike_events = np.empty(len(nu1) + len(nu2)) + spike_events[0] = s1[0] + # the values have one entry less - the number of intervals between events + isi_values = np.empty(len(spike_events) - 1) + # add the distance of the first events + # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ + # else 1.0 - nu2[0]/nu1[0] + isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= len(nu1): + break + spike_events[index] = s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= len(nu2): + break + spike_events[index] = s2[index2] + else: # s1[index1 + 1] == s2[index2 + 1] + index1 += 1 + index2 += 1 + if (index1 >= len(nu1)) or (index2 >= len(nu2)): + break + spike_events[index] = s1[index1] + # compute the corresponding isi-distance + isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ + max(nu1[index1], nu2[index2]) + index += 1 + # the last event is the interval end + spike_events[index] = s1[-1] + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index + 1], isi_values[:index] + + +############################################################ +# get_min_dist +############################################################ +def get_min_dist(spike_time, spike_train, start_index=0): + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + d = abs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < len(spike_train): + d_temp = abs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +############################################################ +# spike_distance_python +############################################################ +def spike_distance_python(spikes1, spikes2): + """ Computes the instantaneous spike-distance S_spike (t) of the two given + spike trains. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. + Args: + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + Returns: + - PieceWiseLinFunc describing the spike-distance. + """ + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0] == spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1] == spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + # shorter variables + t1 = spikes1 + t2 = spikes2 + + spike_events = np.empty(len(t1) + len(t2) - 2) + spike_events[0] = t1[0] + y_starts = np.empty(len(spike_events) - 1) + y_ends = np.empty(len(spike_events) - 1) + + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist(t1[1], t2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist(t2[1], t1, 0) + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + while True: + # print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= len(t1): + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + elif t1[index1+1] > t2[index2+1]: + index2 += 1 + if index2+1 >= len(t2): + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + #s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + else: # t1[index1+1] == t2[index2+1] - generate only one event + index1 += 1 + index2 += 1 + if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): + break + assert dt_f2 == 0.0 + assert dt_f1 == 0.0 + spike_events[index] = t1[index1] + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + dt_p1 = 0.0 + dt_p2 = 0.0 + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + isi1 = t1[index1+1]-t1[index1] + isi2 = t2[index2+1]-t2[index2] + index += 1 + # the last event is the interval end + spike_events[index] = t1[-1] + # the ending value of the last interval + isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) + isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) + s1 = dt_p1*(t1[-1]-t1[-2])/isi1 + s2 = dt_p2*(t2[-1]-t2[-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + +############################################################ +# cumulative_sync_python +############################################################ +def cumulative_sync_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], + spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) + c = np.zeros(N1 + N2 - 3) + c[0] = 0 + st[0] = 0 + while n < N1 + N2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if spikes1[i]-spikes2[j] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if spikes2[j]-spikes1[i] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + else: # spikes1[i+1] = spikes2[j+1] + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1] + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1]+1 + c[0] = 0 + st[0] = spikes1[0] + st[-1] = spikes1[-1] + + return st, c + + +############################################################ +# coincidence_python +############################################################ +def coincidence_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + m = 1E100 # some huge number + if i < len(spikes1)-2: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-2: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = min(m, spikes2[j]-spikes2[j-1]) + return 0.5*m + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) # spike times + c = np.zeros(N1 + N2 - 2) # coincidences + mp = np.ones(N1 + N2 - 2) # multiplicity + while n < N1 + N2 - 2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = spikes1[0] + st[-1] = spikes1[-1] + c[0] = c[1] + c[-1] = c[-2] + mp[0] = mp[1] + mp[-1] = mp[-2] + + return st, c, mp + + +############################################################ +# add_piece_wise_const_python +############################################################ +def add_piece_wise_const_python(x1, y1, x2, y2): + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty(len(x_new)-1) + x_new[0] = x1[0] + y_new[0] = y1[0] + y2[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1]: + index1 += 1 + index2 += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] + index += len(x1)-index1-2 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + + return x_new[:index+2], y_new[:index+1] + + +############################################################ +# add_piece_lin_const_python +############################################################ +def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): + x_new = np.empty(len(x1) + len(x2)) + y1_new = np.empty(len(x_new)-1) + y2_new = np.empty_like(y1_new) + x_new[0] = x1[0] + y1_new[0] = y11[0] + y21[0] + index1 = 0 # index for self + index2 = 0 # index for f + index = 0 # index for new + while (index1+1 < len(y11)) and (index2+1 < len(y21)): + # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) + y2_new[index] = y12[index1] + y + index1 += 1 + index += 1 + x_new[index] = x1[index1] + # and the starting value for the next interval + y1_new[index] = y11[index1] + y + elif x1[index1+1] > x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y2_new[index] = y22[index2] + y + index2 += 1 + index += 1 + x_new[index] = x2[index2] + # and the starting value for the next interval + y1_new[index] = y21[index2] + y + else: # x1[index1+1] == x2[index2+1]: + y2_new[index] = y12[index1] + y22[index2] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y1_new[index] = y11[index1] + y21[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y11): + # compute the linear interpolations values + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2]) + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y + y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y + index += len(x1)-index1-2 + elif index2+1 < len(y21): + # compute the linear interpolations values + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1:-1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y + y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # finally, the end value for the last interval + y2_new[index] = y12[-1]+y22[-1] + # only use the data that was actually filled + return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] + + +############################################################ +# add_discrete_function_python +############################################################ +def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): + + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty_like(x_new) + mp_new = np.empty_like(x_new) + x_new[0] = x1[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] + diff --git a/pyspike/cython_add.pyx b/pyspike/cython_add.pyx deleted file mode 100644 index ac64005..0000000 --- a/pyspike/cython_add.pyx +++ /dev/null @@ -1,235 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_add.pyx - -cython implementation of the add function for piece-wise const and -piece-wise linear functions - -Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects -improves the performance of spike_distance by a factor of 10! - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_add.pyx - -which gives:: - - cython_add.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# add_piece_wise_const_cython -############################################################ -def add_piece_wise_const_cython(double[:] x1, double[:] y1, - double[:] x2, double[:] y2): - - cdef int N1 = len(x1) - cdef int N2 = len(x2) - cdef double[:] x_new = np.empty(N1+N2) - cdef double[:] y_new = np.empty(N1+N2-1) - cdef int index1 = 0 - cdef int index2 = 0 - cdef int index = 0 - cdef int i - with nogil: # release the interpreter lock to allow multi-threading - x_new[0] = x1[0] - y_new[0] = y1[0] + y2[0] - while (index1+1 < N1-1) and (index2+1 < N2-1): - index += 1 - # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - index1 += 1 - x_new[index] = x1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - x_new[index] = x2[index2] - else: # x1[index1+1] == x2[index2+1]: - index1 += 1 - index2 += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < N1-1: - x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] - for i in xrange(N1-index1-2): - y_new[index+1+i] = y1[index1+1+i] + y2[N2-2] - index += N1-index1-2 - elif index2+1 < N2-1: - x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] - for i in xrange(N2-index2-2): - y_new[index+1+i] = y2[index2+1+i] + y1[N1-2] - index += N2-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[N1-1] - # the last value is again the end of the interval - # x_new[index+1] = x1[-1] - # only use the data that was actually filled - x1 = x_new[:index+2] - y1 = y_new[:index+1] - # end nogil - return np.array(x_new[:index+2]), np.array(y_new[:index+1]) - - -############################################################ -# add_piece_wise_lin_cython -############################################################ -def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12, - double[:] x2, double[:] y21, double[:] y22): - cdef int N1 = len(x1) - cdef int N2 = len(x2) - cdef double[:] x_new = np.empty(N1+N2) - cdef double[:] y1_new = np.empty(N1+N2-1) - cdef double[:] y2_new = np.empty_like(y1_new) - cdef int index1 = 0 # index for self - cdef int index2 = 0 # index for f - cdef int index = 0 # index for new - cdef int i - cdef double y - with nogil: # release the interpreter lock to allow multi-threading - x_new[0] = x1[0] - y1_new[0] = y11[0] + y21[0] - while (index1+1 < N1-1) and (index2+1 < N2-1): - # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) - y2_new[index] = y12[index1] + y - index1 += 1 - index += 1 - x_new[index] = x1[index1] - # and the starting value for the next interval - y1_new[index] = y11[index1] + y - elif x1[index1+1] > x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y2_new[index] = y22[index2] + y - index2 += 1 - index += 1 - x_new[index] = x2[index2] - # and the starting value for the next interval - y1_new[index] = y21[index2] + y - else: # x1[index1+1] == x2[index2+1]: - y2_new[index] = y12[index1] + y22[index2] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y1_new[index] = y11[index1] + y21[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < N1-1: - x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] - for i in xrange(N1-index1-2): - # compute the linear interpolations value - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1+i]-x2[index2]) / (x2[index2+1]-x2[index2]) - y1_new[index+1+i] = y11[index1+1+i] + y - y2_new[index+i] = y12[index1+i] + y - index += N1-index1-2 - elif index2+1 < N2-1: - x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] - # compute the linear interpolations values - for i in xrange(N2-index2-2): - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1+i]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y1_new[index+1+i] = y21[index2+1+i] + y - y2_new[index+i] = y22[index2+i] + y - index += N2-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[N1-1] - # finally, the end value for the last interval - y2_new[index] = y12[N1-2]+y22[N2-2] - # only use the data that was actually filled - # end nogil - return (np.array(x_new[:index+2]), - np.array(y1_new[:index+1]), - np.array(y2_new[:index+1])) - - -############################################################ -# add_discrete_function_cython -############################################################ -def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, - double[:] x2, double[:] y2, double[:] mp2): - - cdef double[:] x_new = np.empty(len(x1) + len(x2)) - cdef double[:] y_new = np.empty_like(x_new) - cdef double[:] mp_new = np.empty_like(x_new) - cdef int index1 = 0 - cdef int index2 = 0 - cdef int index = 0 - cdef int N1 = len(y1) - cdef int N2 = len(y2) - x_new[0] = x1[0] - while (index1+1 < N1) and (index2+1 < N2): - if x1[index1+1] < x2[index2+1]: - index1 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] - mp_new[index] = mp1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - index += 1 - x_new[index] = x2[index2] - y_new[index] = y2[index2] - mp_new[index] = mp2[index2] - else: # x1[index1+1] == x2[index2+1] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - mp_new[index] = mp1[index1] + mp2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] - - y_new[0] = y_new[1] - mp_new[0] = mp_new[1] - - # the last value is again the end of the interval - # only use the data that was actually filled - return (np.array(x_new[:index+1]), - np.array(y_new[:index+1]), - np.array(mp_new[:index+1])) diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx deleted file mode 100644 index 489aab9..0000000 --- a/pyspike/cython_distance.pyx +++ /dev/null @@ -1,312 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_distances.pyx - -cython implementation of the isi- and spike-distance - -Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects -improves the performance of spike_distance by a factor of 10! - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_distance.pyx - -which gives:: - - cython_distance.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs -from libc.math cimport fmax -from libc.math cimport fmin - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# isi_distance_cython -############################################################ -def isi_distance_cython(double[:] s1, - double[:] s2): - - cdef double[:] spike_events - cdef double[:] isi_values - cdef int index1, index2, index - cdef int N1, N2 - cdef double nu1, nu2 - N1 = len(s1)-1 - N2 = len(s2)-1 - - nu1 = s1[1]-s1[0] - nu2 = s2[1]-s2[0] - spike_events = np.empty(N1+N2) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(N1+N2-1) - - with nogil: # release the interpreter to allow multithreading - isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= N1: - break - spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - elif s1[index1+1] > s2[index2+1]: - index2 += 1 - if index2 >= N2: - break - spike_events[index] = s2[index2] - nu2 = s2[index2+1]-s2[index2] - else: # s1[index1+1] == s2[index2+1] - index1 += 1 - index2 += 1 - if (index1 >= N1) or (index2 >= N2): - break - spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - nu2 = s2[index2+1]-s2[index2] - # compute the corresponding isi-distance - isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) - index += 1 - # the last event is the interval end - spike_events[index] = s1[N1] - # end nogil - - return spike_events[:index+1], isi_values[:index] - - -############################################################ -# get_min_dist_cython -############################################################ -cdef inline double get_min_dist_cython(double spike_time, - double[:] spike_train, - # use memory view to ensure inlining - # np.ndarray[DTYPE_t,ndim=1] spike_train, - int N, - int start_index=0) nogil: - """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index. - """ - cdef double d, d_temp - d = fabs(spike_time - spike_train[start_index]) - start_index += 1 - while start_index < N: - d_temp = fabs(spike_time - spike_train[start_index]) - if d_temp > d: - break - else: - d = d_temp - start_index += 1 - return d - - -############################################################ -# isi_avrg_cython -############################################################ -cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: - return 0.5*(isi1+isi2)*(isi1+isi2) - # alternative definition to obtain ~ 0.5 for Poisson spikes - # return 0.5*(isi1*isi1+isi2*isi2) - - -############################################################ -# spike_distance_cython -############################################################ -def spike_distance_cython(double[:] t1, - double[:] t2): - - cdef double[:] spike_events - cdef double[:] y_starts - cdef double[:] y_ends - - cdef int N1, N2, index1, index2, index - cdef double dt_p1, dt_p2, dt_f1, dt_f2, isi1, isi2, s1, s2 - - N1 = len(t1) - N2 = len(t2) - - spike_events = np.empty(N1+N2-2) - spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events)-1) - y_ends = np.empty(len(spike_events)-1) - - with nogil: # release the interpreter to allow multithreading - index1 = 0 - index2 = 0 - index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist_cython(t1[1], t2, N2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist_cython(t2[1], t1, N1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - while True: - # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= N1: - break - spike_events[index] = t1[index1] - # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time now was the following time before - s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - # now the next interval start value - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - isi1 = t1[index1+1]-t1[index1] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - elif t1[index1+1] > t2[index2+1]: - index2 += 1 - if index2+1 >= N2: - break - spike_events[index] = t2[index2] - # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time now was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 - s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - # now the next interval start value - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - else: # t1[index1+1] == t2[index2+1] - generate only one event - index1 += 1 - index2 += 1 - if (index1+1 >= N1) or (index2+1 >= N2): - break - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 - dt_p1 = 0.0 - dt_p2 = 0.0 - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] - index += 1 - # the last event is the interval end - spike_events[index] = t1[N1-1] - # the ending value of the last interval - isi1 = max(t1[N1-1]-t1[N1-2], t1[N1-2]-t1[N1-3]) - isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) - s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 - s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - # end nogil - - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index+1], y_starts[:index], y_ends[:index] - - - -############################################################ -# coincidence_python -############################################################ -cdef inline double get_tau(double[:] spikes1, double[:] spikes2, int i, int j): - cdef double m = 1E100 # some huge number - cdef int N1 = len(spikes1)-2 - cdef int N2 = len(spikes2)-2 - if i < N1: - m = fmin(m, spikes1[i+1]-spikes1[i]) - if j < N2: - m = fmin(m, spikes2[j+1]-spikes2[j]) - if i > 1: - m = fmin(m, spikes1[i]-spikes1[i-1]) - if j > 1: - m = fmin(m, spikes2[j]-spikes2[j-1]) - return 0.5*m - - -############################################################ -# coincidence_cython -############################################################ -def coincidence_cython(double[:] spikes1, double[:] spikes2): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = 0 - cdef int j = 0 - cdef int n = 0 - cdef double[:] st = np.zeros(N1 + N2 - 2) # spike times - cdef double[:] c = np.zeros(N1 + N2 - 2) # coincidences - cdef double[:] mp = np.ones(N1 + N2 - 2) # multiplicity - cdef double tau - while n < N1 + N2 - 2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if j > 0 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if i > 0 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - # add only one event, but with coincidence 2 and multiplicity 2 - st[n] = spikes1[i] - c[n] = 2 - mp[n] = 2 - - st = st[:n+2] - c = c[:n+2] - mp = mp[:n+2] - - st[0] = spikes1[0] - st[len(st)-1] = spikes1[len(spikes1)-1] - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] - - return st, c, mp diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 745d280..c2ef8e8 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -35,13 +35,15 @@ def isi_profile(spikes1, spikes2): # load cython implementation try: - from cython_distance import isi_distance_cython as isi_distance_impl + from cython.cython_distance import isi_distance_cython \ + as isi_distance_impl except ImportError: print("Warning: isi_distance_cython not found. Make sure that PySpike \ is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import isi_distance_python as isi_distance_impl + from cython.python_backend import isi_distance_python \ + as isi_distance_impl times, values = isi_distance_impl(spikes1, spikes2) return PieceWiseConstFunc(times, values) diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py deleted file mode 100644 index 481daf9..0000000 --- a/pyspike/python_backend.py +++ /dev/null @@ -1,485 +0,0 @@ -""" python_backend.py - -Collection of python functions that can be used instead of the cython -implementation. - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -import numpy as np - - -############################################################ -# isi_distance_python -############################################################ -def isi_distance_python(s1, s2): - """ Plain Python implementation of the isi distance. - """ - # compute the interspike interval - nu1 = s1[1:] - s1[:-1] - nu2 = s2[1:] - s2[:-1] - - # compute the isi-distance - spike_events = np.empty(len(nu1) + len(nu2)) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(len(spike_events) - 1) - # add the distance of the first events - # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ - # else 1.0 - nu2[0]/nu1[0] - isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= len(nu1): - break - spike_events[index] = s1[index1] - elif s1[index1+1] > s2[index2+1]: - index2 += 1 - if index2 >= len(nu2): - break - spike_events[index] = s2[index2] - else: # s1[index1 + 1] == s2[index2 + 1] - index1 += 1 - index2 += 1 - if (index1 >= len(nu1)) or (index2 >= len(nu2)): - break - spike_events[index] = s1[index1] - # compute the corresponding isi-distance - isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ - max(nu1[index1], nu2[index2]) - index += 1 - # the last event is the interval end - spike_events[index] = s1[-1] - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index + 1], isi_values[:index] - - -############################################################ -# get_min_dist -############################################################ -def get_min_dist(spike_time, spike_train, start_index=0): - """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index. - """ - d = abs(spike_time - spike_train[start_index]) - start_index += 1 - while start_index < len(spike_train): - d_temp = abs(spike_time - spike_train[start_index]) - if d_temp > d: - break - else: - d = d_temp - start_index += 1 - return d - - -############################################################ -# spike_distance_python -############################################################ -def spike_distance_python(spikes1, spikes2): - """ Computes the instantaneous spike-distance S_spike (t) of the two given - spike trains. The spike trains are expected to have auxiliary spikes at the - beginning and end of the interval. Use the function add_auxiliary_spikes to - add those spikes to the spike train. - Args: - - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. - Returns: - - PieceWiseLinFunc describing the spike-distance. - """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ - "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ - "Given spike trains seems not to have auxiliary spikes!" - # shorter variables - t1 = spikes1 - t2 = spikes2 - - spike_events = np.empty(len(t1) + len(t2) - 2) - spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events) - 1) - y_ends = np.empty(len(spike_events) - 1) - - index1 = 0 - index2 = 0 - index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist(t1[1], t2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist(t2[1], t1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - while True: - # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= len(t1): - break - spike_events[index] = t1[index1] - # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time was the following time before - s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # now the next interval start value - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - isi1 = t1[index1+1]-t1[index1] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - elif t1[index1+1] > t2[index2+1]: - index2 += 1 - if index2+1 >= len(t2): - break - spike_events[index] = t2[index2] - # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 - s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # now the next interval start value - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - else: # t1[index1+1] == t2[index2+1] - generate only one event - index1 += 1 - index2 += 1 - if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): - break - assert dt_f2 == 0.0 - assert dt_f1 == 0.0 - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 - dt_p1 = 0.0 - dt_p2 = 0.0 - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] - index += 1 - # the last event is the interval end - spike_events[index] = t1[-1] - # the ending value of the last interval - isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) - isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) - s1 = dt_p1*(t1[-1]-t1[-2])/isi1 - s2 = dt_p2*(t2[-1]-t2[-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index+1], y_starts[:index], y_ends[:index] - - -############################################################ -# cumulative_sync_python -############################################################ -def cumulative_sync_python(spikes1, spikes2): - - def get_tau(spikes1, spikes2, i, j): - return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], - spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) - N1 = len(spikes1) - N2 = len(spikes2) - i = 0 - j = 0 - n = 0 - st = np.zeros(N1 + N2 - 2) - c = np.zeros(N1 + N2 - 3) - c[0] = 0 - st[0] = 0 - while n < N1 + N2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if spikes1[i]-spikes2[j] > tau: - c[n] = c[n-1] - else: - c[n] = c[n-1]+1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if spikes2[j]-spikes1[i] > tau: - c[n] = c[n-1] - else: - c[n] = c[n-1]+1 - else: # spikes1[i+1] = spikes2[j+1] - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - st[n] = spikes1[i] - c[n] = c[n-1] - n += 1 - st[n] = spikes1[i] - c[n] = c[n-1]+1 - c[0] = 0 - st[0] = spikes1[0] - st[-1] = spikes1[-1] - - return st, c - - -############################################################ -# coincidence_python -############################################################ -def coincidence_python(spikes1, spikes2): - - def get_tau(spikes1, spikes2, i, j): - m = 1E100 # some huge number - if i < len(spikes1)-2: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-2: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 1: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 1: - m = min(m, spikes2[j]-spikes2[j-1]) - return 0.5*m - N1 = len(spikes1) - N2 = len(spikes2) - i = 0 - j = 0 - n = 0 - st = np.zeros(N1 + N2 - 2) # spike times - c = np.zeros(N1 + N2 - 2) # coincidences - mp = np.ones(N1 + N2 - 2) # multiplicity - while n < N1 + N2 - 2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if j > 0 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if i > 0 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - # add only one event, but with coincidence 2 and multiplicity 2 - st[n] = spikes1[i] - c[n] = 2 - mp[n] = 2 - - st = st[:n+2] - c = c[:n+2] - mp = mp[:n+2] - - st[0] = spikes1[0] - st[-1] = spikes1[-1] - c[0] = c[1] - c[-1] = c[-2] - mp[0] = mp[1] - mp[-1] = mp[-2] - - return st, c, mp - - -############################################################ -# add_piece_wise_const_python -############################################################ -def add_piece_wise_const_python(x1, y1, x2, y2): - x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty(len(x_new)-1) - x_new[0] = x1[0] - y_new[0] = y1[0] + y2[0] - index1 = 0 - index2 = 0 - index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): - index += 1 - # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - index1 += 1 - x_new[index] = x1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - x_new[index] = x2[index2] - else: # x1[index1+1] == x2[index2+1]: - index1 += 1 - index2 += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] - index += len(x1)-index1-2 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] - index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[-1] - # the last value is again the end of the interval - # x_new[index+1] = x1[-1] - # only use the data that was actually filled - - return x_new[:index+2], y_new[:index+1] - - -############################################################ -# add_piece_lin_const_python -############################################################ -def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): - x_new = np.empty(len(x1) + len(x2)) - y1_new = np.empty(len(x_new)-1) - y2_new = np.empty_like(y1_new) - x_new[0] = x1[0] - y1_new[0] = y11[0] + y21[0] - index1 = 0 # index for self - index2 = 0 # index for f - index = 0 # index for new - while (index1+1 < len(y11)) and (index2+1 < len(y21)): - # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) - y2_new[index] = y12[index1] + y - index1 += 1 - index += 1 - x_new[index] = x1[index1] - # and the starting value for the next interval - y1_new[index] = y11[index1] + y - elif x1[index1+1] > x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y2_new[index] = y22[index2] + y - index2 += 1 - index += 1 - x_new[index] = x2[index2] - # and the starting value for the next interval - y1_new[index] = y21[index2] + y - else: # x1[index1+1] == x2[index2+1]: - y2_new[index] = y12[index1] + y22[index2] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y1_new[index] = y11[index1] + y21[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y11): - # compute the linear interpolations values - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2]) - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y - y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y - index += len(x1)-index1-2 - elif index2+1 < len(y21): - # compute the linear interpolations values - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1:-1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y - y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y - index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[-1] - # finally, the end value for the last interval - y2_new[index] = y12[-1]+y22[-1] - # only use the data that was actually filled - return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] - - -############################################################ -# add_discrete_function_python -############################################################ -def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): - - x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty_like(x_new) - mp_new = np.empty_like(x_new) - x_new[0] = x1[0] - index1 = 0 - index2 = 0 - index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): - if x1[index1+1] < x2[index2+1]: - index1 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] - mp_new[index] = mp1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - index += 1 - x_new[index] = x2[index2] - y_new[index] = y2[index2] - mp_new[index] = mp2[index2] - else: # x1[index1+1] == x2[index2+1] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - mp_new[index] = mp1[index1] + mp2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] - - y_new[0] = y_new[1] - mp_new[0] = mp_new[1] - - # the last value is again the end of the interval - # only use the data that was actually filled - return x_new[:index+1], y_new[:index+1], mp_new[:index+1] - diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index 2c989a4..f721c86 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -35,14 +35,15 @@ def spike_profile(spikes1, spikes2): # cython implementation try: - from cython_distance import spike_distance_cython \ + from cython.cython_distance import spike_distance_cython \ as spike_distance_impl except ImportError: print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import spike_distance_python as spike_distance_impl + from cython.python_backend import spike_distance_python \ + as spike_distance_impl times, y_starts, y_ends = spike_distance_impl(spikes1, spikes2) return PieceWiseLinFunc(times, y_starts, y_ends) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index bded8da..342bf69 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -33,14 +33,14 @@ def spike_sync_profile(spikes1, spikes2): # cython implementation try: - from cython_distance import coincidence_cython \ + from cython.cython_distance import coincidence_cython \ as coincidence_impl except ImportError: print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import coincidence_python \ + from cython.python_backend import coincidence_python \ as coincidence_impl times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) diff --git a/setup.py b/setup.py index 9cab5de..289d521 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ except ImportError: else: use_cython = True -if os.path.isfile("pyspike/cython_add.c") and \ - os.path.isfile("pyspike/cython_distance.c"): +if os.path.isfile("pyspike/cython/cython_add.c") and \ + os.path.isfile("pyspike/cython/cython_distance.c"): use_c = True else: use_c = False @@ -32,14 +32,14 @@ ext_modules = [] if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ - Extension("pyspike.cython_add", ["pyspike/cython_add.pyx"]), - Extension("pyspike.cython_distance", ["pyspike/cython_distance.pyx"]), + Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), + Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.pyx"]), ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ - Extension("pyspike.cython_add", ["pyspike/cython_add.c"]), - Extension("pyspike.cython_distance", ["pyspike/cython_distance.c"]), + Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), + Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.c"]), ] # neither cython nor c files available -> automatic fall-back to python backend @@ -78,7 +78,7 @@ train similarity', 'Programming Language :: Python :: 2.7', ], package_data={ - 'pyspike': ['cython_add.c', 'cython_distance.c'], + 'pyspike': ['cython/cython_add.c', 'cython/cython_distance.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From 619ffd7105203938a26075c79a77d63960da9922 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 8 May 2015 12:29:47 +0200 Subject: renamed cython_distance module -> cython_profiles --- pyspike/cython/cython_distance.pyx | 423 ------------------------------------- pyspike/cython/cython_profiles.pyx | 423 +++++++++++++++++++++++++++++++++++++ pyspike/isi_distance.py | 10 +- pyspike/spike_distance.py | 16 +- pyspike/spike_sync.py | 15 +- setup.py | 8 +- 6 files changed, 447 insertions(+), 448 deletions(-) delete mode 100644 pyspike/cython/cython_distance.pyx create mode 100644 pyspike/cython/cython_profiles.pyx (limited to 'setup.py') diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx deleted file mode 100644 index 6ee0181..0000000 --- a/pyspike/cython/cython_distance.pyx +++ /dev/null @@ -1,423 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_distances.pyx - -cython implementation of the isi- and spike-distance - -Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects -improves the performance of spike_distance by a factor of 10! - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_distance.pyx - -which gives:: - - cython_distance.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs -from libc.math cimport fmax -from libc.math cimport fmin - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# isi_distance_cython -############################################################ -def isi_distance_cython(double[:] s1, double[:] s2, - double t_start, double t_end): - - cdef double[:] spike_events - cdef double[:] isi_values - cdef int index1, index2, index - cdef int N1, N2 - cdef double nu1, nu2 - N1 = len(s1) - N2 = len(s2) - - spike_events = np.empty(N1+N2+2) - # the values have one entry less as they are defined at the intervals - isi_values = np.empty(N1+N2+1) - - # first x-value of the profile - spike_events[0] = t_start - - # first interspike interval - check if a spike exists at the start time - if s1[0] > t_start: - # edge correction - nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) - index1 = -1 - else: - nu1 = s1[1]-s1[0] - index1 = 0 - - if s2[0] > t_start: - # edge correction - nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) - index2 = -1 - else: - nu2 = s2[1]-s2[0] - index2 = 0 - - isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) - index = 1 - - with nogil: # release the interpreter to allow multithreading - while index1+index2 < N1+N2-2: - # check which spike is next, only if there are spikes left in 1 - # next spike in 1 is earlier, or there are no spikes left in 2 - if (index1 < N1-1) and ((index2 == N2-1) or - (s1[index1+1] < s2[index2+1])): - index1 += 1 - spike_events[index] = s1[index1] - if index1 < N1-1: - nu1 = s1[index1+1]-s1[index1] - else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) - elif (index2 < N2-1) and ((index1 == N1-1) or - (s1[index1+1] > s2[index2+1])): - index2 += 1 - spike_events[index] = s2[index2] - if index2 < N2-1: - nu2 = s2[index2+1]-s2[index2] - else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) - else: # s1[index1+1] == s2[index2+1] - index1 += 1 - index2 += 1 - spike_events[index] = s1[index1] - if index1 < N1-1: - nu1 = s1[index1+1]-s1[index1] - else: - # edge correction - nu1 = fmax(t_end-s1[index1], nu1) - if index2 < N2-1: - nu2 = s2[index2+1]-s2[index2] - else: - # edge correction - nu2 = fmax(t_end-s2[index2], nu2) - # compute the corresponding isi-distance - isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) - index += 1 - # the last event is the interval end - if spike_events[index-1] == t_end: - index -= 1 - else: - spike_events[index] = t_end - # end nogil - - return spike_events[:index+1], isi_values[:index] - - -############################################################ -# get_min_dist_cython -############################################################ -cdef inline double get_min_dist_cython(double spike_time, - double[:] spike_train, - # use memory view to ensure inlining - # np.ndarray[DTYPE_t,ndim=1] spike_train, - int N, - int start_index, - double t_start, double t_end) nogil: - """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index. - """ - cdef double d, d_temp - # start with the distance to the start time - d = fabs(spike_time - t_start) - if start_index < 0: - start_index = 0 - while start_index < N: - d_temp = fabs(spike_time - spike_train[start_index]) - if d_temp > d: - return d - else: - d = d_temp - start_index += 1 - - # finally, check the distance to end time - d_temp = fabs(t_end - spike_time) - if d_temp > d: - return d - else: - return d_temp - - -############################################################ -# isi_avrg_cython -############################################################ -cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: - return 0.5*(isi1+isi2)*(isi1+isi2) - # alternative definition to obtain ~ 0.5 for Poisson spikes - # return 0.5*(isi1*isi1+isi2*isi2) - - -############################################################ -# spike_distance_cython -############################################################ -def spike_distance_cython(double[:] t1, double[:] t2, - double t_start, double t_end): - - cdef double[:] spike_events - cdef double[:] y_starts - cdef double[:] y_ends - - cdef int N1, N2, index1, index2, index - cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 - cdef double isi1, isi2, s1, s2 - - N1 = len(t1) - N2 = len(t2) - - spike_events = np.empty(N1+N2+2) - - y_starts = np.empty(len(spike_events)-1) - y_ends = np.empty(len(spike_events)-1) - - with nogil: # release the interpreter to allow multithreading - spike_events[0] = t_start - t_p1 = t_start - t_p2 = t_start - if t1[0] > t_start: - # dt_p1 = t2[0]-t_start - t_f1 = t1[0] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) - isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) - dt_p1 = dt_f1 - s1 = dt_p1*(t_f1-t_start)/isi1 - index1 = -1 - else: - t_f1 = t1[1] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) - dt_p1 = 0.0 - isi1 = t1[1]-t1[0] - s1 = dt_p1 - index1 = 0 - if t2[0] > t_start: - # dt_p1 = t2[0]-t_start - t_f2 = t2[0] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) - dt_p2 = dt_f2 - isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) - s2 = dt_p2*(t_f2-t_start)/isi2 - index2 = -1 - else: - t_f2 = t2[1] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) - dt_p2 = 0.0 - isi2 = t2[1]-t2[0] - s2 = dt_p2 - index2 = 0 - - y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - index = 1 - - while index1+index2 < N1+N2-2: - # print(index, index1, index2) - if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): - index1 += 1 - # first calculate the previous interval end value - s1 = dt_f1*(t_f1-t_p1) / isi1 - # the previous time now was the following time before: - dt_p1 = dt_f1 - t_p1 = t_f1 # t_p1 contains the current time point - # get the next time - if index1 < N1-1: - t_f1 = t1[index1+1] - else: - t_f1 = t_end - spike_events[index] = t_p1 - s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, - isi2) - # now the next interval start value - if index1 < N1-1: - dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) - isi1 = t_f1-t_p1 - s1 = dt_p1 - else: - dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - # s1 needs adjustment due to change of isi1 - s1 = dt_p1*(t_end-t1[N1-1])/isi1 - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, - isi2) - elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): - index2 += 1 - # first calculate the previous interval end value - s2 = dt_f2*(t_f2-t_p2) / isi2 - # the previous time now was the following time before: - dt_p2 = dt_f2 - t_p2 = t_f2 # t_p2 contains the current time point - # get the next time - if index2 < N2-1: - t_f2 = t2[index2+1] - else: - t_f2 = t_end - spike_events[index] = t_p2 - s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, - isi2) - # now the next interval start value - if index2 < N2-1: - dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) - isi2 = t_f2-t_p2 - s2 = dt_p2 - else: - dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - # s2 needs adjustment due to change of isi2 - s2 = dt_p2*(t_end-t2[N2-1])/isi2 - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - else: # t_f1 == t_f2 - generate only one event - index1 += 1 - index2 += 1 - t_p1 = t_f1 - t_p2 = t_f2 - dt_p1 = 0.0 - dt_p2 = 0.0 - spike_events[index] = t_f1 - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 - if index1 < N1-1: - t_f1 = t1[index1+1] - dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, - t_start, t_end) - isi1 = t_f1 - t_p1 - else: - t_f1 = t_end - dt_f1 = dt_p1 - isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - if index2 < N2-1: - t_f2 = t2[index2+1] - dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, - t_start, t_end) - isi2 = t_f2 - t_p2 - else: - t_f2 = t_end - dt_f2 = dt_p2 - isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - index += 1 - # the last event is the interval end - if spike_events[index-1] == t_end: - index -= 1 - else: - spike_events[index] = t_end - # the ending value of the last interval - isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) - isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) - s1 = dt_f1*(t_end-t1[N1-1])/isi1 - s2 = dt_f2*(t_end-t2[N2-1])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - # end nogil - - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index+1], y_starts[:index], y_ends[:index] - - - -############################################################ -# coincidence_python -############################################################ -cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double max_tau): - cdef double m = 1E100 # some huge number - cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 - cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 - if i < N1 and i > -1: - m = fmin(m, spikes1[i+1]-spikes1[i]) - if j < N2 and j > -1: - m = fmin(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = fmin(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = fmin(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = fmin(m, max_tau) - return m - - -############################################################ -# coincidence_cython -############################################################ -def coincidence_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, double max_tau): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = -1 - cdef int j = -1 - cdef int n = 0 - cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times - cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences - cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity - cdef double tau - while i + j < N1 + N2 - 2: - if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) - st[n] = spikes1[i] - if j > -1 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) - st[n] = spikes2[j] - if i > -1 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - n += 1 - # add only one event, but with coincidence 2 and multiplicity 2 - st[n] = spikes1[i] - c[n] = 2 - mp[n] = 2 - - st = st[:n+2] - c = c[:n+2] - mp = mp[:n+2] - - st[0] = t_start - st[len(st)-1] = t_end - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] - - return st, c, mp diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx new file mode 100644 index 0000000..59a8d30 --- /dev/null +++ b/pyspike/cython/cython_profiles.pyx @@ -0,0 +1,423 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_profiles.pyx + +cython implementation of the isi-, spike- and spike-sync profiles + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_profiles.pyx + +which gives:: + + cython_profiles.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# isi_profile_cython +############################################################ +def isi_profile_cython(double[:] s1, double[:] s2, + double t_start, double t_end): + + cdef double[:] spike_events + cdef double[:] isi_values + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + N1 = len(s1) + N2 = len(s2) + + spike_events = np.empty(N1+N2+2) + # the values have one entry less as they are defined at the intervals + isi_values = np.empty(N1+N2+1) + + # first x-value of the profile + spike_events[0] = t_start + + # first interspike interval - check if a spike exists at the start time + if s1[0] > t_start: + # edge correction + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + index1 = -1 + else: + nu1 = s1[1]-s1[0] + index1 = 0 + + if s2[0] > t_start: + # edge correction + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + index2 = -1 + else: + nu2 = s2[1]-s2[0] + index2 = 0 + + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) + index = 1 + + with nogil: # release the interpreter to allow multithreading + while index1+index2 < N1+N2-2: + # check which spike is next, only if there are spikes left in 1 + # next spike in 1 is earlier, or there are no spikes left in 2 + if (index1 < N1-1) and ((index2 == N2-1) or + (s1[index1+1] < s2[index2+1])): + index1 += 1 + spike_events[index] = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + elif (index2 < N2-1) and ((index1 == N1-1) or + (s1[index1+1] > s2[index2+1])): + index2 += 1 + spike_events[index] = s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + spike_events[index] = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + # compute the corresponding isi-distance + isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) + index += 1 + # the last event is the interval end + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end + # end nogil + + return spike_events[:index+1], isi_values[:index] + + +############################################################ +# get_min_dist_cython +############################################################ +cdef inline double get_min_dist_cython(double spike_time, + double[:] spike_train, + # use memory view to ensure inlining + # np.ndarray[DTYPE_t,ndim=1] spike_train, + int N, + int start_index, + double t_start, double t_end) nogil: + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + cdef double d, d_temp + # start with the distance to the start time + d = fabs(spike_time - t_start) + if start_index < 0: + start_index = 0 + while start_index < N: + d_temp = fabs(spike_time - spike_train[start_index]) + if d_temp > d: + return d + else: + d = d_temp + start_index += 1 + + # finally, check the distance to end time + d_temp = fabs(t_end - spike_time) + if d_temp > d: + return d + else: + return d_temp + + +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + +############################################################ +# spike_profile_cython +############################################################ +def spike_profile_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef double[:] spike_events + cdef double[:] y_starts + cdef double[:] y_ends + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + + N1 = len(t1) + N2 = len(t2) + + spike_events = np.empty(N1+N2+2) + + y_starts = np.empty(len(spike_events)-1) + y_ends = np.empty(len(spike_events)-1) + + with nogil: # release the interpreter to allow multithreading + spike_events[0] = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + spike_events[index] = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, + isi2) + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, + isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + spike_events[index] = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, + isi2) + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + spike_events[index] = t_f1 + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + # the last event is the interval end + if spike_events[index-1] == t_end: + index -= 1 + else: + spike_events[index] = t_end + # the ending value of the last interval + isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double max_tau): + cdef double m = 1E100 # some huge number + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# coincidence_profile_cython +############################################################ +def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + + return st, c, mp diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 8b1e9bf..164378d 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -31,18 +31,18 @@ def isi_profile(spike_train1, spike_train2): # load cython implementation try: - from cython.cython_distance import isi_distance_cython \ - as isi_distance_impl + from cython.cython_profiles import isi_profile_cython \ + as isi_profile_impl except ImportError: print("Warning: isi_distance_cython not found. Make sure that PySpike \ is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import isi_distance_python \ - as isi_distance_impl + as isi_profile_impl - times, values = isi_distance_impl(spike_train1.spikes, spike_train2.spikes, - spike_train1.t_start, spike_train1.t_end) + times, values = isi_profile_impl(spike_train1.spikes, spike_train2.spikes, + spike_train1.t_start, spike_train1.t_end) return PieceWiseConstFunc(times, values) diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index 499ab77..3567585 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -31,20 +31,20 @@ def spike_profile(spike_train1, spike_train2): # cython implementation try: - from cython.cython_distance import spike_distance_cython \ - as spike_distance_impl + from cython.cython_profiles import spike_profile_cython \ + as spike_profile_impl except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ + print("Warning: spike_profile_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import spike_distance_python \ - as spike_distance_impl + as spike_profile_impl - times, y_starts, y_ends = spike_distance_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end) + times, y_starts, y_ends = spike_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end) return PieceWiseLinFunc(times, y_starts, y_ends) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 7d429e4..107734d 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -36,24 +36,23 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_distance import coincidence_cython \ - as coincidence_impl + from cython.cython_profiles import coincidence_profile_cython \ + as coincidence_profile_impl except ImportError: print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import coincidence_python \ - as coincidence_impl + as coincidence_profile_impl if max_tau is None: max_tau = 0.0 - times, coincidences, multiplicity = coincidence_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) + times, coincidences, multiplicity \ + = coincidence_profile_impl(spike_train1.spikes, spike_train2.spikes, + spike_train1.t_start, spike_train1.t_end, + max_tau) return DiscreteFunc(times, coincidences, multiplicity) diff --git a/setup.py b/setup.py index 289d521..d687240 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ else: use_cython = True if os.path.isfile("pyspike/cython/cython_add.c") and \ - os.path.isfile("pyspike/cython/cython_distance.c"): + os.path.isfile("pyspike/cython/cython_profiles.c"): use_c = True else: use_c = False @@ -33,13 +33,13 @@ ext_modules = [] if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), - Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.pyx"]), + Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), - Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.c"]), + Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), ] # neither cython nor c files available -> automatic fall-back to python backend @@ -78,7 +78,7 @@ train similarity', 'Programming Language :: Python :: 2.7', ], package_data={ - 'pyspike': ['cython/cython_add.c', 'cython/cython_distance.c'], + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From f688dc2e8616f914040746de845646abb158125d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 8 May 2015 18:06:59 +0200 Subject: introduce backend for distance function isi- and spike distances over complete intervals are now computed without obtaining the profile first. This gives more than x2 performance improvements. --- examples/performance.py | 8 +- pyspike/cython/cython_distances.pyx | 328 ++++++++++++++++++++++++++++++++++++ pyspike/cython/cython_profiles.pyx | 2 +- pyspike/isi_distance.py | 16 +- pyspike/spike_distance.py | 17 +- setup.py | 5 +- 6 files changed, 371 insertions(+), 5 deletions(-) create mode 100644 pyspike/cython/cython_distances.pyx (limited to 'setup.py') diff --git a/examples/performance.py b/examples/performance.py index 94dae25..1c31e8f 100644 --- a/examples/performance.py +++ b/examples/performance.py @@ -1,4 +1,10 @@ -""" Compute distances of large sets of spike trains for performance tests +""" +Compute distances of large sets of spike trains for performance tests + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + """ from __future__ import print_function diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx new file mode 100644 index 0000000..65c2872 --- /dev/null +++ b/pyspike/cython/cython_distances.pyx @@ -0,0 +1,328 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_distances.pyx + +cython implementation of the isi-, spike- and spike-sync distances + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_distances.pyx + +which gives:: + + cython_distances.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# isi_distance_cython +############################################################ +def isi_distance_cython(double[:] s1, double[:] s2, + double t_start, double t_end): + + cdef double isi_value + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + cdef double last_t, curr_t, curr_isi + isi_value = 0.0 + N1 = len(s1) + N2 = len(s2) + + # first interspike interval - check if a spike exists at the start time + if s1[0] > t_start: + # edge correction + nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) + index1 = -1 + else: + nu1 = s1[1]-s1[0] + index1 = 0 + + if s2[0] > t_start: + # edge correction + nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) + index2 = -1 + else: + nu2 = s2[1]-s2[0] + index2 = 0 + + last_t = t_start + curr_isi = fabs(nu1-nu2)/fmax(nu1, nu2) + index = 1 + + with nogil: # release the interpreter to allow multithreading + while index1+index2 < N1+N2-2: + # check which spike is next, only if there are spikes left in 1 + # next spike in 1 is earlier, or there are no spikes left in 2 + if (index1 < N1-1) and ((index2 == N2-1) or + (s1[index1+1] < s2[index2+1])): + index1 += 1 + curr_t = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + elif (index2 < N2-1) and ((index1 == N1-1) or + (s1[index1+1] > s2[index2+1])): + index2 += 1 + curr_t = s2[index2] + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + curr_t = s1[index1] + if index1 < N1-1: + nu1 = s1[index1+1]-s1[index1] + else: + # edge correction + nu1 = fmax(t_end-s1[index1], nu1) + if index2 < N2-1: + nu2 = s2[index2+1]-s2[index2] + else: + # edge correction + nu2 = fmax(t_end-s2[index2], nu2) + # compute the corresponding isi-distance + isi_value += curr_isi * (curr_t - last_t) + curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2) + last_t = curr_t + index += 1 + + isi_value += curr_isi * (t_end - last_t) + # end nogil + + return isi_value / (t_end-t_start) + + +############################################################ +# get_min_dist_cython +############################################################ +cdef inline double get_min_dist_cython(double spike_time, + double[:] spike_train, + # use memory view to ensure inlining + # np.ndarray[DTYPE_t,ndim=1] spike_train, + int N, + int start_index, + double t_start, double t_end) nogil: + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + cdef double d, d_temp + # start with the distance to the start time + d = fabs(spike_time - t_start) + if start_index < 0: + start_index = 0 + while start_index < N: + d_temp = fabs(spike_time - spike_train[start_index]) + if d_temp > d: + return d + else: + d = d_temp + start_index += 1 + + # finally, check the distance to end time + d_temp = fabs(t_end - spike_time) + if d_temp > d: + return d + else: + return d_temp + + +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + +############################################################ +# spike_distance_cython +############################################################ +def spike_distance_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 59a8d30..3690091 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -10,7 +10,7 @@ cython implementation of the isi-, spike- and spike-sync profiles Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects improves the performance of spike_distance by a factor of 10! -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 164378d..2a1ed3a 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -66,7 +66,21 @@ def isi_distance(spike_train1, spike_train2, interval=None): :returns: The isi-distance :math:`D_I`. :rtype: double """ - return isi_profile(spike_train1, spike_train2).avrg(interval) + + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_distances import isi_distance_cython \ + as isi_distance_impl + return isi_distance_impl(spike_train1.spikes, spike_train2.spikes, + spike_train1.t_start, spike_train1.t_end) + except ImportError: + # Cython backend not available: fall back to profile averaging + return isi_profile(spike_train1, spike_train2).avrg(interval) + else: + # some specific interval is provided: use profile + return isi_profile(spike_train1, spike_train2).avrg(interval) ############################################################ diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index 3567585..d727fa2 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -68,7 +68,22 @@ def spike_distance(spike_train1, spike_train2, interval=None): :rtype: double """ - return spike_profile(spike_train1, spike_train2).avrg(interval) + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_distances import spike_distance_cython \ + as spike_distance_impl + return spike_distance_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end) + except ImportError: + # Cython backend not available: fall back to average profile + return spike_profile(spike_train1, spike_train2).avrg(interval) + else: + # some specific interval is provided: compute the whole profile + return spike_profile(spike_train1, spike_train2).avrg(interval) ############################################################ diff --git a/setup.py b/setup.py index d687240..7902066 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,8 @@ else: use_cython = True if os.path.isfile("pyspike/cython/cython_add.c") and \ - os.path.isfile("pyspike/cython/cython_profiles.c"): + os.path.isfile("pyspike/cython/cython_profiles.c") and \ + os.path.isfile("pyspike/cython/cython_distances.c"): use_c = True else: use_c = False @@ -34,12 +35,14 @@ if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), + Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), + Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), ] # neither cython nor c files available -> automatic fall-back to python backend -- cgit v1.2.3 From 81fc2c20e7360714f218e9bba729ec4387f59aef Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 18 May 2015 15:15:36 +0200 Subject: set version 0.3 --- Changelog | 4 +++- doc/conf.py | 4 ++-- setup.py | 8 ++++---- 3 files changed, 9 insertions(+), 7 deletions(-) (limited to 'setup.py') diff --git a/Changelog b/Changelog index 798403e..519dd3b 100644 --- a/Changelog +++ b/Changelog @@ -1,4 +1,4 @@ -PySpike v0.2.1: +PySpike v0.3: * addition of __version__ attribute * restructured docs, Readme now only contains basic examples * performance improvements by introducing specific distance functions @@ -7,6 +7,8 @@ PySpike v0.2.1: * new example: performance.py * consistent treatment of empty spike trains * new example: profiles.py + * additional functionality: pwc and pwl function classes can now return + function values at given time(s) PySpike v0.2: diff --git a/doc/conf.py b/doc/conf.py index 9b994c2..8011ea9 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -64,9 +64,9 @@ copyright = u'2014-2015, Mario Mulansky' # built documents. # # The short X.Y version. -version = '0.2' +version = '0.3' # The full version, including alpha/beta/rc tags. -release = '0.2.0' +release = '0.3.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index 7902066..d853cdf 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ to compile cython files: python setup.py build_ext --inplace -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -49,7 +49,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.1.3', + version='0.3.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy.get_include()], @@ -59,7 +59,6 @@ train similarity', author_email='mario.mulanskygmx.net', license='BSD', url='https://github.com/mariomulansky/PySpike', - # download_url='https://github.com/mariomulansky/PySpike/tarball/0.1', install_requires=['numpy'], keywords=['data analysis', 'spike', 'neuroscience'], # arbitrary keywords classifiers=[ @@ -81,7 +80,8 @@ train similarity', 'Programming Language :: Python :: 2.7', ], package_data={ - 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c'], + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython_distances.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From 29e50478dcfc31ce04c4343fa585463abe96caae Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 12 Aug 2015 18:42:54 +0200 Subject: new spike delay asymmetry measures added first version of spike delay asymmetry functions. still incomplete and untested. --- pyspike/__init__.py | 3 +- pyspike/directionality/__init__.py | 12 ++ pyspike/directionality/cython/__init__.py | 0 .../cython/cython_directionality.pyx | 177 +++++++++++++++++ pyspike/directionality/spike_delay_asymmetry.py | 212 +++++++++++++++++++++ pyspike/generic.py | 6 +- setup.py | 28 ++- 7 files changed, 427 insertions(+), 11 deletions(-) create mode 100644 pyspike/directionality/__init__.py create mode 100644 pyspike/directionality/cython/__init__.py create mode 100644 pyspike/directionality/cython/cython_directionality.pyx create mode 100644 pyspike/directionality/spike_delay_asymmetry.py (limited to 'setup.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 2060f73..8d92ea4 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -6,7 +6,7 @@ Distributed under the BSD License __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc"] + "DiscreteFunc", "directionality"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc @@ -24,6 +24,7 @@ from psth import psth from spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes +import directionality as drct # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py new file mode 100644 index 0000000..e6de1de --- /dev/null +++ b/pyspike/directionality/__init__.py @@ -0,0 +1,12 @@ +""" +Copyright 2015, Mario Mulansky + +Distributed under the BSD License +""" + +__all__ = ["spike_delay_asymmetry"] + +from spike_delay_asymmetry import spike_delay_asymmetry_profile, \ + spike_delay_asymmetry, spike_delay_asymmetry_profile_multi, \ + spike_delay_asymmetry_matrix, optimal_asymmetry_order, \ + optimal_asymmetry_order_from_D, reorder_asymmetry_matrix diff --git a/pyspike/directionality/cython/__init__.py b/pyspike/directionality/cython/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyspike/directionality/cython/cython_directionality.pyx b/pyspike/directionality/cython/cython_directionality.pyx new file mode 100644 index 0000000..f5ea752 --- /dev/null +++ b/pyspike/directionality/cython/cython_directionality.pyx @@ -0,0 +1,177 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_delay_asymmetry_profile_cython +############################################################ +def spike_delay_asymmetry_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + + +############################################################ +# spike_delay_asymmetry_cython +############################################################ +def spike_delay_asymmetry_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int asym = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + asym -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + asym += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if asym == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + asym = 1 + mp = 1 + + return asym, mp diff --git a/pyspike/directionality/spike_delay_asymmetry.py b/pyspike/directionality/spike_delay_asymmetry.py new file mode 100644 index 0000000..7d59601 --- /dev/null +++ b/pyspike/directionality/spike_delay_asymmetry.py @@ -0,0 +1,212 @@ +# Module containing functions to compute multivariate spike delay asymmetry +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +import numpy as np +from math import exp +from functools import partial +# import pyspike +from pyspike import DiscreteFunc +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_delay_asymmetry_profile +############################################################ +def spike_delay_asymmetry_profile(spike_train1, spike_train2, max_tau=None): + """ Computes the spike delay asymmetry profile A(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_delay_asymmetry_profile_cython as \ + spike_delay_asymmetry_profile_impl + except ImportError: + raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.python_backend import coincidence_python \ +# as coincidence_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_delay_asymmetry_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_delay_asymmetry +############################################################ +def spike_delay_asymmetry(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Computes the overall spike delay asymmetry value for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_delay_asymmetry_cython as spike_delay_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_delay_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + return c + except ImportError: + # Cython backend not available: fall back to profile averaging + raise NotImplementedError() + # return spike_sync_profile(spike_train1, spike_train2, + # max_tau).integral(interval) + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_delay_asymmetry_profile_multi +############################################################ +def spike_delay_asymmetry_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike delay asymmetry profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + prof_func = partial(spike_delay_asymmetry_profile, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_prof + + +############################################################ +# spike_delay_asymmetry_matrix +############################################################ +def spike_delay_asymmetry_matrix(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike delay asymmetry matrix for the given spike trains. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_delay_asymmetry(spike_trains[i], spike_trains[j], + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# optimal_asymmetry_order_from_D +############################################################ +def optimal_asymmetry_order_from_D(D, full_output=False): + """ finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Internal function, don't call directly! Use optimal_asymmetry_order + instead. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T = 2*np.max(D) # starting temperature + T_end = 1E-5 * T # final temperature + alpha = 0.9 # cooling factor + total_iter = 0 + while T > T_end: + iterations = 0 + succ_iter = 0 + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + ind1 = np.random.randint(N-1) + delta_A = -2*D[p[ind1], p[ind1+1]] + if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # swap indices + p[ind1], p[ind1+1] = p[ind1+1], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + break + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# _optimal_asymmetry_order +############################################################ +def optimal_asymmetry_order(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ finds the best sorting of the given spike trains via simulated + annealing. + Returns the optimal permutation p and A value. + """ + D = spike_delay_asymmetry_matrix(spike_trains, indices, interval, max_tau) + return optimal_asymmetry_order_from_D(D, full_output) + + +############################################################ +# reorder_asymmetry_matrix +############################################################ +def reorder_asymmetry_matrix(D, p): + N = len(D) + D_p = np.empty_like(D) + for n in xrange(N): + for m in xrange(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/generic.py b/pyspike/generic.py index 2df34f1..515cbf4 100644 --- a/pyspike/generic.py +++ b/pyspike/generic.py @@ -37,13 +37,15 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None): """ L1 = len(pairs1) if L1 > 1: - dist_prof1 = divide_and_conquer(pairs1[:L1//2], pairs1[int(L1//2):]) + dist_prof1 = divide_and_conquer(pairs1[:L1//2], + pairs1[int(L1//2):]) else: dist_prof1 = pair_distance_func(spike_trains[pairs1[0][0]], spike_trains[pairs1[0][1]]) L2 = len(pairs2) if L2 > 1: - dist_prof2 = divide_and_conquer(pairs2[:L2//2], pairs2[int(L2//2):]) + dist_prof2 = divide_and_conquer(pairs2[:L2//2], + pairs2[int(L2//2):]) else: dist_prof2 = pair_distance_func(spike_trains[pairs2[0][0]], spike_trains[pairs2[0][1]]) diff --git a/setup.py b/setup.py index d853cdf..960c684 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,8 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/directionality/cython/cython_directionality.c"): use_c = True else: use_c = False @@ -33,16 +34,26 @@ ext_modules = [] if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ - Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), - Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), - Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_add", + ["pyspike/cython/cython_add.pyx"]), + Extension("pyspike.cython.cython_profiles", + ["pyspike/cython/cython_profiles.pyx"]), + Extension("pyspike.cython.cython_distances", + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.directionality.cython.cython_directionality", + ["pyspike/directionality/cython/cython_directionality.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ - Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), - Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), - Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_add", + ["pyspike/cython/cython_add.c"]), + Extension("pyspike.cython.cython_profiles", + ["pyspike/cython/cython_profiles.c"]), + Extension("pyspike.cython.cython_distances", + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.directionality.cython.cython_directionality", + ["pyspike/directionality/cython/cython_directionality.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -81,7 +92,8 @@ train similarity', ], package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', - 'cython_distances.c'], + 'cython/cython_distances.c', + 'directionality/cython/cython_directionality.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From 61a9785822d402d5269bdff608f6259c1b6bffa4 Mon Sep 17 00:00:00 2001 From: Igor Gnatenko Date: Sun, 13 Dec 2015 11:04:33 +0100 Subject: add metadata and testing under py3 Signed-off-by: Igor Gnatenko --- .travis.yml | 3 +++ setup.py | 5 +++++ 2 files changed, 8 insertions(+) (limited to 'setup.py') diff --git a/.travis.yml b/.travis.yml index 1035775..1562d21 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,9 @@ language: python python: - "2.6" - "2.7" + - "3.3" + - "3.4" + - "3.5" env: - CYTHON_INSTALL="pip install -q cython" - CYTHON_INSTALL="" diff --git a/setup.py b/setup.py index 960c684..0bd9173 100644 --- a/setup.py +++ b/setup.py @@ -89,6 +89,11 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', + + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', ], package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', -- cgit v1.2.3 From 692a5773c85e08a690bc5406147e58c4f10b6628 Mon Sep 17 00:00:00 2001 From: Igor Gnatenko Date: Sun, 13 Dec 2015 11:17:37 +0100 Subject: stop using package_data Signed-off-by: Igor Gnatenko --- MANIFEST.in | 7 +++++++ setup.py | 8 +------- 2 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 MANIFEST.in (limited to 'setup.py') diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..aed0ae0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,7 @@ +include *.rst +include *.txt +include pyspike/cython/*.c +include directionality/cython/*.c +recursive-include examples *.py *.txt +recursive-include test *.py *.txt +recursive-include doc * diff --git a/setup.py b/setup.py index 0bd9173..62c1951 100644 --- a/setup.py +++ b/setup.py @@ -94,11 +94,5 @@ train similarity', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', - ], - package_data={ - 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', - 'cython/cython_distances.c', - 'directionality/cython/cython_directionality.c'], - 'test': ['Spike_testdata.txt'] - } + ] ) -- cgit v1.2.3 From 64b680238ac5bcafbf3ce67ed2a4d7487098b43d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 14 Dec 2015 16:40:49 +0100 Subject: update version strings and Changelog --- Changelog | 6 ++++++ doc/conf.py | 4 ++-- setup.py | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) (limited to 'setup.py') diff --git a/Changelog b/Changelog index 519dd3b..2be5e52 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,9 @@ +PySpike v0.4: + * Python 3 support (thanks to Igor Gnatenko) + * list interface to SpikeTrain class + * disable_backend_warning property + * several bugfixes + PySpike v0.3: * addition of __version__ attribute * restructured docs, Readme now only contains basic examples diff --git a/doc/conf.py b/doc/conf.py index 8011ea9..7e13eae 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -64,9 +64,9 @@ copyright = u'2014-2015, Mario Mulansky' # built documents. # # The short X.Y version. -version = '0.3' +version = '0.4' # The full version, including alpha/beta/rc tags. -release = '0.3.0' +release = '0.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index 62c1951..a1ab122 100644 --- a/setup.py +++ b/setup.py @@ -60,7 +60,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.3.0', + version='0.4.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy.get_include()], -- cgit v1.2.3 From 9061f2a0c13134e53f937d730295a421fd671ea3 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 14 Dec 2015 16:50:55 +0100 Subject: removed directionality from __init__ and setup.py --- pyspike/__init__.py | 2 -- setup.py | 11 +++-------- 2 files changed, 3 insertions(+), 10 deletions(-) (limited to 'setup.py') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 335b1d3..069090b 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -26,8 +26,6 @@ from .psth import psth from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes -from . import directionality as drct - # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/setup.py b/setup.py index a1ab122..8ef431a 100644 --- a/setup.py +++ b/setup.py @@ -23,8 +23,7 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/directionality/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_distances.c"): use_c = True else: use_c = False @@ -39,9 +38,7 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.pyx"]) + ["pyspike/cython/cython_distances.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -51,9 +48,7 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.c"]) + ["pyspike/cython/cython_distances.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend -- cgit v1.2.3 From c17cc8602414cec883c412008a4300b2c7ac7f80 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 9 Mar 2016 14:13:11 +0100 Subject: new version: 0.5.1 --- doc/conf.py | 4 ++-- setup.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'setup.py') diff --git a/doc/conf.py b/doc/conf.py index 7e13eae..807dec6 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -64,9 +64,9 @@ copyright = u'2014-2015, Mario Mulansky' # built documents. # # The short X.Y version. -version = '0.4' +version = '0.5' # The full version, including alpha/beta/rc tags. -release = '0.4.0' +release = '0.5.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index 8ef431a..b53304b 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ to compile cython files: python setup.py build_ext --inplace -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2016, Mario Mulansky Distributed under the BSD License @@ -55,7 +55,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.4.0', + version='0.5.1', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy.get_include()], -- cgit v1.2.3 From b5d6cd7bfab62fd7fb96570cf99b87aeed419a4d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 4 Oct 2017 20:03:08 -0700 Subject: Bump version --- setup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'setup.py') diff --git a/setup.py b/setup.py index b53304b..f2315f2 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ to compile cython files: python setup.py build_ext --inplace -Copyright 2014-2016, Mario Mulansky +Copyright 2014-2017, Mario Mulansky Distributed under the BSD License @@ -55,7 +55,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.1', + version='0.5.2', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy.get_include()], @@ -72,7 +72,7 @@ train similarity', # 3 - Alpha # 4 - Beta # 5 - Production/Stable - 'Development Status :: 3 - Alpha', + 'Development Status :: 3 - Beta', # Indicate who your project is intended for 'Intended Audience :: Science/Research', @@ -82,12 +82,12 @@ train similarity', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6' ] ) -- cgit v1.2.3 From ecd4b5f0f7e93859c1262593e2c09e1eb6775819 Mon Sep 17 00:00:00 2001 From: Dan Meliza Date: Mon, 2 Oct 2017 12:15:36 -0400 Subject: defer numpy import to allow install_requires to do its job (fixes #24) --- Changelog | 6 ++++++ setup.py | 11 +++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) (limited to 'setup.py') diff --git a/Changelog b/Changelog index 2be5e52..21b7cb0 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,9 @@ +PySpike v0.5: + * First beta release + * Python 2.6 support removed + * Python 3.6 support added + * several bugfixes + PySpike v0.4: * Python 3 support (thanks to Igor Gnatenko) * list interface to SpikeTrain class diff --git a/setup.py b/setup.py index f2315f2..afcfa16 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,6 @@ Distributed under the BSD License from setuptools import setup, find_packages from distutils.extension import Extension import os.path -import numpy try: from Cython.Distutils import build_ext @@ -21,6 +20,14 @@ except ImportError: else: use_cython = True + +class numpy_include(object): + """Defers import of numpy until install_requires is through""" + def __str__(self): + import numpy + return numpy.get_include() + + if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ os.path.isfile("pyspike/cython/cython_distances.c"): @@ -58,7 +65,7 @@ setup( version='0.5.2', cmdclass=cmdclass, ext_modules=ext_modules, - include_dirs=[numpy.get_include()], + include_dirs=[numpy_include()], description='A Python library for the numerical analysis of spike\ train similarity', author='Mario Mulansky', -- cgit v1.2.3 From 7d661660f143c1e690efa6dc2d526db635dc079a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 4 Oct 2017 22:08:35 -0700 Subject: Fix typo in email --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'setup.py') diff --git a/setup.py b/setup.py index afcfa16..76fce07 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,7 @@ setup( description='A Python library for the numerical analysis of spike\ train similarity', author='Mario Mulansky', - author_email='mario.mulanskygmx.net', + author_email='mario.mulansky@gmx.net', license='BSD', url='https://github.com/mariomulansky/PySpike', install_requires=['numpy'], -- cgit v1.2.3 From ece06870ab729f28fa1771f49725f25c6a25576d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 4 Oct 2017 22:27:44 -0700 Subject: Fix library status string --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'setup.py') diff --git a/setup.py b/setup.py index 76fce07..be5d209 100644 --- a/setup.py +++ b/setup.py @@ -79,7 +79,7 @@ train similarity', # 3 - Alpha # 4 - Beta # 5 - Production/Stable - 'Development Status :: 3 - Beta', + 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Science/Research', -- cgit v1.2.3 From 16040c3fb9f61f7753172f6d67ac7b22c961dcaa Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 4 Oct 2017 22:35:08 -0700 Subject: Version bump --- doc/conf.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'setup.py') diff --git a/doc/conf.py b/doc/conf.py index f9ec320..25aa9c9 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -66,7 +66,7 @@ copyright = u'2014-2017, Mario Mulansky' # The short X.Y version. version = '0.5' # The full version, including alpha/beta/rc tags. -release = '0.5.2' +release = '0.5.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index be5d209..5b9e677 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.2', + version='0.5.3', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], -- cgit v1.2.3 From 34bd30415dd93a2425ce566627e24ee9483ada3e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:49:42 -0700 Subject: Spike Order support (#39) * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- Changelog | 3 + Readme.rst | 9 +- doc/pyspike.rst | 6 + doc/tutorial.rst | 66 +++ examples/spike_train_order.py | 52 +++ pyspike/PieceWiseConstFunc.py | 32 +- pyspike/PieceWiseLinFunc.py | 42 +- pyspike/__init__.py | 16 +- pyspike/cython/cython_directionality.pyx | 262 ++++++++++++ pyspike/cython/cython_distances.pyx | 200 +++++++++ pyspike/cython/cython_profiles.pyx | 33 ++ pyspike/cython/cython_simulated_annealing.pyx | 82 ++++ pyspike/cython/directionality_python_backend.py | 144 +++++++ pyspike/cython/python_backend.py | 67 ++- pyspike/spike_directionality.py | 522 ++++++++++++++++++++++++ pyspike/spike_sync.py | 55 ++- setup.py | 28 +- test/test_directionality.py | 97 +++++ test/test_function.py | 62 +++ test/test_sync_filter.py | 95 +++++ 20 files changed, 1812 insertions(+), 61 deletions(-) create mode 100644 examples/spike_train_order.py create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/cython_simulated_annealing.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py create mode 100644 test/test_sync_filter.py (limited to 'setup.py') diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,12 @@ PSTH :undoc-members: :show-inheritance: +Directionality +........................................ +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from .spike_directionality import spike_directionality, \ + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..ac37690 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,262 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + + +############################################################ +# spike_directionality_profiles_cython +############################################################ +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_directionality_cython +############################################################ +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 # directionality value + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + + return d diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if fabs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..c1d820b --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,144 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..248862c --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,522 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +from __future__ import absolute_import + +import numpy as np +import pyspike +from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_directionality_cython as spike_directionality_impl + if max_tau is None: + max_tau = 0.0 + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) + if normalize: + return 1.0*d/c + else: + return d + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. + + Valid call structures:: + + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) + + +############################################################ +# spike_train_order_profile_bi +############################################################ +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_train_order_profile_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl +############################################################ +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_func + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_func(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau=max_tau).integral(interval) + return c, mp + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + + + +############################################################ +# optimal_spike_train_sorting_from_matrix +############################################################ +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_sorting +############################################################ +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return _optimal_spike_train_sorting_from_matrix(D, full_output) + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` + """ + N = len(D) + D_p = np.empty_like(D) + for n in range(N): + for m in range(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ @@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + removed_spike_trains = [] + + # cython implementation + try: + from .cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.python_backend import coincidence_single_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N): + if i == j: + continue + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 5b9e677..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -90,11 +100,17 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..c2e9bfe --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,97 @@ +""" test_directionality.py + +Tests the directionality functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc + + +def test_spike_directionality(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_values([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) diff --git a/test/test_function.py b/test/test_function.py index 92d378d..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -162,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..e259903 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,95 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_single_prof(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import \ + coincidence_single_python as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3