From 0c710a2450a055091fe6de2313d6240671d1c74d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 12:10:01 +0200 Subject: reorganized directionality module --- test/test_directionality.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 test/test_directionality.py (limited to 'test') diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..5c3da00 --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,41 @@ +""" test_spike_delay_asymmetry.py + +Tests the asymmetry functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc +from pyspike.spike_directionality import _spike_directionality_profile + + +def test_profile(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = _spike_directionality_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 4.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) + expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) + + f = _spike_directionality_profile(st1, st3) + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) -- cgit v1.2.3 From 827be39a1646f1e518f7210b8943006f5741144d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 17:40:09 +0200 Subject: further refactoring of directionality --- pyspike/__init__.py | 7 +- pyspike/cython/cython_directionality.pyx | 109 ++++++++++----- pyspike/spike_directionality.py | 232 ++++++++++++++++++++++--------- test/test_directionality.py | 80 ++++++++--- 4 files changed, 311 insertions(+), 117 deletions(-) (limited to 'test') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 7fa5265..10d2936 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -28,9 +28,10 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ merge_spike_trains, generate_poisson_spikes from spike_directionality import spike_directionality, \ - spike_directionality_matrix, spike_train_order_profile, \ - optimal_spike_train_order_from_matrix, optimal_spike_train_order, \ - permutate_matrix + spike_directionality_profiles, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order, \ + spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ + optimal_spike_train_order, permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx index e1f63c4..ac37690 100644 --- a/pyspike/cython/cython_directionality.pyx +++ b/pyspike/cython/cython_directionality.pyx @@ -128,21 +128,68 @@ def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, return st, a, mp +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + ############################################################ -# spike_order_values_cython +# spike_directionality_profiles_cython ############################################################ -def spike_order_values_cython(double[:] spikes1, - double[:] spikes2, - double t_start, double t_end, - double max_tau): +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef double[:] a1 = np.zeros(N1) # asymmetry values - cdef double[:] a2 = np.zeros(N2) # asymmetry values + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: @@ -153,8 +200,8 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 after spike train 2 # leading spike gets +1, following spike -1 - a1[i] = -1 - a2[j] = +1 + d1[i] = -1 + d2[j] = +1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) @@ -162,62 +209,54 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 before spike train 2 # leading spike gets +1, following spike -1 - a1[i] = +1 - a2[j] = -1 + d1[i] = +1 + d2[j] = -1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 # equal spike times: zero asymmetry value - a1[i] = 0 - a2[j] = 0 + d1[i] = 0 + d2[j] = 0 - return a1, a2 + return d1, d2 ############################################################ -# spike_train_order_cython +# spike_directionality_cython ############################################################ -def spike_train_order_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, double max_tau): +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef int asym = 0 - cdef int mp = 0 + cdef int d = 0 # directionality value cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 2 appeared before spike in spike train 1 - # mark with -1 - asym -= 2 + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 1 appeared before spike in spike train 2 - # mark with +1 - asym += 2 + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 - # add only one event with multiplicity 2, but no asymmetry counting - mp += 2 - - if asym == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - asym = 1 - mp = 1 - return asym, mp + return d diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index 0e69cb5..f608ecc 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -7,6 +7,8 @@ import numpy as np from math import exp import pyspike from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi ############################################################ @@ -21,23 +23,21 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, # for optimal performance try: from cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl + spike_directionality_cython as spike_directionality_impl if max_tau is None: max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) except ImportError: - # Cython backend not available: fall back to profile averaging - c, mp = _spike_directionality_profile(spike_train1, - spike_train2, - max_tau).integral(interval) + raise NotImplementedError() if normalize: - return 1.0*c/mp + return 1.0*d/c else: - return c + return d else: # some specific interval is provided: not yet implemented raise NotImplementedError() @@ -70,11 +70,11 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, ############################################################ -# spike_train_order_profile +# spike_directionality_profiles ############################################################ -def spike_train_order_profile(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike train symmetry value for each spike in each spike +def spike_directionality_profiles(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality value for each spike in each spike train. """ if indices is None: @@ -92,7 +92,7 @@ def spike_train_order_profile(spike_trains, indices=None, # cython implementation try: from cython.cython_directionality import \ - spike_order_values_cython as spike_order_values_impl + spike_directionality_profiles_cython as profile_impl except ImportError: raise NotImplementedError() # if not(pyspike.disable_backend_warning): @@ -107,11 +107,9 @@ def spike_train_order_profile(spike_trains, indices=None, max_tau = 0.0 for i, j in pairs: - a1, a2 = spike_order_values_impl(spike_trains[i].spikes, - spike_trains[j].spikes, - spike_trains[i].t_start, - spike_trains[i].t_end, - max_tau) + a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) asymmetry_list[i] += a1 asymmetry_list[j] += a2 for a in asymmetry_list: @@ -119,6 +117,114 @@ def spike_train_order_profile(spike_trains, indices=None, return asymmetry_list +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_train_order_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike delay asymmetry value for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau).integral(interval) + if normalize: + return 1.0*c/mp + else: + return c + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike delay asymmetry profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_prof + + ############################################################ # optimal_spike_train_order_from_matrix ############################################################ @@ -195,50 +301,50 @@ def permutate_matrix(D, p): ############################################################ # _spike_directionality_profile ############################################################ -def _spike_directionality_profile(spike_train1, spike_train2, - max_tau=None): - """ Computes the spike delay asymmetry profile A(t) of the two given - spike trains. Returns the profile as a DiscreteFunction object. +# def _spike_directionality_profile(spike_train1, spike_train2, +# max_tau=None): +# """ Computes the spike delay asymmetry profile A(t) of the two given +# spike trains. Returns the profile as a DiscreteFunction object. - :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` - :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. - :rtype: :class:`pyspike.function.DiscreteFunction` +# :param spike_train1: First spike train. +# :type spike_train1: :class:`pyspike.SpikeTrain` +# :param spike_train2: Second spike train. +# :type spike_train2: :class:`pyspike.SpikeTrain` +# :param max_tau: Maximum coincidence window size. If 0 or `None`, the +# coincidence window has no upper bound. +# :returns: The spike-distance profile :math:`S_{sync}(t)`. +# :rtype: :class:`pyspike.function.DiscreteFunction` - """ - # check whether the spike trains are defined for the same interval - assert spike_train1.t_start == spike_train2.t_start, \ - "Given spike trains are not defined on the same interval!" - assert spike_train1.t_end == spike_train2.t_end, \ - "Given spike trains are not defined on the same interval!" +# """ +# # check whether the spike trains are defined for the same interval +# assert spike_train1.t_start == spike_train2.t_start, \ +# "Given spike trains are not defined on the same interval!" +# assert spike_train1.t_end == spike_train2.t_end, \ +# "Given spike trains are not defined on the same interval!" - # cython implementation - try: - from cython.cython_directionality import \ - spike_train_order_profile_cython as \ - spike_train_order_profile_impl - except ImportError: - # raise NotImplementedError() - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl +# # cython implementation +# try: +# from cython.cython_directionality import \ +# spike_train_order_profile_cython as \ +# spike_train_order_profile_impl +# except ImportError: +# # raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.directionality_python_backend import \ +# spike_train_order_python as spike_train_order_profile_impl - if max_tau is None: - max_tau = 0.0 +# if max_tau is None: +# max_tau = 0.0 - times, coincidences, multiplicity \ - = spike_train_order_profile_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) +# times, coincidences, multiplicity \ +# = spike_train_order_profile_impl(spike_train1.spikes, +# spike_train2.spikes, +# spike_train1.t_start, +# spike_train1.t_end, +# max_tau) - return DiscreteFunc(times, coincidences, multiplicity) +# return DiscreteFunc(times, coincidences, multiplicity) diff --git a/test/test_directionality.py b/test/test_directionality.py index 5c3da00..7ca5d58 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -14,28 +14,76 @@ from numpy.testing import assert_equal, assert_almost_equal, \ import pyspike as spk from pyspike import SpikeTrain, DiscreteFunc -from pyspike.spike_directionality import _spike_directionality_profile +# from pyspike.spike_directionality import _spike_directionality_profile -def test_profile(): +def test_spike_directionality(): st1 = SpikeTrain([100, 200, 300], [0, 1000]) st2 = SpikeTrain([105, 205, 300], [0, 1000]) - expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) - expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) - - f = _spike_directionality_profile(st1, st2) - - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) - assert_almost_equal(f.avrg(), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), - 4.0) + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_profiles([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) st3 = SpikeTrain([105, 195, 500], [0, 1000]) - expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) - expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) - f = _spike_directionality_profile(st1, st3) - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) -- cgit v1.2.3 From a5e6a12a619cb9528a4cf7f3ef8f082e5eb877c2 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 9 Sep 2015 17:51:03 +0200 Subject: added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. --- pyspike/cython/cython_profiles.pyx | 31 +++++++++++++++++++++++++++ pyspike/spike_sync.py | 40 ++++++++++++++++++++++++++++++++++- test/test_sync_filter.py | 43 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 test/test_sync_filter.py (limited to 'test') diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..fe08cb7 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,34 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if j > -1 and spikes1[i]-spikes2[j] < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if spikes2[j]-spikes1[i] < tau: + # current spike in st1 is coincident + c[i] = 1 + + return c diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..d37731f 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -290,3 +290,41 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + + # cython implementation + try: + from cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cytho not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N).remove(i): + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + return filtered_spike_trains diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..ce03b23 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,43 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_cython(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) -- cgit v1.2.3 From 18ea80e2d01e9eb4ceee17219f91098efbcdf67c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 10 Oct 2015 20:45:09 +0200 Subject: spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. --- pyspike/__init__.py | 7 +- pyspike/cython/cython_distances.pyx | 200 ++++++++++++++++++++++++++ pyspike/cython/cython_profiles.pyx | 14 +- pyspike/cython/cython_simulated_annealing.pyx | 82 +++++++++++ pyspike/spike_directionality.py | 54 ++++--- pyspike/spike_sync.py | 19 ++- setup.py | 14 +- test/test_sync_filter.py | 61 +++++++- 8 files changed, 408 insertions(+), 43 deletions(-) create mode 100644 pyspike/cython/cython_simulated_annealing.pyx (limited to 'test') diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 10d2936..61c5c4f 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -19,9 +19,10 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix -from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix -from .psth import psth +from spike_sync import spike_sync_profile, spike_sync,\ + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync +from psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index fe08cb7..eb4d157 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -466,18 +466,20 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, cdef double tau for i in xrange(N1): while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if j > -1 and spikes1[i]-spikes2[j] < tau: + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1: + if j < N2-1 and spikes2[j] < spikes1[i]: + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if spikes2[j]-spikes1[i] < tau: + if fabs(spikes2[j]-spikes1[i]) < tau: # current spike in st1 is coincident c[i] = 1 - return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index cda7fe3..e1f5f16 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -242,27 +242,39 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): p = np.arange(N) - T = 2*np.max(D) # starting temperature - T_end = 1E-5 * T # final temperature - alpha = 0.9 # cooling factor - total_iter = 0 - while T > T_end: - iterations = 0 - succ_iter = 0 - while iterations < 100*N and succ_iter < 10*N: - # exchange two rows and cols - ind1 = np.random.randint(N-1) - delta_A = -2*D[p[ind1], p[ind1+1]] - if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # swap indices - p[ind1], p[ind1+1] = p[ind1+1], p[ind1] - A += delta_A - succ_iter += 1 - iterations += 1 - total_iter += iterations - T *= alpha # cool down - if succ_iter == 0: - break + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + from cython.cython_simulated_annealing import sim_ann_cython as sim_ann + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + # T = T_start + # total_iter = 0 + # while T > T_end: + # iterations = 0 + # succ_iter = 0 + # # equilibrate for 100*N steps or 10*N successful steps + # while iterations < 100*N and succ_iter < 10*N: + # # exchange two rows and cols + # ind1 = np.random.randint(N-1) + # if ind1 < N-1: + # ind2 = ind1+1 + # else: # this can never happend + # ind2 = 0 + # delta_A = -2*D[p[ind1], p[ind2]] + # if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # # swap indices + # p[ind1], p[ind2] = p[ind2], p[ind1] + # A += delta_A + # succ_iter += 1 + # iterations += 1 + # total_iter += iterations + # T *= alpha # cool down + # if succ_iter == 0: + # break + if full_output: return p, A, total_iter else: diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index d37731f..1d2ecdb 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -295,12 +295,14 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): ############################################################ # filter_by_spike_sync ############################################################ -def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): """ Removes the spikes with a multi-variate spike_sync value below threshold. """ N = len(spike_trains) filtered_spike_trains = [] + removed_spike_trains = [] # cython implementation try: @@ -308,7 +310,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): as coincidence_impl except ImportError: if not(pyspike.disable_backend_warning): - print("Warning: coincidence_single_profile_cytho not found. Make \ + print("Warning: coincidence_single_profile_cython not found. Make \ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") @@ -321,10 +323,19 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in range(N).remove(i): + for j in xrange(N): + if i == j: + continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, st.t_start, st.t_end, max_tau) filtered_spikes = st[coincidences > threshold*(N-1)] filtered_spike_trains.append(SpikeTrain(filtered_spikes, [st.t_start, st.t_end])) - return filtered_spike_trains + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 9ba1da6..808a122 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,8 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -48,7 +49,9 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.pyx"]) + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -60,7 +63,9 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.c"]) + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -105,7 +110,8 @@ train similarity', package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', 'cython/cython_distances.c', - 'cython/cython_directionality.c'], + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], 'test': ['Spike_testdata.txt'] } ) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index ce03b23..66ffcb6 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -17,17 +17,18 @@ import pyspike as spk from pyspike import SpikeTrain -def test_cython(): +def test_single_prof(): st1 = np.array([1.0, 2.0, 3.0, 4.0]) st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) # cython implementation try: - from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ - as coincidence_impl + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl except ImportError: - from pyspike.cython.python_backend import coincidence_single_profile_python \ - as coincidence_impl + from pyspike.cython.python_backend import \ + coincidence_single_profile_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) @@ -41,3 +42,53 @@ def test_cython(): for i, t in enumerate(st2): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3 From 6c68690b992a6dfabf8259e265cf427fc6f193eb Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 12 Oct 2015 11:20:03 +0200 Subject: added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well --- pyspike/cython/cython_profiles.pyx | 2 +- pyspike/cython/python_backend.py | 67 ++++++++++++++++++++++++++++---------- pyspike/spike_sync.py | 2 +- test/test_sync_filter.py | 3 +- 4 files changed, 53 insertions(+), 21 deletions(-) (limited to 'test') diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index eb4d157..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -473,7 +473,7 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1 and spikes2[j] < spikes1[i]: + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): # in case spikes2[j] is before spikes1[i] it has to be the one # right before (see above), hence we move one forward and also # check the next spike diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..a4a0c9e 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 1d2ecdb..66aa906 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -315,7 +315,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_single_profile_python \ + from cython.python_backend import coincidence_single_python \ as coincidence_impl if max_tau is None: diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index 66ffcb6..e259903 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -28,12 +28,13 @@ def test_single_prof(): coincidence_single_profile_cython as coincidence_impl except ImportError: from pyspike.cython.python_backend import \ - coincidence_single_profile_python as coincidence_impl + coincidence_single_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) for i, t in enumerate(st1): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) -- cgit v1.2.3 From 67f20e486b9d7da6a9c9e1cf51a1f4138b642e61 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 12:43:21 +0200 Subject: updated test case to new spike sync behavior --- test/test_directionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test') diff --git a/test/test_directionality.py b/test/test_directionality.py index 7ca5d58..3e12177 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -82,7 +82,7 @@ def test_spike_train_order(): expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) - expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) assert_array_equal(f.x, expected_x) assert_array_equal(f.y, expected_y) -- cgit v1.2.3 From 110d90117f048a28ed6f9efd00176477dd54c3a6 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 9 Jun 2018 13:13:41 -0700 Subject: remove commented code --- pyspike/spike_directionality.py | 54 ----------------------------------------- test/test_directionality.py | 4 +-- 2 files changed, 2 insertions(+), 56 deletions(-) (limited to 'test') diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index cd2bf04..d1a525e 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -310,57 +310,3 @@ def permutate_matrix(D, p): for m in range(N): D_p[n, m] = D[p[n], p[m]] return D_p - - -# internal helper functions - -############################################################ -# _spike_directionality_profile -############################################################ -# def _spike_directionality_profile(spike_train1, spike_train2, -# max_tau=None): -# """ Computes the spike delay asymmetry profile A(t) of the two given -# spike trains. Returns the profile as a DiscreteFunction object. - -# :param spike_train1: First spike train. -# :type spike_train1: :class:`pyspike.SpikeTrain` -# :param spike_train2: Second spike train. -# :type spike_train2: :class:`pyspike.SpikeTrain` -# :param max_tau: Maximum coincidence window size. If 0 or `None`, the -# coincidence window has no upper bound. -# :returns: The spike-distance profile :math:`S_{sync}(t)`. -# :rtype: :class:`pyspike.function.DiscreteFunction` - -# """ -# # check whether the spike trains are defined for the same interval -# assert spike_train1.t_start == spike_train2.t_start, \ -# "Given spike trains are not defined on the same interval!" -# assert spike_train1.t_end == spike_train2.t_end, \ -# "Given spike trains are not defined on the same interval!" - -# # cython implementation -# try: -# from cython.cython_directionality import \ -# spike_train_order_profile_cython as \ -# spike_train_order_profile_impl -# except ImportError: -# # raise NotImplementedError() -# if not(pyspike.disable_backend_warning): -# print("Warning: spike_distance_cython not found. Make sure that \ -# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -# Falling back to slow python backend.") -# # use python backend -# from cython.directionality_python_backend import \ -# spike_train_order_python as spike_train_order_profile_impl - -# if max_tau is None: -# max_tau = 0.0 - -# times, coincidences, multiplicity \ -# = spike_train_order_profile_impl(spike_train1.spikes, -# spike_train2.spikes, -# spike_train1.t_start, -# spike_train1.t_end, -# max_tau) - -# return DiscreteFunc(times, coincidences, multiplicity) diff --git a/test/test_directionality.py b/test/test_directionality.py index 3e12177..63865cc 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -1,6 +1,6 @@ -""" test_spike_delay_asymmetry.py +""" test_directionality.py -Tests the asymmetry functions +Tests the directionality functions Copyright 2015, Mario Mulansky -- cgit v1.2.3 From aed1f8185cf2a3b3f9330e93681fa12b365d287b Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 15 Jul 2018 15:33:22 -0700 Subject: Clean up directionality module, add doxy. --- doc/pyspike.rst | 1 + pyspike/__init__.py | 17 +- pyspike/spike_directionality.py | 446 +++++++++++++++++++++++++++++----------- pyspike/spike_sync.py | 4 +- test/test_directionality.py | 14 +- 5 files changed, 349 insertions(+), 133 deletions(-) (limited to 'test') diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 9552fa6..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -65,6 +65,7 @@ PSTH :show-inheritance: Directionality +........................................ .. automodule:: pyspike.spike_directionality :members: :undoc-members: diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3cf416f..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -29,16 +29,11 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ merge_spike_trains, generate_poisson_spikes from .spike_directionality import spike_directionality, \ - spike_directionality_profiles, spike_directionality_matrix, \ - spike_train_order_profile, spike_train_order, \ - spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ - optimal_spike_train_order, permutate_matrix - -from .spike_directionality import spike_directionality, \ - spike_directionality_profiles, spike_directionality_matrix, \ - spike_train_order_profile, spike_train_order, \ - spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ - optimal_spike_train_order, permutate_matrix + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index d1a525e..3a71f0b 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -12,12 +12,109 @@ from functools import partial from pyspike.generic import _generic_profile_multi +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + ############################################################ # spike_directionality ############################################################ def spike_directionality(spike_train1, spike_train2, normalize=True, interval=None, max_tau=None): - """ Computes the overall spike directionality for two spike trains. + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. """ if interval is None: # distance over the whole interval is requested: use specific function @@ -34,9 +131,14 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, max_tau) c = len(spike_train1.spikes) except ImportError: - d1, x = spike_directionality_profiles([spike_train1, spike_train2], - interval=interval, - max_tau=max_tau) + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) d = np.sum(d1) c = len(spike_train1.spikes) if normalize: @@ -45,7 +147,7 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, return d else: # some specific interval is provided: not yet implemented - raise NotImplementedError() + raise NotImplementedError("Parameter `interval` not supported.") ############################################################ @@ -53,7 +155,17 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, ############################################################ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, interval=None, max_tau=None): - """ Computes the spike directionaity matrix for the given spike trains. + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. """ if indices is None: indices = np.arange(len(spike_trains)) @@ -75,65 +187,53 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, ############################################################ -# spike_directionality_profiles +# spike_train_order_profile ############################################################ -def spike_directionality_profiles(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike directionality value for each spike in each spike - train. - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # list of arrays for reulting asymmetry values - asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains] - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. - # cython implementation - try: - from .cython.cython_directionality import \ - spike_directionality_profiles_cython as profile_impl - except ImportError: - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from .cython.directionality_python_backend import \ - spike_directionality_profile_python as profile_impl + Valid call structures:: - if max_tau is None: - max_tau = 0.0 + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains - for i, j in pairs: - d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, - spike_trains[i].t_start, spike_trains[i].t_end, - max_tau) - asymmetry_list[i] += d1 - asymmetry_list[j] += d2 - for a in asymmetry_list: - a /= len(spike_trains)-1 - return asymmetry_list + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) ############################################################ -# spike_train_order_profile +# spike_train_order_profile_bi ############################################################ -def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): """ Computes the spike train order profile P(t) of the two given spike trains. Returns the profile as a DiscreteFunction object. + :param spike_train1: First spike train. :type spike_train1: :class:`pyspike.SpikeTrain` :param spike_train2: Second spike train. :type spike_train2: :class:`pyspike.SpikeTrain` :param max_tau: Maximum coincidence window size. If 0 or `None`, the coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. + :returns: The spike train order profile :math:`E(t)`. :rtype: :class:`pyspike.function.DiscreteFunction` """ # check whether the spike trains are defined for the same interval @@ -171,21 +271,56 @@ Falling back to slow python backend.") ############################################################ -# spike_train_order +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl ############################################################ -def spike_train_order(spike_train1, spike_train2, normalize=True, - interval=None, max_tau=None): - """ Computes the overall spike delay asymmetry value for two spike trains. +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) """ if interval is None: # distance over the whole interval is requested: use specific function # for optimal performance try: from .cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl + spike_train_order_cython as spike_train_order_func if max_tau is None: max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, + c, mp = spike_train_order_func(spike_train1.spikes, spike_train2.spikes, spike_train1.t_start, spike_train1.t_end, @@ -194,49 +329,127 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # Cython backend not available: fall back to profile averaging c, mp = spike_train_order_profile(spike_train1, spike_train2, max_tau).integral(interval) - if normalize: - return 1.0*c/mp - else: - return c + return c, mp else: # some specific interval is provided: not yet implemented - raise NotImplementedError() + raise NotImplementedError("Parameter `interval` not supported.") ############################################################ -# spike_train_order_profile_multi +# spike_train_order ############################################################ -def spike_train_order_profile_multi(spike_trains, indices=None, - max_tau=None): - """ Computes the multi-variate spike delay asymmetry profile for a set of - spike trains. For each spike in the set of spike trains, the multi-variate - profile is defined as the sum of asymmetry values divided by the number of - spike trains pairs involving the spike train of containing this spike, - which is the number of spike trains minus one (N-1). - :param spike_trains: list of :class:`pyspike.SpikeTrain` +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) - :type indices: list or None + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. :param max_tau: Maximum coincidence window size. If 0 or `None`, the coincidence window has no upper bound. - :returns: The multi-variate spike sync profile :math:`(t)` - :rtype: :class:`pyspike.function.DiscreteFunction` + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double """ - prof_func = partial(spike_train_order_profile, max_tau=max_tau) - average_prof, M = _generic_profile_multi(spike_trains, prof_func, - indices) - # average_dist.mul_scalar(1.0/M) # no normalization here! - return average_prof + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + ############################################################ -# optimal_spike_train_order_from_matrix +# optimal_spike_train_sorting_from_matrix ############################################################ -def optimal_spike_train_order_from_matrix(D, full_output=False): - """ finds the best sorting via simulated annealing. +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. Returns the optimal permutation p and A value. - Internal function, don't call directly! Use optimal_asymmetry_order - instead. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. """ N = len(D) A = np.sum(np.triu(D, 0)) @@ -247,35 +460,14 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): T_end = 1E-5 * T_start # final temperature alpha = 0.9 # cooling factor - from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") p, A, total_iter = sim_ann(D, T_start, T_end, alpha) - # T = T_start - # total_iter = 0 - # while T > T_end: - # iterations = 0 - # succ_iter = 0 - # # equilibrate for 100*N steps or 10*N successful steps - # while iterations < 100*N and succ_iter < 10*N: - # # exchange two rows and cols - # ind1 = np.random.randint(N-1) - # if ind1 < N-1: - # ind2 = ind1+1 - # else: # this can never happend - # ind2 = 0 - # delta_A = -2*D[p[ind1], p[ind2]] - # if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # # swap indices - # p[ind1], p[ind2] = p[ind2], p[ind1] - # A += delta_A - # succ_iter += 1 - # iterations += 1 - # total_iter += iterations - # T *= alpha # cool down - # if succ_iter == 0: - # break - if full_output: return p, A, total_iter else: @@ -283,26 +475,44 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): ############################################################ -# optimal_spike_train_order +# optimal_spike_train_sorting ############################################################ -def optimal_spike_train_order(spike_trains, indices=None, interval=None, - max_tau=None, full_output=False): - """ finds the best sorting of the given spike trains via simulated - annealing. - Returns the optimal permutation p and A value. +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. """ D = spike_directionality_matrix(spike_trains, normalize=False, indices=indices, interval=interval, max_tau=max_tau) - return optimal_spike_train_order_from_matrix(D, full_output) - + return _optimal_spike_train_order_from_matrix(D, full_output) ############################################################ # permutate_matrix ############################################################ def permutate_matrix(D, p): - """ Applies the permutation p to the columns and rows of matrix D. - Return the new permutated matrix. + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` """ N = len(D) D_p = np.empty_like(D) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 32e6bf4..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ diff --git a/test/test_directionality.py b/test/test_directionality.py index 63865cc..5c7e917 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -14,7 +14,6 @@ from numpy.testing import assert_equal, assert_almost_equal, \ import pyspike as spk from pyspike import SpikeTrain, DiscreteFunc -# from pyspike.spike_directionality import _spike_directionality_profile def test_spike_directionality(): @@ -39,7 +38,7 @@ def test_spike_directionality(): D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) assert_array_equal(D, D_expected) - dir_profs = spk.spike_directionality_profiles([st1, st2, st3]) + dir_profs = spk.spike_directionality_values([st1, st2, st3]) assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) @@ -87,3 +86,14 @@ def test_spike_train_order(): assert_array_equal(f.x, expected_x) assert_array_equal(f.y, expected_y) assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + print D_matrix + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + print f.avrg(), num_spikes + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) -- cgit v1.2.3 From 5ff5cfa0641490fb63509e0cac30f86562f7ee16 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 15 Jul 2018 15:47:03 -0700 Subject: Remove debug print from tests --- test/test_directionality.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'test') diff --git a/test/test_directionality.py b/test/test_directionality.py index 5c7e917..c2e9bfe 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -92,8 +92,6 @@ def test_spike_train_order(): # We can also compute the synfire indicator from the Directionality Matrix: D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) - print D_matrix num_spikes = np.sum(len(st) for st in [st1, st2, st3]) - print f.avrg(), num_spikes syn_fire = np.sum(np.triu(D_matrix)) / num_spikes assert_almost_equal(f.avrg(), syn_fire) -- cgit v1.2.3 From 5178df8d74bcdde310f8007ac891dd8a1bf4a9d2 Mon Sep 17 00:00:00 2001 From: Jonathan Jouty Date: Sun, 22 Jul 2018 02:50:17 +0200 Subject: Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation --- pyspike/PieceWiseConstFunc.py | 32 ++++++++++++++++++--------- test/test_function.py | 50 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 10 deletions(-) (limited to 'test') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/test/test_function.py b/test/test_function.py index 92d378d..ddeb4b1 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=16) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] -- cgit v1.2.3 From 50b85d976f2f5ec7e40faec1ede047cf45b10bc1 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 19 Sep 2018 16:53:50 -0700 Subject: Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 --- pyspike/PieceWiseLinFunc.py | 42 +++++++++++++++++++++++++++++------------- test/test_function.py | 14 +++++++++++++- 2 files changed, 42 insertions(+), 14 deletions(-) (limited to 'test') diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/test/test_function.py b/test/test_function.py index ddeb4b1..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -136,7 +136,7 @@ def test_pwc_integral(): # test part interval, spanning an edge assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) # test part interval, just over two edges - assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=16) + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) # test part interval, between two edges assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) @@ -212,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) -- cgit v1.2.3 From fc6f43a23f8bc62bcc10a0e6531ac78d309a3620 Mon Sep 17 00:00:00 2001 From: gspr Date: Wed, 29 Dec 2021 22:53:42 +0100 Subject: Use assert_allclose instead of assert_equal in tests to allow for different floating point behavior on different architectures or optimization levels. (#49) --- test/test_distance.py | 70 +++++++++++++++--------------- test/test_empty.py | 36 +++++++-------- test/test_function.py | 52 +++++++++++----------- test/test_generic_interfaces.py | 18 ++++---- test/test_regression/test_regression_15.py | 20 ++++----- test/test_spikes.py | 10 ++--- test/test_sync_filter.py | 32 +++++++------- 7 files changed, 119 insertions(+), 119 deletions(-) (limited to 'test') diff --git a/test/test_distance.py b/test/test_distance.py index fe09f34..7ec3a72 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -11,7 +11,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -41,10 +41,10 @@ def test_isi(): # print("ISI: ", f.y) print("ISI value:", expected_isi_val) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=15) - assert_equal(f.avrg(), expected_isi_val) - assert_equal(spk.isi_distance(t1, t2), expected_isi_val) + assert_allclose(f.avrg(), expected_isi_val) + assert_allclose(spk.isi_distance(t1, t2), expected_isi_val) # check with some equal spike times t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0]) @@ -60,10 +60,10 @@ def test_isi(): f = spk.isi_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=15) - assert_equal(f.avrg(), expected_isi_val) - assert_equal(spk.isi_distance(t1, t2), expected_isi_val) + assert_allclose(f.avrg(), expected_isi_val) + assert_allclose(spk.isi_distance(t1, t2), expected_isi_val) def test_spike(): @@ -75,7 +75,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) # from SPIKY: y_all = np.array([0.000000000000000000, 0.555555555555555580, @@ -89,7 +89,7 @@ def test_spike(): assert_array_almost_equal(f.y2, y_all[1::2]) assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15) - assert_equal(spk.spike_distance(t1, t2), f.avrg()) + assert_allclose(spk.spike_distance(t1, t2), f.avrg()) t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) @@ -118,7 +118,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=15) assert_array_almost_equal(f.y2, expected_y2, decimal=15) assert_almost_equal(f.avrg(), expected_spike_val, decimal=15) @@ -157,7 +157,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14) assert_almost_equal(f.avrg(), expected_spike_val, decimal=16) @@ -236,8 +236,8 @@ def test_spike_sync(): f.add(f2) i12 = f.integral() - assert_equal(i1[0]+i2[0], i12[0]) - assert_equal(i1[1]+i2[1], i12[1]) + assert_allclose(i1[0]+i2[0], i12[0]) + assert_allclose(i1[1]+i2[1], i12[1]) def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): @@ -258,7 +258,7 @@ def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): f_multi = profile_func_multi(spike_trains, [0, 1]) assert f_multi.almost_equal(f12, decimal=14) d = dist_func_multi(spike_trains, [0, 1]) - assert_equal(f_multi.avrg(), d) + assert_allclose(f_multi.avrg(), d) f_multi1 = profile_func_multi(spike_trains, [1, 2, 3]) f_multi2 = profile_func_multi(spike_trains[1:]) @@ -329,11 +329,11 @@ def test_multi_spike_sync(): f = spk.spike_sync_profile_multi(spike_trains) - assert_equal(spike_times, f.x[1:-1]) - assert_equal(len(f.x), len(f.y)) + assert_allclose(spike_times, f.x[1:-1]) + assert_allclose(len(f.x), len(f.y)) - assert_equal(np.sum(f.y[1:-1]), 39932) - assert_equal(np.sum(f.mp[1:-1]), 85554) + assert_allclose(np.sum(f.y[1:-1]), 39932) + assert_allclose(np.sum(f.mp[1:-1]), 85554) # example with 2 empty spike trains sts = [] @@ -365,16 +365,16 @@ def check_dist_matrix(dist_func, dist_matrix_func): f_matrix = dist_matrix_func(spike_trains) # check zero diagonal for i in range(4): - assert_equal(0.0, f_matrix[i, i]) + assert_allclose(0.0, f_matrix[i, i]) for i in range(4): for j in range(i+1, 4): - assert_equal(f_matrix[i, j], f_matrix[j, i]) - assert_equal(f12, f_matrix[1, 0]) - assert_equal(f13, f_matrix[2, 0]) - assert_equal(f14, f_matrix[3, 0]) - assert_equal(f23, f_matrix[2, 1]) - assert_equal(f24, f_matrix[3, 1]) - assert_equal(f34, f_matrix[3, 2]) + assert_allclose(f_matrix[i, j], f_matrix[j, i]) + assert_allclose(f12, f_matrix[1, 0]) + assert_allclose(f13, f_matrix[2, 0]) + assert_allclose(f14, f_matrix[3, 0]) + assert_allclose(f23, f_matrix[2, 1]) + assert_allclose(f24, f_matrix[3, 1]) + assert_allclose(f34, f_matrix[3, 2]) def test_isi_matrix(): @@ -397,13 +397,13 @@ def test_regression_spiky(): isi_dist = spk.isi_distance(st1, st2) assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15) isi_profile = spk.isi_profile(st1, st2) - assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) + assert_allclose(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) - assert_equal(spike_dist, 0.211058782487353908) + assert_allclose(spike_dist, 0.211058782487353908) spike_sync = spk.spike_sync(st1, st2) - assert_equal(spike_sync, 8.6956521739130432e-01) + assert_allclose(spike_sync, 8.6956521739130432e-01) # multivariate check @@ -414,7 +414,7 @@ def test_regression_spiky(): assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) - assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252) + assert_allclose(len(spike_profile.y1)+len(spike_profile.y2), 1252) spike_dist = spk.spike_distance_multi(spike_trains) # get the full precision from SPIKY @@ -422,7 +422,7 @@ def test_regression_spiky(): spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY - assert_equal(spike_sync, 0.7183531505298066) + assert_allclose(spike_sync, 0.7183531505298066) # Eero's edge correction example st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0) @@ -439,7 +439,7 @@ def test_regression_spiky(): expected_y1 = y_all[::2] expected_y2 = y_all[1::2] - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14) @@ -452,15 +452,15 @@ def test_multi_variate_subsets(): v1 = spk.isi_distance_multi(spike_trains_sub_set) v2 = spk.isi_distance_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) v1 = spk.spike_distance_multi(spike_trains_sub_set) v2 = spk.spike_distance_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) v1 = spk.spike_sync_multi(spike_trains_sub_set) v2 = spk.spike_sync_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) if __name__ == "__main__": diff --git a/test/test_empty.py b/test/test_empty.py index 4d0a5cf..93fd2c1 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -10,7 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal import pyspike as spk @@ -33,18 +33,18 @@ def test_isi_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.isi_distance(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y, [0.0, ]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.isi_distance(st1, st2) - assert_equal(d, 0.6*0.4+0.4*0.6) + assert_allclose(d, 0.6*0.4+0.4*0.6) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_equal(prof.y, [0.6, 0.4]) @@ -53,7 +53,7 @@ def test_isi_empty(): d = spk.isi_distance(st1, st2) assert_almost_equal(d, 0.2/0.6*0.4 + 0.0 + 0.2/0.6*0.4, decimal=15) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y, [0.2/0.6, 0.0, 0.2/0.6], decimal=15) @@ -62,9 +62,9 @@ def test_spike_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y1, [0.0, ]) assert_array_equal(prof.y2, [0.0, ]) @@ -75,7 +75,7 @@ def test_spike_empty(): d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2 assert_almost_equal(d, d_expect, decimal=15) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2, 2*0.4*1.0/(0.6+1.0)**2], @@ -100,7 +100,7 @@ def test_spike_empty(): assert_almost_equal(d, expected_spike_val, decimal=15) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y1, expected_y1, decimal=15) assert_array_almost_equal(prof.y2, expected_y2, decimal=15) @@ -110,18 +110,18 @@ def test_spike_sync_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 1.0) + assert_allclose(d, 1.0) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y, [1.0, 1.0]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_equal(prof.y, [0.0, 0.0, 0.0]) @@ -130,7 +130,7 @@ def test_spike_sync_empty(): d = spk.spike_sync(st1, st2) assert_almost_equal(d, 1.0, decimal=15) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y, [1.0, 1.0, 1.0, 1.0], decimal=15) @@ -139,7 +139,7 @@ def test_spike_sync_empty(): d = spk.spike_sync(st1, st2) assert_almost_equal(d, 0.0, decimal=15) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.2, 0.8, 1.0], decimal=15) assert_array_almost_equal(prof.y, [0.0, 0.0, 0.0, 0.0], decimal=15) @@ -148,9 +148,9 @@ def test_spike_sync_empty(): st2 = SpikeTrain([2.1, 7.0], [0, 10.0]) st3 = SpikeTrain([5.1, 6.0], [0, 10.0]) res = spk.spike_sync_profile(st1, st2).avrg(interval=[3.0, 4.0]) - assert_equal(res, 1.0) + assert_allclose(res, 1.0) res = spk.spike_sync(st1, st2, interval=[3.0, 4.0]) - assert_equal(res, 1.0) + assert_allclose(res, 1.0) sync_matrix = spk.spike_sync_matrix([st1, st2, st3], interval=[3.0, 4.0]) assert_array_equal(sync_matrix, np.ones((3, 3)) - np.diag(np.ones(3))) diff --git a/test/test_function.py b/test/test_function.py index 6c04839..ba10ae7 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -11,7 +11,7 @@ from __future__ import print_function import numpy as np from copy import copy from nose.tools import raises -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal import pyspike as spk @@ -24,14 +24,14 @@ def test_pwc(): f = spk.PieceWiseConstFunc(x, y) # function values - assert_equal(f(0.0), 1.0) - assert_equal(f(0.5), 1.0) - assert_equal(f(1.0), 0.25) - assert_equal(f(2.0), 0.5) - assert_equal(f(2.25), 1.5) - assert_equal(f(2.5), 2.25/2) - assert_equal(f(3.5), 0.75) - assert_equal(f(4.0), 0.75) + assert_allclose(f(0.0), 1.0) + assert_allclose(f(0.5), 1.0) + assert_allclose(f(1.0), 0.25) + assert_allclose(f(2.0), 0.5) + assert_allclose(f(2.25), 1.5) + assert_allclose(f(2.5), 2.25/2) + assert_allclose(f(3.5), 0.75) + assert_allclose(f(4.0), 0.75) assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), [1.0, 1.0, 0.25, 0.5, 1.5, 2.25/2, 0.75, 0.75]) @@ -131,21 +131,21 @@ def test_pwc_integral(): # test full interval full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; - assert_equal(f1.integral(), full) - assert_equal(f1.integral((np.min(x),np.max(x))), full) + assert_allclose(f1.integral(), full) + assert_allclose(f1.integral((np.min(x),np.max(x))), full) # test part interval, spanning an edge - assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + assert_allclose(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) # test part interval, just over two edges assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) # test part interval, between two edges - assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) - assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + assert_allclose(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_allclose(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) # test part interval, start to before and after edge - assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) - assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + assert_allclose(f1.integral((0.0,0.7)), 0.7*1.0) + assert_allclose(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) # test part interval, before and after edge till end - assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) - assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + assert_allclose(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_allclose(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) @raises(ValueError) def test_pwc_integral_bad_bounds_inv(): @@ -178,14 +178,14 @@ def test_pwl(): f = spk.PieceWiseLinFunc(x, y1, y2) # function values - assert_equal(f(0.0), 1.0) - assert_equal(f(0.5), 1.25) - assert_equal(f(1.0), 0.5) - assert_equal(f(2.0), 1.1/2) - assert_equal(f(2.25), 1.5) - assert_equal(f(2.5), 2.25/2) - assert_equal(f(3.5), 0.75-0.5*1.0/1.5) - assert_equal(f(4.0), 0.25) + assert_allclose(f(0.0), 1.0) + assert_allclose(f(0.5), 1.25) + assert_allclose(f(1.0), 0.5) + assert_allclose(f(2.0), 1.1/2) + assert_allclose(f(2.25), 1.5) + assert_allclose(f(2.5), 2.25/2) + assert_allclose(f(3.5), 0.75-0.5*1.0/1.5) + assert_allclose(f(4.0), 0.25) assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), [1.0, 1.25, 0.5, 0.55, 1.5, 2.25/2, 0.75-0.5/1.5, 0.25]) diff --git a/test/test_generic_interfaces.py b/test/test_generic_interfaces.py index 7f08067..553f3f4 100644 --- a/test/test_generic_interfaces.py +++ b/test/test_generic_interfaces.py @@ -9,7 +9,7 @@ Distributed under the BSD License """ from __future__ import print_function -from numpy.testing import assert_equal +from numpy.testing import assert_allclose import pyspike as spk from pyspike import SpikeTrain @@ -43,33 +43,33 @@ def check_func(dist_func): isi12 = dist_func(t1, t2) isi12_ = dist_func([t1, t2]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi12_ = dist_func(spike_trains, indices=[0, 1]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi123 = dist_func(t1, t2, t3) isi123_ = dist_func([t1, t2, t3]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) isi123_ = dist_func(spike_trains, indices=[0, 1, 2]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) # run the same test with an additional interval parameter isi12 = dist_func(t1, t2, interval=[0.0, 0.5]) isi12_ = dist_func([t1, t2], interval=[0.0, 0.5]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi12_ = dist_func(spike_trains, indices=[0, 1], interval=[0.0, 0.5]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi123 = dist_func(t1, t2, t3, interval=[0.0, 0.5]) isi123_ = dist_func([t1, t2, t3], interval=[0.0, 0.5]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) isi123_ = dist_func(spike_trains, indices=[0, 1, 2], interval=[0.0, 0.5]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) def test_isi_profile(): diff --git a/test/test_regression/test_regression_15.py b/test/test_regression/test_regression_15.py index 54adf23..81b5bb0 100644 --- a/test/test_regression/test_regression_15.py +++ b/test/test_regression/test_regression_15.py @@ -11,7 +11,7 @@ Distributed under the BSD License from __future__ import division import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -28,15 +28,15 @@ def test_regression_15_isi(): N = len(spike_trains) dist_mat = spk.isi_distance_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.isi_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.isi_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) def test_regression_15_spike(): @@ -46,15 +46,15 @@ def test_regression_15_spike(): N = len(spike_trains) dist_mat = spk.spike_distance_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.spike_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.spike_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) def test_regression_15_sync(): @@ -64,15 +64,15 @@ def test_regression_15_sync(): N = len(spike_trains) dist_mat = spk.spike_sync_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.spike_sync_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.spike_sync_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) if __name__ == "__main__": diff --git a/test/test_spikes.py b/test/test_spikes.py index ee505b5..579f8e1 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -9,7 +9,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal +from numpy.testing import assert_allclose import pyspike as spk @@ -29,7 +29,7 @@ def test_load_from_txt(): spike_times = [64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1, 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7, 3644.3, 3936.3] - assert_equal(spike_times, spike_trains[0].spikes) + assert_allclose(spike_times, spike_trains[0].spikes) # check auxiliary spikes for spike_train in spike_trains: @@ -47,9 +47,9 @@ def test_load_time_series(): # check spike trains for n in range(len(spike_trains)): - assert_equal(spike_trains[n].spikes, spike_trains_check[n].spikes) - assert_equal(spike_trains[n].t_start, 0) - assert_equal(spike_trains[n].t_end, 4000) + assert_allclose(spike_trains[n].spikes, spike_trains_check[n].spikes) + assert_allclose(spike_trains[n].t_start, 0) + assert_allclose(spike_trains[n].t_end, 4000) def check_merged_spikes(merged_spikes, spike_trains): diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index e259903..0b915db 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -10,7 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -36,21 +36,21 @@ def test_single_prof(): coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) print(coincidences) for i, t in enumerate(st1): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) for i, t in enumerate(st2): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st3, 5.0)) coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) for i, t in enumerate(st1): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) st1 = np.array([1.0, 2.0, 3.0, 4.0]) st2 = np.array([1.0, 2.0, 4.0]) @@ -61,8 +61,8 @@ def test_single_prof(): coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) for i, t in enumerate(st1): expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] - assert_equal(coincidences[i], expected, - "At index %d" % i) + assert_allclose(coincidences[i], expected, + err_msg="At index %d" % i) def test_filter(): @@ -72,22 +72,22 @@ def test_filter(): # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) - # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) - # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + # assert_allclose(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_allclose(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) - # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) - # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + # assert_allclose(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_allclose(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) for st in filtered_spike_trains: print(st.spikes) - assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) - assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) - assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + assert_allclose(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_allclose(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_allclose(filtered_spike_trains[2].spikes, [0.9, 4.1]) if __name__ == "main": -- cgit v1.2.3