diff options
author | Mario Mulansky <mario.mulansky@gmx.net> | 2018-09-20 10:49:42 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-09-20 10:49:42 -0700 |
commit | 34bd30415dd93a2425ce566627e24ee9483ada3e (patch) | |
tree | dcfa9164d46e3cf501a1e8dcf4970f350063561a /pyspike/cython/python_backend.py | |
parent | 44d23620d2faa78ca74437fbd3f1b95da722a853 (diff) |
Spike Order support (#39)0.6.0
* reorganized directionality module
* further refactoring of directionality
* completed python directionality backend
* added SPIKE-Sync based filtering
new function filter_by_spike_sync removes spikes that have a multi-variate
Spike Sync value below some threshold
not yet fully tested, python backend missing.
* spike sync filtering, cython sim ann
Added function for filtering out events based on a threshold for the spike
sync values. Usefull for focusing on synchronous events during directionality
analysis.
Also added cython version of simulated annealing for performance.
* added coincidence single profile to python backend
missing function in python backend added, identified and fixed a bug in the
implementation as well
* updated test case to new spike sync behavior
* python3 fixes
* another python3 fix
* reorganized directionality module
* further refactoring of directionality
* completed python directionality backend
* added SPIKE-Sync based filtering
new function filter_by_spike_sync removes spikes that have a multi-variate
Spike Sync value below some threshold
not yet fully tested, python backend missing.
* spike sync filtering, cython sim ann
Added function for filtering out events based on a threshold for the spike
sync values. Usefull for focusing on synchronous events during directionality
analysis.
Also added cython version of simulated annealing for performance.
* added coincidence single profile to python backend
missing function in python backend added, identified and fixed a bug in the
implementation as well
* updated test case to new spike sync behavior
* python3 fixes
* another python3 fix
* Fix absolute imports in directionality measures
* remove commented code
* Add directionality to docs, bump version
* Clean up directionality module, add doxy.
* Remove debug print from tests
* Fix bug in calling Python backend
* Fix incorrect integrals in PieceWiseConstFunc (#36)
* Add (some currently failing) tests for PieceWiseConstFunc.integral
* Fix implementation of PieceWiseConstFunc.integral
Just by adding a special condition for when we are only taking an
integral "between" two edges of a PieceWiseConstFunc
All tests now pass.
Fixes #33.
* Add PieceWiseConstFunc.integral tests for ValueError
* Add testing bounds of integral
* Raise ValueError in function implementation
* Fix incorrect integrals in PieceWiseLinFunc (#38)
Integrals of piece-wise linear functions were incorrect if the
requested interval lies completely between two support points.
This has been fixed, and a unit test exercising this behavior
was added.
Fixes #38
* Add Spike Order example and Tutorial section
Adds an example computing spike order profile and the optimal
spike train order. Also adds a section on spike train order to the
tutorial.
Diffstat (limited to 'pyspike/cython/python_backend.py')
-rw-r--r-- | pyspike/cython/python_backend.py | 67 |
1 files changed, 49 insertions, 18 deletions
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net> +Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net> Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -434,6 +435,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): ############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + +############################################################ # add_piece_wise_const_python ############################################################ def add_piece_wise_const_python(x1, y1, x2, y2): |