diff options
author | Mario Mulansky <mario.mulansky@gmx.net> | 2015-10-10 20:45:09 +0200 |
---|---|---|
committer | Mario Mulansky <mario.mulansky@gmx.net> | 2018-06-02 12:59:43 -0700 |
commit | 18ea80e2d01e9eb4ceee17219f91098efbcdf67c (patch) | |
tree | d7819736b059e9885d53c14e28160d6487d93e6c /pyspike/spike_sync.py | |
parent | a5e6a12a619cb9528a4cf7f3ef8f082e5eb877c2 (diff) |
spike sync filtering, cython sim ann
Added function for filtering out events based on a threshold for the spike
sync values. Usefull for focusing on synchronous events during directionality
analysis.
Also added cython version of simulated annealing for performance.
Diffstat (limited to 'pyspike/spike_sync.py')
-rw-r--r-- | pyspike/spike_sync.py | 19 |
1 files changed, 15 insertions, 4 deletions
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index d37731f..1d2ecdb 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -295,12 +295,14 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): ############################################################ # filter_by_spike_sync ############################################################ -def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): """ Removes the spikes with a multi-variate spike_sync value below threshold. """ N = len(spike_trains) filtered_spike_trains = [] + removed_spike_trains = [] # cython implementation try: @@ -308,7 +310,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): as coincidence_impl except ImportError: if not(pyspike.disable_backend_warning): - print("Warning: coincidence_single_profile_cytho not found. Make \ + print("Warning: coincidence_single_profile_cython not found. Make \ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") @@ -321,10 +323,19 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in range(N).remove(i): + for j in xrange(N): + if i == j: + continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, st.t_start, st.t_end, max_tau) filtered_spikes = st[coincidences > threshold*(N-1)] filtered_spike_trains.append(SpikeTrain(filtered_spikes, [st.t_start, st.t_end])) - return filtered_spike_trains + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains |