summaryrefslogtreecommitdiff
path: root/pyspike/spike_sync.py
diff options
context:
space:
mode:
Diffstat (limited to 'pyspike/spike_sync.py')
-rw-r--r--pyspike/spike_sync.py40
1 files changed, 39 insertions, 1 deletions
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index 80f7805..d37731f 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -8,7 +8,7 @@ from __future__ import absolute_import
import numpy as np
from functools import partial
import pyspike
-from pyspike import DiscreteFunc
+from pyspike import DiscreteFunc, SpikeTrain
from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
@@ -290,3 +290,41 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
dist_func = partial(spike_sync_bi, max_tau=max_tau)
return _generic_distance_matrix(spike_trains, dist_func,
indices, interval)
+
+
+############################################################
+# filter_by_spike_sync
+############################################################
+def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None):
+ """ Removes the spikes with a multi-variate spike_sync value below
+ threshold.
+ """
+ N = len(spike_trains)
+ filtered_spike_trains = []
+
+ # cython implementation
+ try:
+ from cython.cython_profiles import coincidence_single_profile_cython \
+ as coincidence_impl
+ except ImportError:
+ if not(pyspike.disable_backend_warning):
+ print("Warning: coincidence_single_profile_cytho not found. Make \
+sure that PySpike is installed by running\n \
+'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from cython.python_backend import coincidence_single_profile_python \
+ as coincidence_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ for i, st in enumerate(spike_trains):
+ coincidences = np.zeros_like(st)
+ for j in range(N).remove(i):
+ coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes,
+ st.t_start, st.t_end, max_tau)
+ filtered_spikes = st[coincidences > threshold*(N-1)]
+ filtered_spike_trains.append(SpikeTrain(filtered_spikes,
+ [st.t_start, st.t_end]))
+ return filtered_spike_trains