diff options
-rw-r--r-- | examples/perf_isi.py | 46 | ||||
-rw-r--r-- | examples/perf_spike.py | 39 | ||||
-rw-r--r-- | pyspike/cython_distance.pyx | 75 | ||||
-rw-r--r-- | pyspike/distances.py | 17 |
4 files changed, 174 insertions, 3 deletions
diff --git a/examples/perf_isi.py b/examples/perf_isi.py new file mode 100644 index 0000000..8b44946 --- /dev/null +++ b/examples/perf_isi.py @@ -0,0 +1,46 @@ +# performance measure of the isi calculation + +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt +import time +from functools import partial + +import pyspike as spk +#import pyspike.distances # for the python functions + +def measure_perf(func, loops=10): + times = np.empty(loops) + for i in xrange(loops): + start = time.clock() + func() + times[i] = time.clock() - start + return np.min(times) + +print("# approximate number of spikes\tcython time [ms]\tpython time [ms]") + +# fix seed to get reproducible results +np.random.seed(1) + +# max times +Ns = np.arange(10000, 50001, 10000) +for N in Ns: + + # first generate some data + times = 2.0*np.random.random(1.1*N) + t1 = np.cumsum(times) + # only up to T + t1 = spk.add_auxiliary_spikes(t1[t1<N], N) + + times = 2.0*np.random.random(N) + t2 = np.cumsum(times) + # only up to T + t2 = spk.add_auxiliary_spikes(t2[t2<N], N) + + t_cython = measure_perf(partial(spk.isi_distance, t1, t2)) + + t_python = measure_perf(partial(spk.distances.isi_distance_python, + t1, t2)) + + print("%d\t%.3f\t%.1f" % (N, t_cython*1000, t_python*1000)) diff --git a/examples/perf_spike.py b/examples/perf_spike.py new file mode 100644 index 0000000..37154ae --- /dev/null +++ b/examples/perf_spike.py @@ -0,0 +1,39 @@ +# performance measure of the isi calculation + +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt +import time +from functools import partial + +import pyspike as spk + +def measure_perf(func, loops=10): + times = np.empty(loops) + for i in xrange(loops): + start = time.clock() + func() + times[i] = time.clock() - start + return np.min(times) + +print("# approximate number of spikes\texecution time [ms]") + +# max times +Ns = np.arange(10000, 50001, 10000) +for N in Ns: + + # first generate some data + times = 2.0*np.random.random(1.1*N) + t1 = np.cumsum(times) + # only up to T + t1 = spk.add_auxiliary_spikes(t1[t1<N], N) + + times = 2.0*np.random.random(N) + t2 = np.cumsum(times) + # only up to T + t2 = spk.add_auxiliary_spikes(t2[t2<N], N) + + t = measure_perf(partial(spk.spike_distance, t1, t2)) + + print("%d\t%.1f" % (N, t*1000)) diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx new file mode 100644 index 0000000..330eea4 --- /dev/null +++ b/pyspike/cython_distance.pyx @@ -0,0 +1,75 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +Doc + +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_distance.pyx + +which gives:: + + cython_distance.html + + +""" + +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +def isi_distance_cython(np.ndarray[DTYPE_t, ndim=1] s1, np.ndarray[DTYPE_t, ndim=1] s2): + + cdef np.ndarray[DTYPE_t, ndim=1] spike_events + # the values have one entry less - the number of intervals between events + cdef np.ndarray[DTYPE_t, ndim=1] isi_values + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + N1 = len(s1)-1 + N2 = len(s2)-1 + + nu1 = s1[1]-s1[0] + nu2 = s2[1]-s2[0] + spike_events = np.empty(N1+N2) + spike_events[0] = s1[0] + isi_values = np.empty(N1+N2-1) + isi_values[0] = (nu1-nu2)/max(nu1,nu2) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= N1: + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= N2: + break + spike_events[index] = s2[index2] + nu2 = s2[index2+1]-s2[index2] + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + if (index1 >= N1) or (index2 >= N2): + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + nu2 = s2[index2+1]-s2[index2] + # compute the corresponding isi-distance + isi_values[index] = (nu1 - nu2) / max(nu1, nu2) + index += 1 + # the last event is the interval end + spike_events[index] = s1[N1] + + return spike_events[:index+1], isi_values[:index] diff --git a/pyspike/distances.py b/pyspike/distances.py index 52c6640..76dcd83 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -53,10 +53,21 @@ def isi_distance(spikes1, spikes2): assert spikes1[-1]==spikes2[-1], \ "Given spike trains seems not to have auxiliary spikes!" - # shorter names - s1 = spikes1 - s2 = spikes2 + # compile and load cython implementation + import pyximport + pyximport.install(setup_args={'include_dirs': [np.get_include()]}) + from cython_distance import isi_distance_cython + times, values = isi_distance_cython(spikes1, spikes2) + return PieceWiseConstFunc(times, values) + + +############################################################ +# isi_distance_python +############################################################ +def isi_distance_python(s1, s2): + """ Plain Python implementation of the isi distance. + """ # compute the interspike interval nu1 = s1[1:]-s1[:-1] nu2 = s2[1:]-s2[:-1] |