summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2015-12-14 16:38:48 +0100
committerMario Mulansky <mario.mulansky@gmx.net>2015-12-14 16:38:48 +0100
commit2958fac1f12dfc743214615a3e8252dae54ea784 (patch)
treef854cd37fde1b6b7bf9f83d136470802afdd4166
parenta37dc02a0f6c11a8773cba8a6137a2604222fa26 (diff)
remove directionality from develop
directionality development happens in separate branch.
-rw-r--r--pyspike/directionality/__init__.py15
-rw-r--r--pyspike/directionality/cython/__init__.py0
-rw-r--r--pyspike/directionality/cython/cython_directionality.pyx223
-rw-r--r--pyspike/directionality/cython/directionality_python_backend.py89
-rw-r--r--pyspike/directionality/spike_train_order.py266
-rw-r--r--test/test_spike_delay_asymmetry.py40
6 files changed, 0 insertions, 633 deletions
diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py
deleted file mode 100644
index 6ea38b2..0000000
--- a/pyspike/directionality/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
-
-Distributed under the BSD License
-"""
-
-from __future__ import absolute_import
-
-__all__ = ["spike_train_order"]
-
-from .spike_train_order import spike_train_order_profile, \
- spike_train_order, spike_train_order_profile_multi, \
- spike_train_order_matrix, spike_order_values, \
- optimal_spike_train_order, optimal_spike_train_order_from_matrix, \
- permutate_matrix
diff --git a/pyspike/directionality/cython/__init__.py b/pyspike/directionality/cython/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/pyspike/directionality/cython/__init__.py
+++ /dev/null
diff --git a/pyspike/directionality/cython/cython_directionality.pyx b/pyspike/directionality/cython/cython_directionality.pyx
deleted file mode 100644
index e1f63c4..0000000
--- a/pyspike/directionality/cython/cython_directionality.pyx
+++ /dev/null
@@ -1,223 +0,0 @@
-#cython: boundscheck=False
-#cython: wraparound=False
-#cython: cdivision=True
-
-"""
-cython_directionality.pyx
-
-cython implementation of the spike delay asymmetry measures
-
-Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
-
-Distributed under the BSD License
-
-"""
-
-"""
-To test whether things can be optimized: remove all yellow stuff
-in the html output::
-
- cython -a cython_directionality.pyx
-
-which gives::
-
- cython_directionality.html
-
-"""
-
-import numpy as np
-cimport numpy as np
-
-from libc.math cimport fabs
-from libc.math cimport fmax
-from libc.math cimport fmin
-
-# from pyspike.cython.cython_distances cimport get_tau
-
-DTYPE = np.float
-ctypedef np.float_t DTYPE_t
-
-
-############################################################
-# get_tau
-############################################################
-cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
- int i, int j, double interval, double max_tau):
- cdef double m = interval # use interval length as initial tau
- cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1
- cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1
- if i < N1 and i > -1:
- m = fmin(m, spikes1[i+1]-spikes1[i])
- if j < N2 and j > -1:
- m = fmin(m, spikes2[j+1]-spikes2[j])
- if i > 0:
- m = fmin(m, spikes1[i]-spikes1[i-1])
- if j > 0:
- m = fmin(m, spikes2[j]-spikes2[j-1])
- m *= 0.5
- if max_tau > 0.0:
- m = fmin(m, max_tau)
- return m
-
-
-############################################################
-# spike_train_order_profile_cython
-############################################################
-def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2,
- double t_start, double t_end,
- double max_tau):
-
- cdef int N1 = len(spikes1)
- cdef int N2 = len(spikes2)
- cdef int i = -1
- cdef int j = -1
- cdef int n = 0
- cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
- cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values
- cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
- cdef double interval = t_end - t_start
- cdef double tau
- while i + j < N1 + N2 - 2:
- if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
- i += 1
- n += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- st[n] = spikes1[i]
- if j > -1 and spikes1[i]-spikes2[j] < tau:
- # coincidence between the current spike and the previous spike
- # spike from spike train 1 after spike train 2
- # both get marked with -1
- a[n] = -1
- a[n-1] = -1
- elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
- j += 1
- n += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- st[n] = spikes2[j]
- if i > -1 and spikes2[j]-spikes1[i] < tau:
- # coincidence between the current spike and the previous spike
- # spike from spike train 1 before spike train 2
- # both get marked with 1
- a[n] = 1
- a[n-1] = 1
- else: # spikes1[i+1] = spikes2[j+1]
- # advance in both spike trains
- j += 1
- i += 1
- n += 1
- # add only one event with zero asymmetry value and multiplicity 2
- st[n] = spikes1[i]
- a[n] = 0
- mp[n] = 2
-
- st = st[:n+2]
- a = a[:n+2]
- mp = mp[:n+2]
-
- st[0] = t_start
- st[len(st)-1] = t_end
- if N1 + N2 > 0:
- a[0] = a[1]
- a[len(a)-1] = a[len(a)-2]
- mp[0] = mp[1]
- mp[len(mp)-1] = mp[len(mp)-2]
- else:
- a[0] = 1
- a[1] = 1
-
- return st, a, mp
-
-
-
-############################################################
-# spike_order_values_cython
-############################################################
-def spike_order_values_cython(double[:] spikes1,
- double[:] spikes2,
- double t_start, double t_end,
- double max_tau):
-
- cdef int N1 = len(spikes1)
- cdef int N2 = len(spikes2)
- cdef int i = -1
- cdef int j = -1
- cdef double[:] a1 = np.zeros(N1) # asymmetry values
- cdef double[:] a2 = np.zeros(N2) # asymmetry values
- cdef double interval = t_end - t_start
- cdef double tau
- while i + j < N1 + N2 - 2:
- if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
- i += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- if j > -1 and spikes1[i]-spikes2[j] < tau:
- # coincidence between the current spike and the previous spike
- # spike from spike train 1 after spike train 2
- # leading spike gets +1, following spike -1
- a1[i] = -1
- a2[j] = +1
- elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
- j += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- if i > -1 and spikes2[j]-spikes1[i] < tau:
- # coincidence between the current spike and the previous spike
- # spike from spike train 1 before spike train 2
- # leading spike gets +1, following spike -1
- a1[i] = +1
- a2[j] = -1
- else: # spikes1[i+1] = spikes2[j+1]
- # advance in both spike trains
- j += 1
- i += 1
- # equal spike times: zero asymmetry value
- a1[i] = 0
- a2[j] = 0
-
- return a1, a2
-
-
-############################################################
-# spike_train_order_cython
-############################################################
-def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
- double t_start, double t_end, double max_tau):
-
- cdef int N1 = len(spikes1)
- cdef int N2 = len(spikes2)
- cdef int i = -1
- cdef int j = -1
- cdef int asym = 0
- cdef int mp = 0
- cdef double interval = t_end - t_start
- cdef double tau
- while i + j < N1 + N2 - 2:
- if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
- i += 1
- mp += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- if j > -1 and spikes1[i]-spikes2[j] < tau:
- # coincidence between the current spike and the previous spike
- # spike in spike train 2 appeared before spike in spike train 1
- # mark with -1
- asym -= 2
- elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
- j += 1
- mp += 1
- tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
- if i > -1 and spikes2[j]-spikes1[i] < tau:
- # coincidence between the current spike and the previous spike
- # spike in spike train 1 appeared before spike in spike train 2
- # mark with +1
- asym += 2
- else: # spikes1[i+1] = spikes2[j+1]
- # advance in both spike trains
- j += 1
- i += 1
- # add only one event with multiplicity 2, but no asymmetry counting
- mp += 2
-
- if asym == 0 and mp == 0:
- # empty spike trains -> spike sync = 1 by definition
- asym = 1
- mp = 1
-
- return asym, mp
diff --git a/pyspike/directionality/cython/directionality_python_backend.py b/pyspike/directionality/cython/directionality_python_backend.py
deleted file mode 100644
index e14238f..0000000
--- a/pyspike/directionality/cython/directionality_python_backend.py
+++ /dev/null
@@ -1,89 +0,0 @@
-""" directionality_python_backend.py
-
-Collection of python functions that can be used instead of the cython
-implementation.
-
-Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
-
-Distributed under the BSD License
-
-"""
-
-import numpy as np
-
-
-############################################################
-# spike_train_order_python
-############################################################
-def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau):
-
- def get_tau(spikes1, spikes2, i, j, max_tau):
- m = t_end - t_start # use interval as initial tau
- if i < len(spikes1)-1 and i > -1:
- m = min(m, spikes1[i+1]-spikes1[i])
- if j < len(spikes2)-1 and j > -1:
- m = min(m, spikes2[j+1]-spikes2[j])
- if i > 0:
- m = min(m, spikes1[i]-spikes1[i-1])
- if j > 0:
- m = min(m, spikes2[j]-spikes2[j-1])
- m *= 0.5
- if max_tau > 0.0:
- m = min(m, max_tau)
- return m
-
- N1 = len(spikes1)
- N2 = len(spikes2)
- i = -1
- j = -1
- n = 0
- st = np.zeros(N1 + N2 + 2) # spike times
- a = np.zeros(N1 + N2 + 2) # coincidences
- mp = np.ones(N1 + N2 + 2) # multiplicity
- while i + j < N1 + N2 - 2:
- if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
- i += 1
- n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
- st[n] = spikes1[i]
- if j > -1 and spikes1[i]-spikes2[j] < tau:
- # coincidence between the current spike and the previous spike
- # both get marked with 1
- a[n] = -1
- a[n-1] = -1
- elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
- j += 1
- n += 1
- tau = get_tau(spikes1, spikes2, i, j, max_tau)
- st[n] = spikes2[j]
- if i > -1 and spikes2[j]-spikes1[i] < tau:
- # coincidence between the current spike and the previous spike
- # both get marked with 1
- a[n] = 1
- a[n-1] = 1
- else: # spikes1[i+1] = spikes2[j+1]
- # advance in both spike trains
- j += 1
- i += 1
- n += 1
- # add only one event with zero asymmetry value and multiplicity 2
- st[n] = spikes1[i]
- a[n] = 0
- mp[n] = 2
-
- st = st[:n+2]
- a = a[:n+2]
- mp = mp[:n+2]
-
- st[0] = t_start
- st[len(st)-1] = t_end
- if N1 + N2 > 0:
- a[0] = a[1]
- a[len(a)-1] = a[len(a)-2]
- mp[0] = mp[1]
- mp[len(mp)-1] = mp[len(mp)-2]
- else:
- a[0] = 1
- a[1] = 1
-
- return st, a, mp
diff --git a/pyspike/directionality/spike_train_order.py b/pyspike/directionality/spike_train_order.py
deleted file mode 100644
index e6c9830..0000000
--- a/pyspike/directionality/spike_train_order.py
+++ /dev/null
@@ -1,266 +0,0 @@
-# Module containing functions to compute multivariate spike train order
-# Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
-# Distributed under the BSD License
-
-from __future__ import absolute_import
-
-import numpy as np
-from math import exp
-from functools import partial
-import pyspike
-from pyspike import DiscreteFunc
-from pyspike.generic import _generic_profile_multi
-
-
-############################################################
-# spike_train_order_profile
-############################################################
-def spike_train_order_profile(spike_train1, spike_train2, max_tau=None):
- """ Computes the spike delay asymmetry profile A(t) of the two given
- spike trains. Returns the profile as a DiscreteFunction object.
-
- :param spike_train1: First spike train.
- :type spike_train1: :class:`pyspike.SpikeTrain`
- :param spike_train2: Second spike train.
- :type spike_train2: :class:`pyspike.SpikeTrain`
- :param max_tau: Maximum coincidence window size. If 0 or `None`, the
- coincidence window has no upper bound.
- :returns: The spike-distance profile :math:`S_{sync}(t)`.
- :rtype: :class:`pyspike.function.DiscreteFunction`
-
- """
- # check whether the spike trains are defined for the same interval
- assert spike_train1.t_start == spike_train2.t_start, \
- "Given spike trains are not defined on the same interval!"
- assert spike_train1.t_end == spike_train2.t_end, \
- "Given spike trains are not defined on the same interval!"
-
- # cython implementation
- try:
- from .cython.cython_directionality import \
- spike_train_order_profile_cython as \
- spike_train_order_profile_impl
- except ImportError:
- # raise NotImplementedError()
- if not(pyspike.disable_backend_warning):
- print("Warning: spike_distance_cython not found. Make sure that \
-PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
-Falling back to slow python backend.")
- # use python backend
- from .cython.directionality_python_backend import \
- spike_train_order_python as spike_train_order_profile_impl
-
- if max_tau is None:
- max_tau = 0.0
-
- times, coincidences, multiplicity \
- = spike_train_order_profile_impl(spike_train1.spikes,
- spike_train2.spikes,
- spike_train1.t_start,
- spike_train1.t_end,
- max_tau)
-
- return DiscreteFunc(times, coincidences, multiplicity)
-
-
-############################################################
-# spike_train_order
-############################################################
-def spike_train_order(spike_train1, spike_train2, normalize=True,
- interval=None, max_tau=None):
- """ Computes the overall spike delay asymmetry value for two spike trains.
- """
- if interval is None:
- # distance over the whole interval is requested: use specific function
- # for optimal performance
- try:
- from .cython.cython_directionality import \
- spike_train_order_cython as spike_train_order_impl
- if max_tau is None:
- max_tau = 0.0
- c, mp = spike_train_order_impl(spike_train1.spikes,
- spike_train2.spikes,
- spike_train1.t_start,
- spike_train1.t_end,
- max_tau)
- except ImportError:
- # Cython backend not available: fall back to profile averaging
- c, mp = spike_train_order_profile(spike_train1, spike_train2,
- max_tau).integral(interval)
- if normalize:
- return 1.0*c/mp
- else:
- return c
- else:
- # some specific interval is provided: not yet implemented
- raise NotImplementedError()
-
-
-############################################################
-# spike_train_order_profile_multi
-############################################################
-def spike_train_order_profile_multi(spike_trains, indices=None,
- max_tau=None):
- """ Computes the multi-variate spike delay asymmetry profile for a set of
- spike trains. For each spike in the set of spike trains, the multi-variate
- profile is defined as the sum of asymmetry values divided by the number of
- spike trains pairs involving the spike train of containing this spike,
- which is the number of spike trains minus one (N-1).
-
- :param spike_trains: list of :class:`pyspike.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type indices: list or None
- :param max_tau: Maximum coincidence window size. If 0 or `None`, the
- coincidence window has no upper bound.
- :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
- :rtype: :class:`pyspike.function.DiscreteFunction`
-
- """
- prof_func = partial(spike_train_order_profile, max_tau=max_tau)
- average_prof, M = _generic_profile_multi(spike_trains, prof_func,
- indices)
- # average_dist.mul_scalar(1.0/M) # no normalization here!
- return average_prof
-
-
-############################################################
-# spike_train_order_matrix
-############################################################
-def spike_train_order_matrix(spike_trains, normalize=True, indices=None,
- interval=None, max_tau=None):
- """ Computes the spike delay asymmetry matrix for the given spike trains.
- """
- if indices is None:
- indices = np.arange(len(spike_trains))
- indices = np.array(indices)
- # check validity of indices
- assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
- # generate a list of possible index pairs
- pairs = [(indices[i], j) for i in range(len(indices))
- for j in indices[i+1:]]
-
- distance_matrix = np.zeros((len(indices), len(indices)))
- for i, j in pairs:
- d = spike_train_order(spike_trains[i], spike_trains[j], normalize,
- interval, max_tau=max_tau)
- distance_matrix[i, j] = d
- distance_matrix[j, i] = -d
- return distance_matrix
-
-
-############################################################
-# spike_order_values
-############################################################
-def spike_order_values(spike_trains, indices=None,
- interval=None, max_tau=None):
- """ Computes the spike train symmetry value for each spike in each spike
- train.
- """
- if indices is None:
- indices = np.arange(len(spike_trains))
- indices = np.array(indices)
- # check validity of indices
- assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
- # list of arrays for reulting asymmetry values
- asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains]
- # generate a list of possible index pairs
- pairs = [(indices[i], j) for i in range(len(indices))
- for j in indices[i+1:]]
-
- # cython implementation
- try:
- from .cython.cython_directionality import \
- spike_order_values_cython as spike_order_values_impl
- except ImportError:
- raise NotImplementedError()
-# if not(pyspike.disable_backend_warning):
-# print("Warning: spike_distance_cython not found. Make sure that \
-# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
-# Falling back to slow python backend.")
-# # use python backend
-# from .cython.python_backend import coincidence_python \
-# as coincidence_profile_impl
-
- if max_tau is None:
- max_tau = 0.0
-
- for i, j in pairs:
- a1, a2 = spike_order_values_impl(spike_trains[i].spikes,
- spike_trains[j].spikes,
- spike_trains[i].t_start,
- spike_trains[i].t_end,
- max_tau)
- asymmetry_list[i] += a1
- asymmetry_list[j] += a2
- for a in asymmetry_list:
- a /= len(spike_trains)-1
- return asymmetry_list
-
-
-############################################################
-# optimal_asymmetry_order_from_matrix
-############################################################
-def optimal_spike_train_order_from_matrix(D, full_output=False):
- """ finds the best sorting via simulated annealing.
- Returns the optimal permutation p and A value.
- Internal function, don't call directly! Use optimal_asymmetry_order
- instead.
- """
- N = len(D)
- A = np.sum(np.triu(D, 0))
-
- p = np.arange(N)
-
- T = 2*np.max(D) # starting temperature
- T_end = 1E-5 * T # final temperature
- alpha = 0.9 # cooling factor
- total_iter = 0
- while T > T_end:
- iterations = 0
- succ_iter = 0
- while iterations < 100*N and succ_iter < 10*N:
- # exchange two rows and cols
- ind1 = np.random.randint(N-1)
- delta_A = -2*D[p[ind1], p[ind1+1]]
- if delta_A > 0.0 or exp(delta_A/T) > np.random.random():
- # swap indices
- p[ind1], p[ind1+1] = p[ind1+1], p[ind1]
- A += delta_A
- succ_iter += 1
- iterations += 1
- total_iter += iterations
- T *= alpha # cool down
- if succ_iter == 0:
- break
- if full_output:
- return p, A, total_iter
- else:
- return p, A
-
-
-############################################################
-# optimal_spike_train_order
-############################################################
-def optimal_spike_train_order(spike_trains, indices=None, interval=None,
- max_tau=None, full_output=False):
- """ finds the best sorting of the given spike trains via simulated
- annealing.
- Returns the optimal permutation p and A value.
- """
- D = spike_train_order_matrix(spike_trains, indices, interval, max_tau)
- return optimal_spike_train_order_from_matrix(D, full_output)
-
-
-############################################################
-# permutate_matrix
-############################################################
-def permutate_matrix(D, p):
- N = len(D)
- D_p = np.empty_like(D)
- for n in range(N):
- for m in range(N):
- D_p[n, m] = D[p[n], p[m]]
- return D_p
diff --git a/test/test_spike_delay_asymmetry.py b/test/test_spike_delay_asymmetry.py
deleted file mode 100644
index 9de16e5..0000000
--- a/test/test_spike_delay_asymmetry.py
+++ /dev/null
@@ -1,40 +0,0 @@
-""" test_spike_delay_asymmetry.py
-
-Tests the asymmetry functions
-
-Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
-
-Distributed under the BSD License
-
-"""
-
-import numpy as np
-from numpy.testing import assert_equal, assert_almost_equal, \
- assert_array_equal
-
-import pyspike as spk
-from pyspike import SpikeTrain, DiscreteFunc
-
-
-def test_profile():
- st1 = SpikeTrain([100, 200, 300], [0, 1000])
- st2 = SpikeTrain([105, 205, 300], [0, 1000])
- expected_x = np.array([0, 100, 105, 200, 205, 300, 1000])
- expected_y = np.array([1, 1, 1, 1, 1, 0, 0])
- expected_mp = np.array([1, 1, 1, 1, 1, 2, 2])
-
- f = spk.drct.spike_train_order_profile(st1, st2)
-
- assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp))
- assert_almost_equal(f.avrg(), 2.0/3.0)
- assert_almost_equal(spk.drct.spike_train_order(st1, st2), 2.0/3.0)
- assert_almost_equal(spk.drct.spike_train_order(st1, st2, normalize=False),
- 4.0)
-
- st3 = SpikeTrain([105, 195, 500], [0, 1000])
- expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000])
- expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0])
- expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1])
-
- f = spk.drct.spike_train_order_profile(st1, st3)
- assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp))