summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2015-01-19 22:32:42 +0100
committerMario Mulansky <mario.mulansky@gmx.net>2015-01-19 22:32:42 +0100
commit6c0f966649c8dedd4115d6809e569732ee5709c9 (patch)
treee85e90e0dc29bbdae414f4eaae356d22b259695b
parent66968eedd276eb5d661b25d92775203546a3d646 (diff)
interval averages for discrete functions
-rw-r--r--pyspike/function.py34
-rw-r--r--test/test_function.py27
2 files changed, 53 insertions, 8 deletions
diff --git a/pyspike/function.py b/pyspike/function.py
index 6fb7537..ebf4189 100644
--- a/pyspike/function.py
+++ b/pyspike/function.py
@@ -416,9 +416,9 @@ class DiscreteFunction(object):
return 1.0*self.x, 1.0*self.y/self.mp
def integral(self, interval=None):
- """ Returns the integral over the given interval. For the interval
- sequence this amounts to the sum over all values divided by the count
- of intervals.
+ """ Returns the integral over the given interval. For the discrete
+ function, this amounts to the sum over all values divided by the total
+ multiplicity.
:param interval: integration interval given as a pair of floats, if
None the integral over the whole function is computed.
@@ -429,9 +429,16 @@ class DiscreteFunction(object):
if interval is None:
# no interval given, integrate over the whole spike train
# don't count the first value, which is zero by definition
- a = 1.0*np.sum(self.y[1:-1])
+ a = 1.0 * np.sum(self.y[1:-1]) / np.sum(self.mp[1:-1])
else:
- raise NotImplementedError()
+ # find the indices corresponding to the interval
+ start_ind = np.searchsorted(self.x, interval[0], side='right')
+ end_ind = np.searchsorted(self.x, interval[1], side='left')
+ assert start_ind > 0 and end_ind < len(self.x), \
+ "Invalid averaging interval"
+ # first the contribution from between the indices
+ a = np.sum(self.y[start_ind:end_ind]) / \
+ np.sum(self.mp[start_ind:end_ind])
return a
def avrg(self, interval=None):
@@ -448,10 +455,21 @@ class DiscreteFunction(object):
"""
if interval is None:
# no interval given, average over the whole spike train
- # don't count the first interval for normalization
- return self.integral() / np.sum(self.mp[1:-1])
+ return self.integral()
+
+ # check if interval is as sequence
+ assert isinstance(interval, collections.Sequence), \
+ "Invalid value for `interval`. None, Sequence or Tuple expected."
+ # check if interval is a sequence of intervals
+ if not isinstance(interval[0], collections.Sequence):
+ # just one interval
+ a = self.integral(interval)
else:
- raise NotImplementedError()
+ # several intervals
+ a = 0.0
+ for ival in interval:
+ a += self.integral(ival)
+ return a
def add(self, f):
""" Adds another `DiscreteFunction` function to this function.
diff --git a/test/test_function.py b/test/test_function.py
index ba87db8..da3d851 100644
--- a/test/test_function.py
+++ b/test/test_function.py
@@ -203,6 +203,33 @@ def test_pwl_avrg():
assert_array_almost_equal(f_avrg.y2, y2_expected, decimal=16)
+def test_df():
+ # testing discrete function
+ x = [0.0, 1.0, 2.0, 2.5, 4.0]
+ y = [0.0, 1.0, 1.0, 0.0, 1.0]
+ mp = [1.0, 2.0, 1.0, 2.0, 1.0]
+ f = spk.DiscreteFunction(x, y, mp)
+ xp, yp = f.get_plottable_data()
+
+ xp_expected = [0.0, 1.0, 2.0, 2.5, 4.0]
+ yp_expected = [0.0, 0.5, 1.0, 0.0, 1.0]
+ assert_array_almost_equal(xp, xp_expected, decimal=16)
+ assert_array_almost_equal(yp, yp_expected, decimal=16)
+
+ avrg_expected = 2.0 / 5.0
+ assert_almost_equal(f.avrg(), avrg_expected, decimal=16)
+
+ # interval averaging
+ a = f.avrg([0.5, 2.4])
+ assert_almost_equal(a, 2.0/3.0, decimal=16)
+ a = f.avrg([1.5, 3.5])
+ assert_almost_equal(a, 1.0/3.0, decimal=16)
+ a = f.avrg((0.9, 3.5))
+ assert_almost_equal(a, 2.0/5.0, decimal=16)
+ a = f.avrg([1.1, 4.0])
+ assert_almost_equal(a, 1.0/3.0, decimal=16)
+
+
if __name__ == "__main__":
test_pwc()
test_pwc_add()