summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
commit4274c328a4927b392036d1c3b759b0787b05f300 (patch)
tree37a4f331006c63e7155bfb4c083c7e149f567eb8 /test
parentef15a482604d8ce9bef094d470d8a905c6da49a0 (diff)
code formatting following PEP8
Diffstat (limited to 'test')
-rw-r--r--test/test_distance.py37
-rw-r--r--test/test_function.py28
-rw-r--r--test/test_spikes.py27
3 files changed, 49 insertions, 43 deletions
diff --git a/test/test_distance.py b/test/test_distance.py
index dafe693..3371cbd 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -22,8 +22,8 @@ def test_isi():
t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95])
# pen&paper calculation of the isi distance
- expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0]
- expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35,
+ expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
+ expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35,
-0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3]
t1 = spk.add_auxiliary_spikes(t1, 1.0)
@@ -36,10 +36,10 @@ def test_isi():
assert_array_almost_equal(f.y, expected_isi, decimal=14)
# check with some equal spike times
- t1 = np.array([0.2,0.4,0.6])
- t2 = np.array([0.1,0.4,0.5,0.6])
+ t1 = np.array([0.2, 0.4, 0.6])
+ t2 = np.array([0.1, 0.4, 0.5, 0.6])
- expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0]
+ expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5]
t1 = spk.add_auxiliary_spikes(t1, 1.0)
@@ -56,11 +56,11 @@ def test_spike():
t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95])
# pen&paper calculation of the spike distance
- expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0]
+ expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2,
0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.1])
- s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05,
- (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35,
+ s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05,
+ (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35,
0.1, 0.1, 0.05, 0.05])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3])
isi2 = np.array([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05])
@@ -76,17 +76,17 @@ def test_spike():
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
# check with some equal spike times
- t1 = np.array([0.2,0.4,0.6])
- t2 = np.array([0.1,0.4,0.5,0.6])
+ t1 = np.array([0.2, 0.4, 0.6])
+ t2 = np.array([0.1, 0.4, 0.5, 0.6])
- expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0]
+ expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
s2 = np.array([0.1*0.1/0.3, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4])
isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4])
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2)
-
+
t1 = spk.add_auxiliary_spikes(t1, 1.0)
t2 = spk.add_auxiliary_spikes(t2, 1.0)
f = spk.spike_distance(t1, t2)
@@ -100,8 +100,8 @@ def check_multi_distance(dist_func, dist_func_multi):
# generate spike trains:
t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0)
t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0)
- t3 = spk.add_auxiliary_spikes(np.array([0.2,0.4,0.6]), 1.0)
- t4 = spk.add_auxiliary_spikes(np.array([0.1,0.4,0.5,0.6]), 1.0)
+ t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0)
+ t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0)
spike_trains = [t1, t2, t3, t4]
f12 = dist_func(t1, t2)
@@ -111,17 +111,17 @@ def check_multi_distance(dist_func, dist_func_multi):
f24 = dist_func(t2, t4)
f34 = dist_func(t3, t4)
- f_multi = dist_func_multi(spike_trains, [0,1])
+ f_multi = dist_func_multi(spike_trains, [0, 1])
assert f_multi.almost_equal(f12, decimal=14)
f = copy(f12)
f.add(f13)
f.add(f23)
f.mul_scalar(1.0/3)
- f_multi = dist_func_multi(spike_trains, [0,1,2])
+ f_multi = dist_func_multi(spike_trains, [0, 1, 2])
assert f_multi.almost_equal(f, decimal=14)
- f.mul_scalar(3) # revert above normalization
+ f.mul_scalar(3) # revert above normalization
f.add(f14)
f.add(f24)
f.add(f34)
@@ -139,6 +139,7 @@ def test_multi_spike():
if __name__ == "__main__":
- test_auxiliary_spikes()
test_isi()
test_spike()
+ test_multi_isi()
+ test_multi_spike()
diff --git a/test/test_function.py b/test/test_function.py
index c0fb3fd..ed7d6bc 100644
--- a/test/test_function.py
+++ b/test/test_function.py
@@ -10,18 +10,18 @@ Distributed under the MIT License (MIT)
from __future__ import print_function
import numpy as np
from copy import copy
-from numpy.testing import assert_equal, assert_almost_equal, \
- assert_array_almost_equal
+from numpy.testing import assert_almost_equal, assert_array_almost_equal
import pyspike as spk
+
def test_pwc():
# some random data
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y = [1.0, -0.5, 1.5, 0.75]
f = spk.PieceWiseConstFunc(x, y)
xp, yp = f.get_plottable_data()
-
+
xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0]
yp_expected = [1.0, 1.0, -0.5, -0.5, 1.5, 1.5, 0.75, 0.75]
assert_array_almost_equal(xp, xp_expected, decimal=16)
@@ -51,17 +51,18 @@ def test_pwc_add():
f2.add(f)
assert_array_almost_equal(f2.x, x_expected, decimal=16)
assert_array_almost_equal(f2.y, y_expected, decimal=16)
-
+
f1.add(f2)
# same x, but y doubled
assert_array_almost_equal(f1.x, f2.x, decimal=16)
assert_array_almost_equal(f1.y, 2*f2.y, decimal=16)
+
def test_pwc_mul():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y = [1.0, -0.5, 1.5, 0.75]
f = spk.PieceWiseConstFunc(x, y)
-
+
f.mul_scalar(1.5)
assert_array_almost_equal(f.x, x, decimal=16)
assert_array_almost_equal(f.y, 1.5*np.array(y), decimal=16)
@@ -75,15 +76,15 @@ def test_pwl():
y2 = [1.5, -0.4, 1.5, 0.25]
f = spk.PieceWiseLinFunc(x, y1, y2)
xp, yp = f.get_plottable_data()
-
+
xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0]
yp_expected = [1.0, 1.5, -0.5, -0.4, 1.5, 1.5, 0.75, 0.25]
assert_array_almost_equal(xp, xp_expected, decimal=16)
assert_array_almost_equal(yp, yp_expected, decimal=16)
-
+
avrg_expected = (1.25 - 0.45 + 0.75 + 1.5*0.5) / 4.0
assert_almost_equal(f.avrg(), avrg_expected, decimal=16)
-
+
abs_avrg_expected = (1.25 + 0.45 + 0.75 + 1.5*0.5) / 4.0
assert_almost_equal(f.abs_avrg(), abs_avrg_expected, decimal=16)
@@ -113,7 +114,7 @@ def test_pwl_add():
assert_array_almost_equal(f2.x, x_expected, decimal=16)
assert_array_almost_equal(f2.y1, y1_expected, decimal=16)
assert_array_almost_equal(f2.y2, y2_expected, decimal=16)
-
+
f1.add(f2)
# same x, but y doubled
assert_array_almost_equal(f1.x, f2.x, decimal=16)
@@ -121,12 +122,12 @@ def test_pwl_add():
assert_array_almost_equal(f1.y2, 2*f2.y2, decimal=16)
-def test_pwc_mul():
+def test_pwl_mul():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y1 = [1.0, -0.5, 1.5, 0.75]
y2 = [1.5, -0.4, 1.5, 0.25]
f = spk.PieceWiseLinFunc(x, y1, y2)
-
+
f.mul_scalar(1.5)
assert_array_almost_equal(f.x, x, decimal=16)
assert_array_almost_equal(f.y1, 1.5*np.array(y1), decimal=16)
@@ -137,3 +138,8 @@ def test_pwc_mul():
if __name__ == "__main__":
test_pwc()
+ test_pwc_add()
+ test_pwc_mul()
+ test_pwl()
+ test_pwl_add()
+ test_pwl_mul()
diff --git a/test/test_spikes.py b/test/test_spikes.py
index e008207..349e0bf 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -23,13 +23,13 @@ def test_auxiliary_spikes():
def test_load_from_txt():
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
assert len(spike_trains) == 40
# check the first spike train
- spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
- 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
+ spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
+ 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
3644.3, 3936.3, 4000]
assert_equal(spike_times, spike_trains[0])
@@ -39,15 +39,15 @@ def test_load_from_txt():
assert spike_train[-1] == 4000
# load without adding auxiliary spikes
- spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=None)
+ spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=None)
assert len(spike_trains2) == 40
# check auxiliary spikes
for i in xrange(len(spike_trains)):
- assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less
+ assert len(spike_trains[i]) == len(spike_trains2[i])+2 # 2 spikes less
-def check_merged_spikes( merged_spikes, spike_trains ):
+def check_merged_spikes(merged_spikes, spike_trains):
# create a flat array with all spike events
all_spikes = np.array([])
for spike_train in spike_trains:
@@ -55,7 +55,7 @@ def check_merged_spikes( merged_spikes, spike_trains ):
indices = np.zeros_like(all_spikes, dtype='bool')
# check if we find all the spike events in the original spike trains
for x in merged_spikes:
- i = np.where(all_spikes == x)[0][0] # the first axis and the first entry
+ i = np.where(all_spikes == x)[0][0] # first axis and first entry
# change to something impossible so we dont find this event again
all_spikes[i] = -1.0
indices[i] = True
@@ -64,23 +64,22 @@ def check_merged_spikes( merged_spikes, spike_trains ):
def test_merge_spike_trains():
# first load the data
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] )
+ check_merged_spikes(spikes, [spike_trains[0], spike_trains[1]])
spikes = spk.merge_spike_trains(spike_trains)
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, spike_trains )
+ check_merged_spikes(spikes, spike_trains)
if __name__ == "main":
test_auxiliary_spikes()
test_load_from_txt()
test_merge_spike_trains()
-