summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2014-10-10 17:04:04 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2014-10-10 17:04:04 +0200
commita769a03d089ac0c61e2155239a28665c9316e14a (patch)
tree766347541743aab1baeb07e9d75d008981c553d6 /test
parent62f792fa52801234d4f9c33800a44b0308e9b8ab (diff)
added load_txt function, some restructuring
Diffstat (limited to 'test')
-rwxr-xr-xtest/SPIKY_testdata.txt3
-rw-r--r--test/test_distance.py24
-rw-r--r--test/test_merge_spikes.py49
-rw-r--r--test/test_spikes.py84
4 files changed, 95 insertions, 65 deletions
diff --git a/test/SPIKY_testdata.txt b/test/SPIKY_testdata.txt
index 8fa3fcf..c8bea67 100755
--- a/test/SPIKY_testdata.txt
+++ b/test/SPIKY_testdata.txt
@@ -1,7 +1,10 @@
64.886 305.81 696 937.77 1059.7 1322.2 1576.1 1808.1 2121.5 2381.1 2728.6 2966.9 3223.7 3473.7 3644.3 3936.3
65.553 307.49 696.63 948.66 1070.4 1312.2 1712.7 1934.3 2117.6 2356.9 2727.3 2980.6 3226.9 3475.7 3726.4 3944
+# test comment
69.064 319.1 688.32 947.85 1071.8 1300.8 1697.2 1930.6 2139.4 2354.2 2723.7 2963.6 3221.3 3470.1
59.955 313.83 692.23 955.95 1070.4 1319.6 1681.9 1963.5 2151.4 2373.8 2729.4 2971.2 3220.2 3475.5 3632.3 3788.9
+# empty line
+
59.977 306.84 686.09 935.08 1059.9 1325.9 1543.4 1821.9 2150.2 2390.4 2724.5 2969.6 3222.5 3471.5 3576 3913.9
66.415 313.41 688.83 931.43 1051.8 1304.6 1555.6 1820.2 2150.5 2383.1 2723.4 2947.7 3196.6 3443.5 3575 3804.9
66.449 311.02 689.26 947.12 1058.9 1286.6 1708.2 1957.3 2124.8 2375.7 2709.4 2977.6 3191.1 3449.6 3590.4 3831.2
diff --git a/test/test_distance.py b/test/test_distance.py
index c43f0b3..92b99ae 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -13,14 +13,6 @@ from numpy.testing import assert_equal, assert_array_almost_equal
import pyspike as spk
-def test_auxiliary_spikes():
- t = np.array([0.2, 0.4, 0.6, 0.7])
- t_aux = spk.add_auxiliary_spikes(t, T_end=1.0, T_start=0.1)
- assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
- t_aux = spk.add_auxiliary_spikes(t_aux, 1.0)
- assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
-
-
def test_isi():
# generate two spike trains:
t1 = np.array([0.2, 0.4, 0.6, 0.7])
@@ -31,8 +23,8 @@ def test_isi():
expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35,
-0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3]
- t1 = spk.add_auxiliary_spikes(t1, 1.0)
- t2 = spk.add_auxiliary_spikes(t2, 1.0)
+ t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0))
+ t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0))
f = spk.isi_distance(t1, t2)
# print("ISI: ", f.y)
@@ -47,8 +39,8 @@ def test_isi():
expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0]
expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5]
- t1 = spk.add_auxiliary_spikes(t1, 1.0)
- t2 = spk.add_auxiliary_spikes(t2, 1.0)
+ t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0))
+ t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0))
f = spk.isi_distance(t1, t2)
assert_equal(f.x, expected_times)
@@ -72,8 +64,8 @@ def test_spike():
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2)
- t1 = spk.add_auxiliary_spikes(t1, 1.0)
- t2 = spk.add_auxiliary_spikes(t2, 1.0)
+ t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0))
+ t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0))
f = spk.spike_distance(t1, t2)
assert_equal(f.x, expected_times)
@@ -92,8 +84,8 @@ def test_spike():
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2)
- t1 = spk.add_auxiliary_spikes(t1, 1.0)
- t2 = spk.add_auxiliary_spikes(t2, 1.0)
+ t1 = spk.add_auxiliary_spikes(t1, (0.0,1.0))
+ t2 = spk.add_auxiliary_spikes(t2, (0.0,1.0))
f = spk.spike_distance(t1, t2)
assert_equal(f.x, expected_times)
diff --git a/test/test_merge_spikes.py b/test/test_merge_spikes.py
deleted file mode 100644
index 3162700..0000000
--- a/test/test_merge_spikes.py
+++ /dev/null
@@ -1,49 +0,0 @@
-""" test_merge_spikes.py
-
-Tests merging spikes
-
-Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
-"""
-from __future__ import print_function
-import numpy as np
-
-import pyspike as spk
-
-def check_merged_spikes( merged_spikes, spike_trains ):
- # create a flat array with all spike events
- all_spikes = np.array([])
- for spike_train in spike_trains:
- all_spikes = np.append(all_spikes, spike_train)
- indices = np.zeros_like(all_spikes, dtype='bool')
- # check if we find all the spike events in the original spike trains
- for x in merged_spikes:
- i = np.where(all_spikes == x)[0][0] # the first axis and the first entry
- # change to something impossible so we dont find this event again
- all_spikes[i] = -1.0
- indices[i] = True
- assert( indices.all() )
-
-def test_merge_spike_trains():
-
- # first load the data
- spike_trains = []
- spike_file = open("SPIKY_testdata.txt", 'r')
- for line in spike_file:
- spike_trains.append(spk.spike_train_from_string(line))
-
- spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
- # test if result is sorted
- assert((spikes == np.sort(spikes)).all())
- # check merging
- check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] )
-
- spikes = spk.merge_spike_trains(spike_trains)
- # test if result is sorted
- assert((spikes == np.sort(spikes)).all())
- # check merging
- check_merged_spikes( spikes, spike_trains )
-
-
-if __name__ == "main":
- test_merge_spike_trains()
-
diff --git a/test/test_spikes.py b/test/test_spikes.py
new file mode 100644
index 0000000..dca580f
--- /dev/null
+++ b/test/test_spikes.py
@@ -0,0 +1,84 @@
+""" test_load.py
+
+Test loading of spike trains from text files
+
+Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
+"""
+
+from __future__ import print_function
+import numpy as np
+from numpy.testing import assert_equal
+
+import pyspike as spk
+
+
+def test_auxiliary_spikes():
+ t = np.array([0.2, 0.4, 0.6, 0.7])
+ t_aux = spk.add_auxiliary_spikes(t, time_interval=(0.1, 1.0))
+ assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
+ t_aux = spk.add_auxiliary_spikes(t_aux, time_interval=(0.0, 1.0))
+ assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
+
+
+def test_load_from_txt():
+ spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt",
+ time_interval=(0,4000))
+ assert len(spike_trains) == 40
+
+ # check the first spike train
+ spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
+ 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
+ 3644.3, 3936.3, 4000]
+ assert_equal(spike_times, spike_trains[0])
+
+ # check auxiliary spikes
+ for spike_train in spike_trains:
+ assert spike_train[0] == 0.0
+ assert spike_train[-1] == 4000
+
+ # load without adding auxiliary spikes
+ spike_trains2 = spk.load_spike_trains_from_txt("SPIKY_testdata.txt",
+ time_interval=None)
+ assert len(spike_trains2) == 40
+ # check auxiliary spikes
+ for i in xrange(len(spike_trains)):
+ assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less
+
+
+def check_merged_spikes( merged_spikes, spike_trains ):
+ # create a flat array with all spike events
+ all_spikes = np.array([])
+ for spike_train in spike_trains:
+ all_spikes = np.append(all_spikes, spike_train)
+ indices = np.zeros_like(all_spikes, dtype='bool')
+ # check if we find all the spike events in the original spike trains
+ for x in merged_spikes:
+ i = np.where(all_spikes == x)[0][0] # the first axis and the first entry
+ # change to something impossible so we dont find this event again
+ all_spikes[i] = -1.0
+ indices[i] = True
+ assert( indices.all() )
+
+
+def test_merge_spike_trains():
+ # first load the data
+ spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt",
+ time_interval=(0,4000))
+
+ spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
+ # test if result is sorted
+ assert((spikes == np.sort(spikes)).all())
+ # check merging
+ check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] )
+
+ spikes = spk.merge_spike_trains(spike_trains)
+ # test if result is sorted
+ assert((spikes == np.sort(spikes)).all())
+ # check merging
+ check_merged_spikes( spikes, spike_trains )
+
+if __name__ == "main":
+ test_auxiliary_spikes()
+ test_load_from_txt()
+ test_merge_spike_trains()
+