summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2015-04-24 16:10:38 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2015-04-24 16:10:38 +0200
commitc3e58aeb00ef2a386a4c6a620e4e13652c55aed5 (patch)
tree93e99ed3aa01285a461819db4bb219a199dde794
parent36d80c9ec1d28488f9b5c97cd202c196efff694e (diff)
removed auxiliary_spike test, all tests pass
-rw-r--r--test/test_spikes.py46
1 files changed, 16 insertions, 30 deletions
diff --git a/test/test_spikes.py b/test/test_spikes.py
index b12099e..6e11c07 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -14,38 +14,22 @@ from numpy.testing import assert_equal
import pyspike as spk
-def test_auxiliary_spikes():
- t = np.array([0.2, 0.4, 0.6, 0.7])
- t_aux = spk.add_auxiliary_spikes(t, time_interval=(0.1, 1.0))
- assert_equal(t_aux, [0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
- t_aux = spk.add_auxiliary_spikes(t_aux, time_interval=(0.0, 1.0))
- assert_equal(t_aux, [0.0, 0.1, 0.2, 0.4, 0.6, 0.7, 1.0])
-
-
def test_load_from_txt():
spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- time_interval=(0, 4000))
+ interval=(0, 4000))
assert len(spike_trains) == 40
# check the first spike train
- spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
+ spike_times = [64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
- 3644.3, 3936.3, 4000]
- assert_equal(spike_times, spike_trains[0])
+ 3644.3, 3936.3]
+ assert_equal(spike_times, spike_trains[0].spikes)
# check auxiliary spikes
for spike_train in spike_trains:
- assert spike_train[0] == 0.0
- assert spike_train[-1] == 4000
+ assert spike_train.t_start == 0.0
+ assert spike_train.t_end == 4000
- # load without adding auxiliary spikes
- spike_trains2 = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- time_interval=None)
- assert len(spike_trains2) == 40
- # check auxiliary spikes
- for i in xrange(len(spike_trains)):
- assert len(spike_trains[i]) == len(spike_trains2[i])+2 # 2 spikes less
-
def check_merged_spikes(merged_spikes, spike_trains):
# create a flat array with all spike events
@@ -65,21 +49,23 @@ def check_merged_spikes(merged_spikes, spike_trains):
def test_merge_spike_trains():
# first load the data
spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt",
- time_interval=(0, 4000))
+ interval=(0, 4000))
- spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
+ merged_spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
# test if result is sorted
- assert((spikes == np.sort(spikes)).all())
+ assert((merged_spikes.spikes == np.sort(merged_spikes.spikes)).all())
# check merging
- check_merged_spikes(spikes, [spike_trains[0], spike_trains[1]])
+ check_merged_spikes(merged_spikes.spikes, [spike_trains[0].spikes,
+ spike_trains[1].spikes])
- spikes = spk.merge_spike_trains(spike_trains)
+ merged_spikes = spk.merge_spike_trains(spike_trains)
# test if result is sorted
- assert((spikes == np.sort(spikes)).all())
+ assert((merged_spikes.spikes == np.sort(merged_spikes.spikes)).all())
# check merging
- check_merged_spikes(spikes, spike_trains)
+ check_merged_spikes(merged_spikes.spikes,
+ [st.spikes for st in spike_trains])
+
if __name__ == "main":
- test_auxiliary_spikes()
test_load_from_txt()
test_merge_spike_trains()