summaryrefslogtreecommitdiff
path: root/test/test_spikes.py
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
commit4274c328a4927b392036d1c3b759b0787b05f300 (patch)
tree37a4f331006c63e7155bfb4c083c7e149f567eb8 /test/test_spikes.py
parentef15a482604d8ce9bef094d470d8a905c6da49a0 (diff)
code formatting following PEP8
Diffstat (limited to 'test/test_spikes.py')
-rw-r--r--test/test_spikes.py27
1 files changed, 13 insertions, 14 deletions
diff --git a/test/test_spikes.py b/test/test_spikes.py
index e008207..349e0bf 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -23,13 +23,13 @@ def test_auxiliary_spikes():
def test_load_from_txt():
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
assert len(spike_trains) == 40
# check the first spike train
- spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
- 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
+ spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
+ 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
3644.3, 3936.3, 4000]
assert_equal(spike_times, spike_trains[0])
@@ -39,15 +39,15 @@ def test_load_from_txt():
assert spike_train[-1] == 4000
# load without adding auxiliary spikes
- spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=None)
+ spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=None)
assert len(spike_trains2) == 40
# check auxiliary spikes
for i in xrange(len(spike_trains)):
- assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less
+ assert len(spike_trains[i]) == len(spike_trains2[i])+2 # 2 spikes less
-def check_merged_spikes( merged_spikes, spike_trains ):
+def check_merged_spikes(merged_spikes, spike_trains):
# create a flat array with all spike events
all_spikes = np.array([])
for spike_train in spike_trains:
@@ -55,7 +55,7 @@ def check_merged_spikes( merged_spikes, spike_trains ):
indices = np.zeros_like(all_spikes, dtype='bool')
# check if we find all the spike events in the original spike trains
for x in merged_spikes:
- i = np.where(all_spikes == x)[0][0] # the first axis and the first entry
+ i = np.where(all_spikes == x)[0][0] # first axis and first entry
# change to something impossible so we dont find this event again
all_spikes[i] = -1.0
indices[i] = True
@@ -64,23 +64,22 @@ def check_merged_spikes( merged_spikes, spike_trains ):
def test_merge_spike_trains():
# first load the data
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] )
+ check_merged_spikes(spikes, [spike_trains[0], spike_trains[1]])
spikes = spk.merge_spike_trains(spike_trains)
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, spike_trains )
+ check_merged_spikes(spikes, spike_trains)
if __name__ == "main":
test_auxiliary_spikes()
test_load_from_txt()
test_merge_spike_trains()
-