From c1c5403b8274bd19aa1e71933cfaefe1ba622e59 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 10 Oct 2014 17:23:28 +0200 Subject: added License note in headers --- examples/plot.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 examples/plot.py (limited to 'examples/plot.py') diff --git a/examples/plot.py b/examples/plot.py new file mode 100644 index 0000000..d7e2173 --- /dev/null +++ b/examples/plot.py @@ -0,0 +1,42 @@ +""" plot.py + +Simple example showing how to load and plot spike trains and their distances. + +Copyright 2014, Mario Mulansky + +Distributed under the MIT License (MIT) +""" + + +from __future__ import print_function + +import numpy as np +import matplotlib.pyplot as plt + +import pyspike as spk + +spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", + time_interval=(0,4000)) + +# plot the spike time +for (i,spikes) in enumerate(spike_trains): + plt.plot(spikes, i*np.ones_like(spikes), 'o') + +f = spk.isi_distance(spike_trains[0], spike_trains[1]) +x, y = f.get_plottable_data() + +plt.figure() +plt.plot(x, np.abs(y), '--k') + +print("Average: %.8f" % f.avrg()) +print("Absolute average: %.8f" % f.abs_avrg()) + + +f = spk.spike_distance(spike_trains[0], spike_trains[1]) +x, y = f.get_plottable_data() +print(x) +print(y) +#plt.figure() +plt.plot(x, y, '-b') + +plt.show() -- cgit v1.2.3 From f3fefa32f9c02c217448529454011d56669b42ae Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 12 Oct 2014 18:27:38 +0200 Subject: changed name of example spike data file --- examples/PySpike_testdata.txt | 43 +++++++++++++++++++++++++++++++++++++++++++ examples/SPIKY_testdata.txt | 43 ------------------------------------------- examples/isi_matrix.py | 2 +- examples/merge.py | 2 +- examples/plot.py | 2 +- 5 files changed, 46 insertions(+), 46 deletions(-) create mode 100755 examples/PySpike_testdata.txt delete mode 100755 examples/SPIKY_testdata.txt (limited to 'examples/plot.py') diff --git a/examples/PySpike_testdata.txt b/examples/PySpike_testdata.txt new file mode 100755 index 0000000..c8bea67 --- /dev/null +++ b/examples/PySpike_testdata.txt @@ -0,0 +1,43 @@ +64.886 305.81 696 937.77 1059.7 1322.2 1576.1 1808.1 2121.5 2381.1 2728.6 2966.9 3223.7 3473.7 3644.3 3936.3 +65.553 307.49 696.63 948.66 1070.4 1312.2 1712.7 1934.3 2117.6 2356.9 2727.3 2980.6 3226.9 3475.7 3726.4 3944 +# test comment +69.064 319.1 688.32 947.85 1071.8 1300.8 1697.2 1930.6 2139.4 2354.2 2723.7 2963.6 3221.3 3470.1 +59.955 313.83 692.23 955.95 1070.4 1319.6 1681.9 1963.5 2151.4 2373.8 2729.4 2971.2 3220.2 3475.5 3632.3 3788.9 +# empty line + +59.977 306.84 686.09 935.08 1059.9 1325.9 1543.4 1821.9 2150.2 2390.4 2724.5 2969.6 3222.5 3471.5 3576 3913.9 +66.415 313.41 688.83 931.43 1051.8 1304.6 1555.6 1820.2 2150.5 2383.1 2723.4 2947.7 3196.6 3443.5 3575 3804.9 +66.449 311.02 689.26 947.12 1058.9 1286.6 1708.2 1957.3 2124.8 2375.7 2709.4 2977.6 3191.1 3449.6 3590.4 3831.2 +63.764 318.45 697.48 936.97 1059.3 1325 1687.9 1944.7 2132.5 2377.1 2713.1 2976.6 3196.8 3442.6 3741.6 3998.3 +63.906 314.79 693.26 937.12 1065.9 1315.8 1584.3 1821.5 2126.3 2396.8 2709.1 2967 3197.4 3444 3732.8 3849.5 +69.493 316.62 689.81 943.62 1071.9 1296.3 1654.8 1931.9 2127.5 2390.6 2708.9 2950.4 3194.8 3445.2 3670.1 3903.3 +61.789 317.53 555.82 813.15 1198.7 1448.7 1686.7 1943.5 2060.7 2311.4 2658.2 2900.2 3167.4 3418.2 3617.3 3771 +64.098 309.86 567.27 813.91 1182 1464.3 1576.8 1822.5 2063.1 2311.7 2655.8 2911.7 3168.3 3418.2 3586.4 3999.7 +68.59 315.5 559.52 806.23 1182.5 1441.1 1567.2 1804.8 2074.9 2315.8 2655.1 2913.2 3165.9 3419.5 3648.1 3884.4 +66.507 314.42 556.42 814.83 1182.5 1440.3 1701.3 1911.1 2069.7 2319.3 2662.3 2903.2 3167.4 3418.5 3545 3893.9 +72.744 318.45 554.4 819.64 1186.9 1449.7 1676 1957.4 2051.4 2302.8 2657.8 2916.2 3169.4 3416.7 3570.4 3884.8 +64.779 324.42 560.56 828.99 1174.8 1439.9 1563.7 1790.6 2067.7 2287.6 2657.4 2905.2 3139.2 3389.1 3507.8 3807.5 +64.852 316.63 568.89 815.61 1198.3 1454.1 1710.6 1933.9 2091.5 2309.6 2660.9 2907.5 3137.2 3389.3 3617.2 +63.089 314.52 553.8 827.06 1183.9 1457.6 1558.9 1808.3 2064.5 2337.1 2653.6 2897 3143.7 3385.7 3668.7 3803.8 +62.23 315.16 564.35 812.15 1199.6 1448.9 1562.7 1839.1 2069.7 2308.9 2649.6 2919.7 3141 3389.9 3723.6 3882.2 +69.662 311.93 564.91 805.25 1209.7 1451.4 1691.9 1932.1 2044.2 2329.4 2657.1 2908.5 3142.8 3390.5 3597.3 3991.1 +183.42 431.34 562.41 809.57 1086.3 1308.9 1555.9 1831.3 2057 2326.9 2591.3 2831.4 3113.9 3367.9 3555.3 3956 +188.49 442.39 572.4 810.76 1065 1326.7 1564.3 1803.4 2060.4 2322.4 2607.2 2824.1 3110.2 3363.9 3644.1 3819.6 +177 437.76 569.82 819.66 1064.1 1309.2 1685.7 1957.5 2066.9 2313.8 2593.2 2847 3116.8 3364.5 3727.3 3881.6 +193.9 441.93 586.9 804.98 1062.5 1312.4 1542.4 1793.1 2073.9 2314.7 2587.8 2845.9 3112.4 3359.8 +193.01 440.26 555.64 814.08 1056.3 1315 1689.9 1961.4 2049.1 2305 2593.9 2847.5 3110.6 3361.1 3711.6 3914.7 +194.71 437.57 566.18 806.73 1069.2 1314.6 1682.7 1942.2 2061.8 2304.6 2607.6 2841.7 3082.9 3330.3 3679.7 3848.2 +184.88 441.22 570.92 794.35 1063.7 1309.9 1678.7 1930 2058 2321.3 2606.7 2845 3084.8 3337.3 3640 3952.1 +189.66 443.59 560.67 816.89 1070.4 1303.4 1550.1 1815.5 2057.6 2323.7 2587.1 2843.5 3086.6 3333.6 3618.2 3815.4 +190.41 440.77 568.96 808.56 1073.8 1322.1 1686.5 1952.8 2068.7 2335.7 2595.7 2845.4 3086 3333.5 3635.6 3939.3 +181.16 440.67 577.54 823.52 1052.5 1322.3 1578.4 1822.2 2079.4 2309.1 2596.9 2851.9 3083.5 3335.1 3531.2 3770.6 +181.09 434.97 687.15 943.33 1192.9 1444 1699.4 1942 2194.6 2445.9 2549.4 2785.1 3056.5 3308.2 3620.5 3932.7 +186.7 446.53 688.18 942.86 1186.1 1441.9 1688.1 1922.2 2196.6 2455.3 2534.8 2776.5 3060.3 3309.4 3514.1 3808.6 +196.76 446 681.26 948.27 1195.8 1433.1 1699 1933 2201.2 2461.4 2547.4 2777.8 3055.7 3307.1 3590.6 3952.8 +200.68 427.11 695.67 946.42 1178.6 1440.1 1538.4 1809 2199.8 2432.5 2531.6 2793.2 3056.6 3308.6 3510.6 3928.1 +190.83 429.57 698.73 931.16 1190.6 1428.9 1698.3 1935 2176.8 2424.7 2530.5 2766.9 3062 3309.7 3689.8 +181.47 441.93 682.32 943.01 1190.1 1459.1 1570.6 1819.6 2189.8 2437.9 2543.3 2782.8 3025.9 3280.2 3581 3855.9 +191.38 435.69 702.76 935.62 1188.3 1438.3 1564.2 1823.9 2191.3 2444.9 2531.9 2782.4 3030.7 3275.7 3677.7 3829.2 +191.97 433.85 686.29 932.65 1183.1 1432.7 1563.9 1826.5 2214.1 2436.8 2529.8 2778.3 3028.3 3281.8 3582 3863.4 +189.51 453.21 691.3 940.86 1180.1 1430.1 1567.1 1835 2199 2448.2 2526.7 2773.8 3030.5 3280.1 3576.2 3893.6 +190.88 435.48 692.66 940.51 1189.5 1448.9 1575.1 1824.2 2190.8 2425.9 2530.6 2783.3 3033.3 3279.5 3733 3838.9 diff --git a/examples/SPIKY_testdata.txt b/examples/SPIKY_testdata.txt deleted file mode 100755 index c8bea67..0000000 --- a/examples/SPIKY_testdata.txt +++ /dev/null @@ -1,43 +0,0 @@ -64.886 305.81 696 937.77 1059.7 1322.2 1576.1 1808.1 2121.5 2381.1 2728.6 2966.9 3223.7 3473.7 3644.3 3936.3 -65.553 307.49 696.63 948.66 1070.4 1312.2 1712.7 1934.3 2117.6 2356.9 2727.3 2980.6 3226.9 3475.7 3726.4 3944 -# test comment -69.064 319.1 688.32 947.85 1071.8 1300.8 1697.2 1930.6 2139.4 2354.2 2723.7 2963.6 3221.3 3470.1 -59.955 313.83 692.23 955.95 1070.4 1319.6 1681.9 1963.5 2151.4 2373.8 2729.4 2971.2 3220.2 3475.5 3632.3 3788.9 -# empty line - -59.977 306.84 686.09 935.08 1059.9 1325.9 1543.4 1821.9 2150.2 2390.4 2724.5 2969.6 3222.5 3471.5 3576 3913.9 -66.415 313.41 688.83 931.43 1051.8 1304.6 1555.6 1820.2 2150.5 2383.1 2723.4 2947.7 3196.6 3443.5 3575 3804.9 -66.449 311.02 689.26 947.12 1058.9 1286.6 1708.2 1957.3 2124.8 2375.7 2709.4 2977.6 3191.1 3449.6 3590.4 3831.2 -63.764 318.45 697.48 936.97 1059.3 1325 1687.9 1944.7 2132.5 2377.1 2713.1 2976.6 3196.8 3442.6 3741.6 3998.3 -63.906 314.79 693.26 937.12 1065.9 1315.8 1584.3 1821.5 2126.3 2396.8 2709.1 2967 3197.4 3444 3732.8 3849.5 -69.493 316.62 689.81 943.62 1071.9 1296.3 1654.8 1931.9 2127.5 2390.6 2708.9 2950.4 3194.8 3445.2 3670.1 3903.3 -61.789 317.53 555.82 813.15 1198.7 1448.7 1686.7 1943.5 2060.7 2311.4 2658.2 2900.2 3167.4 3418.2 3617.3 3771 -64.098 309.86 567.27 813.91 1182 1464.3 1576.8 1822.5 2063.1 2311.7 2655.8 2911.7 3168.3 3418.2 3586.4 3999.7 -68.59 315.5 559.52 806.23 1182.5 1441.1 1567.2 1804.8 2074.9 2315.8 2655.1 2913.2 3165.9 3419.5 3648.1 3884.4 -66.507 314.42 556.42 814.83 1182.5 1440.3 1701.3 1911.1 2069.7 2319.3 2662.3 2903.2 3167.4 3418.5 3545 3893.9 -72.744 318.45 554.4 819.64 1186.9 1449.7 1676 1957.4 2051.4 2302.8 2657.8 2916.2 3169.4 3416.7 3570.4 3884.8 -64.779 324.42 560.56 828.99 1174.8 1439.9 1563.7 1790.6 2067.7 2287.6 2657.4 2905.2 3139.2 3389.1 3507.8 3807.5 -64.852 316.63 568.89 815.61 1198.3 1454.1 1710.6 1933.9 2091.5 2309.6 2660.9 2907.5 3137.2 3389.3 3617.2 -63.089 314.52 553.8 827.06 1183.9 1457.6 1558.9 1808.3 2064.5 2337.1 2653.6 2897 3143.7 3385.7 3668.7 3803.8 -62.23 315.16 564.35 812.15 1199.6 1448.9 1562.7 1839.1 2069.7 2308.9 2649.6 2919.7 3141 3389.9 3723.6 3882.2 -69.662 311.93 564.91 805.25 1209.7 1451.4 1691.9 1932.1 2044.2 2329.4 2657.1 2908.5 3142.8 3390.5 3597.3 3991.1 -183.42 431.34 562.41 809.57 1086.3 1308.9 1555.9 1831.3 2057 2326.9 2591.3 2831.4 3113.9 3367.9 3555.3 3956 -188.49 442.39 572.4 810.76 1065 1326.7 1564.3 1803.4 2060.4 2322.4 2607.2 2824.1 3110.2 3363.9 3644.1 3819.6 -177 437.76 569.82 819.66 1064.1 1309.2 1685.7 1957.5 2066.9 2313.8 2593.2 2847 3116.8 3364.5 3727.3 3881.6 -193.9 441.93 586.9 804.98 1062.5 1312.4 1542.4 1793.1 2073.9 2314.7 2587.8 2845.9 3112.4 3359.8 -193.01 440.26 555.64 814.08 1056.3 1315 1689.9 1961.4 2049.1 2305 2593.9 2847.5 3110.6 3361.1 3711.6 3914.7 -194.71 437.57 566.18 806.73 1069.2 1314.6 1682.7 1942.2 2061.8 2304.6 2607.6 2841.7 3082.9 3330.3 3679.7 3848.2 -184.88 441.22 570.92 794.35 1063.7 1309.9 1678.7 1930 2058 2321.3 2606.7 2845 3084.8 3337.3 3640 3952.1 -189.66 443.59 560.67 816.89 1070.4 1303.4 1550.1 1815.5 2057.6 2323.7 2587.1 2843.5 3086.6 3333.6 3618.2 3815.4 -190.41 440.77 568.96 808.56 1073.8 1322.1 1686.5 1952.8 2068.7 2335.7 2595.7 2845.4 3086 3333.5 3635.6 3939.3 -181.16 440.67 577.54 823.52 1052.5 1322.3 1578.4 1822.2 2079.4 2309.1 2596.9 2851.9 3083.5 3335.1 3531.2 3770.6 -181.09 434.97 687.15 943.33 1192.9 1444 1699.4 1942 2194.6 2445.9 2549.4 2785.1 3056.5 3308.2 3620.5 3932.7 -186.7 446.53 688.18 942.86 1186.1 1441.9 1688.1 1922.2 2196.6 2455.3 2534.8 2776.5 3060.3 3309.4 3514.1 3808.6 -196.76 446 681.26 948.27 1195.8 1433.1 1699 1933 2201.2 2461.4 2547.4 2777.8 3055.7 3307.1 3590.6 3952.8 -200.68 427.11 695.67 946.42 1178.6 1440.1 1538.4 1809 2199.8 2432.5 2531.6 2793.2 3056.6 3308.6 3510.6 3928.1 -190.83 429.57 698.73 931.16 1190.6 1428.9 1698.3 1935 2176.8 2424.7 2530.5 2766.9 3062 3309.7 3689.8 -181.47 441.93 682.32 943.01 1190.1 1459.1 1570.6 1819.6 2189.8 2437.9 2543.3 2782.8 3025.9 3280.2 3581 3855.9 -191.38 435.69 702.76 935.62 1188.3 1438.3 1564.2 1823.9 2191.3 2444.9 2531.9 2782.4 3030.7 3275.7 3677.7 3829.2 -191.97 433.85 686.29 932.65 1183.1 1432.7 1563.9 1826.5 2214.1 2436.8 2529.8 2778.3 3028.3 3281.8 3582 3863.4 -189.51 453.21 691.3 940.86 1180.1 1430.1 1567.1 1835 2199 2448.2 2526.7 2773.8 3030.5 3280.1 3576.2 3893.6 -190.88 435.48 692.66 940.51 1189.5 1448.9 1575.1 1824.2 2190.8 2425.9 2530.6 2783.3 3033.3 3279.5 3733 3838.9 diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py index 3297d3d..2a4d075 100644 --- a/examples/isi_matrix.py +++ b/examples/isi_matrix.py @@ -17,7 +17,7 @@ import matplotlib.pyplot as plt import pyspike as spk # first load the data, interval ending time = 4000, start=0 (default) -spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", 4000) +spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", 4000) print(len(spike_trains)) diff --git a/examples/merge.py b/examples/merge.py index 55c7f0a..726d32b 100644 --- a/examples/merge.py +++ b/examples/merge.py @@ -15,7 +15,7 @@ import matplotlib.pyplot as plt import pyspike as spk # first load the data, ending time = 4000 -spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", 4000) +spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", 4000) spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) diff --git a/examples/plot.py b/examples/plot.py index d7e2173..4ff75c4 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -15,7 +15,7 @@ import matplotlib.pyplot as plt import pyspike as spk -spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", +spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", time_interval=(0,4000)) # plot the spike time -- cgit v1.2.3 From 4274c328a4927b392036d1c3b759b0787b05f300 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 13 Oct 2014 10:47:18 +0200 Subject: code formatting following PEP8 --- examples/isi_matrix.py | 2 -- examples/plot.py | 6 ++-- pyspike/distances.py | 77 ++++++++++++++++++++++++----------------------- pyspike/function.py | 35 +++++++++++---------- pyspike/python_backend.py | 72 ++++++++++++++++++++++---------------------- pyspike/spikes.py | 48 ++++++++++++++--------------- test/test_distance.py | 37 ++++++++++++----------- test/test_function.py | 28 ++++++++++------- test/test_spikes.py | 27 ++++++++--------- 9 files changed, 168 insertions(+), 164 deletions(-) (limited to 'examples/plot.py') diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py index 2a4d075..db740dd 100644 --- a/examples/isi_matrix.py +++ b/examples/isi_matrix.py @@ -11,7 +11,6 @@ Distributed under the MIT License (MIT) from __future__ import print_function -import numpy as np import matplotlib.pyplot as plt import pyspike as spk @@ -25,4 +24,3 @@ m = spk.isi_distance_matrix(spike_trains) plt.imshow(m, interpolation='none') plt.show() - diff --git a/examples/plot.py b/examples/plot.py index 4ff75c4..5c3ad4a 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -15,11 +15,11 @@ import matplotlib.pyplot as plt import pyspike as spk -spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0,4000)) +spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) # plot the spike time -for (i,spikes) in enumerate(spike_trains): +for (i, spikes) in enumerate(spike_trains): plt.plot(spikes, i*np.ones_like(spikes), 'o') f = spk.isi_distance(spike_trains[0], spike_trains[1]) diff --git a/pyspike/distances.py b/pyspike/distances.py index db04c4e..b2eec92 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -17,7 +17,7 @@ from pyspike import PieceWiseConstFunc, PieceWiseLinFunc # isi_distance ############################################################ def isi_distance(spikes1, spikes2): - """ Computes the instantaneous isi-distance S_isi (t) of the two given + """ Computes the instantaneous isi-distance S_isi (t) of the two given spike trains. The spike trains are expected to have auxiliary spikes at the beginning and end of the interval. Use the function add_auxiliary_spikes to add those spikes to the spike train. @@ -27,9 +27,9 @@ def isi_distance(spikes1, spikes2): - PieceWiseConstFunc describing the isi-distance. """ # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0]==spikes2[0], \ + assert spikes1[0] == spikes2[0], \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1]==spikes2[-1], \ + assert spikes1[-1] == spikes2[-1], \ "Given spike trains seems not to have auxiliary spikes!" # cython implementation @@ -53,9 +53,9 @@ def spike_distance(spikes1, spikes2): - PieceWiseLinFunc describing the spike-distance. """ # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0]==spikes2[0], \ + assert spikes1[0] == spikes2[0], \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1]==spikes2[-1], \ + assert spikes1[-1] == spikes2[-1], \ "Given spike trains seems not to have auxiliary spikes!" # cython implementation @@ -74,33 +74,33 @@ def multi_distance(spike_trains, pair_distance_func, indices=None): use isi_distance_multi or spike_distance_multi instead. Computes the multi-variate distance for a set of spike-trains using the - pair_dist_func to compute pair-wise distances. That is it computes the + pair_dist_func to compute pair-wise distances. That is it computes the average distance of all pairs of spike-trains: - S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + S(t) = 2/((N(N-1)) sum_{} S_{i,j}, where the sum goes over all pairs . Args: - spike_trains: list of spike trains - pair_distance_func: function computing the distance of two spike trains - - indices: list of indices defining which spike trains to use, + - indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) Returns: - The averaged multi-variate distance of all pairs """ - if indices==None: + if indices is None: indices = np.arange(len(spike_trains)) indices = np.array(indices) # check validity of indices assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." + "Invalid index list." # generate a list of possible index pairs - pairs = [(i,j) for i in indices for j in indices[i+1:]] + pairs = [(i, j) for i in indices for j in indices[i+1:]] # start with first pair - (i,j) = pairs[0] + (i, j) = pairs[0] average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - for (i,j) in pairs[1:]: + for (i, j) in pairs[1:]: current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) - average_dist.add(current_dist) # add to the average - average_dist.mul_scalar(1.0/len(pairs)) # normalize + average_dist.add(current_dist) # add to the average + average_dist.mul_scalar(1.0/len(pairs)) # normalize return average_dist @@ -113,45 +113,46 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None): """ num_threads = 2 - lock = threading.Lock() + def run(spike_trains, index_pairs, average_dist): - (i,j) = index_pairs[0] + (i, j) = index_pairs[0] # print(i,j) this_avrg = pair_distance_func(spike_trains[i], spike_trains[j]) - for (i,j) in index_pairs[1:]: + for (i, j) in index_pairs[1:]: # print(i,j) current_dist = pair_distance_func(spike_trains[i], spike_trains[j]) this_avrg.add(current_dist) with lock: - average_dist.add(this_avrg) + average_dist.add(this_avrg) - if indices==None: + if indices is None: indices = np.arange(len(spike_trains)) indices = np.array(indices) # check validity of indices assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." + "Invalid index list." # generate a list of possible index pairs - pairs = [(i,j) for i in indices for j in indices[i+1:]] + pairs = [(i, j) for i in indices for j in indices[i+1:]] num_pairs = len(pairs) # start with first pair - (i,j) = pairs[0] + (i, j) = pairs[0] average_dist = pair_distance_func(spike_trains[i], spike_trains[j]) # remove the one we already computed pairs = pairs[1:] # distribute the rest into num_threads pieces - clustered_pairs = [ pairs[i::num_threads] for i in xrange(num_threads) ] + clustered_pairs = [pairs[n::num_threads] for n in xrange(num_threads)] threads = [] for pairs in clustered_pairs: - t = threading.Thread(target=run, args=(spike_trains, pairs, average_dist)) + t = threading.Thread(target=run, args=(spike_trains, pairs, + average_dist)) threads.append(t) t.start() for t in threads: t.join() - average_dist.mul_scalar(1.0/num_pairs) # normalize + average_dist.mul_scalar(1.0/num_pairs) # normalize return average_dist @@ -161,11 +162,11 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None): def isi_distance_multi(spike_trains, indices=None): """ computes the multi-variate isi-distance for a set of spike-trains. That is the average isi-distance of all pairs of spike-trains: - S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + S(t) = 2/((N(N-1)) sum_{} S_{i,j}, where the sum goes over all pairs Args: - spike_trains: list of spike trains - - indices: list of indices defining which spike trains to use, + - indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) Returns: - A PieceWiseConstFunc representing the averaged isi distance S @@ -177,13 +178,13 @@ def isi_distance_multi(spike_trains, indices=None): # spike_distance_multi ############################################################ def spike_distance_multi(spike_trains, indices=None): - """ computes the multi-variate spike-distance for a set of spike-trains. + """ computes the multi-variate spike-distance for a set of spike-trains. That is the average spike-distance of all pairs of spike-trains: - S(t) = 2/((N(N-1)) sum_{} S_{i,j}, + S(t) = 2/((N(N-1)) sum_{} S_{i, j}, where the sum goes over all pairs Args: - spike_trains: list of spike trains - - indices: list of indices defining which spike-trains to use, + - indices: list of indices defining which spike-trains to use, if None all given spike trains are used (default=None) Returns: - A PieceWiseLinFunc representing the averaged spike distance S @@ -198,21 +199,21 @@ def isi_distance_matrix(spike_trains, indices=None): - indices: list of indices defining which spike-trains to use if None all given spike-trains are used (default=None) Return: - - a 2D array of size len(indices)*len(indices) containing the average + - a 2D array of size len(indices)*len(indices) containing the average pair-wise isi-distance """ - if indices==None: + if indices is None: indices = np.arange(len(spike_trains)) indices = np.array(indices) # check validity of indices assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." + "Invalid index list." # generate a list of possible index pairs - pairs = [(i,j) for i in indices for j in indices[i+1:]] + pairs = [(i, j) for i in indices for j in indices[i+1:]] distance_matrix = np.zeros((len(indices), len(indices))) - for i,j in pairs: + for i, j in pairs: d = isi_distance(spike_trains[i], spike_trains[j]).abs_avrg() - distance_matrix[i,j] = d - distance_matrix[j,i] = d + distance_matrix[i, j] = d + distance_matrix[j, i] = d return distance_matrix diff --git a/pyspike/function.py b/pyspike/function.py index 243ef67..8107538 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -1,7 +1,7 @@ """ function.py -Module containing classes representing piece-wise constant and piece-wise linear -functions. +Module containing classes representing piece-wise constant and piece-wise +linear functions. Copyright 2014, Mario Mulansky @@ -35,7 +35,7 @@ class PieceWiseConstFunc: Args: - other: another PieceWiseConstFunc object Returns: - True if the two functions are equal up to `decimal` decimals, + True if the two functions are equal up to `decimal` decimals, False otherwise """ eps = 10.0**(-decimal) @@ -61,23 +61,23 @@ class PieceWiseConstFunc: """ Computes the average of the piece-wise const function: a = 1/T int f(x) dx where T is the length of the interval. Returns: - - the average a. + - the average a. """ return np.sum((self.x[1:]-self.x[:-1]) * self.y) / \ (self.x[-1]-self.x[0]) def abs_avrg(self): - """ Computes the average of the abs value of the piece-wise const + """ Computes the average of the abs value of the piece-wise const function: a = 1/T int |f(x)| dx where T is the length of the interval. Returns: - - the average a. + - the average a. """ return np.sum((self.x[1:]-self.x[:-1]) * np.abs(self.y)) / \ (self.x[-1]-self.x[0]) def add(self, f): - """ Adds another PieceWiseConst function to this function. + """ Adds another PieceWiseConst function to this function. Note: only functions defined on the same interval can be summed. Args: - f: PieceWiseConst function to be added. @@ -87,13 +87,13 @@ class PieceWiseConstFunc: # python implementation # from python_backend import add_piece_wise_const_python - # self.x, self.y = add_piece_wise_const_python(self.x, self.y, f.x, f.y) + # self.x, self.y = add_piece_wise_const_python(self.x, self.y, + # f.x, f.y) # cython version from cython_add import add_piece_wise_const_cython self.x, self.y = add_piece_wise_const_cython(self.x, self.y, f.x, f.y) - def mul_scalar(self, fac): """ Multiplies the function with a scalar value Args: @@ -113,10 +113,10 @@ class PieceWiseLinFunc: Args: - x: array of length N+1 defining the edges of the intervals of the pwc function. - - y1: array of length N defining the function values at the left of the - intervals. - - y2: array of length N defining the function values at the right of the + - y1: array of length N defining the function values at the left of the intervals. + - y2: array of length N defining the function values at the right of + the intervals. """ self.x = np.array(x) self.y1 = np.array(y1) @@ -128,7 +128,7 @@ class PieceWiseLinFunc: Args: - other: another PieceWiseLinFunc object Returns: - True if the two functions are equal up to `decimal` decimals, + True if the two functions are equal up to `decimal` decimals, False otherwise """ eps = 10.0**(-decimal) @@ -153,7 +153,7 @@ class PieceWiseLinFunc: """ Computes the average of the piece-wise linear function: a = 1/T int f(x) dx where T is the length of the interval. Returns: - - the average a. + - the average a. """ return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) / \ (self.x[-1]-self.x[0]) @@ -162,13 +162,13 @@ class PieceWiseLinFunc: """ Computes the absolute average of the piece-wise linear function: a = 1/T int |f(x)| dx where T is the length of the interval. Returns: - - the average a. + - the average a. """ return np.sum((self.x[1:]-self.x[:-1]) * 0.5 * (np.abs(self.y1)+np.abs(self.y2)))/(self.x[-1]-self.x[0]) def add(self, f): - """ Adds another PieceWiseLin function to this function. + """ Adds another PieceWiseLin function to this function. Note: only functions defined on the same interval can be summed. Args: - f: PieceWiseLin function to be added. @@ -178,7 +178,7 @@ class PieceWiseLinFunc: # python implementation # from python_backend import add_piece_wise_lin_python - # self.x, self.y1, self.y2 = add_piece_wise_lin_python( + # self.x, self.y1, self.y2 = add_piece_wise_lin_python( # self.x, self.y1, self.y2, f.x, f.y1, f.y2) # cython version @@ -186,7 +186,6 @@ class PieceWiseLinFunc: self.x, self.y1, self.y2 = add_piece_wise_lin_cython( self.x, self.y1, self.y2, f.x, f.y1, f.y2) - def mul_scalar(self, fac): """ Multiplies the function with a scalar value Args: diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index e5b74e9..cf1a92f 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -1,6 +1,6 @@ """ python_backend.py -Collection of python functions that can be used instead of the cython +Collection of python functions that can be used instead of the cython implementation. Copyright 2014, Mario Mulansky @@ -21,18 +21,18 @@ def isi_distance_python(s1, s2): """ Plain Python implementation of the isi distance. """ # compute the interspike interval - nu1 = s1[1:]-s1[:-1] - nu2 = s2[1:]-s2[:-1] - + nu1 = s1[1:] - s1[:-1] + nu2 = s2[1:] - s2[:-1] + # compute the isi-distance - spike_events = np.empty(len(nu1)+len(nu2)) + spike_events = np.empty(len(nu1) + len(nu2)) spike_events[0] = s1[0] # the values have one entry less - the number of intervals between events - isi_values = np.empty(len(spike_events)-1) + isi_values = np.empty(len(spike_events) - 1) # add the distance of the first events # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ # else 1.0 - nu2[0]/nu1[0] - isi_values[0] = (nu1[0]-nu2[0])/max(nu1[0],nu2[0]) + isi_values[0] = (nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) index1 = 0 index2 = 0 index = 1 @@ -49,28 +49,28 @@ def isi_distance_python(s1, s2): if index2 >= len(nu2): break spike_events[index] = s2[index2] - else: # s1[index1+1] == s2[index2+1] + else: # s1[index1 + 1] == s2[index2 + 1] index1 += 1 index2 += 1 if (index1 >= len(nu1)) or (index2 >= len(nu2)): break spike_events[index] = s1[index1] # compute the corresponding isi-distance - isi_values[index] = (nu1[index1]-nu2[index2]) / \ - max(nu1[index1], nu2[index2]) + isi_values[index] = (nu1[index1] - nu2[index2]) / \ + max(nu1[index1], nu2[index2]) index += 1 # the last event is the interval end spike_events[index] = s1[-1] - # use only the data added above + # use only the data added above # could be less than original length due to equal spike times - return PieceWiseConstFunc(spike_events[:index+1], isi_values[:index]) + return PieceWiseConstFunc(spike_events[:index + 1], isi_values[:index]) ############################################################ # get_min_dist ############################################################ def get_min_dist(spike_time, spike_train, start_index=0): - """ Returns the minimal distance |spike_time - spike_train[i]| + """ Returns the minimal distance |spike_time - spike_train[i]| with i>=start_index. """ d = abs(spike_time - spike_train[start_index]) @@ -99,18 +99,18 @@ def spike_distance_python(spikes1, spikes2): - PieceWiseLinFunc describing the spike-distance. """ # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0]==spikes2[0], \ + assert spikes1[0] == spikes2[0], \ "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1]==spikes2[-1], \ + assert spikes1[-1] == spikes2[-1], \ "Given spike trains seems not to have auxiliary spikes!" # shorter variables t1 = spikes1 t2 = spikes2 - spike_events = np.empty(len(t1)+len(t2)-2) + spike_events = np.empty(len(t1) + len(t2) - 2) spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events)-1) - y_ends = np.empty(len(spike_events)-1) + y_starts = np.empty(len(spike_events) - 1) + y_ends = np.empty(len(spike_events) - 1) index1 = 0 index2 = 0 @@ -133,9 +133,10 @@ def spike_distance_python(spikes1, spikes2): break spike_events[index] = t1[index1] # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time now was the following time before + dt_p1 = dt_f1 # the previous time was the following time before s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + dt_f2*(t1[index1]-t2[index2])) / isi2 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) # now the next interval start value dt_f1 = get_min_dist(t1[index1+1], t2, index2) @@ -148,8 +149,9 @@ def spike_distance_python(spikes1, spikes2): break spike_events[index] = t2[index2] # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time now was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + dt_f1*(t2[index2]-t1[index1])) / isi1 + dt_p2 = dt_f2 # the previous time was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 s2 = dt_p2 y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) # now the next interval start value @@ -158,7 +160,7 @@ def spike_distance_python(spikes1, spikes2): isi2 = t2[index2+1]-t2[index2] # s2 is the same as above, thus we can compute y2 immediately y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - else: # t1[index1+1] == t2[index2+1] - generate only one event + else: # t1[index1+1] == t2[index2+1] - generate only one event index1 += 1 index2 += 1 if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): @@ -183,9 +185,9 @@ def spike_distance_python(spikes1, spikes2): s1 = dt_p1*(t1[-1]-t1[-2])/isi1 s2 = dt_p2*(t2[-1]-t2[-2])/isi2 y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # use only the data added above + # use only the data added above # could be less than original length due to equal spike times - return PieceWiseLinFunc(spike_events[:index+1], + return PieceWiseLinFunc(spike_events[:index+1], y_starts[:index], y_ends[:index]) @@ -209,7 +211,7 @@ def add_piece_wise_const_python(x1, y1, x2, y2): elif x1[index1+1] > x2[index2+1]: index2 += 1 x_new[index] = x2[index2] - else: # x1[index1+1] == x2[index2+1]: + else: # x1[index1+1] == x2[index2+1]: index1 += 1 index2 += 1 x_new[index] = x1[index1] @@ -217,15 +219,13 @@ def add_piece_wise_const_python(x1, y1, x2, y2): # one array reached the end -> copy the contents of the other to the end if index1+1 < len(y1): x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + \ - y2[-1] + y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] index += len(x1)-index1-2 elif index2+1 < len(y2): x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + \ - y1[-1] + y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously + else: # both arrays reached the end simultaneously # only the last x-value missing x_new[index+1] = x1[-1] # the last value is again the end of the interval @@ -244,9 +244,9 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): y2_new = np.empty_like(y1_new) x_new[0] = x1[0] y1_new[0] = y11[0] + y21[0] - index1 = 0 # index for self - index2 = 0 # index for f - index = 0 # index for new + index1 = 0 # index for self + index2 = 0 # index for f + index = 0 # index for new while (index1+1 < len(y11)) and (index2+1 < len(y21)): # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) if x1[index1+1] < x2[index2+1]: @@ -272,7 +272,7 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): x_new[index] = x2[index2] # and the starting value for the next interval y1_new[index] = y21[index2] + y - else: # x1[index1+1] == x2[index2+1]: + else: # x1[index1+1] == x2[index2+1]: y2_new[index] = y12[index1] + y22[index2] index1 += 1 index2 += 1 @@ -297,7 +297,7 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously + else: # both arrays reached the end simultaneously # only the last x-value missing x_new[index+1] = x1[-1] # finally, the end value for the last interval diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 6ea94de..c496ab8 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -31,11 +31,11 @@ def add_auxiliary_spikes(spike_train, time_interval): except: T_start = 0 T_end = time_interval - + assert spike_train[0] >= T_start, \ - "Spike train has events before the given start time" + "Spike train has events before the given start time" assert spike_train[-1] <= T_end, \ - "Spike train has events after the given end time" + "Spike train has events after the given end time" if spike_train[0] != T_start: spike_train = np.insert(spike_train, 0, T_start) if spike_train[-1] != T_end: @@ -64,16 +64,16 @@ def spike_train_from_string(s, sep=' ', sort=True): ############################################################ # load_spike_trains_txt ############################################################ -def load_spike_trains_from_txt(file_name, time_interval=None, +def load_spike_trains_from_txt(file_name, time_interval=None, separator=' ', comment='#', sort=True): - """ Loads a number of spike trains from a text file. Each line of the text - file should contain one spike train as a sequence of spike times separated - by `separator`. Empty lines as well as lines starting with `comment` are - neglected. The `time_interval` represents the start and the end of the spike - trains and it is used to add auxiliary spikes at the beginning and end of - each spike train. However, if `time_interval == None`, no auxiliary spikes - are added, but note that the Spike and ISI distance both require auxiliary - spikes. + """ Loads a number of spike trains from a text file. Each line of the text + file should contain one spike train as a sequence of spike times separated + by `separator`. Empty lines as well as lines starting with `comment` are + neglected. The `time_interval` represents the start and the end of the + spike trains and it is used to add auxiliary spikes at the beginning and + end of each spike train. However, if `time_interval == None`, no auxiliary + spikes are added, but note that the Spike and ISI distance both require + auxiliary spikes. Args: - file_name: The name of the text file. - time_interval: A pair (T_start, T_end) of values representing the start @@ -87,10 +87,10 @@ def load_spike_trains_from_txt(file_name, time_interval=None, spike_trains = [] spike_file = open(file_name, 'r') for line in spike_file: - if len(line) > 1 and not line.startswith(comment): + if len(line) > 1 and not line.startswith(comment): # use only the lines with actual data and not commented spike_train = spike_train_from_string(line, separator, sort) - if not time_interval == None: # add auxiliary spikes if times given + if time_interval is not None: # add auxil. spikes if times given spike_train = add_auxiliary_spikes(spike_train, time_interval) spike_trains.append(spike_train) return spike_trains @@ -109,19 +109,19 @@ def merge_spike_trains(spike_trains): # get the lengths of the spike trains lens = np.array([len(st) for st in spike_trains]) merged_spikes = np.empty(np.sum(lens)) - index = 0 # the index for merged_spikes - indices = np.zeros_like(lens) # indices of the spike trains - index_list = np.arange(len(indices)) # indices of indices of spike trains - # that have not yet reached the end + index = 0 # the index for merged_spikes + indices = np.zeros_like(lens) # indices of the spike trains + index_list = np.arange(len(indices)) # indices of indices of spike trains + # that have not yet reached the end # list of the possible events in the spike trains vals = [spike_trains[i][indices[i]] for i in index_list] while len(index_list) > 0: - i = np.argmin(vals) # the next spike is the minimum - merged_spikes[index] = vals[i] # put it to the merged spike train + i = np.argmin(vals) # the next spike is the minimum + merged_spikes[index] = vals[i] # put it to the merged spike train i = index_list[i] - index += 1 # next index of merged spike train - indices[i] += 1 # next index for the chosen spike train - if indices[i] >= lens[i]: # remove spike train index if ended + index += 1 # next index of merged spike train + indices[i] += 1 # next index for the chosen spike train + if indices[i] >= lens[i]: # remove spike train index if ended index_list = index_list[index_list != i] - vals = [spike_trains[i][indices[i]] for i in index_list] + vals = [spike_trains[n][indices[n]] for n in index_list] return merged_spikes diff --git a/test/test_distance.py b/test/test_distance.py index dafe693..3371cbd 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -22,8 +22,8 @@ def test_isi(): t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95]) # pen&paper calculation of the isi distance - expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0] - expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35, + expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] + expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35, -0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3] t1 = spk.add_auxiliary_spikes(t1, 1.0) @@ -36,10 +36,10 @@ def test_isi(): assert_array_almost_equal(f.y, expected_isi, decimal=14) # check with some equal spike times - t1 = np.array([0.2,0.4,0.6]) - t2 = np.array([0.1,0.4,0.5,0.6]) + t1 = np.array([0.2, 0.4, 0.6]) + t2 = np.array([0.1, 0.4, 0.5, 0.6]) - expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0] + expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5] t1 = spk.add_auxiliary_spikes(t1, 1.0) @@ -56,11 +56,11 @@ def test_spike(): t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95]) # pen&paper calculation of the spike distance - expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0] + expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2, 0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.1]) - s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, - (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, + s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05, + (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35, 0.1, 0.1, 0.05, 0.05]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3]) isi2 = np.array([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05]) @@ -76,17 +76,17 @@ def test_spike(): assert_array_almost_equal(f.y2, expected_y2, decimal=14) # check with some equal spike times - t1 = np.array([0.2,0.4,0.6]) - t2 = np.array([0.1,0.4,0.5,0.6]) + t1 = np.array([0.2, 0.4, 0.6]) + t2 = np.array([0.1, 0.4, 0.5, 0.6]) - expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0] + expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0]) s2 = np.array([0.1*0.1/0.3, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0]) isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4]) isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4]) expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2) expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2) - + t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) f = spk.spike_distance(t1, t2) @@ -100,8 +100,8 @@ def check_multi_distance(dist_func, dist_func_multi): # generate spike trains: t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0) t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0) - t3 = spk.add_auxiliary_spikes(np.array([0.2,0.4,0.6]), 1.0) - t4 = spk.add_auxiliary_spikes(np.array([0.1,0.4,0.5,0.6]), 1.0) + t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0) + t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0) spike_trains = [t1, t2, t3, t4] f12 = dist_func(t1, t2) @@ -111,17 +111,17 @@ def check_multi_distance(dist_func, dist_func_multi): f24 = dist_func(t2, t4) f34 = dist_func(t3, t4) - f_multi = dist_func_multi(spike_trains, [0,1]) + f_multi = dist_func_multi(spike_trains, [0, 1]) assert f_multi.almost_equal(f12, decimal=14) f = copy(f12) f.add(f13) f.add(f23) f.mul_scalar(1.0/3) - f_multi = dist_func_multi(spike_trains, [0,1,2]) + f_multi = dist_func_multi(spike_trains, [0, 1, 2]) assert f_multi.almost_equal(f, decimal=14) - f.mul_scalar(3) # revert above normalization + f.mul_scalar(3) # revert above normalization f.add(f14) f.add(f24) f.add(f34) @@ -139,6 +139,7 @@ def test_multi_spike(): if __name__ == "__main__": - test_auxiliary_spikes() test_isi() test_spike() + test_multi_isi() + test_multi_spike() diff --git a/test/test_function.py b/test/test_function.py index c0fb3fd..ed7d6bc 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,18 +10,18 @@ Distributed under the MIT License (MIT) from __future__ import print_function import numpy as np from copy import copy -from numpy.testing import assert_equal, assert_almost_equal, \ - assert_array_almost_equal +from numpy.testing import assert_almost_equal, assert_array_almost_equal import pyspike as spk + def test_pwc(): # some random data x = [0.0, 1.0, 2.0, 2.5, 4.0] y = [1.0, -0.5, 1.5, 0.75] f = spk.PieceWiseConstFunc(x, y) xp, yp = f.get_plottable_data() - + xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0] yp_expected = [1.0, 1.0, -0.5, -0.5, 1.5, 1.5, 0.75, 0.75] assert_array_almost_equal(xp, xp_expected, decimal=16) @@ -51,17 +51,18 @@ def test_pwc_add(): f2.add(f) assert_array_almost_equal(f2.x, x_expected, decimal=16) assert_array_almost_equal(f2.y, y_expected, decimal=16) - + f1.add(f2) # same x, but y doubled assert_array_almost_equal(f1.x, f2.x, decimal=16) assert_array_almost_equal(f1.y, 2*f2.y, decimal=16) + def test_pwc_mul(): x = [0.0, 1.0, 2.0, 2.5, 4.0] y = [1.0, -0.5, 1.5, 0.75] f = spk.PieceWiseConstFunc(x, y) - + f.mul_scalar(1.5) assert_array_almost_equal(f.x, x, decimal=16) assert_array_almost_equal(f.y, 1.5*np.array(y), decimal=16) @@ -75,15 +76,15 @@ def test_pwl(): y2 = [1.5, -0.4, 1.5, 0.25] f = spk.PieceWiseLinFunc(x, y1, y2) xp, yp = f.get_plottable_data() - + xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0] yp_expected = [1.0, 1.5, -0.5, -0.4, 1.5, 1.5, 0.75, 0.25] assert_array_almost_equal(xp, xp_expected, decimal=16) assert_array_almost_equal(yp, yp_expected, decimal=16) - + avrg_expected = (1.25 - 0.45 + 0.75 + 1.5*0.5) / 4.0 assert_almost_equal(f.avrg(), avrg_expected, decimal=16) - + abs_avrg_expected = (1.25 + 0.45 + 0.75 + 1.5*0.5) / 4.0 assert_almost_equal(f.abs_avrg(), abs_avrg_expected, decimal=16) @@ -113,7 +114,7 @@ def test_pwl_add(): assert_array_almost_equal(f2.x, x_expected, decimal=16) assert_array_almost_equal(f2.y1, y1_expected, decimal=16) assert_array_almost_equal(f2.y2, y2_expected, decimal=16) - + f1.add(f2) # same x, but y doubled assert_array_almost_equal(f1.x, f2.x, decimal=16) @@ -121,12 +122,12 @@ def test_pwl_add(): assert_array_almost_equal(f1.y2, 2*f2.y2, decimal=16) -def test_pwc_mul(): +def test_pwl_mul(): x = [0.0, 1.0, 2.0, 2.5, 4.0] y1 = [1.0, -0.5, 1.5, 0.75] y2 = [1.5, -0.4, 1.5, 0.25] f = spk.PieceWiseLinFunc(x, y1, y2) - + f.mul_scalar(1.5) assert_array_almost_equal(f.x, x, decimal=16) assert_array_almost_equal(f.y1, 1.5*np.array(y1), decimal=16) @@ -137,3 +138,8 @@ def test_pwc_mul(): if __name__ == "__main__": test_pwc() + test_pwc_add() + test_pwc_mul() + test_pwl() + test_pwl_add() + test_pwl_mul() diff --git a/test/test_spikes.py b/test/test_spikes.py index e008207..349e0bf 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -23,13 +23,13 @@ def test_auxiliary_spikes(): def test_load_from_txt(): - spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0,4000)) + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) assert len(spike_trains) == 40 # check the first spike train - spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1, - 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7, + spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1, + 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7, 3644.3, 3936.3, 4000] assert_equal(spike_times, spike_trains[0]) @@ -39,15 +39,15 @@ def test_load_from_txt(): assert spike_train[-1] == 4000 # load without adding auxiliary spikes - spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=None) + spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=None) assert len(spike_trains2) == 40 # check auxiliary spikes for i in xrange(len(spike_trains)): - assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less + assert len(spike_trains[i]) == len(spike_trains2[i])+2 # 2 spikes less -def check_merged_spikes( merged_spikes, spike_trains ): +def check_merged_spikes(merged_spikes, spike_trains): # create a flat array with all spike events all_spikes = np.array([]) for spike_train in spike_trains: @@ -55,7 +55,7 @@ def check_merged_spikes( merged_spikes, spike_trains ): indices = np.zeros_like(all_spikes, dtype='bool') # check if we find all the spike events in the original spike trains for x in merged_spikes: - i = np.where(all_spikes == x)[0][0] # the first axis and the first entry + i = np.where(all_spikes == x)[0][0] # first axis and first entry # change to something impossible so we dont find this event again all_spikes[i] = -1.0 indices[i] = True @@ -64,23 +64,22 @@ def check_merged_spikes( merged_spikes, spike_trains ): def test_merge_spike_trains(): # first load the data - spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0,4000)) + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) # test if result is sorted assert((spikes == np.sort(spikes)).all()) # check merging - check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] ) + check_merged_spikes(spikes, [spike_trains[0], spike_trains[1]]) spikes = spk.merge_spike_trains(spike_trains) # test if result is sorted assert((spikes == np.sort(spikes)).all()) # check merging - check_merged_spikes( spikes, spike_trains ) + check_merged_spikes(spikes, spike_trains) if __name__ == "main": test_auxiliary_spikes() test_load_from_txt() test_merge_spike_trains() - -- cgit v1.2.3 From 5ce807943fab2ba233cff661e34e4d6a83397b99 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 13 Oct 2014 11:03:42 +0200 Subject: changed to BSD license --- License | 25 ++++++++----------------- examples/isi_matrix.py | 2 +- examples/merge.py | 2 +- examples/plot.py | 2 +- pyspike/__init__.py | 2 +- pyspike/cython_distance.pyx | 2 +- pyspike/distances.py | 2 +- pyspike/function.py | 2 +- pyspike/python_backend.py | 2 +- pyspike/spikes.py | 2 +- test/test_distance.py | 2 +- test/test_function.py | 2 +- test/test_spikes.py | 2 +- 13 files changed, 20 insertions(+), 29 deletions(-) (limited to 'examples/plot.py') diff --git a/License b/License index 95d0405..472deac 100644 --- a/License +++ b/License @@ -1,21 +1,12 @@ -The MIT License (MIT) +BSD License -Copyright (c) 2014 Mario Mulansky, +Copyright (c) 2014, Mario Mulansky +All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py index db740dd..7bf1cf9 100644 --- a/examples/isi_matrix.py +++ b/examples/isi_matrix.py @@ -5,7 +5,7 @@ trains. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/examples/merge.py b/examples/merge.py index 726d32b..2550cdb 100644 --- a/examples/merge.py +++ b/examples/merge.py @@ -4,7 +4,7 @@ Simple example showing the merging of two spike trains. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/examples/plot.py b/examples/plot.py index 5c3ad4a..da53670 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -4,7 +4,7 @@ Simple example showing how to load and plot spike trains and their distances. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3867e6e..c58a6b1 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,7 +1,7 @@ """ Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ __all__ = ["function", "distances", "spikes"] diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index 4ab4381..ccf8060 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -12,7 +12,7 @@ improves the performance of spike_distance by a factor of 10! Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/distances.py b/pyspike/distances.py index b2eec92..3b9fe1f 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -4,7 +4,7 @@ Module containing several functions to compute spike distances Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ import numpy as np diff --git a/pyspike/function.py b/pyspike/function.py index 8107538..7722cc3 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -5,7 +5,7 @@ linear functions. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py index cf1a92f..a1f5ea2 100644 --- a/pyspike/python_backend.py +++ b/pyspike/python_backend.py @@ -5,7 +5,7 @@ implementation. Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/pyspike/spikes.py b/pyspike/spikes.py index c496ab8..d390222 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -4,7 +4,7 @@ Module containing several function to load and transform spike trains Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ import numpy as np diff --git a/test/test_distance.py b/test/test_distance.py index 3371cbd..b500b2c 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -4,7 +4,7 @@ Tests the isi- and spike-distance computation Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ diff --git a/test/test_function.py b/test/test_function.py index ed7d6bc..a579796 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -4,7 +4,7 @@ Tests the PieceWiseConst and PieceWiseLinear functions Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function diff --git a/test/test_spikes.py b/test/test_spikes.py index 349e0bf..bf914c0 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -4,7 +4,7 @@ Test loading of spike trains from text files Copyright 2014, Mario Mulansky -Distributed under the MIT License (MIT) +Distributed under the BSD License """ from __future__ import print_function -- cgit v1.2.3 From 65801901e6d3325c8d1c82ab92334ca19ebd92d7 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 14 Oct 2014 17:49:23 +0200 Subject: changed isi distance profile to abs values --- examples/plot.py | 2 -- pyspike/cython_distance.pyx | 4 ++-- pyspike/distances.py | 2 +- pyspike/function.py | 16 +++------------- test/test_distance.py | 6 +++--- test/test_function.py | 2 -- 6 files changed, 9 insertions(+), 23 deletions(-) (limited to 'examples/plot.py') diff --git a/examples/plot.py b/examples/plot.py index da53670..4ac8f5d 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -29,8 +29,6 @@ plt.figure() plt.plot(x, np.abs(y), '--k') print("Average: %.8f" % f.avrg()) -print("Absolute average: %.8f" % f.abs_avrg()) - f = spk.spike_distance(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx index ccf8060..178fcba 100644 --- a/pyspike/cython_distance.pyx +++ b/pyspike/cython_distance.pyx @@ -60,7 +60,7 @@ def isi_distance_cython(double[:] s1, isi_values = np.empty(N1+N2-1) with nogil: # release the interpreter to allow multithreading - isi_values[0] = (nu1-nu2)/fmax(nu1,nu2) + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) index1 = 0 index2 = 0 index = 1 @@ -88,7 +88,7 @@ def isi_distance_cython(double[:] s1, nu1 = s1[index1+1]-s1[index1] nu2 = s2[index2+1]-s2[index2] # compute the corresponding isi-distance - isi_values[index] = (nu1 - nu2) / fmax(nu1, nu2) + isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) index += 1 # the last event is the interval end spike_events[index] = s1[N1] diff --git a/pyspike/distances.py b/pyspike/distances.py index 3b9fe1f..08d0ed8 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -213,7 +213,7 @@ def isi_distance_matrix(spike_trains, indices=None): distance_matrix = np.zeros((len(indices), len(indices))) for i, j in pairs: - d = isi_distance(spike_trains[i], spike_trains[j]).abs_avrg() + d = isi_distance(spike_trains[i], spike_trains[j]).avrg() distance_matrix[i, j] = d distance_matrix[j, i] = d return distance_matrix diff --git a/pyspike/function.py b/pyspike/function.py index 7722cc3..bd3e2d5 100644 --- a/pyspike/function.py +++ b/pyspike/function.py @@ -18,7 +18,7 @@ import numpy as np ############################################################## class PieceWiseConstFunc: """ A class representing a piece-wise constant function. """ - + def __init__(self, x, y): """ Constructs the piece-wise const function. Args: @@ -66,16 +66,6 @@ class PieceWiseConstFunc: return np.sum((self.x[1:]-self.x[:-1]) * self.y) / \ (self.x[-1]-self.x[0]) - def abs_avrg(self): - """ Computes the average of the abs value of the piece-wise const - function: - a = 1/T int |f(x)| dx where T is the length of the interval. - Returns: - - the average a. - """ - return np.sum((self.x[1:]-self.x[:-1]) * np.abs(self.y)) / \ - (self.x[-1]-self.x[0]) - def add(self, f): """ Adds another PieceWiseConst function to this function. Note: only functions defined on the same interval can be summed. @@ -84,7 +74,7 @@ class PieceWiseConstFunc: """ assert self.x[0] == f.x[0], "The functions have different intervals" assert self.x[-1] == f.x[-1], "The functions have different intervals" - + # python implementation # from python_backend import add_piece_wise_const_python # self.x, self.y = add_piece_wise_const_python(self.x, self.y, @@ -107,7 +97,7 @@ class PieceWiseConstFunc: ############################################################## class PieceWiseLinFunc: """ A class representing a piece-wise linear function. """ - + def __init__(self, x, y1, y2): """ Constructs the piece-wise linear function. Args: diff --git a/test/test_distance.py b/test/test_distance.py index b500b2c..6e50467 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -23,8 +23,8 @@ def test_isi(): # pen&paper calculation of the isi distance expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0] - expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35, - -0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3] + expected_isi = [0.1/0.3, 0.1/0.3, 0.05/0.2, 0.05/0.2, 0.15/0.35, + 0.25/0.35, 0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3] t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) @@ -40,7 +40,7 @@ def test_isi(): t2 = np.array([0.1, 0.4, 0.5, 0.6]) expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0] - expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5] + expected_isi = [0.1/0.2, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5] t1 = spk.add_auxiliary_spikes(t1, 1.0) t2 = spk.add_auxiliary_spikes(t2, 1.0) diff --git a/test/test_function.py b/test/test_function.py index a579796..c5caa5a 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -28,8 +28,6 @@ def test_pwc(): assert_array_almost_equal(yp, yp_expected, decimal=16) assert_almost_equal(f.avrg(), (1.0-0.5+0.5*1.5+1.5*0.75)/4.0, decimal=16) - assert_almost_equal(f.abs_avrg(), (1.0+0.5+0.5*1.5+1.5*0.75)/4.0, - decimal=16) def test_pwc_add(): -- cgit v1.2.3 From d869d4d822c651ea3d094eaf17ba7732bf91136f Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 16 Oct 2014 14:16:32 +0200 Subject: new function names in examples and readme --- .travis.yml | 2 -- Readme.md | 12 +++++++----- examples/plot.py | 4 ++-- pyspike/distances.py | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) (limited to 'examples/plot.py') diff --git a/.travis.yml b/.travis.yml index 31df3bc..43f3ef7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,8 +7,6 @@ install: - pip install -q cython script: -# - export PYTHONPATH=$PYTHONPATH:$TRAVIS_BUILD_DIR - python setup.py build_ext --inplace -# - cd test - nosetests diff --git a/Readme.md b/Readme.md index c12b0b6..be42c4a 100644 --- a/Readme.md +++ b/Readme.md @@ -33,7 +33,6 @@ To install PySpike, simply download the source, e.g. from Github, and run the `s Then you can run the tests using the `nosetests` test framework: - cd test nosetests Finally, you should make PySpike's installation folder known to Python to be able to import pyspike in your own projects. @@ -70,7 +69,7 @@ If you load spike trains yourself, i.e. from data files with different structure Both the ISI and the SPIKE distance computation require the presence of auxiliary spikes, so make sure you have those in your spike trains: spike_train = spk.add_auxiliary_spikes(spike_train, (T_start, T_end)) - # you provide only a single value, it is interpreted as T_end, while T_start=0 + # if you provide only a single value, it is interpreted as T_end, while T_start=0 spike_train = spk.add_auxiliary_spikes(spike_train, T_end) ## Computing bi-variate distances @@ -98,14 +97,17 @@ The following code loads some exemplary spike trains, computes the dissimilarity spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", time_interval=(0, 4000)) - isi_profile = spk.isi_distance(spike_trains[0], spike_trains[1]) + isi_profile = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = isi_profile.get_plottable_data() plt.plot(x, y, '--k') print("ISI distance: %.8f" % isi_profil.avrg()) plt.show() -The ISI-profile is a piece-wise constant function, there the function `isi_distance` returns an instance of the `PieceWiseConstFunc` class. -As above, this class allows you to obtain arrays that can be used to plot the function with `plt.plt`, but also to compute the absolute average, which amounts to the final scalar ISI-distance. +The ISI-profile is a piece-wise constant function, there the function `isi_profile` returns an instance of the `PieceWiseConstFunc` class. +As shown above, this class allows you to obtain arrays that can be used to plot the function with `plt.plt`, but also to compute the absolute average, which amounts to the final scalar ISI-distance. +If you are only interested in the scalar ISI-distance and not the profile, you can simly use: + + isi_dist = spk.isi_distance(spike_trains[0], spike_trains[1]) Furthermore, PySpike provides the `average_profile` function that can be used to compute the average profile of a list of given `PieceWiseConstFunc` instances. diff --git a/examples/plot.py b/examples/plot.py index 4ac8f5d..6da7f49 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -22,7 +22,7 @@ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", for (i, spikes) in enumerate(spike_trains): plt.plot(spikes, i*np.ones_like(spikes), 'o') -f = spk.isi_distance(spike_trains[0], spike_trains[1]) +f = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() plt.figure() @@ -30,7 +30,7 @@ plt.plot(x, np.abs(y), '--k') print("Average: %.8f" % f.avrg()) -f = spk.spike_distance(spike_trains[0], spike_trains[1]) +f = spk.spike_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() print(x) print(y) diff --git a/pyspike/distances.py b/pyspike/distances.py index e50772f..9056863 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -284,7 +284,7 @@ def isi_distance_matrix(spike_trains, indices=None): distance_matrix = np.zeros((len(indices), len(indices))) for i, j in pairs: - d = isi_distance(spike_trains[i], spike_trains[j]).avrg() + d = isi_distance(spike_trains[i], spike_trains[j]) distance_matrix[i, j] = d distance_matrix[j, i] = d return distance_matrix -- cgit v1.2.3 From e85e6a72662d30b677dd4c9ded6d2b1520ba63ec Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 20 Oct 2014 18:13:23 +0200 Subject: +multivariate example, docs for spike profile --- Readme.rst | 60 ++++++++++++++++++++++++++++++++++++++++-------- examples/multivariate.py | 50 ++++++++++++++++++++++++++++++++++++++++ examples/plot.py | 14 ++++++----- 3 files changed, 109 insertions(+), 15 deletions(-) create mode 100644 examples/multivariate.py (limited to 'examples/plot.py') diff --git a/Readme.rst b/Readme.rst index 81ef338..b0128c0 100644 --- a/Readme.rst +++ b/Readme.rst @@ -26,7 +26,7 @@ To use PySpike you need Python installed with the following additional packages: - cython - nosetests (for running the tests) -In particular, make sure that cython_ is configured properly and able to locate a C compiler. +In particular, make sure that cython_ is configured properly and able to locate a C compiler, otherwise you will only be able to use the much slower plain Python implementations. To install PySpike, simply download the source, e.g. from Github, and run the :code:`setup.py` script: @@ -124,7 +124,7 @@ The following code loads some exemplary spike trains, computes the dissimilarity plt.show() The ISI-profile is a piece-wise constant function, there the function :code:`isi_profile` returns an instance of the :code:`PieceWiseConstFunc` class. -As shown above, this class allows you to obtain arrays that can be used to plot the function with :code":`plt.plt`, but also to compute the absolute average, which amounts to the final scalar ISI-distance. +As shown above, this class allows you to obtain arrays that can be used to plot the function with :code":`plt.plt`, but also to compute the average, which amounts to the final scalar ISI-distance. If you are only interested in the scalar ISI-distance and not the profile, you can simly use: .. code:: python @@ -135,11 +135,15 @@ Furthermore, PySpike provides the :code:`average_profile` function that can be u .. code:: python - avrg_profile = spk.average_profile([spike_train1, spike_train2]) + isi_profile1 = spk.isi_profile(spike_trains[0], spike_trains[1]) + isi_profile2 = spk.isi_profile(spike_trains[0], spike_trains[2]) + isi_profile3 = spk.isi_profile(spike_trains[1], spike_trains[2]) + + avrg_profile = spk.average_profile([isi_profile1, isi_profile2, isi_profile3]) x, y = avrg_profile.get_plottable_data() - plt.plot(x, y, label="Average profile") + plt.plot(x, y, label="Average ISI profile") -Note the difference between the :code:`average_profile` function, which returns a :code:`PieceWiseConstFunc` (or :code:`PieceWiseLinFunc`, see below), and the :code:`avrg` member function above, that computes the integral over the time profile. +Note the difference between the :code:`average_profile` function, which returns a :code:`PieceWiseConstFunc` (or :code:`PieceWiseLinFunc`, see below), and the :code:`avrg` member function above, that computes the integral over the time profile resulting in a single value. So to obtain overall average ISI-distance of a list of ISI profiles you can first compute the average profile using :code:`average_profile` and the use .. code:: python @@ -148,12 +152,50 @@ So to obtain overall average ISI-distance of a list of ISI profiles you can firs to obtain the final, scalar average ISI distance of the whole set (see also "Computing multi-variate distance" below). -Computing multi-variate distances ---------------------------------- +SPIKE-distance +.............. + +To computation for the spike distance you use the function :code:`spike_profile` instead of :code:`isi_profile` above. +But the general approach is very similar: + +.. code:: python + + import matplotlib.pyplot as plt + import pyspike as spk + + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) + spike_profile = spk.spike_profile(spike_trains[0], spike_trains[1]) + x, y = spike_profile.get_plottable_data() + plt.plot(x, y, '--k') + print("SPIKE distance: %.8f" % spike_profil.avrg()) + plt.show() -Plotting --------- +This short example computes and plots the SPIKE-profile of the first two spike trains in the file :code:`PySpike_testdata.txt`. +In contrast to the ISI-profile, a SPIKE-profile is a piece-wise *linear* function and thusly represented by a :code:`PieceWiseLinFunc` object. +Just like the :code:`PieceWiseconstFunc` for the ISI-profile, the :code:`PieceWiseLinFunc` provides a :code:`get_plottable_data` member function that returns array that can be used directly to plot the function. +Furthermore, the :code:`avrg` member function returns the average of the profile defined as the overall SPIKE distance. + +Again, you can use + +.. code:: python + + spike_dist = spk.spike_distance(spike_trains[0], spike_trains[1]) + +to compute the SPIKE distance directly, if you are not interested in the profile at all. +Furthmore, you can use the :code:`average_profile` function to compute an average profile of a list of SPIKE-profiles: + +.. code:: python + + avrg_profile = spk.average_profile([spike_profile1, spike_profile2, + spike_profile3]) + x, y = avrg_profile.get_plottable_data() + plt.plot(x, y, label="Average SPIKE profile") + + +Computing multi-variate distances +--------------------------------- Averaging diff --git a/examples/multivariate.py b/examples/multivariate.py new file mode 100644 index 0000000..260b217 --- /dev/null +++ b/examples/multivariate.py @@ -0,0 +1,50 @@ +""" Example for the multivariate spike distance + +Copyright 2014, Mario Mulansky + +""" +from __future__ import print_function +import time +import pyspike as spk + + +def time_diff_in_ms(start, end): + """ Returns the time difference end-start in ms. + """ + return (end-start)*1000 + + +t_start = time.clock() + +# load the data +time_loading = time.clock() +spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", + time_interval=(0, 4000)) +t_loading = time.clock() + +print("Number of spike trains: %d" % len(spike_trains)) +num_of_spikes = sum([len(spike_trains[i]) for i in xrange(len(spike_trains))]) +print("Number of spikes: %d" % num_of_spikes) + +# calculate the multivariate spike distance +f = spk.spike_profile_multi(spike_trains) + +t_spike = time.clock() + +# print the average +avrg = f.avrg() +print("Spike distance from average: %.8f" % avrg) + +t_avrg = time.clock() + +# compute average distance directly, should give the same result as above +spike_dist = spk.spike_distance_multi(spike_trains) +print("Spike distance directly: %.8f" % spike_dist) + +t_dist = time.clock() + +print("Loading: %9.1f ms" % time_diff_in_ms(t_start, t_loading)) +print("Computing profile: %9.1f ms" % time_diff_in_ms(t_loading, t_spike)) +print("Averaging: %9.1f ms" % time_diff_in_ms(t_spike, t_avrg)) +print("Computing distance: %9.1f ms" % time_diff_in_ms(t_avrg, t_dist)) +print("Total: %9.1f ms" % time_diff_in_ms(t_start, t_dist)) diff --git a/examples/plot.py b/examples/plot.py index 6da7f49..59334c9 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -26,15 +26,17 @@ f = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() plt.figure() -plt.plot(x, np.abs(y), '--k') +plt.plot(x, np.abs(y), '--k', label="ISI-profile") -print("Average: %.8f" % f.avrg()) +print("ISI-distance: %.8f" % f.avrg()) f = spk.spike_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() -print(x) -print(y) -#plt.figure() -plt.plot(x, y, '-b') + +plt.plot(x, y, '-b', label="SPIKE-profile") + +print("SPIKE-distance: %.8f" % f.avrg()) + +plt.legend(loc="upper left") plt.show() -- cgit v1.2.3 From 4a295e6045abc7564a2e72d1a2173bf2b04c5950 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 23 Oct 2014 12:46:55 +0200 Subject: docs: added interval averaging explanations --- Readme.rst | 38 ++++++++++++++++++++++++++++---------- examples/distance_matrix.py | 4 ++-- examples/plot.py | 3 ++- pyspike/distances.py | 14 +++++++------- 4 files changed, 39 insertions(+), 20 deletions(-) (limited to 'examples/plot.py') diff --git a/Readme.rst b/Readme.rst index 4482d01..c6ded74 100644 --- a/Readme.rst +++ b/Readme.rst @@ -87,8 +87,8 @@ Both the ISI and the SPIKE distance computation require the presence of auxiliar # if you provide only a single value, it is interpreted as T_end, while T_start=0 spike_train = spk.add_auxiliary_spikes(spike_train, T_end) -Computing bi-variate distances ------------------------------- +Computing bi-variate distances profiles +--------------------------------------- **Important note:** @@ -124,12 +124,25 @@ The following code loads some exemplary spike trains, computes the dissimilarity plt.show() The ISI-profile is a piece-wise constant function, there the function :code:`isi_profile` returns an instance of the :code:`PieceWiseConstFunc` class. -As shown above, this class allows you to obtain arrays that can be used to plot the function with :code":`plt.plt`, but also to compute the average, which amounts to the final scalar ISI-distance. +As shown above, this class allows you to obtain arrays that can be used to plot the function with :code:`plt.plt`, but also to compute the time average, which amounts to the final scalar ISI-distance. +By default, the time average is computed for the whole :code:`PieceWiseConstFunc` function. +However, it is also possible to obtain the average of some interval by providing a pair of floats defining the start and end of the interval. +In the above example, the following code computes the ISI-distances obtained from averaging the ISI-profile over four different intervals: + +.. code:: python + + isi1 = isi_profil.avrg(interval=(0,1000)) + isi2 = isi_profil.avrg(interval=(1000,2000)) + isi3 = isi_profil.avrg(interval=(2000,3000)) + isi4 = isi_profil.avrg(interval=(3000,4000)) + If you are only interested in the scalar ISI-distance and not the profile, you can simly use: .. code:: python - isi_dist = spk.isi_distance(spike_trains[0], spike_trains[1]) + isi_dist = spk.isi_distance(spike_trains[0], spike_trains[1], interval) + +where :code:`interval` is optional, as above, and if omitted the ISI-distance is computed for the complete spike trains. Furthermore, PySpike provides the :code:`average_profile` function that can be used to compute the average profile of a list of given :code:`PieceWiseConstFunc` instances. @@ -176,14 +189,16 @@ This short example computes and plots the SPIKE-profile of the first two spike t In contrast to the ISI-profile, a SPIKE-profile is a piece-wise *linear* function and thusly represented by a :code:`PieceWiseLinFunc` object. Just like the :code:`PieceWiseconstFunc` for the ISI-profile, the :code:`PieceWiseLinFunc` provides a :code:`get_plottable_data` member function that returns array that can be used directly to plot the function. Furthermore, the :code:`avrg` member function returns the average of the profile defined as the overall SPIKE distance. +As above, you can provide an interval as a pair of floats to :code:`avrg` to specify the averaging interval if required. Again, you can use .. code:: python - spike_dist = spk.spike_distance(spike_trains[0], spike_trains[1]) + spike_dist = spk.spike_distance(spike_trains[0], spike_trains[1], interval) to compute the SPIKE distance directly, if you are not interested in the profile at all. +:code:`interval` is optional and defines the averaging interval, if neglected the whole spike train is used. Furthmore, you can use the :code:`average_profile` function to compute an average profile of a list of SPIKE-profiles: .. code:: python @@ -195,7 +210,7 @@ Furthmore, you can use the :code:`average_profile` function to compute an averag Computing multi-variate profiles and distances ---------------------------------- +---------------------------------------------- To compute the multi-variate ISI- or SPIKE-profile of a set of spike trains, you can compute all bi-variate profiles separately and then use the :code:`average_profile` function above. However, PySpike provides convenience functions for that purpose. @@ -210,11 +225,12 @@ The following example computes the multivariate ISI- and SPIKE-profile for a lis Both functions take an optional parameter :code:`indices`, a list of indices that allows to define the spike trains that should be used for the multi-variate profile. As before, if you are only interested in the distance values, and not in the profile, PySpike offers the functions: :code:`isi_distance_multi` and :code:`spike_distance_multi`, that return the scalar multi-variate ISI- and SPIKE-distance. +Both distance functions also accept an :code:`interval` parameter that can be used to specify the averaging interval as a pair of floats, if neglected the complete interval is used. Another option to address large sets of spike trains are distance matrices. Each entry in the distance matrix represents a bi-variate distance of the spike trains. Hence, the distance matrix is symmetric and has zero values at the diagonal. -The following example computes and plots the ISI- and SPIKE-distance matrix. +The following example computes and plots the ISI- and SPIKE-distance matrix, where for the latter one only the time interval T=0..1000 is used for the averaging. .. code:: python @@ -226,15 +242,17 @@ The following example computes and plots the ISI- and SPIKE-distance matrix. plt.title("ISI-distance") plt.figure() - spike_distance = spk.spike_distance_matrix(spike_trains) + spike_distance = spk.spike_distance_matrix(spike_trains, interval=(0,1000)) plt.imshow(spike_distance, interpolation='none') plt.title("SPIKE-distance") plt.show() -Averaging ---------- +Time Averages +------------- + + .. _ISI: http://www.scholarpedia.org/article/Measures_of_spike_train_synchrony#ISI-distance diff --git a/examples/distance_matrix.py b/examples/distance_matrix.py index 38bd9c8..142db2c 100644 --- a/examples/distance_matrix.py +++ b/examples/distance_matrix.py @@ -26,8 +26,8 @@ plt.imshow(isi_distance, interpolation='none') plt.title("ISI-distance") plt.figure() -spike_distance = spk.spike_distance_matrix(spike_trains) +spike_distance = spk.spike_distance_matrix(spike_trains, interval=(0, 1000)) plt.imshow(spike_distance, interpolation='none') -plt.title("SPIKE-distance") +plt.title("SPIKE-distance, T=0-1000") plt.show() diff --git a/examples/plot.py b/examples/plot.py index 59334c9..d32c464 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -1,6 +1,7 @@ """ plot.py -Simple example showing how to load and plot spike trains and their distances. +Simple example showing how to load and plot spike trains and their distance +profiles. Copyright 2014, Mario Mulansky diff --git a/pyspike/distances.py b/pyspike/distances.py index 5135b9b..34f7d78 100644 --- a/pyspike/distances.py +++ b/pyspike/distances.py @@ -57,14 +57,13 @@ def isi_distance(spikes1, spikes2, interval=None): isi-distance is the integral over the isi distance profile :math:`S_{isi}(t)`: - .. math:: I = \int_0^T S_{isi}(t) dt. + .. math:: I = \int_{T_0}^{T_1} S_{isi}(t) dt. :param spikes1: ordered array of spike times with auxiliary spikes. :param spikes2: ordered array of spike times with auxiliary spikes. - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. + :param interval: averaging interval given as a pair of floats (T0, T1), + if None the average over the whole function is computed. :type interval: Pair of floats or None. - :returns: The isi-distance I. :rtype: double """ @@ -114,12 +113,13 @@ Falling back to slow python backend.") def spike_distance(spikes1, spikes2, interval=None): """ Computes the spike-distance S of the given spike trains. The spike-distance is the integral over the isi distance profile S_spike(t): - :math:`S = \int_^T S_spike(t) dt`. + + .. math:: S = \int_{T_0}^{T_1} S_{spike}(t) dt. :param spikes1: ordered array of spike times with auxiliary spikes. :param spikes2: ordered array of spike times with auxiliary spikes. - :param interval: averaging interval given as a pair of floats, if None - the average over the whole function is computed. + :param interval: averaging interval given as a pair of floats (T0, T1), + if None the average over the whole function is computed. :type interval: Pair of floats or None. :returns: The spike-distance. :rtype: double -- cgit v1.2.3 From ecc7898a0b6cd5bc353fd246f3ad549934c82229 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 27 Apr 2015 17:35:36 +0200 Subject: adjustments of examples --- examples/PySpike_testdata.txt | 0 examples/merge.py | 11 ++++++----- examples/multivariate.py | 5 +++-- examples/plot.py | 6 +++--- examples/spike_sync.py | 3 +-- 5 files changed, 13 insertions(+), 12 deletions(-) mode change 100755 => 100644 examples/PySpike_testdata.txt (limited to 'examples/plot.py') diff --git a/examples/PySpike_testdata.txt b/examples/PySpike_testdata.txt old mode 100755 new mode 100644 diff --git a/examples/merge.py b/examples/merge.py index 2550cdb..2ea96ea 100644 --- a/examples/merge.py +++ b/examples/merge.py @@ -17,12 +17,13 @@ import pyspike as spk # first load the data, ending time = 4000 spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", 4000) -spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) +merged_spike_train = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) -print(spikes) +print(merged_spike_train.spikes) -plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') -plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x') -plt.plot(spikes, 2*np.ones_like(spikes), 'o') +plt.plot(spike_trains[0].spikes, np.ones_like(spike_trains[0].spikes), 'o') +plt.plot(spike_trains[1].spikes, np.ones_like(spike_trains[1].spikes), 'x') +plt.plot(merged_spike_train.spikes, + 2*np.ones_like(merged_spike_train.spikes), 'o') plt.show() diff --git a/examples/multivariate.py b/examples/multivariate.py index 260b217..53dbf0f 100644 --- a/examples/multivariate.py +++ b/examples/multivariate.py @@ -19,11 +19,12 @@ t_start = time.clock() # load the data time_loading = time.clock() spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) t_loading = time.clock() print("Number of spike trains: %d" % len(spike_trains)) -num_of_spikes = sum([len(spike_trains[i]) for i in xrange(len(spike_trains))]) +num_of_spikes = sum([len(spike_trains[i].spikes) + for i in xrange(len(spike_trains))]) print("Number of spikes: %d" % num_of_spikes) # calculate the multivariate spike distance diff --git a/examples/plot.py b/examples/plot.py index d32c464..9670286 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -17,11 +17,11 @@ import matplotlib.pyplot as plt import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) # plot the spike time -for (i, spikes) in enumerate(spike_trains): - plt.plot(spikes, i*np.ones_like(spikes), 'o') +for (i, spike_train) in enumerate(spike_trains): + plt.plot(spike_train.spikes, i*np.ones_like(spike_train.spikes), 'o') f = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() diff --git a/examples/spike_sync.py b/examples/spike_sync.py index 9c5f75c..9e81536 100644 --- a/examples/spike_sync.py +++ b/examples/spike_sync.py @@ -1,12 +1,11 @@ from __future__ import print_function -import numpy as np import matplotlib.pyplot as plt import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("../test/SPIKE_Sync_Test.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) plt.figure() -- cgit v1.2.3 From 7984d32e767e5833f1aaee06b6aeda8cc3f4500d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Fri, 12 Jun 2015 14:57:50 +0200 Subject: update example to use new SpikeTrain capability Make use of __getitem__ and __len__ of SpikeTrains in some examples. --- examples/multivariate.py | 2 +- examples/plot.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'examples/plot.py') diff --git a/examples/multivariate.py b/examples/multivariate.py index 53dbf0f..9a44758 100644 --- a/examples/multivariate.py +++ b/examples/multivariate.py @@ -23,7 +23,7 @@ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", t_loading = time.clock() print("Number of spike trains: %d" % len(spike_trains)) -num_of_spikes = sum([len(spike_trains[i].spikes) +num_of_spikes = sum([len(spike_trains[i]) for i in xrange(len(spike_trains))]) print("Number of spikes: %d" % num_of_spikes) diff --git a/examples/plot.py b/examples/plot.py index 9670286..5841baf 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -19,9 +19,9 @@ import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", edges=(0, 4000)) -# plot the spike time +# plot the spike times for (i, spike_train) in enumerate(spike_trains): - plt.plot(spike_train.spikes, i*np.ones_like(spike_train.spikes), 'o') + plt.plot(spike_train, i*np.ones_like(spike_train), 'o') f = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() -- cgit v1.2.3 From 0ece782e1579660cdb71e077cbaaf9f76e97bef4 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 7 Jul 2015 18:06:12 +0200 Subject: better spike train plot (scatter) in plot.py --- examples/plot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'examples/plot.py') diff --git a/examples/plot.py b/examples/plot.py index 5841baf..c44afd1 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -21,7 +21,7 @@ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", # plot the spike times for (i, spike_train) in enumerate(spike_trains): - plt.plot(spike_train, i*np.ones_like(spike_train), 'o') + plt.scatter(spike_train, i*np.ones_like(spike_train), marker='|') f = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = f.get_plottable_data() -- cgit v1.2.3 From 5119d47d0f00c3f7203cf94460730b59a7e473ec Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 7 Jul 2015 18:55:32 +0200 Subject: add disable_backend_warning property Users can now disable the warning messages produced when the cython backend is not available by writing spk.disable_backend_warning = True in the beginning --- examples/performance.py | 3 +++ examples/plot.py | 1 + pyspike/DiscreteFunc.py | 4 +++- pyspike/PieceWiseConstFunc.py | 7 +++++-- pyspike/PieceWiseLinFunc.py | 9 ++++++--- pyspike/__init__.py | 2 ++ pyspike/isi_distance.py | 6 ++++-- pyspike/spike_distance.py | 4 +++- pyspike/spike_sync.py | 4 +++- 9 files changed, 30 insertions(+), 10 deletions(-) (limited to 'examples/plot.py') diff --git a/examples/performance.py b/examples/performance.py index 1c31e8f..d0c3b91 100644 --- a/examples/performance.py +++ b/examples/performance.py @@ -14,6 +14,9 @@ from datetime import datetime import cProfile import pstats +# in case you dont have the cython backends, disable the warnings as follows: +# spk.disable_backend_warning = True + M = 100 # number of spike trains r = 1.0 # rate of Poisson spike times T = 1E3 # length of spike trains diff --git a/examples/plot.py b/examples/plot.py index c44afd1..1922939 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -16,6 +16,7 @@ import matplotlib.pyplot as plt import pyspike as spk + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", edges=(0, 4000)) diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 17153ee..a8c054e 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -202,7 +203,8 @@ class DiscreteFunc(object): from cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: - print("Warning: add_discrete_function_cython not found. Make \ + if not(pyspike.disable_backend_warning): + print("Warning: add_discrete_function_cython not found. Make \ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 2705443..23ff536 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -191,8 +192,10 @@ class PieceWiseConstFunc(object): from cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: - print("Warning: add_piece_wise_const_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_const_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_const_python as \ diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index c0dd475..0d51c76 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -230,9 +231,11 @@ class PieceWiseLinFunc: from cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: - print("Warning: add_piece_wise_lin_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_lin_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \n \ +Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3e836bd..2060f73 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -42,3 +42,5 @@ except DistributionNotFound: __version__ = 'Please install this project with setup.py' else: __version__ = _dist.version + +disable_backend_warning = False diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 5ea555d..e50f203 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,8 +35,9 @@ def isi_profile(spike_train1, spike_train2): from cython.cython_profiles import isi_profile_cython \ as isi_profile_impl except ImportError: - print("Warning: isi_distance_cython not found. Make sure that PySpike \ -is installed by running\n 'python setup.py build_ext --inplace'!\n \ + if not(pyspike.disable_backend_warning): + print("Warning: isi_profile_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import isi_distance_python \ diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index dd6d4f8..feea0c1 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,7 +35,8 @@ def spike_profile(spike_train1, spike_train2): from cython.cython_profiles import spike_profile_cython \ as spike_profile_impl except ImportError: - print("Warning: spike_profile_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_profile_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 40d98d2..10ebdc7 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -5,6 +5,7 @@ import numpy as np from functools import partial +import pyspike from pyspike import DiscreteFunc from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -39,7 +40,8 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): from cython.cython_profiles import coincidence_profile_cython \ as coincidence_profile_impl except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend -- cgit v1.2.3 From ee0e980b72c299eed12b7a3afc542fc470dd6d98 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 9 Mar 2016 12:30:35 +0100 Subject: updated examples to use new unified interface removed all occasions of *multi functions from examples as they are considered deprecated now. Uses unified interface everywhere. --- examples/averages.py | 2 +- examples/merge.py | 6 +++--- examples/multivariate.py | 4 ++-- examples/performance.py | 27 +++++++++++++++------------ examples/plot.py | 5 +++-- examples/profiles.py | 4 ++-- examples/spike_sync.py | 2 +- 7 files changed, 27 insertions(+), 23 deletions(-) (limited to 'examples/plot.py') diff --git a/examples/averages.py b/examples/averages.py index c3e81e2..8b405d0 100644 --- a/examples/averages.py +++ b/examples/averages.py @@ -12,7 +12,7 @@ from __future__ import print_function import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) f = spk.isi_profile(spike_trains[0], spike_trains[1]) diff --git a/examples/merge.py b/examples/merge.py index 2ea96ea..b4437a3 100644 --- a/examples/merge.py +++ b/examples/merge.py @@ -21,9 +21,9 @@ merged_spike_train = spk.merge_spike_trains([spike_trains[0], spike_trains[1]]) print(merged_spike_train.spikes) -plt.plot(spike_trains[0].spikes, np.ones_like(spike_trains[0].spikes), 'o') -plt.plot(spike_trains[1].spikes, np.ones_like(spike_trains[1].spikes), 'x') +plt.plot(spike_trains[0], np.ones_like(spike_trains[0]), 'o') +plt.plot(spike_trains[1], np.ones_like(spike_trains[1]), 'x') plt.plot(merged_spike_train.spikes, - 2*np.ones_like(merged_spike_train.spikes), 'o') + 2*np.ones_like(merged_spike_train), 'o') plt.show() diff --git a/examples/multivariate.py b/examples/multivariate.py index 93f8516..e9579a5 100644 --- a/examples/multivariate.py +++ b/examples/multivariate.py @@ -28,7 +28,7 @@ num_of_spikes = sum([len(spike_trains[i]) print("Number of spikes: %d" % num_of_spikes) # calculate the multivariate spike distance -f = spk.spike_profile_multi(spike_trains) +f = spk.spike_profile(spike_trains) t_spike = time.clock() @@ -39,7 +39,7 @@ print("Spike distance from average: %.8f" % avrg) t_avrg = time.clock() # compute average distance directly, should give the same result as above -spike_dist = spk.spike_distance_multi(spike_trains) +spike_dist = spk.spike_distance(spike_trains) print("Spike distance directly: %.8f" % spike_dist) t_dist = time.clock() diff --git a/examples/performance.py b/examples/performance.py index ec6c830..30691f8 100644 --- a/examples/performance.py +++ b/examples/performance.py @@ -31,38 +31,41 @@ for i in range(M): t_end = datetime.now() runtime = (t_end-t_start).total_seconds() +sort_by = 'tottime' +# sort_by = 'cumtime' + print("Spike generation runtime: %.3fs" % runtime) print() print("================ ISI COMPUTATIONS ================") print(" MULTIVARIATE DISTANCE") -cProfile.run('spk.isi_distance_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.isi_distance(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) print(" MULTIVARIATE PROFILE") -cProfile.run('spk.isi_profile_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.isi_profile(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) print("================ SPIKE COMPUTATIONS ================") print(" MULTIVARIATE DISTANCE") -cProfile.run('spk.spike_distance_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.spike_distance(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) print(" MULTIVARIATE PROFILE") -cProfile.run('spk.spike_profile_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.spike_profile(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) print("================ SPIKE-SYNC COMPUTATIONS ================") print(" MULTIVARIATE DISTANCE") -cProfile.run('spk.spike_sync_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.spike_sync(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) print(" MULTIVARIATE PROFILE") -cProfile.run('spk.spike_sync_profile_multi(spike_trains)', 'performance.stat') +cProfile.run('spk.spike_sync_profile(spike_trains)', 'performance.stat') p = pstats.Stats('performance.stat') -p.strip_dirs().sort_stats('tottime').print_stats(5) +p.strip_dirs().sort_stats(sort_by).print_stats(5) diff --git a/examples/plot.py b/examples/plot.py index 1922939..a0e04da 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -24,7 +24,8 @@ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", for (i, spike_train) in enumerate(spike_trains): plt.scatter(spike_train, i*np.ones_like(spike_train), marker='|') -f = spk.isi_profile(spike_trains[0], spike_trains[1]) +# profile of the first two spike trains +f = spk.isi_profile(spike_trains, indices=[0, 1]) x, y = f.get_plottable_data() plt.figure() @@ -32,7 +33,7 @@ plt.plot(x, np.abs(y), '--k', label="ISI-profile") print("ISI-distance: %.8f" % f.avrg()) -f = spk.spike_profile(spike_trains[0], spike_trains[1]) +f = spk.spike_profile(spike_trains, indices=[0, 1]) x, y = f.get_plottable_data() plt.plot(x, y, '-b', label="SPIKE-profile") diff --git a/examples/profiles.py b/examples/profiles.py index 05494bd..8412ffb 100644 --- a/examples/profiles.py +++ b/examples/profiles.py @@ -29,7 +29,7 @@ print("Average ISI distance:", f.avrg()) print() # compute the multivariate ISI profile -f = spk.isi_profile_multi(spike_trains) +f = spk.isi_profile(spike_trains) t = 1200 print("Multivariate ISI value at t =", t, ":", f(t)) @@ -56,7 +56,7 @@ print("Average SPIKE distance:", f.avrg()) print() # compute the multivariate SPIKE profile -f = spk.spike_profile_multi(spike_trains) +f = spk.spike_profile(spike_trains) # SPIKE values at certain points t = 1200 diff --git a/examples/spike_sync.py b/examples/spike_sync.py index 37dbff4..13ca0ce 100644 --- a/examples/spike_sync.py +++ b/examples/spike_sync.py @@ -31,7 +31,7 @@ plt.figure() plt.subplot(211) -f = spk.spike_sync_profile_multi(spike_trains) +f = spk.spike_sync_profile(spike_trains) x, y = f.get_plottable_data() plt.plot(x, y, '-b', alpha=0.7, label="SPIKE-Sync profile") -- cgit v1.2.3