multi_tetrode_example.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. """
  2. Example for usecases.rst
  3. """
  4. from itertools import cycle
  5. import numpy as np
  6. from quantities import ms, mV, kHz
  7. import matplotlib.pyplot as plt
  8. from neo import Block, Segment, ChannelView, Group, SpikeTrain, AnalogSignal
  9. store_signals = False
  10. block = Block(name="probe data", tetrode_ids=["Tetrode #1", "Tetrode #2"])
  11. block.segments = [Segment(name="trial #1", index=0),
  12. Segment(name="trial #2", index=1),
  13. Segment(name="trial #3", index=2)]
  14. n_units = {
  15. "Tetrode #1": 2,
  16. "Tetrode #2": 5
  17. }
  18. # Create a group for each neuron, annotate each group with the tetrode from which it was recorded
  19. groups = []
  20. counter = 0
  21. for tetrode_id, n in n_units.items():
  22. groups.extend(
  23. [Group(name=f"neuron #{counter + i + 1}", tetrode_id=tetrode_id)
  24. for i in range(n)]
  25. )
  26. counter += n
  27. block.groups.extend(groups)
  28. iter_group = cycle(groups)
  29. # Create dummy data, one segment at a time
  30. for segment in block.segments:
  31. segment.block = block
  32. # create two 4-channel AnalogSignals with dummy data
  33. signals = {
  34. "Tetrode #1": AnalogSignal(np.random.rand(1000, 4) * mV,
  35. sampling_rate=10 * kHz, tetrode_id="Tetrode #1"),
  36. "Tetrode #2": AnalogSignal(np.random.rand(1000, 4) * mV,
  37. sampling_rate=10 * kHz, tetrode_id="Tetrode #2")
  38. }
  39. if store_signals:
  40. segment.analogsignals.extend(signals.values())
  41. for signal in signals:
  42. signal.segment = segment
  43. # create spike trains with dummy data
  44. # we will pretend the spikes have been extracted from the dummy signal
  45. for tetrode_id in ("Tetrode #1", "Tetrode #2"):
  46. for i in range(n_units[tetrode_id]):
  47. spiketrain = SpikeTrain(np.random.uniform(0, 100, size=30) * ms, t_stop=100 * ms)
  48. # assign each spiketrain to the appropriate segment
  49. segment.spiketrains.append(spiketrain)
  50. spiketrain.segment = segment
  51. # assign each spiketrain to a given neuron
  52. current_group = next(iter_group)
  53. current_group.add(spiketrain)
  54. if store_signals:
  55. # add to the group a reference to the signal from which the spikes were obtained
  56. # this does not give a 1:1 correspondance between spike trains and signals,
  57. # for that we could use additional groups (and have groups of groups)
  58. current_group.add(signals[tetrode_id])
  59. # Now plot the data
  60. # .. by trial
  61. plt.figure()
  62. for seg in block.segments:
  63. print(f"Analyzing segment {seg.index}")
  64. stlist = [st - st.t_start for st in seg.spiketrains]
  65. plt.subplot(len(block.segments), 1, seg.index + 1)
  66. count, bins = np.histogram(stlist)
  67. plt.bar(bins[:-1], count, width=bins[1] - bins[0])
  68. plt.title(f"PSTH in segment {seg.index}")
  69. plt.show()
  70. # ..by neuron
  71. plt.figure()
  72. for i, group in enumerate(block.groups):
  73. stlist = [st - st.t_start for st in group.spiketrains]
  74. plt.subplot(len(block.groups), 1, i + 1)
  75. count, bins = np.histogram(stlist)
  76. plt.bar(bins[:-1], count, width=bins[1] - bins[0])
  77. plt.title(f"PSTH of unit {group.name}")
  78. plt.show()
  79. # ..by tetrode
  80. plt.figure()
  81. for i, tetrode_id in enumerate(block.annotations["tetrode_ids"]):
  82. stlist = []
  83. for unit in block.filter(objects=Group, tetrode_id=tetrode_id):
  84. stlist.extend([st - st.t_start for st in unit.spiketrains])
  85. plt.subplot(2, 1, i + 1)
  86. count, bins = np.histogram(stlist)
  87. plt.bar(bins[:-1], count, width=bins[1] - bins[0])
  88. plt.title(f"PSTH blend of tetrode {tetrode_id}")
  89. plt.show()