basefromrawio_sg.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. """
  2. BaseFromRaw
  3. ======
  4. BaseFromRaw implement a bridge between the new neo.rawio API
  5. and the neo.io legacy that give neo.core object.
  6. The neo.rawio API is more restricted and limited and do not cover tricky
  7. cases with asymetrical tree of neo object.
  8. But if a format is done in neo.rawio the neo.io is done for free
  9. by inheritance of this class.
  10. Furthermore, IOs that inherits this BaseFromRaw also have the ability
  11. of the lazy load with proxy objects.
  12. """
  13. import collections
  14. import numpy as np
  15. from neo import logging_handler
  16. from neo.core import (AnalogSignal, Block,
  17. Epoch, Event,
  18. IrregularlySampledSignal,
  19. Group,
  20. Segment, SpikeTrain, Unit)
  21. from neo.io.baseio import BaseIO
  22. from neo.io.proxyobjects import (AnalogSignalProxy,
  23. SpikeTrainProxy, EventProxy, EpochProxy,
  24. ensure_signal_units, check_annotations,
  25. ensure_second, proxyobjectlist)
  26. import quantities as pq
  27. # test push to andrew
  28. class BaseFromRaw(BaseIO):
  29. """
  30. This implement generic reader on top of RawIO reader.
  31. Arguments depend on `mode` (dir or file)
  32. File case::
  33. reader = BlackRockIO(filename='FileSpec2.3001.nev')
  34. Dir case::
  35. reader = NeuralynxIO(dirname='Cheetah_v5.7.4/original_data')
  36. Other arguments are IO specific.
  37. """
  38. is_readable = True
  39. is_writable = False
  40. supported_objects = [Block, Segment, AnalogSignal,
  41. SpikeTrain, Unit, Group, Event, Epoch]
  42. readable_objects = [Block, Segment]
  43. writeable_objects = []
  44. support_lazy = True
  45. name = 'BaseIO'
  46. description = ''
  47. extentions = []
  48. mode = 'file'
  49. _prefered_signal_group_mode = 'split-all' # 'group-by-same-units'
  50. def __init__(self, *args, **kargs):
  51. BaseIO.__init__(self, *args, **kargs)
  52. self.parse_header()
  53. def read_block(self, block_index=0, lazy=False,
  54. create_group_across_segment=None,
  55. signal_group_mode=None, load_waveforms=False):
  56. """
  57. :param block_index: int default 0. In case of several block block_index can be specified.
  58. :param lazy: False by default.
  59. :param create_group_across_segment: bool or dict
  60. If True :
  61. * Create a neo.Group to group AnalogSignal segments
  62. * Create a neo.Group to group SpikeTrain across segments
  63. * Create a neo.Group to group Event across segments
  64. * Create a neo.Group to group Epoch across segments
  65. With a dict the behavior can be controlled more finely
  66. create_group_across_segment = { 'AnalogSignal': True, 'SpikeTrain': False, ...}
  67. :param signal_group_mode: 'split-all' or 'group-by-same-units' (default):
  68. This control behavior for grouping channels in AnalogSignal.
  69. * 'split-all': each channel will give an AnalogSignal
  70. * 'group-by-same-units' all channel sharing the same quantity units ar grouped in
  71. a 2D AnalogSignal
  72. :param load_waveforms: False by default. Control SpikeTrains.waveforms is None or not.
  73. """
  74. if signal_group_mode is None:
  75. signal_group_mode = self._prefered_signal_group_mode
  76. if self._prefered_signal_group_mode == 'split-all':
  77. self.logger.warning("the default signal_group_mode will change from "\
  78. "'split-all' to 'group-by-same-units' in next release")
  79. l = ['AnalogSignal', 'SpikeTrain', 'Event', 'Epoch']
  80. if create_group_across_segment is None:
  81. # @andrew @ julia @michael ?
  82. # I think here the default None could give this
  83. create_group_across_segment = {
  84. 'AnalogSignal': True, #because mimic the old ChannelIndex for AnalogSignals
  85. 'SpikeTrain': False, # False by default because can create too many object for simulation
  86. 'Event': False, # not implemented yet
  87. 'Epoch': False, # not implemented yet
  88. }
  89. elif isinstance(create_group_across_segment, bool):
  90. # bool to dict
  91. v = create_group_across_segment
  92. create_group_across_segment = { k: v for k in l}
  93. elif isinstance(create_group_across_segment, dict):
  94. # put False to missing keys
  95. create_group_across_segment = {create_group_across_segment.get(k, False) for k in l}
  96. else:
  97. raise ValueError('create_group_across_segment must be bool or dict')
  98. # annotations
  99. bl_annotations = dict(self.raw_annotations['blocks'][block_index])
  100. bl_annotations.pop('segments')
  101. bl_annotations = check_annotations(bl_annotations)
  102. bl = Block(**bl_annotations)
  103. # Group for AnalogSignals
  104. if create_group_across_segment['AnalogSignal']:
  105. all_channels = self.header['signal_channels']
  106. channel_indexes_list = self.get_group_signal_channel_indexes()
  107. sig_groups = []
  108. for channel_index in channel_indexes_list:
  109. for i, (ind_within, ind_abs) in self._make_signal_channel_subgroups(
  110. channel_index, signal_group_mode=signal_group_mode).items():
  111. group = Group(name='AnalogSignal group {}'.format(i))
  112. # @andrew @ julia @michael : do we annotate group across segment with this arrays ?
  113. group.annotate(ch_names=all_channels[ind_abs]['name'].astype('U')) # ??
  114. group.annotate(channel_ids=all_channels[ind_abs]['id']) # ??
  115. bl.groups.append(group)
  116. sig_groups.append(group)
  117. if create_group_across_segment['SpikeTrain']:
  118. unit_channels = self.header['unit_channels']
  119. st_groups = []
  120. for c in range(unit_channels.size):
  121. group = Group(name='SpikeTrain group {}'.format(i))
  122. group.annotate(unit_name=unit_channels[c]['name'])
  123. group.annotate(unit_id=unit_channels[c]['id'])
  124. unit_annotations = self.raw_annotations['unit_channels'][c]
  125. unit_annotations = check_annotations(unit_annotations)
  126. group.annotations.annotate(**unit_annotations)
  127. bl.groups.append(group)
  128. st_groups.append(group)
  129. if create_group_across_segment['Event']:
  130. # @andrew @ julia @michael :
  131. # Do we need this ? I guess yes
  132. raise NotImplementedError()
  133. if create_group_across_segment['Epoch']:
  134. # @andrew @ julia @michael :
  135. # Do we need this ? I guess yes
  136. raise NotImplementedError()
  137. # Read all segments
  138. for seg_index in range(self.segment_count(block_index)):
  139. seg = self.read_segment(block_index=block_index, seg_index=seg_index,
  140. lazy=lazy, signal_group_mode=signal_group_mode,
  141. load_waveforms=load_waveforms)
  142. bl.segments.append(seg)
  143. # create link between group (across segment) and data objects
  144. for seg in bl.segments:
  145. if create_group_across_segment['AnalogSignal']:
  146. for c, anasig in enumerate(seg.analogsignals):
  147. sig_groups[c].add(anasig)
  148. if create_group_across_segment['SpikeTrain']:
  149. for c, sptr in enumerate(seg.spiketrains):
  150. st_groups[c].add(sptr)
  151. bl.create_many_to_one_relationship()
  152. return bl
  153. def read_segment(self, block_index=0, seg_index=0, lazy=False,
  154. signal_group_mode=None, load_waveforms=False, time_slice=None,
  155. strict_slicing=True):
  156. """
  157. :param block_index: int default 0. In case of several blocks block_index can be specified.
  158. :param seg_index: int default 0. Index of segment.
  159. :param lazy: False by default.
  160. :param signal_group_mode: 'split-all' or 'group-by-same-units' (default depend IO):
  161. This control behavior for grouping channels in AnalogSignal.
  162. * 'split-all': each channel will give an AnalogSignal
  163. * 'group-by-same-units' all channel sharing the same quantity units ar grouped in
  164. a 2D AnalogSignal
  165. :param load_waveforms: False by default. Control SpikeTrains.waveforms is None or not.
  166. :param time_slice: None by default means no limit.
  167. A time slice is (t_start, t_stop) both are quantities.
  168. All object AnalogSignal, SpikeTrain, Event, Epoch will load only in the slice.
  169. :param strict_slicing: True by default.
  170. Control if an error is raised or not when t_start or t_stop
  171. is outside the real time range of the segment.
  172. """
  173. if lazy:
  174. assert time_slice is None, 'For lazy=true you must specify time_slice when loading'
  175. if signal_group_mode is None:
  176. signal_group_mode = self._prefered_signal_group_mode
  177. # annotations
  178. seg_annotations = dict(self.raw_annotations['blocks'][block_index]['segments'][seg_index])
  179. for k in ('signals', 'units', 'events'):
  180. seg_annotations.pop(k)
  181. seg_annotations = check_annotations(seg_annotations)
  182. seg = Segment(index=seg_index, **seg_annotations)
  183. # AnalogSignal
  184. signal_channels = self.header['signal_channels']
  185. if signal_channels.size > 0:
  186. channel_indexes_list = self.get_group_signal_channel_indexes()
  187. for channel_indexes in channel_indexes_list:
  188. for i, (ind_within, ind_abs) in self._make_signal_channel_subgroups(
  189. channel_indexes,
  190. signal_group_mode=signal_group_mode).items():
  191. # make a proxy...
  192. anasig = AnalogSignalProxy(rawio=self, global_channel_indexes=ind_abs,
  193. block_index=block_index, seg_index=seg_index)
  194. if not lazy:
  195. # ... and get the real AnalogSIgnal if not lazy
  196. anasig = anasig.load(time_slice=time_slice, strict_slicing=strict_slicing)
  197. # TODO magnitude_mode='rescaled'/'raw'
  198. anasig.segment = seg
  199. seg.analogsignals.append(anasig)
  200. # SpikeTrain and waveforms (optional)
  201. unit_channels = self.header['unit_channels']
  202. for unit_index in range(len(unit_channels)):
  203. # make a proxy...
  204. sptr = SpikeTrainProxy(rawio=self, unit_index=unit_index,
  205. block_index=block_index, seg_index=seg_index)
  206. if not lazy:
  207. # ... and get the real SpikeTrain if not lazy
  208. sptr = sptr.load(time_slice=time_slice, strict_slicing=strict_slicing,
  209. load_waveforms=load_waveforms)
  210. # TODO magnitude_mode='rescaled'/'raw'
  211. sptr.segment = seg
  212. seg.spiketrains.append(sptr)
  213. # Events/Epoch
  214. event_channels = self.header['event_channels']
  215. for chan_ind in range(len(event_channels)):
  216. if event_channels['type'][chan_ind] == b'event':
  217. e = EventProxy(rawio=self, event_channel_index=chan_ind,
  218. block_index=block_index, seg_index=seg_index)
  219. if not lazy:
  220. e = e.load(time_slice=time_slice, strict_slicing=strict_slicing)
  221. e.segment = seg
  222. seg.events.append(e)
  223. elif event_channels['type'][chan_ind] == b'epoch':
  224. e = EpochProxy(rawio=self, event_channel_index=chan_ind,
  225. block_index=block_index, seg_index=seg_index)
  226. if not lazy:
  227. e = e.load(time_slice=time_slice, strict_slicing=strict_slicing)
  228. e.segment = seg
  229. seg.epochs.append(e)
  230. seg.create_many_to_one_relationship()
  231. return seg
  232. def _make_signal_channel_subgroups(self, channel_indexes,
  233. signal_group_mode='group-by-same-units'):
  234. """
  235. For some RawIO channel are already splitted in groups.
  236. But in any cases, channel need to be splitted again in sub groups
  237. because they do not have the same units.
  238. They can also be splitted one by one to match previous behavior for
  239. some IOs in older version of neo (<=0.5).
  240. This method aggregate signal channels with same units or split them all.
  241. """
  242. all_channels = self.header['signal_channels']
  243. if channel_indexes is None:
  244. channel_indexes = np.arange(all_channels.size, dtype=int)
  245. channels = all_channels[channel_indexes]
  246. groups = collections.OrderedDict()
  247. if signal_group_mode == 'group-by-same-units':
  248. all_units = np.unique(channels['units'])
  249. for i, unit in enumerate(all_units):
  250. ind_within, = np.nonzero(channels['units'] == unit)
  251. ind_abs = channel_indexes[ind_within]
  252. groups[i] = (ind_within, ind_abs)
  253. elif signal_group_mode == 'split-all':
  254. for i, chan_index in enumerate(channel_indexes):
  255. ind_within = [i]
  256. ind_abs = channel_indexes[ind_within]
  257. groups[i] = (ind_within, ind_abs)
  258. else:
  259. raise (NotImplementedError)
  260. return groups