proxyobjects.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637
  1. """
  2. Here a list of proxy object that can be used when lazy=True at neo.io level.
  3. This idea is to be able to postpone that real in memory loading
  4. for objects that contains big data (AnalogSIgnal, SpikeTrain, Event, Epoch).
  5. The implementation rely on neo.rawio, so it will available only for neo.io that
  6. ineherits neo.rawio.
  7. """
  8. import numpy as np
  9. import quantities as pq
  10. import logging
  11. from neo.core.baseneo import BaseNeo
  12. from neo.core import (AnalogSignal,
  13. Epoch, Event, SpikeTrain)
  14. from neo.core.dataobject import ArrayDict
  15. logger = logging.getLogger("Neo")
  16. class BaseProxy(BaseNeo):
  17. def __init__(self, array_annotations=None, **annotations):
  18. # this for py27 str vs py3 str in neo attributes ompatibility
  19. annotations = check_annotations(annotations)
  20. if 'file_origin' not in annotations:
  21. # the str is to make compatible with neo_py27 where attribute
  22. # used to be str so raw bytes
  23. annotations['file_origin'] = str(self._rawio.source_name())
  24. # this mock the array annotaions to avoid inherits DataObject
  25. self.array_annotations = ArrayDict(self.shape[-1])
  26. if array_annotations is not None:
  27. self.array_annotations.update(array_annotations)
  28. BaseNeo.__init__(self, **annotations)
  29. def load(self, time_slice=None, **kwargs):
  30. # should be implemented by subclass
  31. raise NotImplementedError
  32. def time_slice(self, t_start, t_stop):
  33. '''
  34. Load the proxy object within the specified time range. Has the same
  35. call signature as AnalogSignal.time_slice, Epoch.time_slice, etc.
  36. '''
  37. return self.load(time_slice=(t_start, t_stop))
  38. class AnalogSignalProxy(BaseProxy):
  39. '''
  40. This object mimic AnalogSignal except that it does not
  41. have the signals array itself. All attributes and annotations are here.
  42. The goal is to postpone the loading of data into memory
  43. when reading a file with the new lazy load system based
  44. on neo.rawio.
  45. This object must not be constructed directly but is given
  46. neo.io when lazy=True instead of a true AnalogSignal.
  47. The AnalogSignalProxy is able to load:
  48. * only a slice of time
  49. * only a subset of channels
  50. * have an internal raw magnitude identic to the file (int16) with
  51. a pq.CompoundUnit().
  52. Usage:
  53. >>> proxy_anasig = AnalogSignalProxy(rawio=self.reader,
  54. global_channel_indexes=None,
  55. block_index=0,
  56. seg_index=0)
  57. >>> anasig = proxy_anasig.load()
  58. >>> slice_of_anasig = proxy_anasig.load(time_slice=(1.*pq.s, 2.*pq.s))
  59. >>> some_channel_of_anasig = proxy_anasig.load(channel_indexes=[0,5,10])
  60. '''
  61. _single_parent_objects = ('Segment', 'ChannelIndex')
  62. _necessary_attrs = (('sampling_rate', pq.Quantity, 0),
  63. ('t_start', pq.Quantity, 0))
  64. _recommended_attrs = BaseNeo._recommended_attrs
  65. proxy_for = AnalogSignal
  66. def __init__(self, rawio=None, global_channel_indexes=None, block_index=0, seg_index=0):
  67. self._rawio = rawio
  68. self._block_index = block_index
  69. self._seg_index = seg_index
  70. if global_channel_indexes is None:
  71. global_channel_indexes = slice(None)
  72. total_nb_chan = self._rawio.header['signal_channels'].size
  73. self._global_channel_indexes = np.arange(total_nb_chan)[global_channel_indexes]
  74. self._nb_chan = self._global_channel_indexes.size
  75. sig_chans = self._rawio.header['signal_channels'][self._global_channel_indexes]
  76. assert np.unique(sig_chans['units']).size == 1, 'Channel do not have same units'
  77. assert np.unique(sig_chans['dtype']).size == 1, 'Channel do not have same dtype'
  78. assert np.unique(sig_chans['sampling_rate']).size == 1, \
  79. 'Channel do not have same sampling_rate'
  80. self.units = ensure_signal_units(sig_chans['units'][0])
  81. self.dtype = sig_chans['dtype'][0]
  82. self.sampling_rate = sig_chans['sampling_rate'][0] * pq.Hz
  83. self.sampling_period = 1. / self.sampling_rate
  84. sigs_size = self._rawio.get_signal_size(block_index=block_index, seg_index=seg_index,
  85. channel_indexes=self._global_channel_indexes)
  86. self.shape = (sigs_size, self._nb_chan)
  87. self.t_start = self._rawio.get_signal_t_start(block_index, seg_index,
  88. self._global_channel_indexes) * pq.s
  89. # magnitude_mode='raw' is supported only if all offset=0
  90. # and all gain are the same
  91. support_raw_magnitude = np.all(sig_chans['gain'] == sig_chans['gain'][0]) and \
  92. np.all(sig_chans['offset'] == 0.)
  93. if support_raw_magnitude:
  94. str_units = ensure_signal_units(sig_chans['units'][0]).units.dimensionality.string
  95. self._raw_units = pq.CompoundUnit('{}*{}'.format(sig_chans['gain'][0], str_units))
  96. else:
  97. self._raw_units = None
  98. # both necessary attr and annotations
  99. annotations = {}
  100. annotations['name'] = self._make_name(None)
  101. if len(sig_chans) == 1:
  102. # when only one channel raw_annotations are set to standart annotations
  103. d = self._rawio.raw_annotations['blocks'][block_index]['segments'][seg_index][
  104. 'signals'][self._global_channel_indexes[0]]
  105. annotations.update(d)
  106. array_annotations = {
  107. 'channel_names': np.array(sig_chans['name'], copy=True),
  108. 'channel_ids': np.array(sig_chans['id'], copy=True),
  109. }
  110. # array annotations for signal can be at 2 places
  111. # global at signal channel level
  112. d = self._rawio.raw_annotations['signal_channels']
  113. array_annotations.update(create_analogsignal_array_annotations(
  114. d, self._global_channel_indexes))
  115. # or specific to block/segment/signals
  116. d = self._rawio.raw_annotations['blocks'][block_index]['segments'][seg_index]['signals']
  117. array_annotations.update(create_analogsignal_array_annotations(
  118. d, self._global_channel_indexes))
  119. BaseProxy.__init__(self, array_annotations=array_annotations, **annotations)
  120. def _make_name(self, channel_indexes):
  121. sig_chans = self._rawio.header['signal_channels'][self._global_channel_indexes]
  122. if channel_indexes is not None:
  123. sig_chans = sig_chans[channel_indexes]
  124. if len(sig_chans) == 1:
  125. name = sig_chans['name'][0]
  126. else:
  127. name = 'Channel bundle ({}) '.format(','.join(sig_chans['name']))
  128. return name
  129. @property
  130. def duration(self):
  131. '''Signal duration'''
  132. return self.shape[0] / self.sampling_rate
  133. @property
  134. def t_stop(self):
  135. '''Time when signal ends'''
  136. return self.t_start + self.duration
  137. def load(self, time_slice=None, strict_slicing=True,
  138. channel_indexes=None, magnitude_mode='rescaled'):
  139. '''
  140. *Args*:
  141. :time_slice: None or tuple of the time slice expressed with quantities.
  142. None is the entire signal.
  143. :channel_indexes: None or list. Channels to load. None is all channels
  144. Be carefull that channel_indexes represent the local channel index inside
  145. the AnalogSignal and not the global_channel_indexes like in rawio.
  146. :magnitude_mode: 'rescaled' or 'raw'.
  147. For instance if the internal dtype is int16:
  148. * **rescaled** give [1.,2.,3.]*pq.uV and the dtype is float32
  149. * **raw** give [10, 20, 30]*pq.CompoundUnit('0.1*uV')
  150. The CompoundUnit with magnitude_mode='raw' is usefull to
  151. postpone the scaling when needed and having an internal dtype=int16
  152. but it less intuitive when you don't know so well quantities.
  153. :strict_slicing: True by default.
  154. Control if an error is raise or not when one of time_slice member
  155. (t_start or t_stop) is outside the real time range of the segment.
  156. '''
  157. if channel_indexes is None:
  158. channel_indexes = slice(None)
  159. sr = self.sampling_rate
  160. if time_slice is None:
  161. i_start, i_stop = None, None
  162. sig_t_start = self.t_start
  163. else:
  164. t_start, t_stop = time_slice
  165. if t_start is None:
  166. i_start = None
  167. sig_t_start = self.t_start
  168. else:
  169. t_start = ensure_second(t_start)
  170. if strict_slicing:
  171. assert self.t_start <= t_start <= self.t_stop, 't_start is outside'
  172. else:
  173. t_start = max(t_start, self.t_start)
  174. # the i_start is ncessary ceil
  175. i_start = int(np.ceil((t_start - self.t_start).magnitude * sr.magnitude))
  176. # this needed to get the real t_start of the first sample
  177. # because do not necessary match what is demanded
  178. sig_t_start = self.t_start + i_start / sr
  179. if t_stop is None:
  180. i_stop = None
  181. else:
  182. t_stop = ensure_second(t_stop)
  183. if strict_slicing:
  184. assert self.t_start <= t_stop <= self.t_stop, 't_stop is outside'
  185. else:
  186. t_stop = min(t_stop, self.t_stop)
  187. i_stop = int((t_stop - self.t_start).magnitude * sr.magnitude)
  188. raw_signal = self._rawio.get_analogsignal_chunk(block_index=self._block_index,
  189. seg_index=self._seg_index, i_start=i_start, i_stop=i_stop,
  190. channel_indexes=self._global_channel_indexes[channel_indexes])
  191. # if slice in channel : change name and array_annotations
  192. if raw_signal.shape[1] != self._nb_chan:
  193. name = self._make_name(channel_indexes)
  194. array_annotations = {k: v[channel_indexes] for k, v in self.array_annotations.items()}
  195. else:
  196. name = self.name
  197. array_annotations = self.array_annotations
  198. if magnitude_mode == 'raw':
  199. assert self._raw_units is not None,\
  200. 'raw magnitude is not support gain are not the same for all channel or offset is not 0'
  201. sig = raw_signal
  202. units = self._raw_units
  203. elif magnitude_mode == 'rescaled':
  204. # dtype is float32 when internally it is float32 or int16
  205. if self.dtype == 'float64':
  206. dtype = 'float64'
  207. else:
  208. dtype = 'float32'
  209. sig = self._rawio.rescale_signal_raw_to_float(raw_signal, dtype=dtype,
  210. channel_indexes=self._global_channel_indexes[channel_indexes])
  211. units = self.units
  212. anasig = AnalogSignal(sig, units=units, copy=False, t_start=sig_t_start,
  213. sampling_rate=self.sampling_rate, name=name,
  214. file_origin=self.file_origin, description=self.description,
  215. array_annotations=array_annotations, **self.annotations)
  216. return anasig
  217. class SpikeTrainProxy(BaseProxy):
  218. '''
  219. This object mimic SpikeTrain except that it does not
  220. have the spike time nor waveforms.
  221. All attributes and annotations are here.
  222. The goal is to postpone the loading of data into memory
  223. when reading a file with the new lazy load system based
  224. on neo.rawio.
  225. This object must not be constructed directly but is given
  226. neo.io when lazy=True instead of a true SpikeTrain.
  227. The SpikeTrainProxy is able to load:
  228. * only a slice of time
  229. * load wveforms or not.
  230. * have an internal raw magnitude identic to the file (generally the ticks
  231. of clock in int64) or the rescale to seconds.
  232. Usage:
  233. >>> proxy_sptr = SpikeTrainProxy(rawio=self.reader, unit_channel=0,
  234. block_index=0, seg_index=0,)
  235. >>> sptr = proxy_sptr.load()
  236. >>> slice_of_sptr = proxy_sptr.load(time_slice=(1.*pq.s, 2.*pq.s))
  237. '''
  238. _single_parent_objects = ('Segment', 'Unit')
  239. _quantity_attr = 'times'
  240. _necessary_attrs = (('t_start', pq.Quantity, 0),
  241. ('t_stop', pq.Quantity, 0))
  242. _recommended_attrs = ()
  243. proxy_for = SpikeTrain
  244. def __init__(self, rawio=None, unit_index=None, block_index=0, seg_index=0):
  245. self._rawio = rawio
  246. self._block_index = block_index
  247. self._seg_index = seg_index
  248. self._unit_index = unit_index
  249. nb_spike = self._rawio.spike_count(block_index=block_index, seg_index=seg_index,
  250. unit_index=unit_index)
  251. self.shape = (nb_spike, )
  252. self.t_start = self._rawio.segment_t_start(block_index, seg_index) * pq.s
  253. self.t_stop = self._rawio.segment_t_stop(block_index, seg_index) * pq.s
  254. # both necessary attr and annotations
  255. annotations = {}
  256. for k in ('name', 'id'):
  257. annotations[k] = self._rawio.header['unit_channels'][unit_index][k]
  258. ann = self._rawio.raw_annotations['blocks'][block_index]['segments'][seg_index]['units'][unit_index]
  259. annotations.update(ann)
  260. h = self._rawio.header['unit_channels'][unit_index]
  261. wf_sampling_rate = h['wf_sampling_rate']
  262. if not np.isnan(wf_sampling_rate) and wf_sampling_rate > 0:
  263. self.sampling_rate = wf_sampling_rate * pq.Hz
  264. self.left_sweep = (h['wf_left_sweep'] / self.sampling_rate).rescale('s')
  265. self._wf_units = ensure_signal_units(h['wf_units'])
  266. else:
  267. self.sampling_rate = None
  268. self.left_sweep = None
  269. BaseProxy.__init__(self, **annotations)
  270. def load(self, time_slice=None, strict_slicing=True,
  271. magnitude_mode='rescaled', load_waveforms=False):
  272. '''
  273. *Args*:
  274. :time_slice: None or tuple of the time slice expressed with quantities.
  275. None is the entire signal.
  276. :strict_slicing: True by default.
  277. Control if an error is raise or not when one of time_slice
  278. member (t_start or t_stop) is outside the real time range of the segment.
  279. :magnitude_mode: 'rescaled' or 'raw'.
  280. :load_waveforms: bool load waveforms or not.
  281. '''
  282. t_start, t_stop = consolidate_time_slice(time_slice, self.t_start,
  283. self.t_stop, strict_slicing)
  284. _t_start, _t_stop = prepare_time_slice(time_slice)
  285. spike_timestamps = self._rawio.get_spike_timestamps(block_index=self._block_index,
  286. seg_index=self._seg_index, unit_index=self._unit_index, t_start=_t_start,
  287. t_stop=_t_stop)
  288. if magnitude_mode == 'raw':
  289. # we must modify a bit the neo.rawio interface to also read the spike_timestamps
  290. # underlying clock wich is not always same as sigs
  291. raise(NotImplementedError)
  292. elif magnitude_mode == 'rescaled':
  293. dtype = 'float64'
  294. spike_times = self._rawio.rescale_spike_timestamp(spike_timestamps, dtype=dtype)
  295. units = 's'
  296. if load_waveforms:
  297. assert self.sampling_rate is not None, 'Do not have waveforms'
  298. raw_wfs = self._rawio.get_spike_raw_waveforms(block_index=self._block_index,
  299. seg_index=self._seg_index, unit_index=self._unit_index,
  300. t_start=_t_start, t_stop=_t_stop)
  301. if magnitude_mode == 'rescaled':
  302. float_wfs = self._rawio.rescale_waveforms_to_float(raw_wfs,
  303. dtype='float32', unit_index=self._unit_index)
  304. waveforms = pq.Quantity(float_wfs, units=self._wf_units,
  305. dtype='float32', copy=False)
  306. elif magnitude_mode == 'raw':
  307. # could code also CompundUnit here but it is over killed
  308. # so we used dimentionless
  309. waveforms = pq.Quantity(raw_wfs, units='',
  310. dtype=raw_wfs.dtype, copy=False)
  311. else:
  312. waveforms = None
  313. sptr = SpikeTrain(spike_times, t_stop, units=units, dtype=dtype,
  314. t_start=t_start, copy=False, sampling_rate=self.sampling_rate,
  315. waveforms=waveforms, left_sweep=self.left_sweep, name=self.name,
  316. file_origin=self.file_origin, description=self.description, **self.annotations)
  317. return sptr
  318. class _EventOrEpoch(BaseProxy):
  319. _single_parent_objects = ('Segment',)
  320. _quantity_attr = 'times'
  321. def __init__(self, rawio=None, event_channel_index=None, block_index=0, seg_index=0):
  322. self._rawio = rawio
  323. self._block_index = block_index
  324. self._seg_index = seg_index
  325. self._event_channel_index = event_channel_index
  326. nb_event = self._rawio.event_count(block_index=block_index, seg_index=seg_index,
  327. event_channel_index=event_channel_index)
  328. self.shape = (nb_event, )
  329. self.t_start = self._rawio.segment_t_start(block_index, seg_index) * pq.s
  330. self.t_stop = self._rawio.segment_t_stop(block_index, seg_index) * pq.s
  331. # both necessary attr and annotations
  332. annotations = {}
  333. for k in ('name', 'id'):
  334. annotations[k] = self._rawio.header['event_channels'][event_channel_index][k]
  335. ann = self._rawio.raw_annotations['blocks'][block_index]['segments'][seg_index]['events'][event_channel_index]
  336. annotations.update(ann)
  337. BaseProxy.__init__(self, **annotations)
  338. def load(self, time_slice=None, strict_slicing=True):
  339. '''
  340. *Args*:
  341. :time_slice: None or tuple of the time slice expressed with quantities.
  342. None is the entire signal.
  343. :strict_slicing: True by default.
  344. Control if an error is raise or not when one of time_slice member (t_start or t_stop)
  345. is outside the real time range of the segment.
  346. '''
  347. t_start, t_stop = consolidate_time_slice(time_slice, self.t_start,
  348. self.t_stop, strict_slicing)
  349. _t_start, _t_stop = prepare_time_slice(time_slice)
  350. timestamp, durations, labels = self._rawio.get_event_timestamps(block_index=self._block_index,
  351. seg_index=self._seg_index, event_channel_index=self._event_channel_index,
  352. t_start=_t_start, t_stop=_t_stop)
  353. dtype = 'float64'
  354. times = self._rawio.rescale_event_timestamp(timestamp, dtype=dtype)
  355. units = 's'
  356. if durations is not None:
  357. durations = self._rawio.rescale_epoch_duration(durations, dtype=dtype) * pq.s
  358. h = self._rawio.header['event_channels'][self._event_channel_index]
  359. if h['type'] == b'event':
  360. ret = Event(times=times, labels=labels, units='s',
  361. name=self.name, file_origin=self.file_origin,
  362. description=self.description, **self.annotations)
  363. elif h['type'] == b'epoch':
  364. ret = Epoch(times=times, durations=durations, labels=labels,
  365. units='s',
  366. name=self.name, file_origin=self.file_origin,
  367. description=self.description, **self.annotations)
  368. return ret
  369. class EventProxy(_EventOrEpoch):
  370. '''
  371. This object mimic Event except that it does not
  372. have the times nor labels.
  373. All other attributes and annotations are here.
  374. The goal is to postpone the loading of data into memory
  375. when reading a file with the new lazy load system based
  376. on neo.rawio.
  377. This object must not be constructed directly but is given
  378. neo.io when lazy=True instead of a true Event.
  379. The EventProxy is able to load:
  380. * only a slice of time
  381. Usage:
  382. >>> proxy_event = EventProxy(rawio=self.reader, event_channel_index=0,
  383. block_index=0, seg_index=0,)
  384. >>> event = proxy_event.load()
  385. >>> slice_of_event = proxy_event.load(time_slice=(1.*pq.s, 2.*pq.s))
  386. '''
  387. _necessary_attrs = (('times', pq.Quantity, 1),
  388. ('labels', np.ndarray, 1, np.dtype('U')))
  389. proxy_for = Event
  390. class EpochProxy(_EventOrEpoch):
  391. '''
  392. This object mimic Epoch except that it does not
  393. have the times nor labels nor durations.
  394. All other attributes and annotations are here.
  395. The goal is to postpone the loading of data into memory
  396. when reading a file with the new lazy load system based
  397. on neo.rawio.
  398. This object must not be constructed directly but is given
  399. neo.io when lazy=True instead of a true Epoch.
  400. The EpochProxy is able to load:
  401. * only a slice of time
  402. Usage:
  403. >>> proxy_epoch = EpochProxy(rawio=self.reader, event_channel_index=0,
  404. block_index=0, seg_index=0,)
  405. >>> epoch = proxy_epoch.load()
  406. >>> slice_of_epoch = proxy_epoch.load(time_slice=(1.*pq.s, 2.*pq.s))
  407. '''
  408. _necessary_attrs = (('times', pq.Quantity, 1),
  409. ('durations', pq.Quantity, 1),
  410. ('labels', np.ndarray, 1, np.dtype('U')))
  411. proxy_for = Epoch
  412. proxyobjectlist = [AnalogSignalProxy, SpikeTrainProxy, EventProxy,
  413. EpochProxy]
  414. unit_convert = {'Volts': 'V', 'volts': 'V', 'Volt': 'V',
  415. 'volt': 'V', ' Volt': 'V', 'microV': 'uV',
  416. # note that "micro" and "mu" are two different characters in Unicode
  417. # although they mostly look the same. Here we accept both.
  418. 'µV': 'uV', 'μV': 'uV'}
  419. def ensure_signal_units(units):
  420. # test units
  421. units = units.replace(' ', '')
  422. if units in unit_convert:
  423. units = unit_convert[units]
  424. try:
  425. units = pq.Quantity(1, units)
  426. except:
  427. logger.warning('Units "{}" can not be converted to a quantity. Using dimensionless '
  428. 'instead'.format(units))
  429. units = ''
  430. units = pq.Quantity(1, units)
  431. return units
  432. def check_annotations(annotations):
  433. # force type to str for some keys
  434. # imposed for tests
  435. for k in ('name', 'description', 'file_origin'):
  436. if k in annotations:
  437. annotations[k] = str(annotations[k])
  438. if 'coordinates' in annotations:
  439. # some rawio expose some coordinates in annotations but is not standardized
  440. # (x, y, z) or polar, at the moment it is more resonable to remove them
  441. annotations.pop('coordinates')
  442. return annotations
  443. def ensure_second(v):
  444. if isinstance(v, float):
  445. return v * pq.s
  446. elif isinstance(v, pq.Quantity):
  447. return v.rescale('s')
  448. elif isinstance(v, int):
  449. return float(v) * pq.s
  450. def prepare_time_slice(time_slice):
  451. """
  452. This give clean time slice but keep None
  453. for calling rawio slice
  454. """
  455. if time_slice is None:
  456. t_start, t_stop = None, None
  457. else:
  458. t_start, t_stop = time_slice
  459. if t_start is not None:
  460. t_start = ensure_second(t_start).rescale('s').magnitude
  461. if t_stop is not None:
  462. t_stop = ensure_second(t_stop).rescale('s').magnitude
  463. return (t_start, t_stop)
  464. def consolidate_time_slice(time_slice, seg_t_start, seg_t_stop, strict_slicing):
  465. """
  466. This give clean time slice in quantity for t_start/t_stop of object
  467. None is replace by seg limit.
  468. """
  469. if time_slice is None:
  470. t_start, t_stop = None, None
  471. else:
  472. t_start, t_stop = time_slice
  473. if t_start is None:
  474. t_start = seg_t_start
  475. else:
  476. if strict_slicing:
  477. assert seg_t_start <= t_start <= seg_t_stop, 't_start is outside'
  478. else:
  479. t_start = max(t_start, seg_t_start)
  480. t_start = ensure_second(t_start)
  481. if t_stop is None:
  482. t_stop = seg_t_stop
  483. else:
  484. if strict_slicing:
  485. assert seg_t_start <= t_stop <= seg_t_stop, 't_stop is outside'
  486. else:
  487. t_stop = min(t_stop, seg_t_stop)
  488. t_stop = ensure_second(t_stop)
  489. return (t_start, t_stop)
  490. def create_analogsignal_array_annotations(sig_annotations, global_channel_indexes):
  491. """
  492. Create array_annotations from raw_annoations.
  493. Since raw_annotation are not np.array but nested dict, this func
  494. try to find keys in raw_annotation that are shared by all channel
  495. and make array_annotation with it.
  496. """
  497. # intersection of keys across channels
  498. common_keys = None
  499. for ind in global_channel_indexes:
  500. keys = [k for k, v in sig_annotations[ind].items() if not \
  501. isinstance(v, (list, tuple, np.ndarray))]
  502. if common_keys is None:
  503. common_keys = keys
  504. else:
  505. common_keys = [k for k in common_keys if k in keys]
  506. # this is redundant and done with other name
  507. for k in ['name', 'channel_id']:
  508. if k in common_keys:
  509. common_keys.remove(k)
  510. array_annotations = {}
  511. for k in common_keys:
  512. values = [sig_annotations[ind][k] for ind in global_channel_indexes]
  513. array_annotations[k] = np.array(values)
  514. return array_annotations