baserawio.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693
  1. """
  2. baserawio
  3. ======
  4. Classes
  5. -------
  6. BaseRawIO
  7. abstract class which should be overridden to write a RawIO.
  8. RawIO is a new API in neo that is supposed to acces as fast as possible
  9. raw data. All IO with theses characteristics should/could be rewritten:
  10. * internally use of memmap (or hdf5)
  11. * reading header is quite cheap (not read all the file)
  12. * neo tree object is symetric and logical: same channel/units/event
  13. along all block and segments.
  14. So this handles **only** one simplified but very frequent case of dataset:
  15. * Only one channel set for AnalogSignal (aka ChannelIndex) stable along Segment
  16. * Only one channel set for SpikeTrain (aka Unit) stable along Segment
  17. * AnalogSignal have all the same sampling_rate acroos all Segment
  18. * t_start/t_stop are the same for many object (SpikeTrain, Event) inside a Segment
  19. * AnalogSignal should all have the same sampling_rate otherwise the won't be read
  20. a the same time. So signal_group_mode=='split-all' in BaseFromRaw
  21. A helper class `neo.io.basefromrawio.BaseFromRaw` should transform a RawIO to
  22. neo legacy IO from free.
  23. With this API the IO have an attributes `header` with necessary keys.
  24. See ExampleRawIO as example.
  25. BaseRawIO implement a possible presistent cache system that can be used
  26. by some IOs to avoid very long parse_header(). The idea is that some variable
  27. or vector can be store somewhere (near the fiel, /tmp, any path)
  28. """
  29. # from __future__ import unicode_literals, print_function, division, absolute_import
  30. import logging
  31. import numpy as np
  32. import os
  33. import sys
  34. from neo import logging_handler
  35. try:
  36. import joblib
  37. HAVE_JOBLIB = True
  38. except ImportError:
  39. HAVE_JOBLIB = False
  40. possible_raw_modes = ['one-file', 'multi-file', 'one-dir', ] # 'multi-dir', 'url', 'other'
  41. error_header = 'Header is not read yet, do parse_header() first'
  42. _signal_channel_dtype = [
  43. ('name', 'U64'),
  44. ('id', 'int64'),
  45. ('sampling_rate', 'float64'),
  46. ('dtype', 'U16'),
  47. ('units', 'U64'),
  48. ('gain', 'float64'),
  49. ('offset', 'float64'),
  50. ('group_id', 'int64'),
  51. ]
  52. _common_sig_characteristics = ['sampling_rate', 'dtype', 'group_id']
  53. _unit_channel_dtype = [
  54. ('name', 'U64'),
  55. ('id', 'U64'),
  56. # for waveform
  57. ('wf_units', 'U64'),
  58. ('wf_gain', 'float64'),
  59. ('wf_offset', 'float64'),
  60. ('wf_left_sweep', 'int64'),
  61. ('wf_sampling_rate', 'float64'),
  62. ]
  63. _event_channel_dtype = [
  64. ('name', 'U64'),
  65. ('id', 'U64'),
  66. ('type', 'S5'), # epoch ot event
  67. ]
  68. class BaseRawIO:
  69. """
  70. Generic class to handle.
  71. """
  72. name = 'BaseIO'
  73. description = ''
  74. extensions = []
  75. rawmode = None # one key in possible_raw_modes
  76. def __init__(self, use_cache=False, cache_path='same_as_resource', **kargs):
  77. """
  78. When rawmode=='one-file' kargs MUST contains 'filename' the filename
  79. When rawmode=='multi-file' kargs MUST contains 'filename' one of the filenames.
  80. When rawmode=='one-dir' kargs MUST contains 'dirname' the dirname.
  81. """
  82. # create a logger for the IO class
  83. fullname = self.__class__.__module__ + '.' + self.__class__.__name__
  84. self.logger = logging.getLogger(fullname)
  85. # create a logger for 'neo' and add a handler to it if it doesn't
  86. # have one already.
  87. # (it will also not add one if the root logger has a handler)
  88. corename = self.__class__.__module__.split('.')[0]
  89. corelogger = logging.getLogger(corename)
  90. rootlogger = logging.getLogger()
  91. if not corelogger.handlers and not rootlogger.handlers:
  92. corelogger.addHandler(logging_handler)
  93. self.use_cache = use_cache
  94. if use_cache:
  95. assert HAVE_JOBLIB, 'You need to install joblib for cache'
  96. self.setup_cache(cache_path)
  97. else:
  98. self._cache = None
  99. self.header = None
  100. def parse_header(self):
  101. """
  102. This must parse the file header to get all stuff for fast use later on.
  103. This must create
  104. self.header['nb_block']
  105. self.header['nb_segment']
  106. self.header['signal_channels']
  107. self.header['units_channels']
  108. self.header['event_channels']
  109. """
  110. self._parse_header()
  111. self._group_signal_channel_characteristics()
  112. def source_name(self):
  113. """Return fancy name of file source"""
  114. return self._source_name()
  115. def __repr__(self):
  116. txt = '{}: {}\n'.format(self.__class__.__name__, self.source_name())
  117. if self.header is not None:
  118. nb_block = self.block_count()
  119. txt += 'nb_block: {}\n'.format(nb_block)
  120. nb_seg = [self.segment_count(i) for i in range(nb_block)]
  121. txt += 'nb_segment: {}\n'.format(nb_seg)
  122. for k in ('signal_channels', 'unit_channels', 'event_channels'):
  123. ch = self.header[k]
  124. if len(ch) > 8:
  125. chantxt = "[{} ... {}]".format(', '.join(e for e in ch['name'][:4]),
  126. ' '.join(e for e in ch['name'][-4:]))
  127. else:
  128. chantxt = "[{}]".format(', '.join(e for e in ch['name']))
  129. txt += '{}: {}\n'.format(k, chantxt)
  130. return txt
  131. def _generate_minimal_annotations(self):
  132. """
  133. Helper function that generate a nested dict
  134. of all annotations.
  135. must be called when these are Ok:
  136. * block_count()
  137. * segment_count()
  138. * signal_channels_count()
  139. * unit_channels_count()
  140. * event_channels_count()
  141. Usage:
  142. raw_annotations['blocks'][block_index] = { 'nickname' : 'super block', 'segments' : ...}
  143. raw_annotations['blocks'][block_index] = { 'nickname' : 'super block', 'segments' : ...}
  144. raw_annotations['blocks'][block_index]['segments'][seg_index]['signals'][channel_index] = {'nickname': 'super channel'}
  145. raw_annotations['blocks'][block_index]['segments'][seg_index]['units'][unit_index] = {'nickname': 'super neuron'}
  146. raw_annotations['blocks'][block_index]['segments'][seg_index]['events'][ev_chan] = {'nickname': 'super trigger'}
  147. Theses annotations will be used at the neo.io API directly in objects.
  148. Standard annotation like name/id/file_origin are already generated here.
  149. """
  150. signal_channels = self.header['signal_channels']
  151. unit_channels = self.header['unit_channels']
  152. event_channels = self.header['event_channels']
  153. a = {'blocks': [], 'signal_channels': [], 'unit_channels': [], 'event_channels': []}
  154. for block_index in range(self.block_count()):
  155. d = {'segments': []}
  156. d['file_origin'] = self.source_name()
  157. a['blocks'].append(d)
  158. for seg_index in range(self.segment_count(block_index)):
  159. d = {'signals': [], 'units': [], 'events': []}
  160. d['file_origin'] = self.source_name()
  161. a['blocks'][block_index]['segments'].append(d)
  162. for c in range(signal_channels.size):
  163. # use for AnalogSignal.annotations
  164. d = {}
  165. d['name'] = signal_channels['name'][c]
  166. d['channel_id'] = signal_channels['id'][c]
  167. a['blocks'][block_index]['segments'][seg_index]['signals'].append(d)
  168. for c in range(unit_channels.size):
  169. # use for SpikeTrain.annotations
  170. d = {}
  171. d['name'] = unit_channels['name'][c]
  172. d['id'] = unit_channels['id'][c]
  173. a['blocks'][block_index]['segments'][seg_index]['units'].append(d)
  174. for c in range(event_channels.size):
  175. # use for Event.annotations
  176. d = {}
  177. d['name'] = event_channels['name'][c]
  178. d['id'] = event_channels['id'][c]
  179. d['file_origin'] = self._source_name()
  180. a['blocks'][block_index]['segments'][seg_index]['events'].append(d)
  181. for c in range(signal_channels.size):
  182. # use for ChannelIndex.annotations
  183. d = {}
  184. d['name'] = signal_channels['name'][c]
  185. d['channel_id'] = signal_channels['id'][c]
  186. d['file_origin'] = self._source_name()
  187. a['signal_channels'].append(d)
  188. for c in range(unit_channels.size):
  189. # use for Unit.annotations
  190. d = {}
  191. d['name'] = unit_channels['name'][c]
  192. d['id'] = unit_channels['id'][c]
  193. d['file_origin'] = self._source_name()
  194. a['unit_channels'].append(d)
  195. for c in range(event_channels.size):
  196. # not used in neo.io at the moment could usefull one day
  197. d = {}
  198. d['name'] = event_channels['name'][c]
  199. d['id'] = event_channels['id'][c]
  200. d['file_origin'] = self._source_name()
  201. a['event_channels'].append(d)
  202. self.raw_annotations = a
  203. def _raw_annotate(self, obj_name, chan_index=0, block_index=0, seg_index=0, **kargs):
  204. """
  205. Annotate an object in the list/dict tree annotations.
  206. """
  207. bl_annotations = self.raw_annotations['blocks'][block_index]
  208. seg_annotations = bl_annotations['segments'][seg_index]
  209. if obj_name == 'blocks':
  210. bl_annotations.update(kargs)
  211. elif obj_name == 'segments':
  212. seg_annotations.update(kargs)
  213. elif obj_name in ['signals', 'events', 'units']:
  214. obj_annotations = seg_annotations[obj_name][chan_index]
  215. obj_annotations.update(kargs)
  216. elif obj_name in ['signal_channels', 'unit_channels', 'event_channel']:
  217. obj_annotations = self.raw_annotations[obj_name][chan_index]
  218. obj_annotations.update(kargs)
  219. def _repr_annotations(self):
  220. txt = 'Raw annotations\n'
  221. for block_index in range(self.block_count()):
  222. bl_a = self.raw_annotations['blocks'][block_index]
  223. txt += '*Block {}\n'.format(block_index)
  224. for k, v in bl_a.items():
  225. if k in ('segments',):
  226. continue
  227. txt += ' -{}: {}\n'.format(k, v)
  228. for seg_index in range(self.segment_count(block_index)):
  229. seg_a = bl_a['segments'][seg_index]
  230. txt += ' *Segment {}\n'.format(seg_index)
  231. for k, v in seg_a.items():
  232. if k in ('signals', 'units', 'events',):
  233. continue
  234. txt += ' -{}: {}\n'.format(k, v)
  235. for child in ('signals', 'units', 'events'):
  236. n = self.header[child[:-1] + '_channels'].shape[0]
  237. for c in range(n):
  238. neo_name = {'signals': 'AnalogSignal',
  239. 'units': 'SpikeTrain', 'events': 'Event/Epoch'}[child]
  240. txt += ' *{} {}\n'.format(neo_name, c)
  241. child_a = seg_a[child][c]
  242. for k, v in child_a.items():
  243. txt += ' -{}: {}\n'.format(k, v)
  244. return txt
  245. def print_annotations(self):
  246. """Print formated raw_annotations"""
  247. print(self._repr_annotations())
  248. def block_count(self):
  249. """return number of blocks"""
  250. return self.header['nb_block']
  251. def segment_count(self, block_index):
  252. """return number of segment for a given block"""
  253. return self.header['nb_segment'][block_index]
  254. def signal_channels_count(self):
  255. """Return the number of signal channels.
  256. Same along all Blocks and Segments.
  257. """
  258. return len(self.header['signal_channels'])
  259. def unit_channels_count(self):
  260. """Return the number of unit (aka spike) channels.
  261. Same along all Blocks and Segment.
  262. """
  263. return len(self.header['unit_channels'])
  264. def event_channels_count(self):
  265. """Return the number of event/epoch channels.
  266. Same allong all Blocks and Segments.
  267. """
  268. return len(self.header['event_channels'])
  269. def segment_t_start(self, block_index, seg_index):
  270. """Global t_start of a Segment in s. Shared by all objects except
  271. for AnalogSignal.
  272. """
  273. return self._segment_t_start(block_index, seg_index)
  274. def segment_t_stop(self, block_index, seg_index):
  275. """Global t_start of a Segment in s. Shared by all objects except
  276. for AnalogSignal.
  277. """
  278. return self._segment_t_stop(block_index, seg_index)
  279. ###
  280. # signal and channel zone
  281. def _group_signal_channel_characteristics(self):
  282. """
  283. Useful for few IOs (TdtrawIO, NeuroExplorerRawIO, ...).
  284. Group signals channels by same characteristics:
  285. * sampling_rate (global along block and segment)
  286. * group_id (explicite channel group)
  287. If all channels have the same characteristics then
  288. `get_analogsignal_chunk` can be call wihtout restriction.
  289. If not, then **channel_indexes** must be specified
  290. in `get_analogsignal_chunk` and only channels with same
  291. characteristics can be read at the same time.
  292. This is useful for some IO than
  293. have internally several signals channels family.
  294. For many RawIO all channels have the same
  295. sampling_rate/size/t_start. In that cases, internal flag
  296. **self._several_channel_groups will be set to False, so
  297. `get_analogsignal_chunk(..)` won't suffer in performance.
  298. Note that at neo.io level this have an impact on
  299. `signal_group_mode`. 'split-all' will work in any situation
  300. But grouping channel in the same AnalogSignal
  301. with 'group-by-XXX' will depend on common characteristics
  302. of course.
  303. """
  304. characteristics = self.header['signal_channels'][_common_sig_characteristics]
  305. unique_characteristics = np.unique(characteristics)
  306. if len(unique_characteristics) == 1:
  307. self._several_channel_groups = False
  308. else:
  309. self._several_channel_groups = True
  310. def _check_common_characteristics(self, channel_indexes):
  311. """
  312. Useful for few IOs (TdtrawIO, NeuroExplorerRawIO, ...).
  313. Check that a set a signal channel_indexes share common
  314. characteristics (**sampling_rate/t_start/size**).
  315. Useful only when RawIO propose differents channels groups
  316. with different sampling_rate for instance.
  317. """
  318. # ~ print('_check_common_characteristics', channel_indexes)
  319. assert channel_indexes is not None, \
  320. 'You must specify channel_indexes'
  321. characteristics = self.header['signal_channels'][_common_sig_characteristics]
  322. # ~ print(characteristics[channel_indexes])
  323. assert np.unique(characteristics[channel_indexes]).size == 1, \
  324. 'This channel set has varied characteristics'
  325. def get_group_signal_channel_indexes(self):
  326. """
  327. Useful for few IOs (TdtrawIO, NeuroExplorerRawIO, ...).
  328. Return a list of channel_indexes than have same characteristics
  329. """
  330. if self._several_channel_groups:
  331. characteristics = self.header['signal_channels'][_common_sig_characteristics]
  332. unique_characteristics = np.unique(characteristics)
  333. channel_indexes_list = []
  334. for e in unique_characteristics:
  335. channel_indexes, = np.nonzero(characteristics == e)
  336. channel_indexes_list.append(channel_indexes)
  337. return channel_indexes_list
  338. else:
  339. return [None]
  340. def channel_name_to_index(self, channel_names):
  341. """
  342. Transform channel_names to channel_indexes.
  343. Based on self.header['signal_channels']
  344. """
  345. ch = self.header['signal_channels']
  346. channel_indexes, = np.nonzero(np.in1d(ch['name'], channel_names))
  347. assert len(channel_indexes) == len(channel_names), 'not match'
  348. return channel_indexes
  349. def channel_id_to_index(self, channel_ids):
  350. """
  351. Transform channel_ids to channel_indexes.
  352. Based on self.header['signal_channels']
  353. """
  354. ch = self.header['signal_channels']
  355. channel_indexes, = np.nonzero(np.in1d(ch['id'], channel_ids))
  356. assert len(channel_indexes) == len(channel_ids), 'not match'
  357. return channel_indexes
  358. def _get_channel_indexes(self, channel_indexes, channel_names, channel_ids):
  359. """
  360. Select channel_indexes from channel_indexes/channel_names/channel_ids
  361. depending which is not None.
  362. """
  363. if channel_indexes is None and channel_names is not None:
  364. channel_indexes = self.channel_name_to_index(channel_names)
  365. if channel_indexes is None and channel_ids is not None:
  366. channel_indexes = self.channel_id_to_index(channel_ids)
  367. return channel_indexes
  368. def get_signal_size(self, block_index, seg_index, channel_indexes=None):
  369. if self._several_channel_groups:
  370. self._check_common_characteristics(channel_indexes)
  371. return self._get_signal_size(block_index, seg_index, channel_indexes)
  372. def get_signal_t_start(self, block_index, seg_index, channel_indexes=None):
  373. if self._several_channel_groups:
  374. self._check_common_characteristics(channel_indexes)
  375. return self._get_signal_t_start(block_index, seg_index, channel_indexes)
  376. def get_signal_sampling_rate(self, channel_indexes=None):
  377. if self._several_channel_groups:
  378. self._check_common_characteristics(channel_indexes)
  379. chan_index0 = channel_indexes[0]
  380. else:
  381. chan_index0 = 0
  382. sr = self.header['signal_channels'][chan_index0]['sampling_rate']
  383. return float(sr)
  384. def get_analogsignal_chunk(self, block_index=0, seg_index=0, i_start=None, i_stop=None,
  385. channel_indexes=None, channel_names=None, channel_ids=None):
  386. """
  387. Return a chunk of raw signal.
  388. """
  389. channel_indexes = self._get_channel_indexes(channel_indexes, channel_names, channel_ids)
  390. if self._several_channel_groups:
  391. self._check_common_characteristics(channel_indexes)
  392. raw_chunk = self._get_analogsignal_chunk(
  393. block_index, seg_index, i_start, i_stop, channel_indexes)
  394. return raw_chunk
  395. def rescale_signal_raw_to_float(self, raw_signal, dtype='float32',
  396. channel_indexes=None, channel_names=None, channel_ids=None):
  397. channel_indexes = self._get_channel_indexes(channel_indexes, channel_names, channel_ids)
  398. if channel_indexes is None:
  399. channel_indexes = slice(None)
  400. channels = self.header['signal_channels'][channel_indexes]
  401. float_signal = raw_signal.astype(dtype)
  402. if np.any(channels['gain'] != 1.):
  403. float_signal *= channels['gain']
  404. if np.any(channels['offset'] != 0.):
  405. float_signal += channels['offset']
  406. return float_signal
  407. # spiketrain and unit zone
  408. def spike_count(self, block_index=0, seg_index=0, unit_index=0):
  409. return self._spike_count(block_index, seg_index, unit_index)
  410. def get_spike_timestamps(self, block_index=0, seg_index=0, unit_index=0,
  411. t_start=None, t_stop=None):
  412. """
  413. The timestamp is as close to the format itself. Sometimes float/int32/int64.
  414. Sometimes it is the index on the signal but not always.
  415. The conversion to second or index_on_signal is done outside here.
  416. t_start/t_sop are limits in seconds.
  417. """
  418. timestamp = self._get_spike_timestamps(block_index, seg_index, unit_index, t_start, t_stop)
  419. return timestamp
  420. def rescale_spike_timestamp(self, spike_timestamps, dtype='float64'):
  421. """
  422. Rescale spike timestamps to seconds.
  423. """
  424. return self._rescale_spike_timestamp(spike_timestamps, dtype)
  425. # spiketrain waveform zone
  426. def get_spike_raw_waveforms(self, block_index=0, seg_index=0, unit_index=0,
  427. t_start=None, t_stop=None):
  428. wf = self._get_spike_raw_waveforms(block_index, seg_index, unit_index, t_start, t_stop)
  429. return wf
  430. def rescale_waveforms_to_float(self, raw_waveforms, dtype='float32', unit_index=0):
  431. wf_gain = self.header['unit_channels']['wf_gain'][unit_index]
  432. wf_offset = self.header['unit_channels']['wf_offset'][unit_index]
  433. float_waveforms = raw_waveforms.astype(dtype)
  434. if wf_gain != 1.:
  435. float_waveforms *= wf_gain
  436. if wf_offset != 0.:
  437. float_waveforms += wf_offset
  438. return float_waveforms
  439. # event and epoch zone
  440. def event_count(self, block_index=0, seg_index=0, event_channel_index=0):
  441. return self._event_count(block_index, seg_index, event_channel_index)
  442. def get_event_timestamps(self, block_index=0, seg_index=0, event_channel_index=0,
  443. t_start=None, t_stop=None):
  444. """
  445. The timestamp is as close to the format itself. Sometimes float/int32/int64.
  446. Sometimes it is the index on the signal but not always.
  447. The conversion to second or index_on_signal is done outside here.
  448. t_start/t_sop are limits in seconds.
  449. returns
  450. timestamp
  451. labels
  452. durations
  453. """
  454. timestamp, durations, labels = self._get_event_timestamps(
  455. block_index, seg_index, event_channel_index, t_start, t_stop)
  456. return timestamp, durations, labels
  457. def rescale_event_timestamp(self, event_timestamps, dtype='float64'):
  458. """
  459. Rescale event timestamps to s
  460. """
  461. return self._rescale_event_timestamp(event_timestamps, dtype)
  462. def rescale_epoch_duration(self, raw_duration, dtype='float64'):
  463. """
  464. Rescale epoch raw duration to s
  465. """
  466. return self._rescale_epoch_duration(raw_duration, dtype)
  467. def setup_cache(self, cache_path, **init_kargs):
  468. if self.rawmode in ('one-file', 'multi-file'):
  469. resource_name = self.filename
  470. elif self.rawmode == 'one-dir':
  471. resource_name = self.dirname
  472. else:
  473. raise (NotImplementedError)
  474. if cache_path == 'home':
  475. if sys.platform.startswith('win'):
  476. dirname = os.path.join(os.environ['APPDATA'], 'neo_rawio_cache')
  477. elif sys.platform.startswith('darwin'):
  478. dirname = '~/Library/Application Support/neo_rawio_cache'
  479. else:
  480. dirname = os.path.expanduser('~/.config/neo_rawio_cache')
  481. dirname = os.path.join(dirname, self.__class__.__name__)
  482. if not os.path.exists(dirname):
  483. os.makedirs(dirname)
  484. elif cache_path == 'same_as_resource':
  485. dirname = os.path.dirname(resource_name)
  486. else:
  487. assert os.path.exists(cache_path), \
  488. 'cache_path do not exists use "home" or "same_as_resource" to make this auto'
  489. # the hash of the resource (dir of file) is done with filename+datetime
  490. # TODO make something more sophisticated when rawmode='one-dir' that use all
  491. # filename and datetime
  492. d = dict(ressource_name=resource_name, mtime=os.path.getmtime(resource_name))
  493. hash = joblib.hash(d, hash_name='md5')
  494. # name is constructed from the real_n,ame and the hash
  495. name = '{}_{}'.format(os.path.basename(resource_name), hash)
  496. self.cache_filename = os.path.join(dirname, name)
  497. if os.path.exists(self.cache_filename):
  498. self.logger.warning('Use existing cache file {}'.format(self.cache_filename))
  499. self._cache = joblib.load(self.cache_filename)
  500. else:
  501. self.logger.warning('Create cache file {}'.format(self.cache_filename))
  502. self._cache = {}
  503. self.dump_cache()
  504. def add_in_cache(self, **kargs):
  505. assert self.use_cache
  506. self._cache.update(kargs)
  507. self.dump_cache()
  508. def dump_cache(self):
  509. assert self.use_cache
  510. joblib.dump(self._cache, self.cache_filename)
  511. ##################
  512. # Functions to be implemented in IO below here
  513. def _parse_header(self):
  514. raise (NotImplementedError)
  515. # must call
  516. # self._generate_empty_annotations()
  517. def _source_name(self):
  518. raise (NotImplementedError)
  519. def _segment_t_start(self, block_index, seg_index):
  520. raise (NotImplementedError)
  521. def _segment_t_stop(self, block_index, seg_index):
  522. raise (NotImplementedError)
  523. ###
  524. # signal and channel zone
  525. def _get_signal_size(self, block_index, seg_index, channel_indexes):
  526. raise (NotImplementedError)
  527. def _get_signal_t_start(self, block_index, seg_index, channel_indexes):
  528. raise (NotImplementedError)
  529. def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, channel_indexes):
  530. raise (NotImplementedError)
  531. ###
  532. # spiketrain and unit zone
  533. def _spike_count(self, block_index, seg_index, unit_index):
  534. raise (NotImplementedError)
  535. def _get_spike_timestamps(self, block_index, seg_index, unit_index, t_start, t_stop):
  536. raise (NotImplementedError)
  537. def _rescale_spike_timestamp(self, spike_timestamps, dtype):
  538. raise (NotImplementedError)
  539. ###
  540. # spike waveforms zone
  541. def _get_spike_raw_waveforms(self, block_index, seg_index, unit_index, t_start, t_stop):
  542. raise (NotImplementedError)
  543. ###
  544. # event and epoch zone
  545. def _event_count(self, block_index, seg_index, event_channel_index):
  546. raise (NotImplementedError)
  547. def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop):
  548. raise (NotImplementedError)
  549. def _rescale_event_timestamp(self, event_timestamps, dtype):
  550. raise (NotImplementedError)
  551. def _rescale_epoch_duration(self, raw_duration, dtype):
  552. raise (NotImplementedError)