Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

nsdfio.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554
  1. # -*- coding: utf-8 -*-
  2. """
  3. Module for reading and writing NSDF files
  4. Author: Mieszko Grodzicki
  5. This module support both reading and writing NDSF files.
  6. Note: Read file must be written using this IO
  7. """
  8. from __future__ import absolute_import
  9. import numpy as np
  10. import quantities as pq
  11. from uuid import uuid1
  12. import pickle
  13. from datetime import datetime
  14. import os
  15. try:
  16. import nsdf
  17. except ImportError as err:
  18. HAVE_NSDF = False
  19. NSDF_ERR = err
  20. else:
  21. HAVE_NSDF = True
  22. NSDF_ERR = None
  23. from neo.io.baseio import BaseIO
  24. from neo.core import Block, Segment, AnalogSignal, ChannelIndex
  25. class NSDFIO(BaseIO):
  26. """
  27. Class for reading and writing files in NSDF Format.
  28. It supports reading and writing: Block, Segment, AnalogSignal, ChannelIndex, with all relationships and metadata.
  29. """
  30. is_readable = True
  31. is_writable = True
  32. supported_objects = [Block, Segment, AnalogSignal, ChannelIndex]
  33. readable_objects = [Block, Segment]
  34. writeable_objects = [Block, Segment]
  35. has_header = False
  36. is_streameable = False
  37. name = 'NSDF'
  38. extensions = ['h5']
  39. mode = 'file'
  40. def __init__(self, filename=None):
  41. """
  42. Initialise NSDFIO instance
  43. :param filename: Path to the file
  44. """
  45. if not HAVE_NSDF:
  46. raise Exception("Failed to import NSDF.")
  47. if filename is None:
  48. raise ValueError("Must provide an input file.")
  49. BaseIO.__init__(self)
  50. self.filename = filename
  51. self.dt_format = '%d/%m/%Y %H:%M:%S'
  52. self.modeltree_path = '/model/modeltree/neo/'
  53. def write_all_blocks(self, blocks):
  54. """
  55. Write list of blocks to the file
  56. :param blocks: List of blocks to be written
  57. """
  58. writer = self._init_writing()
  59. neo_model, blocks_model, segments_model = self._prepare_model_tree(writer)
  60. name_pattern = self._name_pattern(len(blocks))
  61. for i, block in enumerate(blocks):
  62. self.write_block(block, name_pattern.format(i), writer, blocks_model)
  63. def write_block(self, block = None, name='0', writer=None, parent=None):
  64. """
  65. Write a Block to the file
  66. :param block: Block to be written
  67. :param name: Name for block representation in NSDF model tree (optional)
  68. :param writer: NSDFWriter instance (optional)
  69. :param parent: NSDF ModelComponent which will be the parent of block NSDF representation (optional)
  70. """
  71. if not isinstance(block, Block):
  72. raise ValueError("Must provide a Block to write.")
  73. if writer is None:
  74. writer = self._init_writing()
  75. if parent is None:
  76. neo_model, parent, segments_model = self._prepare_model_tree(writer)
  77. block_model = nsdf.ModelComponent(name, uid=uuid1().hex, parent=parent)
  78. self._write_container_metadata(block, block_model)
  79. self._write_model_component(block_model, writer)
  80. self._write_block_children(block, block_model, writer)
  81. self._clean_nsdfio_annotations(block)
  82. def _write_block_children(self, block, block_model, writer):
  83. segments_model = nsdf.ModelComponent(name='segments', uid=uuid1().hex, parent=block_model)
  84. self._write_model_component(segments_model, writer)
  85. name_pattern = self._name_pattern(len(block.segments))
  86. for i, segment in enumerate(block.segments):
  87. self.write_segment(segment=segment, name=name_pattern.format(i),
  88. writer=writer, parent=segments_model)
  89. channel_indexes_model = nsdf.ModelComponent(name='channel_indexes', uid=uuid1().hex, parent=block_model)
  90. self._write_model_component(channel_indexes_model, writer)
  91. name_pattern = self._name_pattern(len(block.channel_indexes))
  92. for i, channelindex in enumerate(block.channel_indexes):
  93. self.write_channelindex(channelindex=channelindex, name=name_pattern.format(i),
  94. writer=writer, parent=channel_indexes_model)
  95. def write_segment(self, segment = None, name='0', writer=None, parent=None):
  96. """
  97. Write a Segment to the file
  98. :param segment: Segment to be written
  99. :param name: Name for segment representation in NSDF model tree (optional)
  100. :param writer: NSDFWriter instance (optional)
  101. :param parent: NSDF ModelComponent which will be the parent of segment NSDF representation (optional)
  102. """
  103. if not isinstance(segment, Segment):
  104. raise ValueError("Must provide a Segment to write.")
  105. if writer is None:
  106. writer = self._init_writing()
  107. single_segment = False
  108. if parent is None:
  109. neo_model, blocks_model, parent = self._prepare_model_tree(writer)
  110. single_segment = True
  111. model = nsdf.ModelComponent(name, uid=uuid1().hex, parent=parent)
  112. self._write_container_metadata(segment, model)
  113. self._write_model_component(model, writer)
  114. self._write_segment_children(model, segment, writer)
  115. if single_segment:
  116. self._clean_nsdfio_annotations(segment)
  117. def _write_segment_children(self, model, segment, writer):
  118. analogsignals_model = nsdf.ModelComponent(name='analogsignals', uid=uuid1().hex, parent=model)
  119. self._write_model_component(analogsignals_model, writer)
  120. name_pattern = self._name_pattern(len(segment.analogsignals))
  121. for i, signal in enumerate(segment.analogsignals):
  122. self.write_analogsignal(signal=signal, name=name_pattern.format(i),
  123. parent=analogsignals_model, writer=writer)
  124. def write_analogsignal(self, signal, name, writer, parent):
  125. """
  126. Write an AnalogSignal to the file
  127. :param signal: AnalogSignal to be written
  128. :param name: Name for signal representation in NSDF model tree
  129. :param writer: NSDFWriter instance
  130. :param parent: NSDF ModelComponent which will be the parent of signal NSDF representation
  131. """
  132. uid = uuid1().hex
  133. model = nsdf.ModelComponent(name, uid=uid, parent=parent)
  134. if signal.annotations.get('nsdfio_uid') is not None:
  135. model.attrs['reference_to'] = signal.annotations['nsdfio_uid']
  136. self._write_model_component(model, writer)
  137. return
  138. self._write_basic_metadata(model, signal)
  139. signal.annotations['nsdfio_uid'] = uid
  140. r_signal = np.swapaxes(signal, 0, 1)
  141. channels_model, channels, source_ds = self._create_signal_data_sources(model, r_signal, uid, writer)
  142. self._write_signal_data(model, channels, r_signal, signal, source_ds, writer)
  143. self._write_model_component(model, writer)
  144. self._write_model_component(channels_model, writer)
  145. for channel_model in channels:
  146. self._write_model_component(channel_model, writer)
  147. def write_channelindex(self, channelindex, name, writer, parent):
  148. """
  149. Write a ChannelIndex to the file
  150. :param channelindex: ChannelIndex to be written
  151. :param name: Name for channelindex representation in NSDF model tree
  152. :param writer: NSDFWriter instance
  153. :param parent: NSDF ModelComponent which will be the parent of channelindex NSDF representation
  154. """
  155. uid = uuid1().hex
  156. model = nsdf.ModelComponent(name, uid=uid, parent=parent)
  157. self._write_basic_metadata(model, channelindex)
  158. self._write_model_component(model, writer)
  159. self._write_channelindex_arrays(model, channelindex, writer)
  160. self._write_channelindex_children(channelindex, model, writer)
  161. def _write_channelindex_children(self, channelindex, model, writer):
  162. analogsignals_model = nsdf.ModelComponent(name='analogsignals', uid=uuid1().hex, parent=model)
  163. self._write_model_component(analogsignals_model, writer)
  164. name_pattern = self._name_pattern(len(channelindex.analogsignals))
  165. for i, signal in enumerate(channelindex.analogsignals):
  166. self.write_analogsignal(signal=signal, name=name_pattern.format(i),
  167. parent=analogsignals_model, writer=writer)
  168. def _init_writing(self):
  169. return nsdf.NSDFWriter(self.filename, mode='w')
  170. def _prepare_model_tree(self, writer):
  171. neo_model = nsdf.ModelComponent('neo', uid=uuid1().hex)
  172. self._write_model_component(neo_model, writer)
  173. blocks_model = nsdf.ModelComponent('blocks', uid=uuid1().hex, parent=neo_model)
  174. self._write_model_component(blocks_model, writer)
  175. segments_model = nsdf.ModelComponent('segments', uid=uuid1().hex, parent=neo_model)
  176. self._write_model_component(segments_model, writer)
  177. return neo_model, blocks_model, segments_model
  178. def _number_of_digits(self, n):
  179. return len(str(n))
  180. def _name_pattern(self, how_many_items):
  181. return '{{:0{}d}}'.format(self._number_of_digits(max(how_many_items - 1, 0)))
  182. def _clean_nsdfio_annotations(self, object):
  183. nsdfio_annotations = ('nsdfio_uid', )
  184. for key in nsdfio_annotations:
  185. object.annotations.pop(key, None)
  186. if hasattr(object, 'children'):
  187. for child in object.children:
  188. self._clean_nsdfio_annotations(child)
  189. def _write_model_component(self, model, writer):
  190. if model.parent is None:
  191. nsdf.add_model_component(model, writer.model['modeltree/'])
  192. else:
  193. nsdf.add_model_component(model, model.parent.hdfgroup)
  194. def _write_container_metadata(self, container, container_model):
  195. self._write_basic_metadata(container_model, container)
  196. self._write_datetime_attributes(container_model, container)
  197. self._write_index_attribute(container_model, container)
  198. def _write_basic_metadata(self, model, object):
  199. self._write_basic_attributes(model, object)
  200. self._write_annotations(model, object)
  201. def _write_basic_attributes(self, model, object):
  202. if object.name is not None:
  203. model.attrs['name'] = object.name
  204. if object.description is not None:
  205. model.attrs['description'] = object.description
  206. def _write_datetime_attributes(self, model, object):
  207. if object.rec_datetime is not None:
  208. model.attrs['rec_datetime'] = object.rec_datetime.strftime(self.dt_format)
  209. def _write_index_attribute(self, model, object):
  210. if object.index is not None:
  211. model.attrs['index'] = object.index
  212. def _write_annotations(self, model, object):
  213. if object.annotations is not None:
  214. model.attrs['annotations'] = pickle.dumps(object.annotations)
  215. def _write_signal_data(self, model, channels, r_signal, signal, source_ds, writer):
  216. dataobj = nsdf.UniformData('signal', unit=str(signal.units.dimensionality))
  217. dataobj.dtype = signal.dtype
  218. for i in range(len(channels)):
  219. dataobj.put_data(channels[i].uid, r_signal[i])
  220. dataobj.set_dt(float(signal.sampling_period.magnitude),
  221. str(signal.sampling_period.dimensionality))
  222. rescaled_tstart = signal.t_start.rescale(signal.sampling_period.dimensionality)
  223. writer.add_uniform_data(source_ds, dataobj,
  224. tstart=float(rescaled_tstart.magnitude))
  225. model.attrs['t_start_unit'] = str(signal.t_start.dimensionality)
  226. def _create_signal_data_sources(self, model, r_signal, uid, writer):
  227. channels = []
  228. channels_model = nsdf.ModelComponent(name='channels', uid=uuid1().hex, parent=model)
  229. name_pattern = '{{:0{}d}}'.format(self._number_of_digits(max(len(r_signal) - 1, 0)))
  230. for i in range(len(r_signal)):
  231. channels.append(nsdf.ModelComponent(name_pattern.format(i),
  232. uid=uuid1().hex,
  233. parent=channels_model))
  234. source_ds = writer.add_uniform_ds(uid, [channel.uid for channel in channels])
  235. return channels_model, channels, source_ds
  236. def _write_channelindex_arrays(self, model, channelindex, writer):
  237. group = model.hdfgroup
  238. self._write_array(group, 'index', channelindex.index)
  239. if channelindex.channel_names is not None:
  240. self._write_array(group, 'channel_names', channelindex.channel_names)
  241. if channelindex.channel_ids is not None:
  242. self._write_array(group, 'channel_ids', channelindex.channel_ids)
  243. if channelindex.coordinates is not None:
  244. self._write_array(group, 'coordinates', channelindex.coordinates)
  245. def _write_array(self, group, name, array):
  246. if isinstance(array, pq.Quantity):
  247. group.create_dataset(name, data=array.magnitude)
  248. group[name].attrs['dimensionality'] = str(array.dimensionality)
  249. else:
  250. group.create_dataset(name, data=array)
  251. def read_all_blocks(self, lazy=False, cascade=True):
  252. """
  253. Read all blocks from the file
  254. :param lazy: Enables lazy reading
  255. :param cascade: Read nested objects or not?
  256. :return: List of read blocks
  257. """
  258. reader = self._init_reading()
  259. blocks = []
  260. blocks_path = self.modeltree_path + 'blocks/'
  261. for block in reader.model[blocks_path].values():
  262. blocks.append(self.read_block(lazy, cascade, group=block, reader=reader))
  263. return blocks
  264. def read_block(self, lazy=False, cascade=True, group=None, reader=None):
  265. """
  266. Read a Block from the file
  267. :param lazy: Enables lazy reading
  268. :param cascade: Read nested objects or not?
  269. :param group: HDF5 Group representing the block in NSDF model tree (optional)
  270. :param reader: NSDFReader instance (optional)
  271. :return: Read block
  272. """
  273. block = Block()
  274. group, reader = self._select_first_container(group, reader, 'block')
  275. if group is None:
  276. return None
  277. attrs = group.attrs
  278. if cascade:
  279. self._read_block_children(lazy, block, group, reader)
  280. block.create_many_to_one_relationship()
  281. self._read_container_metadata(attrs, block)
  282. return block
  283. def _read_block_children(self, lazy, block, group, reader):
  284. for child in group['segments/'].values():
  285. block.segments.append(self.read_segment(lazy=lazy, group=child, reader=reader))
  286. for child in group['channel_indexes/'].values():
  287. block.channel_indexes.append(self.read_channelindex(lazy=lazy, group=child, reader=reader))
  288. def read_segment(self, lazy=False, cascade=True, group=None, reader=None):
  289. """
  290. Read a Segment from the file
  291. :param lazy: Enables lazy reading
  292. :param cascade: Read nested objects or not?
  293. :param group: HDF5 Group representing the segment in NSDF model tree (optional)
  294. :param reader: NSDFReader instance (optional)
  295. :return: Read segment
  296. """
  297. segment = Segment()
  298. group, reader = self._select_first_container(group, reader, 'segment')
  299. if group is None:
  300. return None
  301. attrs = group.attrs
  302. if cascade:
  303. self._read_segment_children(lazy, group, reader, segment)
  304. self._read_container_metadata(attrs, segment)
  305. return segment
  306. def _read_segment_children(self, lazy, group, reader, segment):
  307. for child in group['analogsignals/'].values():
  308. segment.analogsignals.append(self.read_analogsignal(lazy=lazy, group=child, reader=reader))
  309. def read_analogsignal(self, lazy=False, cascade=True, group=None, reader=None):
  310. """
  311. Read an AnalogSignal from the file (must be child of a Segment)
  312. :param lazy: Enables lazy reading
  313. :param cascade: Read nested objects or not?
  314. :param group: HDF5 Group representing the analogsignal in NSDF model tree
  315. :param reader: NSDFReader instance
  316. :return: Read AnalogSignal
  317. """
  318. attrs = group.attrs
  319. if attrs.get('reference_to') is not None:
  320. return self.objects_dict[attrs['reference_to']]
  321. uid = attrs['uid']
  322. data_group = reader.data['uniform/{}/signal'.format(uid)]
  323. t_start = self._read_analogsignal_t_start(attrs, data_group)
  324. signal = self._create_analogsignal(data_group, lazy, group, t_start, uid, reader)
  325. self._read_basic_metadata(attrs, signal)
  326. self.objects_dict[uid] = signal
  327. return signal
  328. def read_channelindex(self, lazy=False, cascade=True, group=None, reader=None):
  329. """
  330. Read a ChannelIndex from the file (must be child of a Block)
  331. :param lazy: Enables lazy reading
  332. :param cascade: Read nested objects or not?
  333. :param group: HDF5 Group representing the channelindex in NSDF model tree
  334. :param reader: NSDFReader instance
  335. :return: Read ChannelIndex
  336. """
  337. attrs = group.attrs
  338. channelindex = self._create_channelindex(group)
  339. if cascade:
  340. self._read_channelindex_children(lazy, group, reader, channelindex)
  341. self._read_basic_metadata(attrs, channelindex)
  342. return channelindex
  343. def _read_channelindex_children(self, lazy, group, reader, channelindex):
  344. for child in group['analogsignals/'].values():
  345. channelindex.analogsignals.append(self.read_analogsignal(lazy=lazy, group=child, reader=reader))
  346. def _init_reading(self):
  347. reader = nsdf.NSDFReader(self.filename)
  348. self.file_datetime = datetime.fromtimestamp(os.stat(self.filename).st_mtime)
  349. self.objects_dict = {}
  350. return reader
  351. def _select_first_container(self, group, reader, name):
  352. if reader is None:
  353. reader = self._init_reading()
  354. if group is None:
  355. path = self.modeltree_path + name + 's/'
  356. if len(reader.model[path].values()) > 0:
  357. group = reader.model[path].values()[0]
  358. return group, reader
  359. def _read_container_metadata(self, attrs, container):
  360. self._read_basic_metadata(attrs, container)
  361. self._read_datetime_attributes(attrs, container)
  362. self._read_index_attribute(attrs, container)
  363. def _read_basic_metadata(self, attrs, signal):
  364. self._read_basic_attributes(attrs, signal)
  365. self._read_annotations(attrs, signal)
  366. def _read_basic_attributes(self, attrs, object):
  367. if attrs.get('name') is not None:
  368. object.name = attrs['name']
  369. if attrs.get('description') is not None:
  370. object.description = attrs['description']
  371. object.file_origin = self.filename
  372. def _read_datetime_attributes(self, attrs, object):
  373. object.file_datetime = self.file_datetime
  374. if attrs.get('rec_datetime') is not None:
  375. object.rec_datetime = datetime.strptime(attrs['rec_datetime'], self.dt_format)
  376. def _read_annotations(self, attrs, object):
  377. if attrs.get('annotations') is not None:
  378. object.annotations = pickle.loads(attrs['annotations'])
  379. def _read_index_attribute(self, attrs, object):
  380. if attrs.get('index') is not None:
  381. object.index = attrs['index']
  382. def _create_analogsignal(self, data_group, lazy, group, t_start, uid, reader):
  383. if lazy:
  384. data_shape = data_group.shape
  385. data_shape = (data_shape[1], data_shape[0])
  386. signal = self._create_lazy_analogsignal(data_shape, data_group, uid, t_start)
  387. else:
  388. dataobj = reader.get_uniform_data(uid, 'signal')
  389. data = self._read_signal_data(dataobj, group)
  390. signal = self._create_normal_analogsignal(data, dataobj, uid, t_start)
  391. return signal
  392. def _read_analogsignal_t_start(self, attrs, data_group):
  393. t_start = float(data_group.attrs['tstart']) * pq.Quantity(1, data_group.attrs['tunit'])
  394. t_start = t_start.rescale(attrs['t_start_unit'])
  395. return t_start
  396. def _read_signal_data(self, dataobj, group):
  397. data = []
  398. for channel in group['channels/'].values():
  399. channel_uid = channel.attrs['uid']
  400. data += [dataobj.get_data(channel_uid)]
  401. return data
  402. def _create_normal_analogsignal(self, data, dataobj, uid, t_start):
  403. return AnalogSignal(np.swapaxes(data, 0, 1), dtype=dataobj.dtype, units=dataobj.unit,
  404. t_start=t_start, sampling_period=pq.Quantity(dataobj.dt, dataobj.tunit))
  405. def _create_lazy_analogsignal(self, shape, data, uid, t_start):
  406. attrs = data.attrs
  407. signal = AnalogSignal([], dtype=data.dtype, units=attrs['unit'],
  408. t_start=t_start, sampling_period=pq.Quantity(attrs['dt'], attrs['tunit']))
  409. signal.lazy_shape = shape
  410. return signal
  411. def _create_channelindex(self, group):
  412. return ChannelIndex(index=self._read_array(group, 'index'),
  413. channel_names=self._read_array(group, 'channel_names'),
  414. channel_ids=self._read_array(group, 'channel_ids'),
  415. coordinates=self._read_array(group, 'coordinates'))
  416. def _read_array(self, group, name):
  417. if group.__contains__(name) == False:
  418. return None
  419. array = group[name][:]
  420. if group[name].attrs.get('dimensionality') is not None:
  421. return pq.Quantity(array, group[name].attrs['dimensionality'])
  422. return array