Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

nixio.py 49 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269
  1. # -*- coding: utf-8 -*-
  2. # Copyright (c) 2016, German Neuroinformatics Node (G-Node)
  3. # Achilleas Koutsou <achilleas.k@gmail.com>
  4. #
  5. # All rights reserved.
  6. #
  7. # Redistribution and use in source and binary forms, with or without
  8. # modification, are permitted under the terms of the BSD License. See
  9. # LICENSE file in the root of the Project.
  10. """
  11. Module for reading data from files in the NIX format.
  12. Author: Achilleas Koutsou
  13. This IO supports both writing and reading of NIX files. Reading is supported
  14. only if the NIX file was created using this IO.
  15. """
  16. from __future__ import absolute_import
  17. import time
  18. from datetime import datetime
  19. from collections import Iterable
  20. import itertools
  21. from hashlib import md5
  22. from uuid import uuid4
  23. import quantities as pq
  24. import numpy as np
  25. from neo.io.baseio import BaseIO
  26. from neo.core import (Block, Segment, ChannelIndex, AnalogSignal,
  27. IrregularlySampledSignal, Epoch, Event, SpikeTrain, Unit)
  28. from neo.io.tools import LazyList
  29. try:
  30. import nixio as nix
  31. HAVE_NIX = True
  32. except ImportError:
  33. HAVE_NIX = False
  34. try:
  35. string_types = basestring
  36. except NameError:
  37. string_types = str
  38. def stringify(value):
  39. if value is None:
  40. return value
  41. if isinstance(value, bytes):
  42. value = value.decode()
  43. return str(value)
  44. def calculate_timestamp(dt):
  45. if isinstance(dt, datetime):
  46. return int(time.mktime(dt.timetuple()))
  47. return int(dt)
  48. class NixIO(BaseIO):
  49. """
  50. Class for reading and writing NIX files.
  51. """
  52. is_readable = True
  53. is_writable = True
  54. supported_objects = [Block, Segment, ChannelIndex,
  55. AnalogSignal, IrregularlySampledSignal,
  56. Epoch, Event, SpikeTrain, Unit]
  57. readable_objects = [Block]
  58. writeable_objects = [Block]
  59. name = "NIX"
  60. extensions = ["h5"]
  61. mode = "file"
  62. _container_map = {
  63. "segments": "groups",
  64. "analogsignals": "data_arrays",
  65. "irregularlysampledsignals": "data_arrays",
  66. "events": "multi_tags",
  67. "epochs": "multi_tags",
  68. "spiketrains": "multi_tags",
  69. "channel_indexes": "sources",
  70. "units": "sources"
  71. }
  72. def __init__(self, filename, mode="rw"):
  73. """
  74. Initialise IO instance and NIX file.
  75. :param filename: Full path to the file
  76. """
  77. if not HAVE_NIX:
  78. raise Exception("Failed to import NIX. "
  79. "The NixIO requires the Python bindings for NIX "
  80. "(nixio on PyPi). Try `pip install nixio`.")
  81. BaseIO.__init__(self, filename)
  82. self.filename = filename
  83. if mode == "ro":
  84. filemode = nix.FileMode.ReadOnly
  85. elif mode == "rw":
  86. filemode = nix.FileMode.ReadWrite
  87. elif mode == "ow":
  88. filemode = nix.FileMode.Overwrite
  89. else:
  90. raise ValueError("Invalid mode specified '{}'. "
  91. "Valid modes: 'ro' (ReadOnly)', 'rw' (ReadWrite),"
  92. " 'ow' (Overwrite).".format(mode))
  93. self.nix_file = nix.File.open(self.filename, filemode, backend="h5py")
  94. self._object_map = dict()
  95. self._lazy_loaded = list()
  96. self._object_hashes = dict()
  97. self._block_read_counter = 0
  98. self._path_map = dict()
  99. def __enter__(self):
  100. return self
  101. def __exit__(self, *args):
  102. self.close()
  103. def read_all_blocks(self, cascade=True, lazy=False):
  104. blocks = list()
  105. for blk in self.nix_file.blocks:
  106. blocks.append(self.read_block("/" + blk.name, cascade, lazy))
  107. return blocks
  108. def read_block(self, path="/", cascade=True, lazy=False):
  109. if path == "/":
  110. try:
  111. nix_block = self.nix_file.blocks[self._block_read_counter]
  112. path += nix_block.name
  113. self._block_read_counter += 1
  114. except KeyError:
  115. return None
  116. else:
  117. nix_block = self._get_object_at(path)
  118. neo_block = self._block_to_neo(nix_block)
  119. neo_block.path = path
  120. if cascade:
  121. self._read_cascade(nix_block, path, cascade, lazy)
  122. self._update_maps(neo_block, lazy)
  123. return neo_block
  124. def read_segment(self, path, cascade=True, lazy=False):
  125. nix_group = self._get_object_at(path)
  126. neo_segment = self._group_to_neo(nix_group)
  127. neo_segment.path = path
  128. if cascade:
  129. self._read_cascade(nix_group, path, cascade, lazy)
  130. self._update_maps(neo_segment, lazy)
  131. nix_parent = self._get_parent(path)
  132. neo_parent = self._get_mapped_object(nix_parent)
  133. if neo_parent:
  134. neo_segment.block = neo_parent
  135. return neo_segment
  136. def read_channelindex(self, path, cascade=True, lazy=False):
  137. nix_source = self._get_object_at(path)
  138. neo_rcg = self._source_chx_to_neo(nix_source)
  139. neo_rcg.path = path
  140. if cascade:
  141. self._read_cascade(nix_source, path, cascade, lazy)
  142. self._update_maps(neo_rcg, lazy)
  143. nix_parent = self._get_parent(path)
  144. neo_parent = self._get_mapped_object(nix_parent)
  145. neo_rcg.block = neo_parent
  146. return neo_rcg
  147. def read_signal(self, path, lazy=False):
  148. nix_data_arrays = list()
  149. parent_group = self._get_parent(path)
  150. parent_container = parent_group.data_arrays
  151. signal_group_name = path.split("/")[-1]
  152. for idx in itertools.count():
  153. signal_name = "{}.{}".format(signal_group_name, idx)
  154. if signal_name in parent_container:
  155. nix_data_arrays.append(parent_container[signal_name])
  156. else:
  157. break
  158. # check metadata segment
  159. group_section = nix_data_arrays[0].metadata
  160. for da in nix_data_arrays:
  161. assert da.metadata == group_section,\
  162. "DataArray {} is not a member of signal group {}".format(
  163. da.name, group_section.name
  164. )
  165. neo_signal = self._signal_da_to_neo(nix_data_arrays, lazy)
  166. neo_signal.path = path
  167. if self._find_lazy_loaded(neo_signal) is None:
  168. self._update_maps(neo_signal, lazy)
  169. nix_parent = self._get_parent(path)
  170. neo_parent = self._get_mapped_object(nix_parent)
  171. neo_signal.segment = neo_parent
  172. return neo_signal
  173. def read_analogsignal(self, path, cascade=True, lazy=False):
  174. return self.read_signal(path, lazy)
  175. def read_irregularlysampledsignal(self, path, cascade=True, lazy=False):
  176. return self.read_signal(path, lazy)
  177. def read_eest(self, path, lazy=False):
  178. nix_mtag = self._get_object_at(path)
  179. neo_eest = self._mtag_eest_to_neo(nix_mtag, lazy)
  180. neo_eest.path = path
  181. self._update_maps(neo_eest, lazy)
  182. nix_parent = self._get_parent(path)
  183. neo_parent = self._get_mapped_object(nix_parent)
  184. neo_eest.segment = neo_parent
  185. return neo_eest
  186. def read_epoch(self, path, cascade=True, lazy=False):
  187. return self.read_eest(path, lazy)
  188. def read_event(self, path, cascade=True, lazy=False):
  189. return self.read_eest(path, lazy)
  190. def read_spiketrain(self, path, cascade=True, lazy=False):
  191. return self.read_eest(path, lazy)
  192. def read_unit(self, path, cascade=True, lazy=False):
  193. nix_source = self._get_object_at(path)
  194. neo_unit = self._source_unit_to_neo(nix_source)
  195. neo_unit.path = path
  196. if cascade:
  197. self._read_cascade(nix_source, path, cascade, lazy)
  198. self._update_maps(neo_unit, lazy)
  199. nix_parent = self._get_parent(path)
  200. neo_parent = self._get_mapped_object(nix_parent)
  201. neo_unit.channel_index = neo_parent
  202. return neo_unit
  203. def _block_to_neo(self, nix_block):
  204. neo_attrs = self._nix_attr_to_neo(nix_block)
  205. neo_block = Block(**neo_attrs)
  206. neo_block.rec_datetime = datetime.fromtimestamp(
  207. nix_block.created_at
  208. )
  209. self._object_map[nix_block.id] = neo_block
  210. return neo_block
  211. def _group_to_neo(self, nix_group):
  212. neo_attrs = self._nix_attr_to_neo(nix_group)
  213. neo_segment = Segment(**neo_attrs)
  214. neo_segment.rec_datetime = datetime.fromtimestamp(
  215. nix_group.created_at
  216. )
  217. self._object_map[nix_group.id] = neo_segment
  218. return neo_segment
  219. def _source_chx_to_neo(self, nix_source):
  220. neo_attrs = self._nix_attr_to_neo(nix_source)
  221. chx = list(self._nix_attr_to_neo(c)
  222. for c in nix_source.sources
  223. if c.type == "neo.channelindex")
  224. chan_names = list(c["neo_name"] for c in chx if "neo_name" in c)
  225. if chan_names:
  226. neo_attrs["channel_names"] = chan_names
  227. neo_attrs["index"] = np.array([c["index"] for c in chx])
  228. if "coordinates" in chx[0]:
  229. coord_units = chx[0]["coordinates.units"]
  230. coord_values = list(c["coordinates"] for c in chx)
  231. neo_attrs["coordinates"] = pq.Quantity(coord_values, coord_units)
  232. rcg = ChannelIndex(**neo_attrs)
  233. self._object_map[nix_source.id] = rcg
  234. return rcg
  235. def _source_unit_to_neo(self, nix_unit):
  236. neo_attrs = self._nix_attr_to_neo(nix_unit)
  237. neo_unit = Unit(**neo_attrs)
  238. self._object_map[nix_unit.id] = neo_unit
  239. return neo_unit
  240. def _signal_da_to_neo(self, nix_da_group, lazy):
  241. """
  242. Convert a group of NIX DataArrays to a Neo signal. This method expects
  243. a list of data arrays that all represent the same, multidimensional
  244. Neo Signal object.
  245. This returns either an AnalogSignal or IrregularlySampledSignal.
  246. :param nix_da_group: a list of NIX DataArray objects
  247. :return: a Neo Signal object
  248. """
  249. nix_da_group = sorted(nix_da_group, key=lambda d: d.name)
  250. neo_attrs = self._nix_attr_to_neo(nix_da_group[0])
  251. metadata = nix_da_group[0].metadata
  252. neo_type = nix_da_group[0].type
  253. neo_attrs["nix_name"] = metadata.name # use the common base name
  254. unit = nix_da_group[0].unit
  255. if lazy:
  256. signaldata = pq.Quantity(np.empty(0), unit)
  257. lazy_shape = (len(nix_da_group[0]), len(nix_da_group))
  258. else:
  259. signaldata = np.array([d[:] for d in nix_da_group]).transpose()
  260. signaldata = pq.Quantity(signaldata, unit)
  261. lazy_shape = None
  262. timedim = self._get_time_dimension(nix_da_group[0])
  263. if (neo_type == "neo.analogsignal" or
  264. timedim.dimension_type == nix.DimensionType.Sample):
  265. if lazy:
  266. sampling_period = pq.Quantity(1, timedim.unit)
  267. t_start = pq.Quantity(0, timedim.unit)
  268. else:
  269. if "sampling_interval.units" in metadata.props:
  270. sample_units = metadata["sampling_interval.units"]
  271. else:
  272. sample_units = timedim.unit
  273. sampling_period = pq.Quantity(timedim.sampling_interval,
  274. sample_units)
  275. if "t_start.units" in metadata.props:
  276. tsunits = metadata["t_start.units"]
  277. else:
  278. tsunits = timedim.unit
  279. t_start = pq.Quantity(timedim.offset, tsunits)
  280. neo_signal = AnalogSignal(
  281. signal=signaldata, sampling_period=sampling_period,
  282. t_start=t_start, **neo_attrs
  283. )
  284. elif (neo_type == "neo.irregularlysampledsignal"
  285. or timedim.dimension_type == nix.DimensionType.Range):
  286. if lazy:
  287. times = pq.Quantity(np.empty(0), timedim.unit)
  288. else:
  289. times = pq.Quantity(timedim.ticks, timedim.unit)
  290. neo_signal = IrregularlySampledSignal(
  291. signal=signaldata, times=times, **neo_attrs
  292. )
  293. else:
  294. return None
  295. for da in nix_da_group:
  296. self._object_map[da.id] = neo_signal
  297. if lazy_shape:
  298. neo_signal.lazy_shape = lazy_shape
  299. return neo_signal
  300. def _mtag_eest_to_neo(self, nix_mtag, lazy):
  301. neo_attrs = self._nix_attr_to_neo(nix_mtag)
  302. neo_type = nix_mtag.type
  303. time_unit = nix_mtag.positions.unit
  304. if lazy:
  305. times = pq.Quantity(np.empty(0), time_unit)
  306. lazy_shape = np.shape(nix_mtag.positions)
  307. else:
  308. times = pq.Quantity(nix_mtag.positions, time_unit)
  309. lazy_shape = None
  310. if neo_type == "neo.epoch":
  311. if lazy:
  312. durations = pq.Quantity(np.empty(0), nix_mtag.extents.unit)
  313. labels = np.empty(0, dtype='S')
  314. else:
  315. durations = pq.Quantity(nix_mtag.extents,
  316. nix_mtag.extents.unit)
  317. labels = np.array(nix_mtag.positions.dimensions[0].labels,
  318. dtype="S")
  319. eest = Epoch(times=times, durations=durations, labels=labels,
  320. **neo_attrs)
  321. elif neo_type == "neo.event":
  322. if lazy:
  323. labels = np.empty(0, dtype='S')
  324. else:
  325. labels = np.array(nix_mtag.positions.dimensions[0].labels,
  326. dtype="S")
  327. eest = Event(times=times, labels=labels, **neo_attrs)
  328. elif neo_type == "neo.spiketrain":
  329. if "t_start" in neo_attrs:
  330. if "t_start.units" in neo_attrs:
  331. t_start_units = neo_attrs["t_start.units"]
  332. del neo_attrs["t_start.units"]
  333. else:
  334. t_start_units = time_unit
  335. t_start = pq.Quantity(neo_attrs["t_start"], t_start_units)
  336. del neo_attrs["t_start"]
  337. else:
  338. t_start = None
  339. if "t_stop" in neo_attrs:
  340. if "t_stop.units" in neo_attrs:
  341. t_stop_units = neo_attrs["t_stop.units"]
  342. del neo_attrs["t_stop.units"]
  343. else:
  344. t_stop_units = time_unit
  345. t_stop = pq.Quantity(neo_attrs["t_stop"], t_stop_units)
  346. del neo_attrs["t_stop"]
  347. else:
  348. t_stop = None
  349. if "sampling_interval.units" in neo_attrs:
  350. interval_units = neo_attrs["sampling_interval.units"]
  351. del neo_attrs["sampling_interval.units"]
  352. else:
  353. interval_units = None
  354. if "left_sweep.units" in neo_attrs:
  355. left_sweep_units = neo_attrs["left_sweep.units"]
  356. del neo_attrs["left_sweep.units"]
  357. else:
  358. left_sweep_units = None
  359. eest = SpikeTrain(times=times, t_start=t_start,
  360. t_stop=t_stop, **neo_attrs)
  361. if len(nix_mtag.features):
  362. wfda = nix_mtag.features[0].data
  363. wftime = self._get_time_dimension(wfda)
  364. if lazy:
  365. eest.waveforms = pq.Quantity(np.empty((0, 0, 0)),
  366. wfda.unit)
  367. eest.sampling_period = pq.Quantity(1, wftime.unit)
  368. eest.left_sweep = pq.Quantity(0, wftime.unit)
  369. else:
  370. eest.waveforms = pq.Quantity(wfda, wfda.unit)
  371. if interval_units is None:
  372. interval_units = wftime.unit
  373. eest.sampling_period = pq.Quantity(
  374. wftime.sampling_interval, interval_units
  375. )
  376. if left_sweep_units is None:
  377. left_sweep_units = wftime.unit
  378. if "left_sweep" in wfda.metadata:
  379. eest.left_sweep = pq.Quantity(
  380. wfda.metadata["left_sweep"], left_sweep_units
  381. )
  382. else:
  383. return None
  384. self._object_map[nix_mtag.id] = eest
  385. if lazy_shape:
  386. eest.lazy_shape = lazy_shape
  387. return eest
  388. def _read_cascade(self, nix_obj, path, cascade, lazy):
  389. neo_obj = self._object_map[nix_obj.id]
  390. for neocontainer in getattr(neo_obj, "_child_containers", []):
  391. nixcontainer = self._container_map[neocontainer]
  392. if not hasattr(nix_obj, nixcontainer):
  393. continue
  394. if neocontainer == "channel_indexes":
  395. neotype = "channelindex"
  396. else:
  397. neotype = neocontainer[:-1]
  398. chpaths = list(path + "/" + neocontainer + "/" + c.name
  399. for c in getattr(nix_obj, nixcontainer)
  400. if c.type == "neo." + neotype)
  401. if neocontainer in ("analogsignals",
  402. "irregularlysampledsignals"):
  403. chpaths = self._group_signals(chpaths)
  404. if cascade != "lazy":
  405. read_func = getattr(self, "read_" + neotype)
  406. children = list(read_func(cp, cascade, lazy)
  407. for cp in chpaths)
  408. else:
  409. children = LazyList(self, lazy, chpaths)
  410. setattr(neo_obj, neocontainer, children)
  411. if isinstance(neo_obj, ChannelIndex):
  412. # set references to signals
  413. parent_block_path = "/" + path.split("/")[1]
  414. parent_block = self._get_object_at(parent_block_path)
  415. ref_das = self._get_referers(nix_obj, parent_block.data_arrays)
  416. ref_signals = self._get_mapped_objects(ref_das)
  417. # deduplicate by name
  418. ref_signals = list(dict((s.annotations["nix_name"], s)
  419. for s in ref_signals).values())
  420. for sig in ref_signals:
  421. if isinstance(sig, AnalogSignal):
  422. neo_obj.analogsignals.append(sig)
  423. elif isinstance(sig, IrregularlySampledSignal):
  424. neo_obj.irregularlysampledsignals.append(sig)
  425. sig.channel_index = neo_obj
  426. elif isinstance(neo_obj, Unit):
  427. # set references to spiketrains
  428. parent_block_path = "/" + path.split("/")[1]
  429. parent_block = self._get_object_at(parent_block_path)
  430. ref_mtags = self._get_referers(nix_obj, parent_block.multi_tags)
  431. ref_sts = self._get_mapped_objects(ref_mtags)
  432. for st in ref_sts:
  433. neo_obj.spiketrains.append(st)
  434. st.unit = neo_obj
  435. def get(self, path, cascade, lazy):
  436. parts = path.split("/")
  437. if len(parts) > 2:
  438. neotype = parts[-2][:-1]
  439. else:
  440. neotype = "block"
  441. if neotype == "channel_indexe":
  442. neotype = "channelindex"
  443. read_func = getattr(self, "read_" + neotype)
  444. return read_func(path, cascade, lazy)
  445. def load_lazy_object(self, obj):
  446. return self.get(obj.path, cascade=False, lazy=False)
  447. def load_lazy_cascade(self, path, lazy):
  448. """
  449. Loads the object at the location specified by the path and all
  450. children. Data is loaded if lazy is False.
  451. :param path: Location of object in file
  452. :param lazy: Do not load data if True
  453. :return: The loaded object
  454. """
  455. neoobj = self.get(path, cascade=True, lazy=lazy)
  456. return neoobj
  457. def write_all_blocks(self, neo_blocks):
  458. """
  459. Convert all ``neo_blocks`` to the NIX equivalent and write them to the
  460. file.
  461. :param neo_blocks: List (or iterable) containing Neo blocks
  462. :return: A list containing the new NIX Blocks
  463. """
  464. for bl in neo_blocks:
  465. self.write_block(bl)
  466. def _write_object(self, obj, loc=""):
  467. objtype = type(obj).__name__.lower()
  468. if isinstance(obj, Block):
  469. containerstr = "/"
  470. else:
  471. if objtype == "channelindex":
  472. containerstr = "/channel_indexes/"
  473. else:
  474. containerstr = "/" + type(obj).__name__.lower() + "s/"
  475. if "nix_name" in obj.annotations:
  476. nix_name = obj.annotations["nix_name"]
  477. else:
  478. nix_name = "neo.{}.{}".format(objtype, self._generate_nix_name())
  479. obj.annotate(nix_name=nix_name)
  480. objpath = loc + containerstr + nix_name
  481. oldhash = self._object_hashes.get(objpath)
  482. if oldhash is None:
  483. try:
  484. oldobj = self.get(objpath, cascade=False, lazy=False)
  485. oldhash = self._hash_object(oldobj)
  486. except (KeyError, IndexError):
  487. oldhash = None
  488. newhash = self._hash_object(obj)
  489. if oldhash != newhash:
  490. attr = self._neo_attr_to_nix(obj)
  491. attr["name"] = nix_name
  492. if isinstance(obj, pq.Quantity):
  493. attr.update(self._neo_data_to_nix(obj))
  494. if oldhash is None:
  495. nixobj = self._create_nix_obj(loc, attr)
  496. else:
  497. nixobj = self._get_object_at(objpath)
  498. self._write_attr_annotations(nixobj, attr, objpath)
  499. if isinstance(obj, pq.Quantity):
  500. self._write_data(nixobj, attr, objpath)
  501. else:
  502. nixobj = self._get_object_at(objpath)
  503. self._object_map[id(obj)] = nixobj
  504. self._object_hashes[objpath] = newhash
  505. self._write_cascade(obj, objpath)
  506. def _create_nix_obj(self, loc, attr):
  507. parentobj = self._get_object_at(loc)
  508. if attr["type"] == "block":
  509. nixobj = parentobj.create_block(attr["name"], "neo.block")
  510. nixobj.metadata = self.nix_file.create_section(
  511. attr["name"], "neo.block.metadata"
  512. )
  513. elif attr["type"] == "segment":
  514. nixobj = parentobj.create_group(attr["name"], "neo.segment")
  515. nixobj.metadata = parentobj.metadata.create_section(
  516. attr["name"], "neo.segment.metadata"
  517. )
  518. elif attr["type"] == "channelindex":
  519. nixobj = parentobj.create_source(attr["name"],
  520. "neo.channelindex")
  521. nixobj.metadata = parentobj.metadata.create_section(
  522. attr["name"], "neo.channelindex.metadata"
  523. )
  524. elif attr["type"] in ("analogsignal", "irregularlysampledsignal"):
  525. blockpath = "/" + loc.split("/")[1]
  526. parentblock = self._get_object_at(blockpath)
  527. nixobj = list()
  528. typestr = "neo.{}".format(attr["type"])
  529. sigmd = parentobj.metadata.create_section(
  530. attr["name"], "{}.metadata".format(typestr)
  531. )
  532. for idx, datarow in enumerate(attr["data"]):
  533. name = "{}.{}".format(attr["name"], idx)
  534. da = parentblock.create_data_array(name, typestr, data=datarow)
  535. da.metadata = sigmd
  536. nixobj.append(da)
  537. parentobj.data_arrays.extend(nixobj)
  538. elif attr["type"] in ("epoch", "event", "spiketrain"):
  539. blockpath = "/" + loc.split("/")[1]
  540. parentblock = self._get_object_at(blockpath)
  541. typestr = "neo.{}".format(attr["type"])
  542. timesda = parentblock.create_data_array(
  543. "{}.times".format(attr["name"]), "{}.times".format(typestr),
  544. data=attr["data"]
  545. )
  546. nixobj = parentblock.create_multi_tag(
  547. attr["name"], typestr, timesda
  548. )
  549. nixobj.metadata = parentobj.metadata.create_section(
  550. attr["name"], "{}.metadata".format(typestr)
  551. )
  552. parentobj.multi_tags.append(nixobj)
  553. elif attr["type"] == "unit":
  554. nixobj = parentobj.create_source(attr["name"], "neo.unit")
  555. nixobj.metadata = parentobj.metadata.create_section(
  556. attr["name"], "neo.unit.metadata"
  557. )
  558. else:
  559. raise ValueError("Unable to create NIX object. Invalid type.")
  560. return nixobj
  561. def write_block(self, bl, loc=""):
  562. """
  563. Convert ``bl`` to the NIX equivalent and write it to the file.
  564. :param bl: Neo block to be written
  565. :param loc: Unused for blocks
  566. """
  567. self._write_object(bl, loc)
  568. self._create_references(bl)
  569. def write_channelindex(self, chx, loc=""):
  570. """
  571. Convert the provided ``chx`` (ChannelIndex) to a NIX Source and write
  572. it to the NIX file at the location defined by ``loc``.
  573. :param chx: The Neo ChannelIndex to be written
  574. :param loc: Path to the parent of the new CHX
  575. """
  576. self._write_object(chx, loc)
  577. def write_segment(self, seg, loc=""):
  578. """
  579. Convert the provided ``seg`` to a NIX Group and write it to the NIX
  580. file at the location defined by ``loc``.
  581. :param seg: Neo seg to be written
  582. :param loc: Path to the parent of the new Segment
  583. """
  584. self._write_object(seg, loc)
  585. def write_indices(self, chx, loc=""):
  586. """
  587. Create NIX Source objects to represent individual indices based on the
  588. provided ``chx`` (ChannelIndex) write them to the NIX file at
  589. the parent ChannelIndex object.
  590. :param chx: The Neo ChannelIndex
  591. :param loc: Path to the CHX
  592. """
  593. nixsource = self._get_mapped_object(chx)
  594. for idx, channel in enumerate(chx.index):
  595. channame = "{}.ChannelIndex{}".format(chx.annotations["nix_name"],
  596. idx)
  597. if channame in nixsource.sources:
  598. nixchan = nixsource.sources[channame]
  599. else:
  600. nixchan = nixsource.create_source(channame,
  601. "neo.channelindex")
  602. nixchan.metadata = nixsource.metadata.create_section(
  603. nixchan.name, "neo.channelindex.metadata"
  604. )
  605. nixchan.definition = nixsource.definition
  606. chanmd = nixchan.metadata
  607. if len(chx.channel_names):
  608. neochanname = stringify(chx.channel_names[idx])
  609. chanmd["neo_name"] = nix.Value(neochanname)
  610. chanmd["index"] = nix.Value(int(channel))
  611. if chx.coordinates is not None:
  612. coords = chx.coordinates[idx]
  613. coordunits = stringify(coords[0].dimensionality)
  614. nixcoords = tuple(
  615. nix.Value(c.rescale(coordunits).magnitude.item())
  616. for c in coords
  617. )
  618. if "coordinates" in chanmd:
  619. del chanmd["coordinates"]
  620. chanprop = chanmd.create_property("coordinates", nixcoords)
  621. chanprop.unit = coordunits
  622. def write_analogsignal(self, anasig, loc=""):
  623. """
  624. Convert the provided ``anasig`` (AnalogSignal) to a list of NIX
  625. DataArray objects and write them to the NIX file at the location
  626. defined by ``loc``. All DataArray objects created from the same
  627. AnalogSignal have their metadata section point to the same object.
  628. :param anasig: The Neo AnalogSignal to be written
  629. :param loc: Path to the parent of the new AnalogSignal
  630. """
  631. self._write_object(anasig, loc)
  632. def write_irregularlysampledsignal(self, irsig, loc=""):
  633. """
  634. Convert the provided ``irsig`` (IrregularlySampledSignal) to a list of
  635. NIX DataArray objects and write them to the NIX file at the location
  636. defined by ``loc``. All DataArray objects created from the same
  637. IrregularlySampledSignal have their metadata section point to the same
  638. object.
  639. :param irsig: The Neo IrregularlySampledSignal to be written
  640. :param loc: Path to the parent of the new
  641. :return: The newly created NIX DataArray
  642. """
  643. self._write_object(irsig, loc)
  644. def write_epoch(self, ep, loc=""):
  645. """
  646. Convert the provided ``ep`` (Epoch) to a NIX MultiTag and write it to
  647. the NIX file at the location defined by ``loc``.
  648. :param ep: The Neo Epoch to be written
  649. :param loc: Path to the parent of the new MultiTag
  650. """
  651. self._write_object(ep, loc)
  652. def write_event(self, ev, loc=""):
  653. """
  654. Convert the provided ``ev`` (Event) to a NIX MultiTag and write it to
  655. the NIX file at the location defined by ``loc``.
  656. :param ev: The Neo Event to be written
  657. :param loc: Path to the parent of the new MultiTag
  658. """
  659. self._write_object(ev, loc)
  660. def write_spiketrain(self, sptr, loc=""):
  661. """
  662. Convert the provided ``sptr`` (SpikeTrain) to a NIX MultiTag and write
  663. it to the NIX file at the location defined by ``loc``.
  664. :param sptr: The Neo SpikeTrain to be written
  665. :param loc: Path to the parent of the new MultiTag
  666. """
  667. self._write_object(sptr, loc)
  668. def write_unit(self, ut, loc=""):
  669. """
  670. Convert the provided ``ut`` (Unit) to a NIX Source and write it to the
  671. NIX file at the parent RCG.
  672. :param ut: The Neo Unit to be written
  673. :param loc: Path to the parent of the new Source
  674. """
  675. self._write_object(ut, loc)
  676. def _write_cascade(self, neoobj, path=""):
  677. if isinstance(neoobj, ChannelIndex):
  678. containers = ["units"]
  679. self.write_indices(neoobj, path)
  680. elif isinstance(neoobj, Unit):
  681. containers = []
  682. else:
  683. containers = getattr(neoobj, "_child_containers", [])
  684. for neocontainer in containers:
  685. if neocontainer == "channel_indexes":
  686. neotype = "channelindex"
  687. else:
  688. neotype = neocontainer[:-1]
  689. children = getattr(neoobj, neocontainer)
  690. write_func = getattr(self, "write_" + neotype)
  691. for ch in children:
  692. write_func(ch, path)
  693. def _create_references(self, block):
  694. """
  695. Create references between NIX objects according to the supplied Neo
  696. Block.
  697. MultiTags reference DataArrays of the same Group.
  698. DataArrays reference ChannelIndexs as sources, based on Neo
  699. RCG -> Signal relationships.
  700. MultiTags (SpikeTrains) reference ChannelIndexs and Units as
  701. sources, based on Neo RCG -> Unit -> SpikeTrain relationships.
  702. :param block: A Neo Block that has already been converted and mapped to
  703. NIX objects.
  704. """
  705. for seg in block.segments:
  706. group = self._get_mapped_object(seg)
  707. group_signals = self._get_contained_signals(group)
  708. for mtag in group.multi_tags:
  709. if mtag.type in ("neo.epoch", "neo.event"):
  710. mtag.references.extend([sig for sig in group_signals
  711. if sig not in mtag.references])
  712. for rcg in block.channel_indexes:
  713. rcgsource = self._get_mapped_object(rcg)
  714. das = self._get_mapped_objects(rcg.analogsignals +
  715. rcg.irregularlysampledsignals)
  716. # flatten nested lists
  717. das = [da for dalist in das for da in dalist]
  718. for da in das:
  719. if rcgsource not in da.sources:
  720. da.sources.append(rcgsource)
  721. for unit in rcg.units:
  722. unitsource = self._get_mapped_object(unit)
  723. for st in unit.spiketrains:
  724. stmtag = self._get_mapped_object(st)
  725. if rcgsource not in stmtag.sources:
  726. stmtag.sources.append(rcgsource)
  727. if unitsource not in stmtag.sources:
  728. stmtag.sources.append(unitsource)
  729. def _get_object_at(self, path):
  730. """
  731. Returns the object at the location defined by the path.
  732. ``path`` is a '/' delimited string. Each part of the string alternates
  733. between an object name and a container.
  734. If the requested object is an AnalogSignal or IrregularlySampledSignal,
  735. identified by the second-to-last part of the path string, a list of
  736. (DataArray) objects is returned.
  737. Example path: /block_1/segments/segment_a/events/event_a1
  738. :param path: Path string
  739. :return: The object at the location defined by the path
  740. """
  741. if path in self._path_map:
  742. return self._path_map[path]
  743. if path in ("", "/"):
  744. return self.nix_file
  745. parts = path.split("/")
  746. if parts[0]:
  747. ValueError("Invalid object path: {}".format(path))
  748. if len(parts) == 2: # root block
  749. return self.nix_file.blocks[parts[1]]
  750. parent_obj = self._get_parent(path)
  751. container_name = self._container_map[parts[-2]]
  752. parent_container = getattr(parent_obj, container_name)
  753. objname = parts[-1]
  754. if parts[-2] in ["analogsignals", "irregularlysampledsignals"]:
  755. obj = list()
  756. for idx in itertools.count():
  757. name = "{}.{}".format(objname, idx)
  758. if name in parent_container:
  759. obj.append(parent_container[name])
  760. else:
  761. break
  762. else:
  763. obj = parent_container[objname]
  764. self._path_map[path] = obj
  765. return obj
  766. def _get_parent(self, path):
  767. parts = path.split("/")
  768. parent_path = "/".join(parts[:-2])
  769. parent_obj = self._get_object_at(parent_path)
  770. return parent_obj
  771. def _get_mapped_objects(self, object_list):
  772. return list(map(self._get_mapped_object, object_list))
  773. def _get_mapped_object(self, obj):
  774. # We could use paths here instead
  775. try:
  776. if hasattr(obj, "id"):
  777. return self._object_map[obj.id]
  778. else:
  779. return self._object_map[id(obj)]
  780. except KeyError:
  781. # raise KeyError("Failed to find mapped object for {}. "
  782. # "Object not yet converted.".format(obj))
  783. return None
  784. def _write_attr_annotations(self, nixobj, attr, path):
  785. if isinstance(nixobj, list):
  786. metadata = nixobj[0].metadata
  787. for obj in nixobj:
  788. obj.definition = attr["definition"]
  789. self._write_attr_annotations(nixobj[0], attr, path)
  790. return
  791. else:
  792. metadata = nixobj.metadata
  793. nixobj.definition = attr["definition"]
  794. if "neo_name" in attr:
  795. metadata["neo_name"] = attr["neo_name"]
  796. if "created_at" in attr:
  797. nixobj.force_created_at(calculate_timestamp(attr["created_at"]))
  798. if "file_datetime" in attr:
  799. self._write_property(metadata,
  800. "file_datetime", attr["file_datetime"])
  801. if attr.get("rec_datetime"):
  802. self._write_property(metadata,
  803. "rec_datetime", attr["rec_datetime"])
  804. if "annotations" in attr:
  805. for k, v in attr["annotations"].items():
  806. self._write_property(metadata, k, v)
  807. def _write_data(self, nixobj, attr, path):
  808. if isinstance(nixobj, list):
  809. metadata = nixobj[0].metadata
  810. metadata["t_start.units"] = nix.Value(attr["t_start.units"])
  811. for obj in nixobj:
  812. obj.unit = attr["data.units"]
  813. if attr["type"] == "analogsignal":
  814. timedim = obj.append_sampled_dimension(
  815. attr["sampling_interval"]
  816. )
  817. timedim.unit = attr["sampling_interval.units"]
  818. elif attr["type"] == "irregularlysampledsignal":
  819. timedim = obj.append_range_dimension(attr["times"])
  820. timedim.unit = attr["times.units"]
  821. timedim.label = "time"
  822. timedim.offset = attr["t_start"]
  823. else:
  824. metadata = nixobj.metadata
  825. nixobj.positions.unit = attr["data.units"]
  826. blockpath = "/" + path.split("/")[1]
  827. parentblock = self._get_object_at(blockpath)
  828. if "extents" in attr:
  829. extname = nixobj.name + ".durations"
  830. exttype = nixobj.type + ".durations"
  831. if extname in parentblock.data_arrays:
  832. del parentblock.data_arrays[extname]
  833. extents = parentblock.create_data_array(
  834. extname,
  835. exttype,
  836. data=attr["extents"]
  837. )
  838. extents.unit = attr["extents.units"]
  839. nixobj.extents = extents
  840. if "labels" in attr:
  841. labeldim = nixobj.positions.append_set_dimension()
  842. labeldim.labels = attr["labels"]
  843. if "t_start" in attr:
  844. metadata["t_start"] = nix.Value(attr["t_start"])
  845. metadata["t_start.units"] = nix.Value(attr["t_start.units"])
  846. if "t_stop" in attr:
  847. metadata["t_stop"] = nix.Value(attr["t_stop"])
  848. metadata["t_stop.units"] = nix.Value(attr["t_stop.units"])
  849. if "waveforms" in attr:
  850. wfname = nixobj.name + ".waveforms"
  851. if wfname in parentblock.data_arrays:
  852. del metadata.sections[wfname]
  853. del parentblock.data_arrays[wfname]
  854. del nixobj.features[0]
  855. wfda = parentblock.create_data_array(
  856. wfname, "neo.waveforms",
  857. data=attr["waveforms"]
  858. )
  859. wfda.metadata = nixobj.metadata.create_section(
  860. wfda.name, "neo.waveforms.metadata"
  861. )
  862. wfda.unit = attr["waveforms.units"]
  863. nixobj.create_feature(wfda, nix.LinkType.Indexed)
  864. wfda.append_set_dimension()
  865. wfda.append_set_dimension()
  866. wftime = wfda.append_sampled_dimension(
  867. attr["sampling_interval"]
  868. )
  869. metadata["sampling_interval.units"] =\
  870. attr["sampling_interval.units"]
  871. wftime.unit = attr["times.units"]
  872. wftime.label = "time"
  873. if "left_sweep" in attr:
  874. self._write_property(wfda.metadata, "left_sweep",
  875. attr["left_sweep"])
  876. def _update_maps(self, obj, lazy):
  877. objidx = self._find_lazy_loaded(obj)
  878. if lazy and objidx is None:
  879. self._lazy_loaded.append(obj)
  880. elif not lazy and objidx is not None:
  881. self._lazy_loaded.pop(objidx)
  882. if not lazy:
  883. self._object_hashes[obj.path] = self._hash_object(obj)
  884. def _find_lazy_loaded(self, obj):
  885. """
  886. Finds the index of an object in the _lazy_loaded list by comparing the
  887. path attribute. Returns None if the object is not in the list.
  888. :param obj: The object to find
  889. :return: The index of the object in the _lazy_loaded list or None if it
  890. was not added
  891. """
  892. for idx, llobj in enumerate(self._lazy_loaded):
  893. if llobj.path == obj.path:
  894. return idx
  895. else:
  896. return None
  897. @staticmethod
  898. def _generate_nix_name():
  899. return uuid4().hex
  900. @staticmethod
  901. def _neo_attr_to_nix(neoobj):
  902. neotype = type(neoobj).__name__
  903. attrs = dict()
  904. # NIX metadata does not support None values
  905. # The property will be excluded to signify 'name is None'
  906. if neoobj.name is not None:
  907. attrs["neo_name"] = neoobj.name
  908. attrs["type"] = neotype.lower()
  909. attrs["definition"] = neoobj.description
  910. if isinstance(neoobj, (Block, Segment)):
  911. attrs["rec_datetime"] = neoobj.rec_datetime
  912. if neoobj.rec_datetime:
  913. attrs["created_at"] = neoobj.rec_datetime
  914. if neoobj.file_datetime:
  915. attrs["file_datetime"] = neoobj.file_datetime
  916. if neoobj.annotations:
  917. attrs["annotations"] = neoobj.annotations
  918. return attrs
  919. @classmethod
  920. def _neo_data_to_nix(cls, neoobj):
  921. attr = dict()
  922. attr["data"] = np.transpose(neoobj.magnitude)
  923. attr["data.units"] = cls._get_units(neoobj)
  924. if isinstance(neoobj, IrregularlySampledSignal):
  925. attr["times"] = neoobj.times.magnitude
  926. attr["times.units"] = cls._get_units(neoobj.times)
  927. else:
  928. attr["times.units"] = cls._get_units(neoobj.times, True)
  929. if hasattr(neoobj, "t_start"):
  930. attr["t_start"] = neoobj.t_start.magnitude.item()
  931. attr["t_start.units"] = cls._get_units(neoobj.t_start)
  932. if hasattr(neoobj, "t_stop"):
  933. attr["t_stop"] = neoobj.t_stop.magnitude.item()
  934. attr["t_stop.units"] = cls._get_units(neoobj.t_stop)
  935. if hasattr(neoobj, "sampling_period"):
  936. attr["sampling_interval"] = neoobj.sampling_period.magnitude.item()
  937. attr["sampling_interval.units"] = cls._get_units(
  938. neoobj.sampling_period
  939. )
  940. if hasattr(neoobj, "durations"):
  941. attr["extents"] = neoobj.durations
  942. attr["extents.units"] = cls._get_units(neoobj.durations)
  943. if hasattr(neoobj, "labels"):
  944. attr["labels"] = neoobj.labels.tolist()
  945. if hasattr(neoobj, "waveforms") and neoobj.waveforms is not None:
  946. attr["waveforms"] = list(wf.magnitude for wf in
  947. list(wfgroup for wfgroup in
  948. neoobj.waveforms))
  949. attr["waveforms.units"] = cls._get_units(neoobj.waveforms)
  950. if hasattr(neoobj, "left_sweep") and neoobj.left_sweep is not None:
  951. attr["left_sweep"] = neoobj.left_sweep.magnitude
  952. attr["left_sweep.units"] = cls._get_units(neoobj.left_sweep)
  953. return attr
  954. def _write_property(self, section, name, v):
  955. """
  956. Create a metadata property with a given name and value on the provided
  957. metadata section.
  958. :param section: The metadata section to hold the new property
  959. :param name: The name of the property
  960. :param v: The value to write
  961. :return: The newly created property
  962. """
  963. if isinstance(v, pq.Quantity):
  964. if len(v.shape):
  965. section[name] = list(nix.Value(vv) for vv in v.magnitude)
  966. else:
  967. section[name] = nix.Value(v.magnitude.item())
  968. section.props[name].unit = str(v.dimensionality)
  969. elif isinstance(v, datetime):
  970. section[name] = nix.Value(calculate_timestamp(v))
  971. elif isinstance(v, string_types):
  972. section[name] = nix.Value(v)
  973. elif isinstance(v, bytes):
  974. section[name] = nix.Value(v.decode())
  975. elif isinstance(v, Iterable):
  976. values = []
  977. unit = None
  978. for item in v:
  979. if isinstance(item, pq.Quantity):
  980. unit = str(item.dimensionality)
  981. item = nix.Value(item.magnitude.item())
  982. elif isinstance(item, Iterable):
  983. self.logger.warn("Multidimensional arrays and nested "
  984. "containers are not currently supported "
  985. "when writing to NIX.")
  986. return None
  987. elif type(item).__module__ == "numpy":
  988. item = nix.Value(item.item())
  989. else:
  990. item = nix.Value(item)
  991. values.append(item)
  992. section[name] = values
  993. section.props[name].unit = unit
  994. elif type(v).__module__ == "numpy":
  995. section[name] = nix.Value(v.item())
  996. else:
  997. section[name] = nix.Value(v)
  998. return section.props[name]
  999. @staticmethod
  1000. def _get_contained_signals(obj):
  1001. return list(
  1002. da for da in obj.data_arrays
  1003. if da.type in ["neo.analogsignal", "neo.irregularlysampledsignal"]
  1004. )
  1005. @staticmethod
  1006. def _get_units(quantity, simplify=False):
  1007. """
  1008. Returns the units of a quantity value or array as a string, or None if
  1009. it is dimensionless.
  1010. :param quantity: Quantity scalar or array
  1011. :param simplify: True/False Simplify units
  1012. :return: Units of the quantity or None if dimensionless
  1013. """
  1014. units = quantity.units.dimensionality
  1015. if simplify:
  1016. units = units.simplified
  1017. units = stringify(units)
  1018. if units == "dimensionless":
  1019. units = None
  1020. return units
  1021. @staticmethod
  1022. def _nix_attr_to_neo(nix_obj):
  1023. neo_attrs = dict()
  1024. neo_attrs["nix_name"] = nix_obj.name
  1025. neo_attrs["description"] = stringify(nix_obj.definition)
  1026. if nix_obj.metadata:
  1027. for prop in nix_obj.metadata.props:
  1028. values = prop.values
  1029. values = list(v.value for v in values)
  1030. if prop.unit:
  1031. values = pq.Quantity(values, prop.unit)
  1032. if len(values) == 1:
  1033. neo_attrs[prop.name] = values[0]
  1034. else:
  1035. neo_attrs[prop.name] = values
  1036. neo_attrs["name"] = neo_attrs.get("neo_name")
  1037. if "file_datetime" in neo_attrs:
  1038. neo_attrs["file_datetime"] = datetime.fromtimestamp(
  1039. neo_attrs["file_datetime"]
  1040. )
  1041. return neo_attrs
  1042. @staticmethod
  1043. def _group_signals(paths):
  1044. """
  1045. Groups data arrays that were generated by the same Neo Signal object.
  1046. :param paths: A list of paths (strings) of all the signals to be
  1047. grouped :return: A list of paths (strings) of signal groups. The last
  1048. part of each path is the common name of the signals in the group.
  1049. """
  1050. grouppaths = list(".".join(p.split(".")[:-1])
  1051. for p in paths)
  1052. # deduplicating paths
  1053. uniquepaths = []
  1054. for path in grouppaths:
  1055. if path not in uniquepaths:
  1056. uniquepaths.append(path)
  1057. return uniquepaths
  1058. @staticmethod
  1059. def _get_referers(nix_obj, obj_list):
  1060. ref_list = list()
  1061. for ref in obj_list:
  1062. if nix_obj.name in list(src.name for src in ref.sources):
  1063. ref_list.append(ref)
  1064. return ref_list
  1065. @staticmethod
  1066. def _get_time_dimension(obj):
  1067. for dim in obj.dimensions:
  1068. if hasattr(dim, "label") and dim.label == "time":
  1069. return dim
  1070. return None
  1071. @staticmethod
  1072. def _hash_object(obj):
  1073. """
  1074. Computes an MD5 hash of a Neo object based on its attribute values and
  1075. data objects. Child objects are not counted.
  1076. :param obj: A Neo object
  1077. :return: MD5 sum
  1078. """
  1079. objhash = md5()
  1080. def strupdate(a):
  1081. objhash.update(str(a).encode())
  1082. def dupdate(d):
  1083. if isinstance(d, np.ndarray) and not d.flags["C_CONTIGUOUS"]:
  1084. d = d.copy(order="C")
  1085. objhash.update(d)
  1086. # attributes
  1087. strupdate(obj.name)
  1088. strupdate(obj.description)
  1089. # annotations
  1090. for k, v in sorted(obj.annotations.items()):
  1091. strupdate(k)
  1092. strupdate(v)
  1093. # data objects and type-specific attributes
  1094. if isinstance(obj, (Block, Segment)):
  1095. strupdate(obj.rec_datetime)
  1096. strupdate(obj.file_datetime)
  1097. elif isinstance(obj, ChannelIndex):
  1098. for idx in obj.index:
  1099. strupdate(idx)
  1100. for n in obj.channel_names:
  1101. strupdate(n)
  1102. if obj.coordinates is not None:
  1103. for coord in obj.coordinates:
  1104. for c in coord:
  1105. strupdate(c)
  1106. elif isinstance(obj, AnalogSignal):
  1107. dupdate(obj)
  1108. dupdate(obj.units)
  1109. dupdate(obj.t_start)
  1110. dupdate(obj.sampling_rate)
  1111. dupdate(obj.t_stop)
  1112. elif isinstance(obj, IrregularlySampledSignal):
  1113. dupdate(obj)
  1114. dupdate(obj.times)
  1115. dupdate(obj.units)
  1116. elif isinstance(obj, Event):
  1117. dupdate(obj.times)
  1118. for l in obj.labels:
  1119. strupdate(l)
  1120. elif isinstance(obj, Epoch):
  1121. dupdate(obj.times)
  1122. dupdate(obj.durations)
  1123. for l in obj.labels:
  1124. strupdate(l)
  1125. elif isinstance(obj, SpikeTrain):
  1126. dupdate(obj.times)
  1127. dupdate(obj.units)
  1128. dupdate(obj.t_stop)
  1129. dupdate(obj.t_start)
  1130. if obj.waveforms is not None:
  1131. dupdate(obj.waveforms)
  1132. dupdate(obj.sampling_rate)
  1133. if obj.left_sweep is not None:
  1134. strupdate(obj.left_sweep)
  1135. # type
  1136. strupdate(type(obj).__name__)
  1137. return objhash.hexdigest()
  1138. def close(self):
  1139. """
  1140. Closes the open nix file and resets maps.
  1141. """
  1142. if (hasattr(self, "nix_file") and
  1143. self.nix_file and self.nix_file.is_open()):
  1144. self.nix_file.close()
  1145. self.nix_file = None
  1146. self._object_map = None
  1147. self._lazy_loaded = None
  1148. self._object_hashes = None
  1149. self._block_read_counter = None
  1150. def __del__(self):
  1151. self.close()