Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

nixio.py 49 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278
  1. # -*- coding: utf-8 -*-
  2. # Copyright (c) 2016, German Neuroinformatics Node (G-Node)
  3. # Achilleas Koutsou <achilleas.k@gmail.com>
  4. #
  5. # All rights reserved.
  6. #
  7. # Redistribution and use in source and binary forms, with or without
  8. # modification, are permitted under the terms of the BSD License. See
  9. # LICENSE file in the root of the Project.
  10. """
  11. Module for reading data from files in the NIX format.
  12. Author: Achilleas Koutsou
  13. This IO supports both writing and reading of NIX files. Reading is supported
  14. only if the NIX file was created using this IO.
  15. """
  16. from __future__ import absolute_import
  17. import time
  18. from datetime import datetime
  19. from collections import Iterable
  20. import itertools
  21. from hashlib import md5
  22. from uuid import uuid4
  23. import quantities as pq
  24. import numpy as np
  25. from neo.io.baseio import BaseIO
  26. from neo.core import (Block, Segment, ChannelIndex, AnalogSignal,
  27. IrregularlySampledSignal, Epoch, Event, SpikeTrain, Unit)
  28. from neo.io.tools import LazyList
  29. try:
  30. import nixio as nix
  31. HAVE_NIX = True
  32. except ImportError:
  33. HAVE_NIX = False
  34. try:
  35. string_types = basestring
  36. except NameError:
  37. string_types = str
  38. def stringify(value):
  39. if value is None:
  40. return value
  41. if isinstance(value, bytes):
  42. value = value.decode()
  43. return str(value)
  44. def calculate_timestamp(dt):
  45. if isinstance(dt, datetime):
  46. return int(time.mktime(dt.timetuple()))
  47. return int(dt)
  48. class NixIO(BaseIO):
  49. """
  50. Class for reading and writing NIX files.
  51. """
  52. is_readable = True
  53. is_writable = True
  54. supported_objects = [Block, Segment, ChannelIndex,
  55. AnalogSignal, IrregularlySampledSignal,
  56. Epoch, Event, SpikeTrain, Unit]
  57. readable_objects = [Block]
  58. writeable_objects = [Block]
  59. name = "NIX"
  60. extensions = ["h5"]
  61. mode = "file"
  62. _container_map = {
  63. "segments": "groups",
  64. "analogsignals": "data_arrays",
  65. "irregularlysampledsignals": "data_arrays",
  66. "events": "multi_tags",
  67. "epochs": "multi_tags",
  68. "spiketrains": "multi_tags",
  69. "channel_indexes": "sources",
  70. "units": "sources"
  71. }
  72. def __init__(self, filename, mode="rw"):
  73. """
  74. Initialise IO instance and NIX file.
  75. :param filename: Full path to the file
  76. """
  77. if not HAVE_NIX:
  78. raise Exception("Failed to import NIX. "
  79. "The NixIO requires the Python bindings for NIX "
  80. "(nixio on PyPi). Try `pip install nixio`.")
  81. BaseIO.__init__(self, filename)
  82. self.filename = filename
  83. if mode == "ro":
  84. filemode = nix.FileMode.ReadOnly
  85. elif mode == "rw":
  86. filemode = nix.FileMode.ReadWrite
  87. elif mode == "ow":
  88. filemode = nix.FileMode.Overwrite
  89. else:
  90. raise ValueError("Invalid mode specified '{}'. "
  91. "Valid modes: 'ro' (ReadOnly)', 'rw' (ReadWrite),"
  92. " 'ow' (Overwrite).".format(mode))
  93. self.nix_file = nix.File.open(self.filename, filemode, backend="h5py")
  94. self._neo_map = dict()
  95. self._nix_map = dict()
  96. self._lazy_loaded = list()
  97. self._object_hashes = dict()
  98. self._block_read_counter = 0
  99. self._path_map = dict()
  100. def __enter__(self):
  101. return self
  102. def __exit__(self, *args):
  103. self.close()
  104. def read_all_blocks(self, cascade=True, lazy=False):
  105. blocks = list()
  106. for blk in self.nix_file.blocks:
  107. blocks.append(self.read_block("/" + blk.name, cascade, lazy))
  108. return blocks
  109. def read_block(self, path="/", cascade=True, lazy=False):
  110. if path == "/":
  111. try:
  112. nix_block = self.nix_file.blocks[self._block_read_counter]
  113. path += nix_block.name
  114. self._block_read_counter += 1
  115. except KeyError:
  116. return None
  117. else:
  118. nix_block = self._get_object_at(path)
  119. neo_block = self._block_to_neo(nix_block)
  120. neo_block.path = path
  121. if cascade:
  122. self._read_cascade(nix_block, path, cascade, lazy)
  123. self._update_maps(neo_block, lazy)
  124. return neo_block
  125. def read_segment(self, path, cascade=True, lazy=False):
  126. nix_group = self._get_object_at(path)
  127. neo_segment = self._group_to_neo(nix_group)
  128. neo_segment.path = path
  129. if cascade:
  130. self._read_cascade(nix_group, path, cascade, lazy)
  131. self._update_maps(neo_segment, lazy)
  132. nix_parent = self._get_parent(path)
  133. neo_parent = self._neo_map.get(nix_parent.name)
  134. if neo_parent:
  135. neo_segment.block = neo_parent
  136. return neo_segment
  137. def read_channelindex(self, path, cascade=True, lazy=False):
  138. nix_source = self._get_object_at(path)
  139. neo_rcg = self._source_chx_to_neo(nix_source)
  140. neo_rcg.path = path
  141. if cascade:
  142. self._read_cascade(nix_source, path, cascade, lazy)
  143. self._update_maps(neo_rcg, lazy)
  144. nix_parent = self._get_parent(path)
  145. neo_parent = self._neo_map.get(nix_parent.name)
  146. neo_rcg.block = neo_parent
  147. return neo_rcg
  148. def read_signal(self, path, lazy=False):
  149. nix_data_arrays = list()
  150. parent_group = self._get_parent(path)
  151. parent_container = parent_group.data_arrays
  152. signal_group_name = path.split("/")[-1]
  153. for idx in itertools.count():
  154. signal_name = "{}.{}".format(signal_group_name, idx)
  155. if signal_name in parent_container:
  156. nix_data_arrays.append(parent_container[signal_name])
  157. else:
  158. break
  159. # check metadata segment
  160. group_section = nix_data_arrays[0].metadata
  161. for da in nix_data_arrays:
  162. assert da.metadata == group_section,\
  163. "DataArray {} is not a member of signal group {}".format(
  164. da.name, group_section.name
  165. )
  166. neo_signal = self._signal_da_to_neo(nix_data_arrays, lazy)
  167. neo_signal.path = path
  168. if self._find_lazy_loaded(neo_signal) is None:
  169. self._update_maps(neo_signal, lazy)
  170. nix_parent = self._get_parent(path)
  171. neo_parent = self._neo_map.get(nix_parent.name)
  172. neo_signal.segment = neo_parent
  173. return neo_signal
  174. def read_analogsignal(self, path, cascade=True, lazy=False):
  175. return self.read_signal(path, lazy)
  176. def read_irregularlysampledsignal(self, path, cascade=True, lazy=False):
  177. return self.read_signal(path, lazy)
  178. def read_eest(self, path, lazy=False):
  179. nix_mtag = self._get_object_at(path)
  180. neo_eest = self._mtag_eest_to_neo(nix_mtag, lazy)
  181. neo_eest.path = path
  182. self._update_maps(neo_eest, lazy)
  183. nix_parent = self._get_parent(path)
  184. neo_parent = self._neo_map.get(nix_parent.name)
  185. neo_eest.segment = neo_parent
  186. return neo_eest
  187. def read_epoch(self, path, cascade=True, lazy=False):
  188. return self.read_eest(path, lazy)
  189. def read_event(self, path, cascade=True, lazy=False):
  190. return self.read_eest(path, lazy)
  191. def read_spiketrain(self, path, cascade=True, lazy=False):
  192. return self.read_eest(path, lazy)
  193. def read_unit(self, path, cascade=True, lazy=False):
  194. nix_source = self._get_object_at(path)
  195. neo_unit = self._source_unit_to_neo(nix_source)
  196. neo_unit.path = path
  197. if cascade:
  198. self._read_cascade(nix_source, path, cascade, lazy)
  199. self._update_maps(neo_unit, lazy)
  200. nix_parent = self._get_parent(path)
  201. neo_parent = self._neo_map.get(nix_parent.name)
  202. neo_unit.channel_index = neo_parent
  203. return neo_unit
  204. def _block_to_neo(self, nix_block):
  205. neo_attrs = self._nix_attr_to_neo(nix_block)
  206. neo_block = Block(**neo_attrs)
  207. neo_block.rec_datetime = datetime.fromtimestamp(
  208. nix_block.created_at
  209. )
  210. self._neo_map[nix_block.name] = neo_block
  211. return neo_block
  212. def _group_to_neo(self, nix_group):
  213. neo_attrs = self._nix_attr_to_neo(nix_group)
  214. neo_segment = Segment(**neo_attrs)
  215. neo_segment.rec_datetime = datetime.fromtimestamp(
  216. nix_group.created_at
  217. )
  218. self._neo_map[nix_group.name] = neo_segment
  219. return neo_segment
  220. def _source_chx_to_neo(self, nix_source):
  221. neo_attrs = self._nix_attr_to_neo(nix_source)
  222. chx = list(self._nix_attr_to_neo(c)
  223. for c in nix_source.sources
  224. if c.type == "neo.channelindex")
  225. chan_names = list(c["neo_name"] for c in chx if "neo_name" in c)
  226. chan_ids = list(c["channel_id"] for c in chx if "channel_id" in c)
  227. if chan_names:
  228. neo_attrs["channel_names"] = chan_names
  229. if chan_ids:
  230. neo_attrs["channel_ids"] = chan_ids
  231. neo_attrs["index"] = np.array([c["index"] for c in chx])
  232. if "coordinates" in chx[0]:
  233. coord_units = chx[0]["coordinates.units"]
  234. coord_values = list(c["coordinates"] for c in chx)
  235. neo_attrs["coordinates"] = pq.Quantity(coord_values, coord_units)
  236. rcg = ChannelIndex(**neo_attrs)
  237. self._neo_map[nix_source.name] = rcg
  238. return rcg
  239. def _source_unit_to_neo(self, nix_unit):
  240. neo_attrs = self._nix_attr_to_neo(nix_unit)
  241. neo_unit = Unit(**neo_attrs)
  242. self._neo_map[nix_unit.name] = neo_unit
  243. return neo_unit
  244. def _signal_da_to_neo(self, nix_da_group, lazy):
  245. """
  246. Convert a group of NIX DataArrays to a Neo signal. This method expects
  247. a list of data arrays that all represent the same, multidimensional
  248. Neo Signal object.
  249. This returns either an AnalogSignal or IrregularlySampledSignal.
  250. :param nix_da_group: a list of NIX DataArray objects
  251. :return: a Neo Signal object
  252. """
  253. nix_da_group = sorted(nix_da_group, key=lambda d: d.name)
  254. neo_attrs = self._nix_attr_to_neo(nix_da_group[0])
  255. metadata = nix_da_group[0].metadata
  256. neo_type = nix_da_group[0].type
  257. neo_attrs["nix_name"] = metadata.name # use the common base name
  258. unit = nix_da_group[0].unit
  259. if lazy:
  260. signaldata = pq.Quantity(np.empty(0), unit)
  261. lazy_shape = (len(nix_da_group[0]), len(nix_da_group))
  262. else:
  263. signaldata = np.array([d[:] for d in nix_da_group]).transpose()
  264. signaldata = pq.Quantity(signaldata, unit)
  265. lazy_shape = None
  266. timedim = self._get_time_dimension(nix_da_group[0])
  267. if (neo_type == "neo.analogsignal" or
  268. timedim.dimension_type == nix.DimensionType.Sample):
  269. if lazy:
  270. sampling_period = pq.Quantity(1, timedim.unit)
  271. t_start = pq.Quantity(0, timedim.unit)
  272. else:
  273. if "sampling_interval.units" in metadata.props:
  274. sample_units = metadata["sampling_interval.units"]
  275. else:
  276. sample_units = timedim.unit
  277. sampling_period = pq.Quantity(timedim.sampling_interval,
  278. sample_units)
  279. if "t_start.units" in metadata.props:
  280. tsunits = metadata["t_start.units"]
  281. else:
  282. tsunits = timedim.unit
  283. t_start = pq.Quantity(timedim.offset, tsunits)
  284. neo_signal = AnalogSignal(
  285. signal=signaldata, sampling_period=sampling_period,
  286. t_start=t_start, **neo_attrs
  287. )
  288. elif (neo_type == "neo.irregularlysampledsignal"
  289. or timedim.dimension_type == nix.DimensionType.Range):
  290. if lazy:
  291. times = pq.Quantity(np.empty(0), timedim.unit)
  292. else:
  293. times = pq.Quantity(timedim.ticks, timedim.unit)
  294. neo_signal = IrregularlySampledSignal(
  295. signal=signaldata, times=times, **neo_attrs
  296. )
  297. else:
  298. return None
  299. for da in nix_da_group:
  300. self._neo_map[da.name] = neo_signal
  301. if lazy_shape:
  302. neo_signal.lazy_shape = lazy_shape
  303. return neo_signal
  304. def _mtag_eest_to_neo(self, nix_mtag, lazy):
  305. neo_attrs = self._nix_attr_to_neo(nix_mtag)
  306. neo_type = nix_mtag.type
  307. time_unit = nix_mtag.positions.unit
  308. if lazy:
  309. times = pq.Quantity(np.empty(0), time_unit)
  310. lazy_shape = np.shape(nix_mtag.positions)
  311. else:
  312. times = pq.Quantity(nix_mtag.positions, time_unit)
  313. lazy_shape = None
  314. if neo_type == "neo.epoch":
  315. if lazy:
  316. durations = pq.Quantity(np.empty(0), nix_mtag.extents.unit)
  317. labels = np.empty(0, dtype='S')
  318. else:
  319. durations = pq.Quantity(nix_mtag.extents,
  320. nix_mtag.extents.unit)
  321. labels = np.array(nix_mtag.positions.dimensions[0].labels,
  322. dtype="S")
  323. eest = Epoch(times=times, durations=durations, labels=labels,
  324. **neo_attrs)
  325. elif neo_type == "neo.event":
  326. if lazy:
  327. labels = np.empty(0, dtype='S')
  328. else:
  329. labels = np.array(nix_mtag.positions.dimensions[0].labels,
  330. dtype="S")
  331. eest = Event(times=times, labels=labels, **neo_attrs)
  332. elif neo_type == "neo.spiketrain":
  333. if "t_start" in neo_attrs:
  334. if "t_start.units" in neo_attrs:
  335. t_start_units = neo_attrs["t_start.units"]
  336. del neo_attrs["t_start.units"]
  337. else:
  338. t_start_units = time_unit
  339. t_start = pq.Quantity(neo_attrs["t_start"], t_start_units)
  340. del neo_attrs["t_start"]
  341. else:
  342. t_start = None
  343. if "t_stop" in neo_attrs:
  344. if "t_stop.units" in neo_attrs:
  345. t_stop_units = neo_attrs["t_stop.units"]
  346. del neo_attrs["t_stop.units"]
  347. else:
  348. t_stop_units = time_unit
  349. t_stop = pq.Quantity(neo_attrs["t_stop"], t_stop_units)
  350. del neo_attrs["t_stop"]
  351. else:
  352. t_stop = None
  353. if "sampling_interval.units" in neo_attrs:
  354. interval_units = neo_attrs["sampling_interval.units"]
  355. del neo_attrs["sampling_interval.units"]
  356. else:
  357. interval_units = None
  358. if "left_sweep.units" in neo_attrs:
  359. left_sweep_units = neo_attrs["left_sweep.units"]
  360. del neo_attrs["left_sweep.units"]
  361. else:
  362. left_sweep_units = None
  363. eest = SpikeTrain(times=times, t_start=t_start,
  364. t_stop=t_stop, **neo_attrs)
  365. if len(nix_mtag.features):
  366. wfda = nix_mtag.features[0].data
  367. wftime = self._get_time_dimension(wfda)
  368. if lazy:
  369. eest.waveforms = pq.Quantity(np.empty((0, 0, 0)),
  370. wfda.unit)
  371. eest.sampling_period = pq.Quantity(1, wftime.unit)
  372. eest.left_sweep = pq.Quantity(0, wftime.unit)
  373. else:
  374. eest.waveforms = pq.Quantity(wfda, wfda.unit)
  375. if interval_units is None:
  376. interval_units = wftime.unit
  377. eest.sampling_period = pq.Quantity(
  378. wftime.sampling_interval, interval_units
  379. )
  380. if left_sweep_units is None:
  381. left_sweep_units = wftime.unit
  382. if "left_sweep" in wfda.metadata:
  383. eest.left_sweep = pq.Quantity(
  384. wfda.metadata["left_sweep"], left_sweep_units
  385. )
  386. else:
  387. return None
  388. self._neo_map[nix_mtag.name] = eest
  389. if lazy_shape:
  390. eest.lazy_shape = lazy_shape
  391. return eest
  392. def _read_cascade(self, nix_obj, path, cascade, lazy):
  393. neo_obj = self._neo_map[nix_obj.name]
  394. for neocontainer in getattr(neo_obj, "_child_containers", []):
  395. nixcontainer = self._container_map[neocontainer]
  396. if not hasattr(nix_obj, nixcontainer):
  397. continue
  398. if neocontainer == "channel_indexes":
  399. neotype = "channelindex"
  400. else:
  401. neotype = neocontainer[:-1]
  402. chpaths = list(path + "/" + neocontainer + "/" + c.name
  403. for c in getattr(nix_obj, nixcontainer)
  404. if c.type == "neo." + neotype)
  405. if neocontainer in ("analogsignals",
  406. "irregularlysampledsignals"):
  407. chpaths = self._group_signals(chpaths)
  408. if cascade != "lazy":
  409. read_func = getattr(self, "read_" + neotype)
  410. children = list(read_func(cp, cascade, lazy)
  411. for cp in chpaths)
  412. else:
  413. children = LazyList(self, lazy, chpaths)
  414. setattr(neo_obj, neocontainer, children)
  415. if isinstance(neo_obj, ChannelIndex):
  416. # set references to signals
  417. parent_block_path = "/" + path.split("/")[1]
  418. parent_block = self._get_object_at(parent_block_path)
  419. ref_das = self._get_referers(nix_obj, parent_block.data_arrays)
  420. ref_signals = list(self._neo_map[da.name] for da in ref_das)
  421. # deduplicate by name
  422. ref_signals = list(dict((s.annotations["nix_name"], s)
  423. for s in ref_signals).values())
  424. for sig in ref_signals:
  425. if isinstance(sig, AnalogSignal):
  426. neo_obj.analogsignals.append(sig)
  427. elif isinstance(sig, IrregularlySampledSignal):
  428. neo_obj.irregularlysampledsignals.append(sig)
  429. sig.channel_index = neo_obj
  430. elif isinstance(neo_obj, Unit):
  431. # set references to spiketrains
  432. parent_block_path = "/" + path.split("/")[1]
  433. parent_block = self._get_object_at(parent_block_path)
  434. ref_mtags = self._get_referers(nix_obj, parent_block.multi_tags)
  435. ref_sts = list(self._neo_map[mt.name] for mt in ref_mtags)
  436. for st in ref_sts:
  437. neo_obj.spiketrains.append(st)
  438. st.unit = neo_obj
  439. def get(self, path, cascade, lazy):
  440. parts = path.split("/")
  441. if len(parts) > 2:
  442. neotype = parts[-2][:-1]
  443. else:
  444. neotype = "block"
  445. if neotype == "channel_indexe":
  446. neotype = "channelindex"
  447. read_func = getattr(self, "read_" + neotype)
  448. return read_func(path, cascade, lazy)
  449. def load_lazy_object(self, obj):
  450. return self.get(obj.path, cascade=False, lazy=False)
  451. def load_lazy_cascade(self, path, lazy):
  452. """
  453. Loads the object at the location specified by the path and all
  454. children. Data is loaded if lazy is False.
  455. :param path: Location of object in file
  456. :param lazy: Do not load data if True
  457. :return: The loaded object
  458. """
  459. neoobj = self.get(path, cascade=True, lazy=lazy)
  460. return neoobj
  461. def write_all_blocks(self, neo_blocks):
  462. """
  463. Convert all ``neo_blocks`` to the NIX equivalent and write them to the
  464. file.
  465. :param neo_blocks: List (or iterable) containing Neo blocks
  466. :return: A list containing the new NIX Blocks
  467. """
  468. for bl in neo_blocks:
  469. self.write_block(bl)
  470. def _write_object(self, obj, loc=""):
  471. objtype = type(obj).__name__.lower()
  472. if isinstance(obj, Block):
  473. containerstr = "/"
  474. else:
  475. if objtype == "channelindex":
  476. containerstr = "/channel_indexes/"
  477. else:
  478. containerstr = "/" + type(obj).__name__.lower() + "s/"
  479. if "nix_name" in obj.annotations:
  480. nix_name = obj.annotations["nix_name"]
  481. else:
  482. nix_name = "neo.{}.{}".format(objtype, self._generate_nix_name())
  483. obj.annotate(nix_name=nix_name)
  484. objpath = loc + containerstr + nix_name
  485. oldhash = self._object_hashes.get(nix_name)
  486. if oldhash is None:
  487. try:
  488. oldobj = self.get(objpath, cascade=False, lazy=False)
  489. oldhash = self._hash_object(oldobj)
  490. except (KeyError, IndexError):
  491. oldhash = None
  492. newhash = self._hash_object(obj)
  493. if oldhash != newhash:
  494. attr = self._neo_attr_to_nix(obj)
  495. attr["name"] = nix_name
  496. if isinstance(obj, pq.Quantity):
  497. attr.update(self._neo_data_to_nix(obj))
  498. if oldhash is None:
  499. nixobj = self._create_nix_obj(loc, attr)
  500. else:
  501. nixobj = self._get_object_at(objpath)
  502. self._write_attr_annotations(nixobj, attr, objpath)
  503. if isinstance(obj, pq.Quantity):
  504. self._write_data(nixobj, attr, objpath)
  505. else:
  506. nixobj = self._nix_map.get(nix_name)
  507. if nixobj is None:
  508. nixobj = self._get_object_at(objpath)
  509. else:
  510. # object is already in file but may not be linked at objpath
  511. objat = self._get_object_at(objpath)
  512. if not objat:
  513. self._link_nix_obj(nixobj, loc, containerstr)
  514. self._nix_map[nix_name] = nixobj
  515. self._object_hashes[nix_name] = newhash
  516. self._write_cascade(obj, objpath)
  517. def _create_nix_obj(self, loc, attr):
  518. parentobj = self._get_object_at(loc)
  519. if attr["type"] == "block":
  520. nixobj = parentobj.create_block(attr["name"], "neo.block")
  521. nixobj.metadata = self.nix_file.create_section(
  522. attr["name"], "neo.block.metadata"
  523. )
  524. elif attr["type"] == "segment":
  525. nixobj = parentobj.create_group(attr["name"], "neo.segment")
  526. nixobj.metadata = parentobj.metadata.create_section(
  527. attr["name"], "neo.segment.metadata"
  528. )
  529. elif attr["type"] == "channelindex":
  530. nixobj = parentobj.create_source(attr["name"],
  531. "neo.channelindex")
  532. nixobj.metadata = parentobj.metadata.create_section(
  533. attr["name"], "neo.channelindex.metadata"
  534. )
  535. elif attr["type"] in ("analogsignal", "irregularlysampledsignal"):
  536. blockpath = "/" + loc.split("/")[1]
  537. parentblock = self._get_object_at(blockpath)
  538. nixobj = list()
  539. typestr = "neo.{}".format(attr["type"])
  540. sigmd = parentobj.metadata.create_section(
  541. attr["name"], "{}.metadata".format(typestr)
  542. )
  543. for idx, datarow in enumerate(attr["data"]):
  544. name = "{}.{}".format(attr["name"], idx)
  545. da = parentblock.create_data_array(name, typestr, data=datarow)
  546. da.metadata = sigmd
  547. nixobj.append(da)
  548. parentobj.data_arrays.extend(nixobj)
  549. elif attr["type"] in ("epoch", "event", "spiketrain"):
  550. blockpath = "/" + loc.split("/")[1]
  551. parentblock = self._get_object_at(blockpath)
  552. typestr = "neo.{}".format(attr["type"])
  553. timesda = parentblock.create_data_array(
  554. "{}.times".format(attr["name"]), "{}.times".format(typestr),
  555. data=attr["data"]
  556. )
  557. nixobj = parentblock.create_multi_tag(
  558. attr["name"], typestr, timesda
  559. )
  560. nixobj.metadata = parentobj.metadata.create_section(
  561. attr["name"], "{}.metadata".format(typestr)
  562. )
  563. parentobj.multi_tags.append(nixobj)
  564. elif attr["type"] == "unit":
  565. nixobj = parentobj.create_source(attr["name"], "neo.unit")
  566. nixobj.metadata = parentobj.metadata.create_section(
  567. attr["name"], "neo.unit.metadata"
  568. )
  569. else:
  570. raise ValueError("Unable to create NIX object. Invalid type.")
  571. return nixobj
  572. def _link_nix_obj(self, obj, loc, neocontainer):
  573. parentobj = self._get_object_at(loc)
  574. container = getattr(parentobj,
  575. self._container_map[neocontainer.strip("/")])
  576. if isinstance(obj, list):
  577. container.extend(obj)
  578. else:
  579. container.append(obj)
  580. def write_block(self, bl, loc=""):
  581. """
  582. Convert ``bl`` to the NIX equivalent and write it to the file.
  583. :param bl: Neo block to be written
  584. :param loc: Unused for blocks
  585. """
  586. self._write_object(bl, loc)
  587. self._create_references(bl)
  588. def write_channelindex(self, chx, loc=""):
  589. """
  590. Convert the provided ``chx`` (ChannelIndex) to a NIX Source and write
  591. it to the NIX file at the location defined by ``loc``.
  592. :param chx: The Neo ChannelIndex to be written
  593. :param loc: Path to the parent of the new CHX
  594. """
  595. self._write_object(chx, loc)
  596. def write_segment(self, seg, loc=""):
  597. """
  598. Convert the provided ``seg`` to a NIX Group and write it to the NIX
  599. file at the location defined by ``loc``.
  600. :param seg: Neo seg to be written
  601. :param loc: Path to the parent of the new Segment
  602. """
  603. self._write_object(seg, loc)
  604. def write_indices(self, chx, loc=""):
  605. """
  606. Create NIX Source objects to represent individual indices based on the
  607. provided ``chx`` (ChannelIndex) write them to the NIX file at
  608. the parent ChannelIndex object.
  609. :param chx: The Neo ChannelIndex
  610. :param loc: Path to the CHX
  611. """
  612. nixsource = self._nix_map[chx.annotations["nix_name"]]
  613. for idx, channel in enumerate(chx.index):
  614. channame = "{}.ChannelIndex{}".format(chx.annotations["nix_name"],
  615. idx)
  616. if channame in nixsource.sources:
  617. nixchan = nixsource.sources[channame]
  618. else:
  619. nixchan = nixsource.create_source(channame,
  620. "neo.channelindex")
  621. nixchan.metadata = nixsource.metadata.create_section(
  622. nixchan.name, "neo.channelindex.metadata"
  623. )
  624. nixchan.definition = nixsource.definition
  625. chanmd = nixchan.metadata
  626. chanmd["index"] = nix.Value(int(channel))
  627. if len(chx.channel_names):
  628. neochanname = stringify(chx.channel_names[idx])
  629. chanmd["neo_name"] = nix.Value(neochanname)
  630. if len(chx.channel_ids):
  631. chanid = chx.channel_ids[idx]
  632. chanmd["channel_id"] = nix.Value(chanid)
  633. if chx.coordinates is not None:
  634. coords = chx.coordinates[idx]
  635. coordunits = stringify(coords[0].dimensionality)
  636. nixcoords = tuple(
  637. nix.Value(c.rescale(coordunits).magnitude.item())
  638. for c in coords
  639. )
  640. if "coordinates" in chanmd:
  641. del chanmd["coordinates"]
  642. chanprop = chanmd.create_property("coordinates", nixcoords)
  643. chanprop.unit = coordunits
  644. def write_analogsignal(self, anasig, loc=""):
  645. """
  646. Convert the provided ``anasig`` (AnalogSignal) to a list of NIX
  647. DataArray objects and write them to the NIX file at the location
  648. defined by ``loc``. All DataArray objects created from the same
  649. AnalogSignal have their metadata section point to the same object.
  650. :param anasig: The Neo AnalogSignal to be written
  651. :param loc: Path to the parent of the new AnalogSignal
  652. """
  653. self._write_object(anasig, loc)
  654. def write_irregularlysampledsignal(self, irsig, loc=""):
  655. """
  656. Convert the provided ``irsig`` (IrregularlySampledSignal) to a list of
  657. NIX DataArray objects and write them to the NIX file at the location
  658. defined by ``loc``. All DataArray objects created from the same
  659. IrregularlySampledSignal have their metadata section point to the same
  660. object.
  661. :param irsig: The Neo IrregularlySampledSignal to be written
  662. :param loc: Path to the parent of the new
  663. :return: The newly created NIX DataArray
  664. """
  665. self._write_object(irsig, loc)
  666. def write_epoch(self, ep, loc=""):
  667. """
  668. Convert the provided ``ep`` (Epoch) to a NIX MultiTag and write it to
  669. the NIX file at the location defined by ``loc``.
  670. :param ep: The Neo Epoch to be written
  671. :param loc: Path to the parent of the new MultiTag
  672. """
  673. self._write_object(ep, loc)
  674. def write_event(self, ev, loc=""):
  675. """
  676. Convert the provided ``ev`` (Event) to a NIX MultiTag and write it to
  677. the NIX file at the location defined by ``loc``.
  678. :param ev: The Neo Event to be written
  679. :param loc: Path to the parent of the new MultiTag
  680. """
  681. self._write_object(ev, loc)
  682. def write_spiketrain(self, sptr, loc=""):
  683. """
  684. Convert the provided ``sptr`` (SpikeTrain) to a NIX MultiTag and write
  685. it to the NIX file at the location defined by ``loc``.
  686. :param sptr: The Neo SpikeTrain to be written
  687. :param loc: Path to the parent of the new MultiTag
  688. """
  689. self._write_object(sptr, loc)
  690. def write_unit(self, ut, loc=""):
  691. """
  692. Convert the provided ``ut`` (Unit) to a NIX Source and write it to the
  693. NIX file at the parent RCG.
  694. :param ut: The Neo Unit to be written
  695. :param loc: Path to the parent of the new Source
  696. """
  697. self._write_object(ut, loc)
  698. def _write_cascade(self, neoobj, path=""):
  699. if isinstance(neoobj, ChannelIndex):
  700. containers = ["units"]
  701. self.write_indices(neoobj, path)
  702. elif isinstance(neoobj, Unit):
  703. containers = []
  704. else:
  705. containers = getattr(neoobj, "_child_containers", [])
  706. for neocontainer in containers:
  707. if neocontainer == "channel_indexes":
  708. neotype = "channelindex"
  709. else:
  710. neotype = neocontainer[:-1]
  711. children = getattr(neoobj, neocontainer)
  712. write_func = getattr(self, "write_" + neotype)
  713. for ch in children:
  714. write_func(ch, path)
  715. def _create_references(self, block):
  716. """
  717. Create references between NIX objects according to the supplied Neo
  718. Block.
  719. MultiTags reference DataArrays of the same Group.
  720. DataArrays reference ChannelIndexs as sources, based on Neo
  721. RCG -> Signal relationships.
  722. MultiTags (SpikeTrains) reference ChannelIndexs and Units as
  723. sources, based on Neo RCG -> Unit -> SpikeTrain relationships.
  724. :param block: A Neo Block that has already been converted and mapped to
  725. NIX objects.
  726. """
  727. for seg in block.segments:
  728. group = self._nix_map[seg.annotations["nix_name"]]
  729. group_signals = self._get_contained_signals(group)
  730. for mtag in group.multi_tags:
  731. if mtag.type in ("neo.epoch", "neo.event"):
  732. mtag.references.extend([sig for sig in group_signals
  733. if sig not in mtag.references])
  734. for rcg in block.channel_indexes:
  735. chidxsrc = self._nix_map[rcg.annotations["nix_name"]]
  736. das = list(self._nix_map[sig.annotations["nix_name"]]
  737. for sig in rcg.analogsignals +
  738. rcg.irregularlysampledsignals)
  739. # flatten nested lists
  740. das = [da for dalist in das for da in dalist]
  741. for da in das:
  742. if chidxsrc not in da.sources:
  743. da.sources.append(chidxsrc)
  744. for unit in rcg.units:
  745. unitsource = self._nix_map[unit.annotations["nix_name"]]
  746. for st in unit.spiketrains:
  747. stmtag = self._nix_map[st.annotations["nix_name"]]
  748. if chidxsrc not in stmtag.sources:
  749. stmtag.sources.append(chidxsrc)
  750. if unitsource not in stmtag.sources:
  751. stmtag.sources.append(unitsource)
  752. def _get_object_at(self, path):
  753. """
  754. Returns the object at the location defined by the path.
  755. ``path`` is a '/' delimited string. Each part of the string alternates
  756. between an object name and a container.
  757. If the requested object is an AnalogSignal or IrregularlySampledSignal,
  758. identified by the second-to-last part of the path string, a list of
  759. (DataArray) objects is returned.
  760. Example path: /block_1/segments/segment_a/events/event_a1
  761. :param path: Path string
  762. :return: The object at the location defined by the path
  763. """
  764. if path in self._path_map:
  765. return self._path_map[path]
  766. if path in ("", "/"):
  767. return self.nix_file
  768. parts = path.split("/")
  769. if parts[0]:
  770. ValueError("Invalid object path: {}".format(path))
  771. if len(parts) == 2: # root block
  772. return self.nix_file.blocks[parts[1]]
  773. parent_obj = self._get_parent(path)
  774. container_name = self._container_map[parts[-2]]
  775. parent_container = getattr(parent_obj, container_name)
  776. objname = parts[-1]
  777. if parts[-2] in ["analogsignals", "irregularlysampledsignals"]:
  778. obj = list()
  779. for idx in itertools.count():
  780. name = "{}.{}".format(objname, idx)
  781. if name in parent_container:
  782. obj.append(parent_container[name])
  783. else:
  784. break
  785. else:
  786. obj = parent_container[objname]
  787. self._path_map[path] = obj
  788. return obj
  789. def _get_parent(self, path):
  790. parts = path.split("/")
  791. parent_path = "/".join(parts[:-2])
  792. parent_obj = self._get_object_at(parent_path)
  793. return parent_obj
  794. def _write_attr_annotations(self, nixobj, attr, path):
  795. if isinstance(nixobj, list):
  796. metadata = nixobj[0].metadata
  797. for obj in nixobj:
  798. obj.definition = attr["definition"]
  799. self._write_attr_annotations(nixobj[0], attr, path)
  800. return
  801. else:
  802. metadata = nixobj.metadata
  803. nixobj.definition = attr["definition"]
  804. if "neo_name" in attr:
  805. metadata["neo_name"] = attr["neo_name"]
  806. if "created_at" in attr:
  807. nixobj.force_created_at(calculate_timestamp(attr["created_at"]))
  808. if "file_datetime" in attr:
  809. self._write_property(metadata,
  810. "file_datetime", attr["file_datetime"])
  811. if attr.get("rec_datetime"):
  812. self._write_property(metadata,
  813. "rec_datetime", attr["rec_datetime"])
  814. if "annotations" in attr:
  815. for k, v in attr["annotations"].items():
  816. self._write_property(metadata, k, v)
  817. def _write_data(self, nixobj, attr, path):
  818. if isinstance(nixobj, list):
  819. metadata = nixobj[0].metadata
  820. metadata["t_start.units"] = nix.Value(attr["t_start.units"])
  821. for obj in nixobj:
  822. obj.unit = attr["data.units"]
  823. if attr["type"] == "analogsignal":
  824. timedim = obj.append_sampled_dimension(
  825. attr["sampling_interval"]
  826. )
  827. timedim.unit = attr["sampling_interval.units"]
  828. elif attr["type"] == "irregularlysampledsignal":
  829. timedim = obj.append_range_dimension(attr["times"])
  830. timedim.unit = attr["times.units"]
  831. timedim.label = "time"
  832. timedim.offset = attr["t_start"]
  833. else:
  834. metadata = nixobj.metadata
  835. nixobj.positions.unit = attr["data.units"]
  836. blockpath = "/" + path.split("/")[1]
  837. parentblock = self._get_object_at(blockpath)
  838. if "extents" in attr:
  839. extname = nixobj.name + ".durations"
  840. exttype = nixobj.type + ".durations"
  841. if extname in parentblock.data_arrays:
  842. del parentblock.data_arrays[extname]
  843. extents = parentblock.create_data_array(
  844. extname,
  845. exttype,
  846. data=attr["extents"]
  847. )
  848. extents.unit = attr["extents.units"]
  849. nixobj.extents = extents
  850. if "labels" in attr:
  851. labeldim = nixobj.positions.append_set_dimension()
  852. labeldim.labels = attr["labels"]
  853. if "t_start" in attr:
  854. metadata["t_start"] = nix.Value(attr["t_start"])
  855. metadata["t_start.units"] = nix.Value(attr["t_start.units"])
  856. if "t_stop" in attr:
  857. metadata["t_stop"] = nix.Value(attr["t_stop"])
  858. metadata["t_stop.units"] = nix.Value(attr["t_stop.units"])
  859. if "waveforms" in attr:
  860. wfname = nixobj.name + ".waveforms"
  861. if wfname in parentblock.data_arrays:
  862. del metadata.sections[wfname]
  863. del parentblock.data_arrays[wfname]
  864. del nixobj.features[0]
  865. wfda = parentblock.create_data_array(
  866. wfname, "neo.waveforms",
  867. data=attr["waveforms"]
  868. )
  869. wfda.metadata = nixobj.metadata.create_section(
  870. wfda.name, "neo.waveforms.metadata"
  871. )
  872. wfda.unit = attr["waveforms.units"]
  873. nixobj.create_feature(wfda, nix.LinkType.Indexed)
  874. wfda.append_set_dimension()
  875. wfda.append_set_dimension()
  876. wftime = wfda.append_sampled_dimension(
  877. attr["sampling_interval"]
  878. )
  879. metadata["sampling_interval.units"] =\
  880. attr["sampling_interval.units"]
  881. wftime.unit = attr["times.units"]
  882. wftime.label = "time"
  883. if "left_sweep" in attr:
  884. self._write_property(wfda.metadata, "left_sweep",
  885. attr["left_sweep"])
  886. def _update_maps(self, obj, lazy):
  887. objidx = self._find_lazy_loaded(obj)
  888. if lazy and objidx is None:
  889. self._lazy_loaded.append(obj)
  890. elif not lazy and objidx is not None:
  891. self._lazy_loaded.pop(objidx)
  892. if not lazy:
  893. nix_name = obj.annotations["nix_name"]
  894. self._object_hashes[nix_name] = self._hash_object(obj)
  895. def _find_lazy_loaded(self, obj):
  896. """
  897. Finds the index of an object in the _lazy_loaded list by comparing the
  898. path attribute. Returns None if the object is not in the list.
  899. :param obj: The object to find
  900. :return: The index of the object in the _lazy_loaded list or None if it
  901. was not added
  902. """
  903. for idx, llobj in enumerate(self._lazy_loaded):
  904. if llobj.path == obj.path:
  905. return idx
  906. else:
  907. return None
  908. @staticmethod
  909. def _generate_nix_name():
  910. return uuid4().hex
  911. @staticmethod
  912. def _neo_attr_to_nix(neoobj):
  913. neotype = type(neoobj).__name__
  914. attrs = dict()
  915. # NIX metadata does not support None values
  916. # The property will be excluded to signify 'name is None'
  917. if neoobj.name is not None:
  918. attrs["neo_name"] = neoobj.name
  919. attrs["type"] = neotype.lower()
  920. attrs["definition"] = neoobj.description
  921. if isinstance(neoobj, (Block, Segment)):
  922. attrs["rec_datetime"] = neoobj.rec_datetime
  923. if neoobj.rec_datetime:
  924. attrs["created_at"] = neoobj.rec_datetime
  925. if neoobj.file_datetime:
  926. attrs["file_datetime"] = neoobj.file_datetime
  927. if neoobj.annotations:
  928. attrs["annotations"] = neoobj.annotations
  929. return attrs
  930. @classmethod
  931. def _neo_data_to_nix(cls, neoobj):
  932. attr = dict()
  933. attr["data"] = np.transpose(neoobj.magnitude)
  934. attr["data.units"] = cls._get_units(neoobj)
  935. if isinstance(neoobj, IrregularlySampledSignal):
  936. attr["times"] = neoobj.times.magnitude
  937. attr["times.units"] = cls._get_units(neoobj.times)
  938. else:
  939. attr["times.units"] = cls._get_units(neoobj.times, True)
  940. if hasattr(neoobj, "t_start"):
  941. attr["t_start"] = neoobj.t_start.magnitude.item()
  942. attr["t_start.units"] = cls._get_units(neoobj.t_start)
  943. if hasattr(neoobj, "t_stop"):
  944. attr["t_stop"] = neoobj.t_stop.magnitude.item()
  945. attr["t_stop.units"] = cls._get_units(neoobj.t_stop)
  946. if hasattr(neoobj, "sampling_period"):
  947. attr["sampling_interval"] = neoobj.sampling_period.magnitude.item()
  948. attr["sampling_interval.units"] = cls._get_units(
  949. neoobj.sampling_period
  950. )
  951. if hasattr(neoobj, "durations"):
  952. attr["extents"] = neoobj.durations
  953. attr["extents.units"] = cls._get_units(neoobj.durations)
  954. if hasattr(neoobj, "labels"):
  955. attr["labels"] = neoobj.labels.tolist()
  956. if hasattr(neoobj, "waveforms") and neoobj.waveforms is not None:
  957. attr["waveforms"] = list(wf.magnitude for wf in
  958. list(wfgroup for wfgroup in
  959. neoobj.waveforms))
  960. attr["waveforms.units"] = cls._get_units(neoobj.waveforms)
  961. if hasattr(neoobj, "left_sweep") and neoobj.left_sweep is not None:
  962. attr["left_sweep"] = neoobj.left_sweep.magnitude
  963. attr["left_sweep.units"] = cls._get_units(neoobj.left_sweep)
  964. return attr
  965. def _write_property(self, section, name, v):
  966. """
  967. Create a metadata property with a given name and value on the provided
  968. metadata section.
  969. :param section: The metadata section to hold the new property
  970. :param name: The name of the property
  971. :param v: The value to write
  972. :return: The newly created property
  973. """
  974. if isinstance(v, pq.Quantity):
  975. if len(v.shape):
  976. section[name] = list(nix.Value(vv) for vv in v.magnitude)
  977. else:
  978. section[name] = nix.Value(v.magnitude.item())
  979. section.props[name].unit = str(v.dimensionality)
  980. elif isinstance(v, datetime):
  981. section[name] = nix.Value(calculate_timestamp(v))
  982. elif isinstance(v, string_types):
  983. section[name] = nix.Value(v)
  984. elif isinstance(v, bytes):
  985. section[name] = nix.Value(v.decode())
  986. elif isinstance(v, Iterable):
  987. values = []
  988. unit = None
  989. for item in v:
  990. if isinstance(item, pq.Quantity):
  991. unit = str(item.dimensionality)
  992. item = nix.Value(item.magnitude.item())
  993. elif isinstance(item, Iterable):
  994. self.logger.warn("Multidimensional arrays and nested "
  995. "containers are not currently supported "
  996. "when writing to NIX.")
  997. return None
  998. elif type(item).__module__ == "numpy":
  999. item = nix.Value(item.item())
  1000. else:
  1001. item = nix.Value(item)
  1002. values.append(item)
  1003. section[name] = values
  1004. section.props[name].unit = unit
  1005. elif type(v).__module__ == "numpy":
  1006. section[name] = nix.Value(v.item())
  1007. else:
  1008. section[name] = nix.Value(v)
  1009. return section.props[name]
  1010. @staticmethod
  1011. def _get_contained_signals(obj):
  1012. return list(
  1013. da for da in obj.data_arrays
  1014. if da.type in ["neo.analogsignal", "neo.irregularlysampledsignal"]
  1015. )
  1016. @staticmethod
  1017. def _get_units(quantity, simplify=False):
  1018. """
  1019. Returns the units of a quantity value or array as a string, or None if
  1020. it is dimensionless.
  1021. :param quantity: Quantity scalar or array
  1022. :param simplify: True/False Simplify units
  1023. :return: Units of the quantity or None if dimensionless
  1024. """
  1025. units = quantity.units.dimensionality
  1026. if simplify:
  1027. units = units.simplified
  1028. units = stringify(units)
  1029. if units == "dimensionless":
  1030. units = None
  1031. return units
  1032. @staticmethod
  1033. def _nix_attr_to_neo(nix_obj):
  1034. neo_attrs = dict()
  1035. neo_attrs["nix_name"] = nix_obj.name
  1036. neo_attrs["description"] = stringify(nix_obj.definition)
  1037. if nix_obj.metadata:
  1038. for prop in nix_obj.metadata.props:
  1039. values = prop.values
  1040. values = list(v.value for v in values)
  1041. if prop.unit:
  1042. values = pq.Quantity(values, prop.unit)
  1043. if len(values) == 1:
  1044. neo_attrs[prop.name] = values[0]
  1045. else:
  1046. neo_attrs[prop.name] = values
  1047. neo_attrs["name"] = neo_attrs.get("neo_name")
  1048. if "file_datetime" in neo_attrs:
  1049. neo_attrs["file_datetime"] = datetime.fromtimestamp(
  1050. neo_attrs["file_datetime"]
  1051. )
  1052. return neo_attrs
  1053. @staticmethod
  1054. def _group_signals(paths):
  1055. """
  1056. Groups data arrays that were generated by the same Neo Signal object.
  1057. :param paths: A list of paths (strings) of all the signals to be
  1058. grouped :return: A list of paths (strings) of signal groups. The last
  1059. part of each path is the common name of the signals in the group.
  1060. """
  1061. grouppaths = list(".".join(p.split(".")[:-1])
  1062. for p in paths)
  1063. # deduplicating paths
  1064. uniquepaths = []
  1065. for path in grouppaths:
  1066. if path not in uniquepaths:
  1067. uniquepaths.append(path)
  1068. return uniquepaths
  1069. @staticmethod
  1070. def _get_referers(nix_obj, obj_list):
  1071. ref_list = list()
  1072. for ref in obj_list:
  1073. if nix_obj.name in list(src.name for src in ref.sources):
  1074. ref_list.append(ref)
  1075. return ref_list
  1076. @staticmethod
  1077. def _get_time_dimension(obj):
  1078. for dim in obj.dimensions:
  1079. if hasattr(dim, "label") and dim.label == "time":
  1080. return dim
  1081. return None
  1082. @staticmethod
  1083. def _hash_object(obj):
  1084. """
  1085. Computes an MD5 hash of a Neo object based on its attribute values and
  1086. data objects. Child objects are not counted.
  1087. :param obj: A Neo object
  1088. :return: MD5 sum
  1089. """
  1090. objhash = md5()
  1091. def strupdate(a):
  1092. objhash.update(str(a).encode())
  1093. def dupdate(d):
  1094. if isinstance(d, np.ndarray) and not d.flags["C_CONTIGUOUS"]:
  1095. d = d.copy(order="C")
  1096. objhash.update(d)
  1097. # attributes
  1098. strupdate(obj.name)
  1099. strupdate(obj.description)
  1100. # annotations
  1101. for k, v in sorted(obj.annotations.items()):
  1102. strupdate(k)
  1103. strupdate(v)
  1104. # data objects and type-specific attributes
  1105. if isinstance(obj, (Block, Segment)):
  1106. strupdate(obj.rec_datetime)
  1107. strupdate(obj.file_datetime)
  1108. elif isinstance(obj, ChannelIndex):
  1109. for idx in obj.index:
  1110. strupdate(idx)
  1111. for n in obj.channel_names:
  1112. strupdate(n)
  1113. if obj.coordinates is not None:
  1114. for coord in obj.coordinates:
  1115. for c in coord:
  1116. strupdate(c)
  1117. elif isinstance(obj, AnalogSignal):
  1118. dupdate(obj)
  1119. dupdate(obj.units)
  1120. dupdate(obj.t_start)
  1121. dupdate(obj.sampling_rate)
  1122. dupdate(obj.t_stop)
  1123. elif isinstance(obj, IrregularlySampledSignal):
  1124. dupdate(obj)
  1125. dupdate(obj.times)
  1126. dupdate(obj.units)
  1127. elif isinstance(obj, Event):
  1128. dupdate(obj.times)
  1129. for l in obj.labels:
  1130. strupdate(l)
  1131. elif isinstance(obj, Epoch):
  1132. dupdate(obj.times)
  1133. dupdate(obj.durations)
  1134. for l in obj.labels:
  1135. strupdate(l)
  1136. elif isinstance(obj, SpikeTrain):
  1137. dupdate(obj.times)
  1138. dupdate(obj.units)
  1139. dupdate(obj.t_stop)
  1140. dupdate(obj.t_start)
  1141. if obj.waveforms is not None:
  1142. dupdate(obj.waveforms)
  1143. dupdate(obj.sampling_rate)
  1144. if obj.left_sweep is not None:
  1145. strupdate(obj.left_sweep)
  1146. # type
  1147. strupdate(type(obj).__name__)
  1148. return objhash.hexdigest()
  1149. def close(self):
  1150. """
  1151. Closes the open nix file and resets maps.
  1152. """
  1153. if (hasattr(self, "nix_file") and
  1154. self.nix_file and self.nix_file.is_open()):
  1155. self.nix_file.close()
  1156. self.nix_file = None
  1157. self._lazy_loaded = None
  1158. self._object_hashes = None
  1159. self._block_read_counter = None
  1160. def __del__(self):
  1161. self.close()