test_nixio.py 79 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936
  1. # Copyright (c) 2016, German Neuroinformatics Node (G-Node)
  2. # Achilleas Koutsou <achilleas.k@gmail.com>
  3. #
  4. # All rights reserved.
  5. #
  6. # Redistribution and use in source and binary forms, with or without
  7. # modification, are permitted under the terms of the BSD License. See
  8. # LICENSE file in the root of the Project.
  9. """
  10. Tests for NixIO
  11. """
  12. import os
  13. import shutil
  14. try:
  15. from collections.abc import Iterable
  16. except ImportError:
  17. from collections import Iterable
  18. from datetime import date, time, datetime
  19. from tempfile import mkdtemp
  20. from itertools import chain
  21. import unittest
  22. import string
  23. import numpy as np
  24. import quantities as pq
  25. from neo.core import (Block, Segment, ChannelIndex, AnalogSignal,
  26. IrregularlySampledSignal, Unit, SpikeTrain,
  27. Event, Epoch, ImageSequence, Group, ChannelView)
  28. from neo.test.iotest.common_io_test import BaseTestIO
  29. from neo.io.nixio import (NixIO, create_quantity, units_to_string, neover,
  30. dt_from_nix, dt_to_nix, DATETIMEANNOTATION)
  31. from neo.io.nixio_fr import NixIO as NixIO_lazy
  32. from neo.io.proxyobjects import (AnalogSignalProxy, SpikeTrainProxy,
  33. EventProxy, EpochProxy)
  34. try:
  35. import nixio as nix
  36. HAVE_NIX = True
  37. except ImportError:
  38. HAVE_NIX = False
  39. try:
  40. from unittest import mock
  41. SKIPMOCK = False
  42. except ImportError:
  43. SKIPMOCK = True
  44. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  45. class NixIOTest(unittest.TestCase):
  46. io = None
  47. tempdir = None
  48. filename = None
  49. def compare_blocks(self, neoblocks, nixblocks):
  50. for neoblock, nixblock in zip(neoblocks, nixblocks):
  51. self.compare_attr(neoblock, nixblock)
  52. self.assertEqual(len(neoblock.segments),
  53. len([grp for grp in nixblock.groups if grp.type == "neo.segment"]))
  54. self.assertEqual(len(neoblock.groups),
  55. len([grp for grp in nixblock.groups if grp.type == "neo.group"]))
  56. for idx, neoseg in enumerate(neoblock.segments):
  57. nixgrp = nixblock.groups[neoseg.annotations["nix_name"]]
  58. self.compare_segment_group(neoseg, nixgrp)
  59. self.assertEqual(len(neoblock.channel_indexes),
  60. len(nixblock.sources))
  61. for idx, neochx in enumerate(neoblock.channel_indexes):
  62. nixsrc = nixblock.sources[neochx.annotations["nix_name"]]
  63. self.compare_chx_source(neochx, nixsrc)
  64. self.check_refs(neoblock, nixblock)
  65. def compare_chx_source(self, neochx, nixsrc):
  66. self.compare_attr(neochx, nixsrc)
  67. nix_channels = list(src for src in nixsrc.sources
  68. if src.type == "neo.channelindex")
  69. self.assertEqual(len(neochx.index), len(nix_channels))
  70. if len(neochx.channel_ids):
  71. nix_chanids = list(src.metadata["channel_id"] for src
  72. in nixsrc.sources
  73. if src.type == "neo.channelindex")
  74. self.assertEqual(len(neochx.channel_ids), len(nix_chanids))
  75. # coordinates can be 1D if there's only one channel
  76. if neochx.coordinates is not None:
  77. neocoordinates = neochx.coordinates
  78. if np.ndim(neocoordinates) == 1:
  79. neocoordinates = [neocoordinates]
  80. else:
  81. neocoordinates = []
  82. for nixchan in nix_channels:
  83. nixchanidx = nixchan.metadata["index"]
  84. try:
  85. neochanpos = list(neochx.index).index(nixchanidx)
  86. except ValueError:
  87. self.fail("Channel indexes do not match.")
  88. if len(neochx.channel_names):
  89. neochanname = neochx.channel_names[neochanpos]
  90. if ((not isinstance(neochanname, str)) and
  91. isinstance(neochanname, bytes)):
  92. neochanname = neochanname.decode()
  93. nixchanname = nixchan.metadata["neo_name"]
  94. self.assertEqual(neochanname, nixchanname)
  95. else:
  96. # Check if channel name exists but not loaded
  97. self.assertNotIn("neo_name", nixchan.metadata)
  98. if len(neochx.channel_ids):
  99. neochanid = neochx.channel_ids[neochanpos]
  100. nixchanid = nixchan.metadata["channel_id"]
  101. self.assertEqual(neochanid, nixchanid)
  102. elif "channel_id" in nixchan.metadata:
  103. self.fail("Channel ID not loaded")
  104. if len(neocoordinates):
  105. neocoord = neocoordinates[neochanpos]
  106. nixcoord = nixchan.metadata.props["coordinates"]
  107. nixcoord = create_quantity(nixcoord.values, nixcoord.unit)
  108. self.assertTrue(all(neocoord == nixcoord),
  109. msg="{} != {}".format(neocoord, nixcoord))
  110. nix_units = list(src for src in nixsrc.sources
  111. if src.type == "neo.unit")
  112. self.assertEqual(len(neochx.units), len(nix_units))
  113. for neounit in neochx.units:
  114. nixunit = nixsrc.sources[neounit.annotations["nix_name"]]
  115. self.compare_attr(neounit, nixunit)
  116. def check_refs(self, neoblock, nixblock):
  117. """
  118. Checks whether the references between objects that are not nested are
  119. mapped correctly (e.g., SpikeTrains referenced by a Unit).
  120. :param neoblock: A Neo block
  121. :param nixblock: The corresponding NIX block
  122. """
  123. for idx, neochx in enumerate(neoblock.channel_indexes):
  124. nixchx = nixblock.sources[neochx.annotations["nix_name"]]
  125. # AnalogSignals referencing CHX
  126. neoasigs = list(sig.annotations["nix_name"]
  127. for sig in neochx.analogsignals)
  128. nixasigs = list({da.metadata.name for da in nixblock.data_arrays
  129. if da.type == "neo.analogsignal"
  130. and nixchx in da.sources})
  131. self.assertEqual(len(neoasigs), len(nixasigs))
  132. # IrregularlySampledSignals referencing CHX
  133. neoisigs = list(sig.annotations["nix_name"] for sig in
  134. neochx.irregularlysampledsignals)
  135. nixisigs = list(
  136. {da.metadata.name for da in nixblock.data_arrays
  137. if da.type == "neo.irregularlysampledsignal"
  138. and nixchx in da.sources}
  139. )
  140. self.assertEqual(len(neoisigs), len(nixisigs))
  141. # SpikeTrains referencing CHX and Units
  142. for sidx, neounit in enumerate(neochx.units):
  143. nixunit = nixchx.sources[neounit.annotations["nix_name"]]
  144. neosts = list(st.annotations["nix_name"]
  145. for st in neounit.spiketrains)
  146. nixsts = list(mt for mt in nixblock.multi_tags
  147. if mt.type == "neo.spiketrain" and
  148. nixunit.name in mt.sources)
  149. # SpikeTrains must also reference CHX
  150. for nixst in nixsts:
  151. self.assertIn(nixchx.name, nixst.sources)
  152. nixsts = list(st.name for st in nixsts)
  153. self.assertEqual(len(neosts), len(nixsts))
  154. for neoname in neosts:
  155. if neoname:
  156. self.assertIn(neoname, nixsts)
  157. # Events and Epochs must reference all Signals in the Group (NIX only)
  158. for nixgroup in nixblock.groups:
  159. nixevep = list(mt for mt in nixgroup.multi_tags
  160. if mt.type in ["neo.event", "neo.epoch"])
  161. nixsigs = list(da.name for da in nixgroup.data_arrays
  162. if da.type in ["neo.analogsignal",
  163. "neo.irregularlysampledsignal"])
  164. for nee in nixevep:
  165. for ns in nixsigs:
  166. self.assertIn(ns, nee.references)
  167. def compare_segment_group(self, neoseg, nixgroup):
  168. self.compare_attr(neoseg, nixgroup)
  169. neo_signals = neoseg.analogsignals + neoseg.irregularlysampledsignals \
  170. + neoseg.imagesequences
  171. self.compare_signals_das(neo_signals, nixgroup.data_arrays)
  172. neo_eests = neoseg.epochs + neoseg.events + neoseg.spiketrains
  173. self.compare_eests_mtags(neo_eests, nixgroup.multi_tags)
  174. def compare_signals_das(self, neosignals, data_arrays):
  175. totalsignals = 0
  176. for sig in neosignals:
  177. dalist = list()
  178. nixname = sig.annotations["nix_name"]
  179. for da in data_arrays:
  180. if da.metadata.name == nixname:
  181. dalist.append(da)
  182. nsig = np.shape(sig)[-1]
  183. totalsignals += nsig
  184. self.assertEqual(nsig, len(dalist))
  185. self.compare_signal_dalist(sig, dalist)
  186. self.assertEqual(totalsignals, len(data_arrays))
  187. def compare_signal_dalist(self, neosig, nixdalist):
  188. """
  189. Check if a Neo Analog or IrregularlySampledSignal matches a list of
  190. NIX DataArrays.
  191. :param neosig: Neo Analog or IrregularlySampledSignal
  192. :param nixdalist: List of DataArrays
  193. """
  194. nixmd = nixdalist[0].metadata
  195. self.assertTrue(all(nixmd == da.metadata for da in nixdalist))
  196. neounit = neosig.units
  197. if isinstance(neosig, AnalogSignalProxy):
  198. neosig = neosig.load()
  199. for sig, da in zip(np.transpose(neosig), nixdalist):
  200. self.compare_attr(neosig, da)
  201. daquant = create_quantity(da[:], da.unit)
  202. np.testing.assert_almost_equal(sig.view(pq.Quantity), daquant)
  203. nixunit = create_quantity(1, da.unit)
  204. self.assertEqual(neounit, nixunit)
  205. if isinstance(neosig, AnalogSignal):
  206. timedim = da.dimensions[0]
  207. self.assertEqual(timedim.dimension_type,
  208. nix.DimensionType.Sample)
  209. neosp = neosig.sampling_period
  210. nixsp = create_quantity(timedim.sampling_interval,
  211. timedim.unit)
  212. self.assertEqual(neosp, nixsp)
  213. tsunit = timedim.unit
  214. if "t_start.units" in da.metadata.props:
  215. tsunit = da.metadata["t_start.units"]
  216. neots = neosig.t_start
  217. nixts = create_quantity(timedim.offset, tsunit)
  218. self.assertEqual(neots, nixts)
  219. elif isinstance(neosig, IrregularlySampledSignal):
  220. timedim = da.dimensions[0]
  221. self.assertEqual(timedim.dimension_type,
  222. nix.DimensionType.Range)
  223. np.testing.assert_almost_equal(neosig.times.magnitude,
  224. timedim.ticks)
  225. self.assertEqual(timedim.unit,
  226. units_to_string(neosig.times.units))
  227. elif isinstance(neosig, ImageSequence):
  228. rate = da.metadata["sampling_rate"]
  229. unit = da.metadata.props["sampling_rate"].unit
  230. sampling_rate = create_quantity(rate, unit)
  231. neosr = neosig.sampling_rate
  232. self.assertEqual(sampling_rate, neosr)
  233. scale = da.metadata["spatial_scale"]
  234. unit = da.metadata.props["spatial_scale"].unit
  235. spatial_scale = create_quantity(scale, unit)
  236. neosps = neosig.spatial_scale
  237. self.assertEqual(spatial_scale, neosps)
  238. def compare_eests_mtags(self, eestlist, mtaglist):
  239. self.assertEqual(len(eestlist), len(mtaglist))
  240. for eest in eestlist:
  241. if isinstance(eest, (EventProxy, EpochProxy, SpikeTrainProxy)):
  242. eest = eest.load()
  243. mtag = mtaglist[eest.annotations["nix_name"]]
  244. if isinstance(eest, Epoch):
  245. self.compare_epoch_mtag(eest, mtag)
  246. elif isinstance(eest, Event):
  247. self.compare_event_mtag(eest, mtag)
  248. elif isinstance(eest, SpikeTrain):
  249. self.compare_spiketrain_mtag(eest, mtag)
  250. else:
  251. self.fail("Stray object")
  252. def compare_epoch_mtag(self, epoch, mtag):
  253. self.assertEqual(mtag.type, "neo.epoch")
  254. self.compare_attr(epoch, mtag)
  255. pos = mtag.positions
  256. posquant = create_quantity(pos[:], pos.unit)
  257. ext = mtag.extents
  258. extquant = create_quantity(ext[:], ext.unit)
  259. np.testing.assert_almost_equal(epoch.as_quantity(), posquant)
  260. np.testing.assert_almost_equal(epoch.durations, extquant)
  261. for neol, nixl in zip(epoch.labels,
  262. mtag.positions.dimensions[0].labels):
  263. self.assertEqual(neol, nixl)
  264. def compare_event_mtag(self, event, mtag):
  265. self.assertEqual(mtag.type, "neo.event")
  266. self.compare_attr(event, mtag)
  267. pos = mtag.positions
  268. posquant = create_quantity(pos[:], pos.unit)
  269. np.testing.assert_almost_equal(event.as_quantity(), posquant)
  270. for neol, nixl in zip(event.labels,
  271. mtag.positions.dimensions[0].labels):
  272. self.assertEqual(neol, nixl)
  273. def compare_spiketrain_mtag(self, spiketrain, mtag):
  274. self.assertEqual(mtag.type, "neo.spiketrain")
  275. self.compare_attr(spiketrain, mtag)
  276. pos = mtag.positions
  277. posquant = create_quantity(pos[:], pos.unit)
  278. np.testing.assert_almost_equal(spiketrain.as_quantity(), posquant)
  279. if len(mtag.features):
  280. neowfs = spiketrain.waveforms
  281. nixwfs = mtag.features[0].data
  282. self.assertEqual(np.shape(neowfs), np.shape(nixwfs))
  283. for nixwf, neowf in zip(nixwfs, neowfs):
  284. for nixrow, neorow in zip(nixwf, neowf):
  285. for nixv, neov in zip(nixrow, neorow):
  286. self.assertEqual(create_quantity(nixv, nixwfs.unit),
  287. neov)
  288. self.assertEqual(nixwfs.dimensions[0].dimension_type,
  289. nix.DimensionType.Set)
  290. self.assertEqual(nixwfs.dimensions[1].dimension_type,
  291. nix.DimensionType.Set)
  292. self.assertEqual(nixwfs.dimensions[2].dimension_type,
  293. nix.DimensionType.Sample)
  294. def compare_attr(self, neoobj, nixobj):
  295. if isinstance(neoobj, (AnalogSignal, IrregularlySampledSignal,
  296. ImageSequence)):
  297. nix_name = ".".join(nixobj.name.split(".")[:-1])
  298. else:
  299. nix_name = nixobj.name
  300. self.assertEqual(neoobj.annotations["nix_name"], nix_name)
  301. self.assertEqual(neoobj.description, nixobj.definition)
  302. if hasattr(neoobj, "rec_datetime") and neoobj.rec_datetime:
  303. self.assertEqual(neoobj.rec_datetime,
  304. datetime.fromtimestamp(nixobj.created_at))
  305. if hasattr(neoobj, "file_datetime") and neoobj.file_datetime:
  306. nixdt = dt_from_nix(nixobj.metadata["file_datetime"],
  307. DATETIMEANNOTATION)
  308. assert neoobj.file_datetime == nixdt
  309. self.assertEqual(neoobj.file_datetime, nixdt)
  310. if neoobj.annotations:
  311. nixmd = nixobj.metadata
  312. for k, v, in neoobj.annotations.items():
  313. if k == "nix_name":
  314. continue
  315. if isinstance(v, pq.Quantity):
  316. nixunit = nixmd.props[str(k)].unit
  317. self.assertEqual(nixunit, units_to_string(v.units))
  318. nixvalue = nixmd[str(k)]
  319. if isinstance(nixvalue, Iterable):
  320. nixvalue = np.array(nixvalue)
  321. np.testing.assert_almost_equal(nixvalue, v.magnitude)
  322. else:
  323. self.assertEqual(nixmd[str(k)], v,
  324. "Property value mismatch: {}".format(k))
  325. if hasattr(neoobj, 'array_annotations'):
  326. if neoobj.array_annotations:
  327. nixmd = nixobj.metadata
  328. for k, v, in neoobj.array_annotations.items():
  329. if k in ['labels', 'durations']:
  330. continue
  331. if isinstance(v, pq.Quantity):
  332. nixunit = nixmd.props[str(k)].unit
  333. self.assertEqual(nixunit, units_to_string(v.units))
  334. nixvalue = nixmd[str(k)]
  335. if isinstance(nixvalue, Iterable):
  336. nixvalue = np.array(nixvalue)
  337. np.testing.assert_almost_equal(nixvalue, v.magnitude)
  338. if isinstance(v, np.ndarray):
  339. self.assertTrue(np.all(v == nixmd[str(k)]))
  340. else:
  341. msg = "Property value mismatch: {}".format(k)
  342. self.assertEqual(nixmd[str(k)], v, msg)
  343. @classmethod
  344. def create_full_nix_file(cls, filename):
  345. nixfile = nix.File.open(filename, nix.FileMode.Overwrite)
  346. nix_block_a = nixfile.create_block(cls.rword(10), "neo.block")
  347. nix_block_a.definition = cls.rsentence(5, 10)
  348. nix_block_b = nixfile.create_block(cls.rword(10), "neo.block")
  349. nix_block_b.definition = cls.rsentence(3, 3)
  350. nix_block_a.metadata = nixfile.create_section(
  351. nix_block_a.name, nix_block_a.name + ".metadata"
  352. )
  353. nix_block_a.metadata["neo_name"] = cls.rword(5)
  354. nix_block_b.metadata = nixfile.create_section(
  355. nix_block_b.name, nix_block_b.name + ".metadata"
  356. )
  357. nix_block_b.metadata["neo_name"] = cls.rword(5)
  358. nix_blocks = [nix_block_a, nix_block_b]
  359. for blk in nix_blocks:
  360. for ind in range(3):
  361. group = blk.create_group(cls.rword(), "neo.segment")
  362. group.definition = cls.rsentence(10, 15)
  363. group_md = blk.metadata.create_section(
  364. group.name, group.name + ".metadata"
  365. )
  366. group.metadata = group_md
  367. blk = nix_blocks[0]
  368. group = blk.groups[0]
  369. allspiketrains = list()
  370. allsignalgroups = list()
  371. # analogsignals
  372. for n in range(5):
  373. siggroup = list()
  374. asig_name = "{}_asig{}".format(cls.rword(10), n)
  375. asig_definition = cls.rsentence(5, 5)
  376. asig_md = group.metadata.create_section(asig_name,
  377. asig_name + ".metadata")
  378. arr_ann_name, arr_ann_val = 'anasig_arr_ann', cls.rquant(10, pq.uV)
  379. asig_md.create_property(arr_ann_name,
  380. arr_ann_val.magnitude.flatten())
  381. asig_md.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  382. asig_md.props[arr_ann_name].type = 'ARRAYANNOTATION'
  383. for idx in range(10):
  384. da_asig = blk.create_data_array(
  385. "{}.{}".format(asig_name, idx),
  386. "neo.analogsignal",
  387. data=cls.rquant(100, 1)
  388. )
  389. da_asig.definition = asig_definition
  390. da_asig.unit = "mV"
  391. da_asig.metadata = asig_md
  392. timedim = da_asig.append_sampled_dimension(0.01)
  393. timedim.unit = "ms"
  394. timedim.label = "time"
  395. timedim.offset = 10
  396. da_asig.append_set_dimension()
  397. group.data_arrays.append(da_asig)
  398. siggroup.append(da_asig)
  399. asig_md["t_start.dim"] = "ms"
  400. allsignalgroups.append(siggroup)
  401. # imagesequence
  402. for n in range(5):
  403. imgseqgroup = list()
  404. imgseq_name = "{}_imgs{}".format(cls.rword(10), n)
  405. imgseq_definition = cls.rsentence(5, 5)
  406. imgseq_md = group.metadata.create_section(imgseq_name,
  407. imgseq_name + ".metadata")
  408. arr_ann_name, arr_ann_val = 'imgseq_arr_ann', cls.rquant(10, pq.V)
  409. imgseq_md.create_property(arr_ann_name,
  410. arr_ann_val.magnitude.flatten())
  411. imgseq_md.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  412. imgseq_md.props[arr_ann_name].type = 'ARRAYANNOTATION'
  413. for idx in range(10):
  414. da_imgseq = blk.create_data_array(
  415. "{}.{}".format(imgseq_name, idx),
  416. "neo.imagesequence",
  417. data=cls.rquant((20, 10), 1)
  418. )
  419. da_imgseq.definition = imgseq_definition
  420. da_imgseq.unit = "mV"
  421. da_imgseq.metadata = imgseq_md
  422. imgseq_md["sampling_rate"] = 10
  423. imgseq_md.props["sampling_rate"].unit = units_to_string(pq.V)
  424. imgseq_md["spatial_scale"] = 10
  425. imgseq_md.props["spatial_scale"].unit = units_to_string(pq.micrometer)
  426. group.data_arrays.append(da_imgseq)
  427. imgseqgroup.append(da_imgseq)
  428. allsignalgroups.append(imgseqgroup)
  429. # irregularlysampledsignals
  430. for n in range(2):
  431. siggroup = list()
  432. isig_name = "{}_isig{}".format(cls.rword(10), n)
  433. isig_definition = cls.rsentence(12, 12)
  434. isig_md = group.metadata.create_section(isig_name,
  435. isig_name + ".metadata")
  436. isig_times = cls.rquant(200, 1, True)
  437. arr_ann_name, arr_ann_val = 'irrsig_arr_ann', cls.rquant(7, pq.uV)
  438. isig_md.create_property(arr_ann_name,
  439. arr_ann_val.magnitude.flatten())
  440. isig_md.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  441. isig_md.props[arr_ann_name].type = 'ARRAYANNOTATION'
  442. for idx in range(7):
  443. da_isig = blk.create_data_array(
  444. "{}.{}".format(isig_name, idx),
  445. "neo.irregularlysampledsignal",
  446. data=cls.rquant(200, 1)
  447. )
  448. da_isig.definition = isig_definition
  449. da_isig.unit = "mV"
  450. da_isig.metadata = isig_md
  451. timedim = da_isig.append_range_dimension(isig_times)
  452. timedim.unit = "s"
  453. timedim.label = "time"
  454. da_isig.append_set_dimension()
  455. group.data_arrays.append(da_isig)
  456. siggroup.append(da_isig)
  457. allsignalgroups.append(siggroup)
  458. # SpikeTrains with Waveforms
  459. for n in range(4):
  460. stname = "{}-st{}".format(cls.rword(20), n)
  461. times = cls.rquant(40, 1, True)
  462. times_da = blk.create_data_array(
  463. "{}.times".format(stname),
  464. "neo.spiketrain.times",
  465. data=times
  466. )
  467. times_da.unit = "ms"
  468. mtag_st = blk.create_multi_tag(stname, "neo.spiketrain", times_da)
  469. group.multi_tags.append(mtag_st)
  470. mtag_st.definition = cls.rsentence(20, 30)
  471. mtag_st_md = group.metadata.create_section(
  472. mtag_st.name, mtag_st.name + ".metadata"
  473. )
  474. mtag_st.metadata = mtag_st_md
  475. mtag_st_md.create_property("t_stop", times[-1] + 1.0)
  476. arr_ann_name, arr_ann_val = 'st_arr_ann', cls.rquant(40, pq.uV)
  477. mtag_st_md.create_property(arr_ann_name,
  478. arr_ann_val.magnitude.flatten())
  479. mtag_st_md.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  480. mtag_st_md.props[arr_ann_name].type = 'ARRAYANNOTATION'
  481. waveforms = cls.rquant((10, 8, 5), 1)
  482. wfname = "{}.waveforms".format(mtag_st.name)
  483. wfda = blk.create_data_array(wfname, "neo.waveforms",
  484. data=waveforms)
  485. wfda.unit = "mV"
  486. mtag_st.create_feature(wfda, nix.LinkType.Indexed)
  487. wfda.append_set_dimension() # spike dimension
  488. wfda.append_set_dimension() # channel dimension
  489. wftimedim = wfda.append_sampled_dimension(0.1)
  490. wftimedim.unit = "ms"
  491. wftimedim.label = "time"
  492. wfda.metadata = mtag_st_md.create_section(
  493. wfname, "neo.waveforms.metadata"
  494. )
  495. wfda.metadata.create_property("left_sweep",
  496. [20] * 5)
  497. allspiketrains.append(mtag_st)
  498. # Epochs
  499. for n in range(3):
  500. epname = "{}-ep{}".format(cls.rword(5), n)
  501. times = cls.rquant(5, 1, True)
  502. times_da = blk.create_data_array(
  503. "{}.times".format(epname),
  504. "neo.epoch.times",
  505. data=times
  506. )
  507. times_da.unit = "s"
  508. extents = cls.rquant(5, 1)
  509. extents_da = blk.create_data_array(
  510. "{}.durations".format(epname),
  511. "neo.epoch.durations",
  512. data=extents
  513. )
  514. extents_da.unit = "s"
  515. mtag_ep = blk.create_multi_tag(
  516. epname, "neo.epoch", times_da
  517. )
  518. mtag_ep.metadata = group.metadata.create_section(
  519. epname, epname + ".metadata"
  520. )
  521. group.multi_tags.append(mtag_ep)
  522. mtag_ep.definition = cls.rsentence(2)
  523. mtag_ep.extents = extents_da
  524. arr_ann_name, arr_ann_val = 'ep_arr_ann', cls.rquant(5, pq.uV)
  525. mtag_ep.metadata.create_property(arr_ann_name,
  526. arr_ann_val.magnitude.flatten())
  527. mtag_ep.metadata.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  528. mtag_ep.metadata.props[arr_ann_name].type = 'ARRAYANNOTATION'
  529. label_dim = mtag_ep.positions.append_set_dimension()
  530. label_dim.labels = cls.rsentence(5).split(" ")
  531. # reference all signals in the group
  532. for siggroup in allsignalgroups:
  533. mtag_ep.references.extend(siggroup)
  534. # Events
  535. for n in range(2):
  536. evname = "{}-ev{}".format(cls.rword(5), n)
  537. times = cls.rquant(5, 1, True)
  538. times_da = blk.create_data_array(
  539. "{}.times".format(evname),
  540. "neo.event.times",
  541. data=times
  542. )
  543. times_da.unit = "s"
  544. mtag_ev = blk.create_multi_tag(
  545. evname, "neo.event", times_da
  546. )
  547. mtag_ev.metadata = group.metadata.create_section(
  548. evname, evname + ".metadata"
  549. )
  550. group.multi_tags.append(mtag_ev)
  551. mtag_ev.definition = cls.rsentence(2)
  552. arr_ann_name, arr_ann_val = 'ev_arr_ann',\
  553. cls.rquant(5, pq.uV)
  554. mtag_ev.metadata.create_property(arr_ann_name,
  555. arr_ann_val.magnitude.flatten())
  556. mtag_ev.metadata.props[arr_ann_name].unit = str(arr_ann_val.dimensionality)
  557. mtag_ev.metadata.props[arr_ann_name].type = 'ARRAYANNOTATION'
  558. label_dim = mtag_ev.positions.append_set_dimension()
  559. label_dim.labels = cls.rsentence(5).split(" ")
  560. # reference all signals in the group
  561. for siggroup in allsignalgroups:
  562. mtag_ev.references.extend(siggroup)
  563. # CHX
  564. nixchx = blk.create_source(cls.rword(10),
  565. "neo.channelindex")
  566. nixchx.metadata = nix_blocks[0].metadata.create_section(
  567. nixchx.name, "neo.channelindex.metadata"
  568. )
  569. chantype = "neo.channelindex"
  570. # 3 channels
  571. for idx, chan in enumerate([2, 5, 9]):
  572. channame = "{}.ChannelIndex{}".format(nixchx.name, idx)
  573. nixrc = nixchx.create_source(channame, chantype)
  574. nixrc.definition = cls.rsentence(13)
  575. nixrc.metadata = nixchx.metadata.create_section(
  576. nixrc.name, "neo.channelindex.metadata"
  577. )
  578. nixrc.metadata.create_property("index", chan)
  579. nixrc.metadata.create_property("channel_id", chan + 1)
  580. dims = cls.rquant(3, 1)
  581. coordprop = nixrc.metadata.create_property("coordinates", dims)
  582. coordprop.unit = "pm"
  583. nunits = 1
  584. stsperunit = np.array_split(allspiketrains, nunits)
  585. for idx in range(nunits):
  586. unitname = "{}-unit{}".format(cls.rword(5), idx)
  587. nixunit = nixchx.create_source(unitname, "neo.unit")
  588. nixunit.metadata = nixchx.metadata.create_section(
  589. unitname, unitname + ".metadata"
  590. )
  591. nixunit.definition = cls.rsentence(4, 10)
  592. for st in stsperunit[idx]:
  593. st.sources.append(nixchx)
  594. st.sources.append(nixunit)
  595. # pick a few signal groups to reference this CHX
  596. randsiggroups = np.random.choice(allsignalgroups, 5, False)
  597. for siggroup in randsiggroups:
  598. for sig in siggroup:
  599. sig.sources.append(nixchx)
  600. return nixfile
  601. @staticmethod
  602. def rdate():
  603. return datetime(year=np.random.randint(1980, 2020),
  604. month=np.random.randint(1, 13),
  605. day=np.random.randint(1, 29))
  606. @classmethod
  607. def populate_dates(cls, obj):
  608. obj.file_datetime = cls.rdate()
  609. obj.rec_datetime = cls.rdate()
  610. @staticmethod
  611. def rword(n=10):
  612. return "".join(np.random.choice(list(string.ascii_letters), n))
  613. @classmethod
  614. def rsentence(cls, n=3, maxwl=10):
  615. return " ".join(cls.rword(np.random.randint(1, maxwl))
  616. for _ in range(n))
  617. @classmethod
  618. def rdict(cls, nitems):
  619. rd = dict()
  620. for _ in range(nitems):
  621. key = cls.rword()
  622. value = cls.rword() if np.random.choice((0, 1)) \
  623. else np.random.uniform()
  624. rd[key] = value
  625. return rd
  626. @staticmethod
  627. def rquant(shape, unit, incr=False):
  628. try:
  629. dim = len(shape)
  630. except TypeError:
  631. dim = 1
  632. if incr and dim > 1:
  633. raise TypeError("Shape of quantity array may only be "
  634. "one-dimensional when incremental values are "
  635. "requested.")
  636. arr = np.random.random(shape)
  637. if incr:
  638. arr = np.array(np.cumsum(arr))
  639. return arr * unit
  640. @classmethod
  641. def create_all_annotated(cls):
  642. times = cls.rquant(10, pq.s)
  643. times_ann = {cls.rword(6): cls.rquant(10, pq.ms)}
  644. signal = cls.rquant((10, 10), pq.V)
  645. signal_ann = {cls.rword(6): cls.rquant(10, pq.uV)}
  646. blk = Block()
  647. blk.annotate(**cls.rdict(3))
  648. cls.populate_dates(blk)
  649. seg = Segment()
  650. seg.annotate(**cls.rdict(4))
  651. cls.populate_dates(seg)
  652. blk.segments.append(seg)
  653. asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz,
  654. array_annotations=signal_ann)
  655. asig.annotate(**cls.rdict(2))
  656. seg.analogsignals.append(asig)
  657. isig = IrregularlySampledSignal(times=times, signal=signal,
  658. time_units=pq.s,
  659. array_annotations=signal_ann)
  660. isig.annotate(**cls.rdict(2))
  661. seg.irregularlysampledsignals.append(isig)
  662. epoch = Epoch(times=times, durations=times,
  663. array_annotations=times_ann)
  664. epoch.annotate(**cls.rdict(4))
  665. seg.epochs.append(epoch)
  666. event = Event(times=times, array_annotations=times_ann)
  667. event.annotate(**cls.rdict(4))
  668. seg.events.append(event)
  669. spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s,
  670. array_annotations=times_ann)
  671. d = cls.rdict(6)
  672. d["quantity"] = pq.Quantity(10, "mV")
  673. d["qarray"] = pq.Quantity(range(10), "mA")
  674. spiketrain.annotate(**d)
  675. seg.spiketrains.append(spiketrain)
  676. chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10])
  677. chx.annotate(**cls.rdict(5))
  678. blk.channel_indexes.append(chx)
  679. unit = Unit()
  680. unit.annotate(**cls.rdict(2))
  681. chx.units.append(unit)
  682. return blk
  683. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  684. class NixIOWriteTest(NixIOTest):
  685. def setUp(self):
  686. self.tempdir = mkdtemp(prefix="nixiotest")
  687. self.filename = os.path.join(self.tempdir, "testnixio.nix")
  688. self.writer = NixIO(self.filename, "ow")
  689. self.io = self.writer
  690. self.reader = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  691. def tearDown(self):
  692. self.writer.close()
  693. self.reader.close()
  694. shutil.rmtree(self.tempdir)
  695. def write_and_compare(self, blocks, use_obj_names=False):
  696. self.writer.write_all_blocks(blocks, use_obj_names)
  697. self.compare_blocks(blocks, self.reader.blocks)
  698. self.compare_blocks(self.writer.read_all_blocks(), self.reader.blocks)
  699. self.compare_blocks(blocks, self.reader.blocks)
  700. def test_block_write(self):
  701. block = Block(name=self.rword(),
  702. description=self.rsentence())
  703. self.write_and_compare([block])
  704. block.annotate(**self.rdict(5))
  705. self.write_and_compare([block])
  706. def test_segment_write(self):
  707. block = Block(name=self.rword())
  708. segment = Segment(name=self.rword(), description=self.rword())
  709. block.segments.append(segment)
  710. self.write_and_compare([block])
  711. segment.annotate(**self.rdict(2))
  712. self.write_and_compare([block])
  713. def test_channel_index_write(self):
  714. block = Block(name=self.rword())
  715. chx = ChannelIndex(name=self.rword(),
  716. description=self.rsentence(),
  717. channel_ids=[10, 20, 30, 50, 80, 130],
  718. index=[1, 2, 3, 5, 8, 13])
  719. block.channel_indexes.append(chx)
  720. self.write_and_compare([block])
  721. chx.annotate(**self.rdict(3))
  722. self.write_and_compare([block])
  723. chx.channel_names = ["one", "two", "three", "five",
  724. "eight", "xiii"]
  725. chx.coordinates = self.rquant((6, 3), pq.um)
  726. self.write_and_compare([block])
  727. # add an empty channel index and check again
  728. newchx = ChannelIndex(np.array([]))
  729. block.channel_indexes.append(newchx)
  730. self.write_and_compare([block])
  731. def test_channel_index_coords(self):
  732. block = Block(name=self.rword())
  733. chxn = ChannelIndex(name=self.rword(),
  734. description=self.rsentence(),
  735. channel_ids=[10, 20, 30],
  736. index=[1, 2, 3])
  737. chxn.coordinates = self.rquant((3, 3), pq.mm)
  738. chx1 = ChannelIndex(name=self.rword(),
  739. description=self.rsentence(),
  740. channel_ids=[1],
  741. index=[0])
  742. chx1.coordinates = self.rquant(2, pq.mm)
  743. block.channel_indexes.append(chxn)
  744. block.channel_indexes.append(chx1)
  745. self.write_and_compare([block])
  746. # add an empty channel index and check again
  747. newchx = ChannelIndex(np.array([]))
  748. block.channel_indexes.append(newchx)
  749. self.write_and_compare([block])
  750. def test_signals_write(self):
  751. block = Block()
  752. seg = Segment()
  753. block.segments.append(seg)
  754. asig = AnalogSignal(signal=self.rquant((19, 15), pq.mV),
  755. sampling_rate=pq.Quantity(10, "Hz"))
  756. seg.analogsignals.append(asig)
  757. self.write_and_compare([block])
  758. anotherblock = Block("ir signal block")
  759. seg = Segment("ir signal seg")
  760. anotherblock.segments.append(seg)
  761. irsig = IrregularlySampledSignal(
  762. signal=np.random.random((20, 30)),
  763. times=self.rquant(20, pq.ms, True),
  764. units=pq.A
  765. )
  766. seg.irregularlysampledsignals.append(irsig)
  767. self.write_and_compare([block, anotherblock])
  768. block.segments[0].analogsignals.append(
  769. AnalogSignal(signal=[10.0, 1.0, 3.0], units=pq.S,
  770. sampling_period=pq.Quantity(3, "s"),
  771. dtype=np.double, name="signal42",
  772. description="this is an analogsignal",
  773. t_start=45 * pq.ms),
  774. )
  775. self.write_and_compare([block, anotherblock])
  776. block.segments[0].irregularlysampledsignals.append(
  777. IrregularlySampledSignal(times=np.random.random(10),
  778. signal=np.random.random((10, 13)),
  779. units="mV", time_units="s",
  780. dtype=np.float,
  781. name="some sort of signal",
  782. description="the signal is described")
  783. )
  784. self.write_and_compare([block, anotherblock])
  785. def test_imagesequence_write(self):
  786. block = Block()
  787. seg = Segment()
  788. block.segments.append(seg)
  789. imgseq = ImageSequence(image_data=self.rquant((19, 10, 15), 1),
  790. sampling_rate=pq.Quantity(10, "Hz"),
  791. spatial_scale=pq.Quantity(10, "micrometer"),
  792. units=pq.V)
  793. seg.imagesequences.append(imgseq)
  794. self.write_and_compare([block])
  795. def test_signals_compound_units(self):
  796. block = Block()
  797. seg = Segment()
  798. block.segments.append(seg)
  799. units = pq.CompoundUnit("1/30000*V")
  800. srate = pq.Quantity(10, pq.CompoundUnit("1.0/10 * Hz"))
  801. asig = AnalogSignal(signal=self.rquant((10, 23), units),
  802. sampling_rate=srate)
  803. seg.analogsignals.append(asig)
  804. self.write_and_compare([block])
  805. anotherblock = Block("ir signal block")
  806. seg = Segment("ir signal seg")
  807. anotherblock.segments.append(seg)
  808. irsig = IrregularlySampledSignal(
  809. signal=np.random.random((20, 3)),
  810. times=self.rquant(20, pq.CompoundUnit("0.1 * ms"), True),
  811. units=pq.CompoundUnit("10 * V / s")
  812. )
  813. seg.irregularlysampledsignals.append(irsig)
  814. self.write_and_compare([block, anotherblock])
  815. block.segments[0].analogsignals.append(
  816. AnalogSignal(signal=[10.0, 1.0, 3.0], units=pq.S,
  817. sampling_period=pq.Quantity(3, "s"),
  818. dtype=np.double, name="signal42",
  819. description="this is an analogsignal",
  820. t_start=45 * pq.CompoundUnit("3.14 * s")),
  821. )
  822. self.write_and_compare([block, anotherblock])
  823. times = self.rquant(10, pq.CompoundUnit("3 * year"), True)
  824. block.segments[0].irregularlysampledsignals.append(
  825. IrregularlySampledSignal(times=times,
  826. signal=np.random.random((10, 3)),
  827. units="mV", dtype=np.float,
  828. name="some sort of signal",
  829. description="the signal is described")
  830. )
  831. self.write_and_compare([block, anotherblock])
  832. def test_imagesequence_compound_units(self):
  833. block = Block()
  834. seg = Segment()
  835. block.segments.append(seg)
  836. units = pq.CompoundUnit("1/30000*V")
  837. srate = pq.Quantity(10, pq.CompoundUnit("1.0/10 * Hz"))
  838. size = pq.Quantity(10, pq.CompoundUnit("1.0/10 * micrometer"))
  839. imgseq = ImageSequence(image_data=self.rquant((10, 20, 10), units),
  840. sampling_rate=srate, spatial_scale=size)
  841. seg.imagesequences.append(imgseq)
  842. self.write_and_compare([block])
  843. def test_epoch_write(self):
  844. block = Block()
  845. seg = Segment()
  846. block.segments.append(seg)
  847. epoch = Epoch(times=[1, 1, 10, 3] * pq.ms,
  848. durations=[3, 3, 3, 1] * pq.ms,
  849. labels=np.array(["one", "two", "three", "four"], dtype='U'),
  850. name="test epoch", description="an epoch for testing")
  851. seg.epochs.append(epoch)
  852. self.write_and_compare([block])
  853. def test_event_write(self):
  854. block = Block()
  855. seg = Segment()
  856. block.segments.append(seg)
  857. event = Event(times=np.arange(0, 30, 10) * pq.s,
  858. labels=np.array(["0", "1", "2"], dtype='U'),
  859. name="event name",
  860. description="event description")
  861. seg.events.append(event)
  862. self.write_and_compare([block])
  863. def test_spiketrain_write(self):
  864. block = Block()
  865. seg = Segment()
  866. block.segments.append(seg)
  867. spiketrain = SpikeTrain(times=[3, 4, 5] * pq.s, t_stop=10.0,
  868. name="spikes!", description="sssssspikes")
  869. seg.spiketrains.append(spiketrain)
  870. self.write_and_compare([block])
  871. waveforms = self.rquant((3, 5, 10), pq.mV)
  872. spiketrain = SpikeTrain(times=[1, 1.1, 1.2] * pq.ms, t_stop=1.5 * pq.s,
  873. name="spikes with wf",
  874. description="spikes for waveform test",
  875. waveforms=waveforms)
  876. seg.spiketrains.append(spiketrain)
  877. self.write_and_compare([block])
  878. spiketrain.left_sweep = np.random.random(10) * pq.ms
  879. self.write_and_compare([block])
  880. spiketrain.left_sweep = pq.Quantity(-10, "ms")
  881. self.write_and_compare([block])
  882. def test_group_write(self):
  883. signals = [
  884. AnalogSignal(np.random.random(size=(1000, 5)) * pq.mV,
  885. sampling_period=1 * pq.ms, name="sig1"),
  886. AnalogSignal(np.random.random(size=(1000, 3)) * pq.mV,
  887. sampling_period=1 * pq.ms, name="sig2"),
  888. ]
  889. spiketrains = [
  890. SpikeTrain([0.1, 54.3, 76.6, 464.2], units=pq.ms,
  891. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  892. SpikeTrain([30.1, 154.3, 276.6, 864.2], units=pq.ms,
  893. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  894. SpikeTrain([120.1, 454.3, 576.6, 764.2], units=pq.ms,
  895. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  896. ]
  897. epochs = [
  898. Epoch(times=[0, 500], durations=[100, 100], units=pq.ms, labels=["A", "B"])
  899. ]
  900. seg = Segment(name="seg1")
  901. seg.analogsignals.extend(signals)
  902. seg.spiketrains.extend(spiketrains)
  903. seg.epochs.extend(epochs)
  904. for obj in chain(signals, spiketrains, epochs):
  905. obj.segment = seg
  906. views = [ChannelView(index=np.array([0, 3, 4]), obj=signals[0], name="view_of_sig1")]
  907. groups = [
  908. Group(objects=(signals[0:1] + spiketrains[0:2] + epochs + views), name="group1"),
  909. Group(objects=(signals[1:2] + spiketrains[1:] + epochs), name="group2")
  910. ]
  911. block = Block(name="block1")
  912. block.segments.append(seg)
  913. block.groups.extend(groups)
  914. for obj in chain([seg], groups):
  915. obj.block = block
  916. self.write_and_compare([block])
  917. def test_group_write_nested(self):
  918. signals = [
  919. AnalogSignal(np.random.random(size=(1000, 5)) * pq.mV,
  920. sampling_period=1 * pq.ms, name="sig1"),
  921. AnalogSignal(np.random.random(size=(1000, 3)) * pq.mV,
  922. sampling_period=1 * pq.ms, name="sig2"),
  923. ]
  924. spiketrains = [
  925. SpikeTrain([0.1, 54.3, 76.6, 464.2], units=pq.ms,
  926. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  927. SpikeTrain([30.1, 154.3, 276.6, 864.2], units=pq.ms,
  928. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  929. SpikeTrain([120.1, 454.3, 576.6, 764.2], units=pq.ms,
  930. t_stop=1000.0 * pq.ms, t_start=0.0 * pq.ms),
  931. ]
  932. epochs = [
  933. Epoch(times=[0, 500], durations=[100, 100], units=pq.ms, labels=["A", "B"])
  934. ]
  935. seg = Segment(name="seg1")
  936. seg.analogsignals.extend(signals)
  937. seg.spiketrains.extend(spiketrains)
  938. seg.epochs.extend(epochs)
  939. for obj in chain(signals, spiketrains, epochs):
  940. obj.segment = seg
  941. views = [ChannelView(index=np.array([0, 3, 4]), obj=signals[0], name="view_of_sig1")]
  942. subgroup = Group(objects=(signals[0:1] + views), name="subgroup")
  943. groups = [
  944. Group(objects=([subgroup] + spiketrains[0:2] + epochs), name="group1"),
  945. Group(objects=(signals[1:2] + spiketrains[1:] + epochs), name="group2")
  946. ]
  947. block = Block(name="block1")
  948. block.segments.append(seg)
  949. block.groups.extend(groups)
  950. for obj in chain([seg], groups):
  951. obj.block = block
  952. self.write_and_compare([block])
  953. def test_metadata_structure_write(self):
  954. neoblk = self.create_all_annotated()
  955. self.io.write_block(neoblk)
  956. blk = self.io.nix_file.blocks[0]
  957. blkmd = blk.metadata
  958. self.assertEqual(blk.name, blkmd.name)
  959. grp = blk.groups[0] # segment
  960. self.assertIn(grp.name, blkmd.sections)
  961. grpmd = blkmd.sections[grp.name]
  962. for da in grp.data_arrays: # signals
  963. name = ".".join(da.name.split(".")[:-1])
  964. self.assertIn(name, grpmd.sections)
  965. for mtag in grp.multi_tags: # spiketrains, events, and epochs
  966. self.assertIn(mtag.name, grpmd.sections)
  967. srcchx = blk.sources[0] # chx
  968. self.assertIn(srcchx.name, blkmd.sections)
  969. for srcunit in blk.sources: # units
  970. self.assertIn(srcunit.name, blkmd.sections)
  971. self.write_and_compare([neoblk])
  972. def test_anonymous_objects_write(self):
  973. nblocks = 2
  974. nsegs = 2
  975. nanasig = 4
  976. nimgseq = 4
  977. nirrseg = 2
  978. nepochs = 3
  979. nevents = 4
  980. nspiketrains = 3
  981. nchx = 5
  982. nunits = 10
  983. times = self.rquant(1, pq.s)
  984. signal = self.rquant(1, pq.V)
  985. blocks = []
  986. for blkidx in range(nblocks):
  987. blk = Block()
  988. blocks.append(blk)
  989. for segidx in range(nsegs):
  990. seg = Segment()
  991. blk.segments.append(seg)
  992. for anaidx in range(nanasig):
  993. seg.analogsignals.append(AnalogSignal(signal=signal,
  994. sampling_rate=pq.Hz))
  995. for imgseqdx in range(nimgseq):
  996. seg.imagesequences.append(ImageSequence(image_data=self.rquant(
  997. (10, 20, 10), pq.V),
  998. sampling_rate=pq.Hz,
  999. spatial_scale=pq.micrometer))
  1000. for irridx in range(nirrseg):
  1001. seg.irregularlysampledsignals.append(
  1002. IrregularlySampledSignal(times=times,
  1003. signal=signal,
  1004. time_units=pq.s)
  1005. )
  1006. for epidx in range(nepochs):
  1007. seg.epochs.append(Epoch(times=times, durations=times))
  1008. for evidx in range(nevents):
  1009. seg.events.append(Event(times=times))
  1010. for stidx in range(nspiketrains):
  1011. seg.spiketrains.append(SpikeTrain(times=times,
  1012. t_stop=times[-1] + pq.s,
  1013. units=pq.s))
  1014. for chidx in range(nchx):
  1015. chx = ChannelIndex(index=[1, 2],
  1016. channel_ids=[11, 22])
  1017. blk.channel_indexes.append(chx)
  1018. for unidx in range(nunits):
  1019. unit = Unit()
  1020. chx.units.append(unit)
  1021. self.writer.write_all_blocks(blocks)
  1022. self.compare_blocks(blocks, self.reader.blocks)
  1023. with self.assertRaises(ValueError):
  1024. self.writer.write_all_blocks(blocks, use_obj_names=True)
  1025. def test_name_objects_write(self):
  1026. nblocks = 2
  1027. nsegs = 2
  1028. nanasig = 4
  1029. nimgseq = 2
  1030. nirrseg = 2
  1031. nepochs = 3
  1032. nevents = 4
  1033. nspiketrains = 3
  1034. nchx = 5
  1035. nunits = 10
  1036. times = self.rquant(1, pq.s)
  1037. signal = self.rquant(1, pq.V)
  1038. blocks = []
  1039. for blkidx in range(nblocks):
  1040. blk = Block(name="block{}".format(blkidx))
  1041. blocks.append(blk)
  1042. for segidx in range(nsegs):
  1043. seg = Segment(name="seg{}".format(segidx))
  1044. blk.segments.append(seg)
  1045. for anaidx in range(nanasig):
  1046. asig = AnalogSignal(
  1047. name="{}:as{}".format(seg.name, anaidx),
  1048. signal=signal, sampling_rate=pq.Hz
  1049. )
  1050. seg.analogsignals.append(asig)
  1051. # imagesequence
  1052. for imgseqdx in range(nimgseq):
  1053. imseq = ImageSequence(
  1054. name="{}:imgs{}".format(seg.name, imgseqdx),
  1055. image_data=np.random.rand(20, 10, 10), units=pq.mV,
  1056. sampling_rate=pq.Hz, spatial_scale=pq.micrometer
  1057. )
  1058. seg.imagesequences.append(imseq)
  1059. for irridx in range(nirrseg):
  1060. isig = IrregularlySampledSignal(
  1061. name="{}:is{}".format(seg.name, irridx),
  1062. times=times,
  1063. signal=signal,
  1064. time_units=pq.s
  1065. )
  1066. seg.irregularlysampledsignals.append(isig)
  1067. for epidx in range(nepochs):
  1068. seg.epochs.append(
  1069. Epoch(name="{}:ep{}".format(seg.name, epidx),
  1070. times=times, durations=times)
  1071. )
  1072. for evidx in range(nevents):
  1073. seg.events.append(
  1074. Event(name="{}:ev{}".format(seg.name, evidx),
  1075. times=times)
  1076. )
  1077. for stidx in range(nspiketrains):
  1078. seg.spiketrains.append(
  1079. SpikeTrain(name="{}:st{}".format(seg.name, stidx),
  1080. times=times,
  1081. t_stop=times[-1] + pq.s,
  1082. units=pq.s)
  1083. )
  1084. for chidx in range(nchx):
  1085. chx = ChannelIndex(name="chx{}".format(chidx),
  1086. index=[1, 2],
  1087. channel_ids=[11, 22])
  1088. blk.channel_indexes.append(chx)
  1089. for unidx in range(nunits):
  1090. unit = Unit(name="unit{}".format(unidx))
  1091. chx.units.append(unit)
  1092. # put guard on _generate_nix_name
  1093. if not SKIPMOCK:
  1094. nixgenmock = mock.Mock(name="_generate_nix_name",
  1095. wraps=self.io._generate_nix_name)
  1096. self.io._generate_nix_name = nixgenmock
  1097. self.writer.write_block(blocks[0], use_obj_names=True)
  1098. self.compare_blocks([blocks[0]], self.reader.blocks)
  1099. self.compare_blocks(self.writer.read_all_blocks(), self.reader.blocks)
  1100. self.compare_blocks(blocks, self.reader.blocks)
  1101. if not SKIPMOCK:
  1102. nixgenmock.assert_not_called()
  1103. self.write_and_compare(blocks, use_obj_names=True)
  1104. if not SKIPMOCK:
  1105. nixgenmock.assert_not_called()
  1106. self.assertEqual(self.reader.blocks[0].name, "block0")
  1107. blocks[0].name = blocks[1].name # name conflict
  1108. with self.assertRaises(ValueError):
  1109. self.writer.write_all_blocks(blocks, use_obj_names=True)
  1110. blocks[0].name = "new name"
  1111. self.assertEqual(blocks[0].segments[1].spiketrains[1].name, "seg1:st1")
  1112. st0 = blocks[0].segments[0].spiketrains[0].name
  1113. blocks[0].segments[0].spiketrains[1].name = st0 # name conflict
  1114. with self.assertRaises(ValueError):
  1115. self.writer.write_all_blocks(blocks, use_obj_names=True)
  1116. with self.assertRaises(ValueError):
  1117. self.writer.write_block(blocks[0], use_obj_names=True)
  1118. if not SKIPMOCK:
  1119. nixgenmock.assert_not_called()
  1120. def test_name_conflicts(self):
  1121. # anon block
  1122. blk = Block()
  1123. with self.assertRaises(ValueError):
  1124. self.io.write_block(blk, use_obj_names=True)
  1125. # two anon blocks
  1126. blocks = [Block(), Block()]
  1127. with self.assertRaises(ValueError):
  1128. self.io.write_all_blocks(blocks, use_obj_names=True)
  1129. # same name blocks
  1130. blocks = [Block(name="one"), Block(name="one")]
  1131. with self.assertRaises(ValueError):
  1132. self.io.write_all_blocks(blocks, use_obj_names=True)
  1133. # one block, two same name segments
  1134. blk = Block("new")
  1135. seg = Segment("I am the segment", a="a annoation")
  1136. blk.segments.append(seg)
  1137. seg = Segment("I am the segment", a="b annotation")
  1138. blk.segments.append(seg)
  1139. with self.assertRaises(ValueError):
  1140. self.io.write_block(blk, use_obj_names=True)
  1141. times = self.rquant(1, pq.s)
  1142. signal = self.rquant(1, pq.V)
  1143. # name conflict: analog + irregular signals
  1144. seg.analogsignals.append(
  1145. AnalogSignal(name="signal", signal=signal, sampling_rate=pq.Hz)
  1146. )
  1147. seg.imagesequences.append(
  1148. ImageSequence(name='signal',
  1149. image_data=self.rquant((10, 20, 10), pq.V),
  1150. sampling_rate=pq.Hz,
  1151. spatial_scale=pq.micrometer))
  1152. seg.irregularlysampledsignals.append(
  1153. IrregularlySampledSignal(name="signal", signal=signal, times=times)
  1154. )
  1155. blk = Block(name="Signal conflict Block")
  1156. blk.segments.append(seg)
  1157. with self.assertRaises(ValueError):
  1158. self.io.write_block(blk, use_obj_names=True)
  1159. # name conflict: event + spiketrain
  1160. blk = Block(name="Event+SpikeTrain conflict Block")
  1161. seg = Segment(name="Event+SpikeTrain conflict Segment")
  1162. blk.segments.append(seg)
  1163. seg.events.append(Event(name="TimeyStuff", times=times))
  1164. seg.spiketrains.append(SpikeTrain(name="TimeyStuff", times=times,
  1165. t_stop=pq.s))
  1166. with self.assertRaises(ValueError):
  1167. self.io.write_block(blk, use_obj_names=True)
  1168. # make spiketrain anon
  1169. blk.segments[0].spiketrains[0].name = None
  1170. with self.assertRaises(ValueError):
  1171. self.io.write_block(blk, use_obj_names=True)
  1172. # name conflict in channel indexes
  1173. blk = Block(name="ChannelIndex conflict Block")
  1174. blk.channel_indexes.append(ChannelIndex(name="chax", index=[1]))
  1175. blk.channel_indexes.append(ChannelIndex(name="chax", index=[2]))
  1176. with self.assertRaises(ValueError):
  1177. self.io.write_block(blk, use_obj_names=True)
  1178. # name conflict in units
  1179. blk = Block(name="unitconf")
  1180. chx = ChannelIndex(name="ok", index=[100])
  1181. blk.channel_indexes.append(chx)
  1182. chx.units.append(Unit(name="IHAVEATWIN"))
  1183. chx.units.append(Unit(name="IHAVEATWIN"))
  1184. with self.assertRaises(ValueError):
  1185. self.io.write_block(blk, use_obj_names=True)
  1186. def test_multiref_write(self):
  1187. blk = Block("blk1")
  1188. signal = AnalogSignal(name="sig1", signal=[0, 1, 2], units="mV",
  1189. sampling_period=pq.Quantity(1, "ms"))
  1190. othersignal = IrregularlySampledSignal(name="i1", signal=[0, 0, 0],
  1191. units="mV", times=[1, 2, 3],
  1192. time_units="ms")
  1193. imgseq = ImageSequence(name="img1", image_data=self.rquant((10, 20, 10), pq.mV),
  1194. frame_duration=pq.Quantity(1, "ms"),
  1195. spatial_scale=pq.meter)
  1196. event = Event(name="Evee", times=[0.3, 0.42], units="year")
  1197. epoch = Epoch(name="epoche", times=[0.1, 0.2] * pq.min,
  1198. durations=[0.5, 0.5] * pq.min)
  1199. st = SpikeTrain(name="the train of spikes", times=[0.1, 0.2, 10.3],
  1200. t_stop=11, units="us")
  1201. for idx in range(3):
  1202. segname = "seg" + str(idx)
  1203. seg = Segment(segname)
  1204. blk.segments.append(seg)
  1205. seg.analogsignals.append(signal)
  1206. seg.imagesequences.append(imgseq)
  1207. seg.irregularlysampledsignals.append(othersignal)
  1208. seg.events.append(event)
  1209. seg.epochs.append(epoch)
  1210. seg.spiketrains.append(st)
  1211. chidx = ChannelIndex([10, 20, 29])
  1212. seg = blk.segments[0]
  1213. st = SpikeTrain(name="choochoo", times=[10, 11, 80], t_stop=1000,
  1214. units="s")
  1215. seg.spiketrains.append(st)
  1216. blk.channel_indexes.append(chidx)
  1217. for idx in range(6):
  1218. unit = Unit("unit" + str(idx))
  1219. chidx.units.append(unit)
  1220. unit.spiketrains.append(st)
  1221. self.writer.write_block(blk)
  1222. self.compare_blocks([blk], self.reader.blocks)
  1223. def test_no_segment_write(self):
  1224. # Tests storing AnalogSignal, IrregularlySampledSignal, and SpikeTrain
  1225. # objects in the secondary (ChannelIndex) substructure without them
  1226. # being attached to a Segment.
  1227. blk = Block("segmentless block")
  1228. signal = AnalogSignal(name="sig1", signal=[0, 1, 2], units="mV",
  1229. sampling_period=pq.Quantity(1, "ms"))
  1230. othersignal = IrregularlySampledSignal(name="i1", signal=[0, 0, 0],
  1231. units="mV", times=[1, 2, 3],
  1232. time_units="ms")
  1233. sta = SpikeTrain(name="the train of spikes", times=[0.1, 0.2, 10.3],
  1234. t_stop=11, units="us")
  1235. stb = SpikeTrain(name="the train of spikes b", times=[1.1, 2.2, 10.1],
  1236. t_stop=100, units="ms")
  1237. chidx = ChannelIndex([8, 13, 21])
  1238. blk.channel_indexes.append(chidx)
  1239. chidx.analogsignals.append(signal)
  1240. chidx.irregularlysampledsignals.append(othersignal)
  1241. unit = Unit()
  1242. chidx.units.append(unit)
  1243. unit.spiketrains.extend([sta, stb])
  1244. self.writer.write_block(blk)
  1245. self.writer.close()
  1246. self.compare_blocks([blk], self.reader.blocks)
  1247. reader = NixIO(self.filename, "ro")
  1248. blk = reader.read_block(neoname="segmentless block")
  1249. chx = blk.channel_indexes[0]
  1250. self.assertEqual(len(chx.analogsignals), 1)
  1251. self.assertEqual(len(chx.irregularlysampledsignals), 1)
  1252. self.assertEqual(len(chx.units[0].spiketrains), 2)
  1253. def test_rewrite_refs(self):
  1254. def checksignalcounts(fname):
  1255. with NixIO(fname, "ro") as r:
  1256. blk = r.read_block()
  1257. chidx = blk.channel_indexes[0]
  1258. seg = blk.segments[0]
  1259. self.assertEqual(len(chidx.analogsignals), 2)
  1260. self.assertEqual(len(chidx.units[0].spiketrains), 3)
  1261. self.assertEqual(len(seg.analogsignals), 1)
  1262. self.assertEqual(len(seg.spiketrains), 1)
  1263. blk = Block()
  1264. # ChannelIndex
  1265. chidx = ChannelIndex(index=[1])
  1266. blk.channel_indexes.append(chidx)
  1267. # Two signals on ChannelIndex
  1268. for idx in range(2):
  1269. asigchx = AnalogSignal(signal=[idx], units="mV",
  1270. sampling_rate=pq.Hz)
  1271. chidx.analogsignals.append(asigchx)
  1272. # Unit
  1273. unit = Unit()
  1274. chidx.units.append(unit)
  1275. # Three SpikeTrains on Unit
  1276. for idx in range(3):
  1277. st = SpikeTrain([idx], units="ms", t_stop=40)
  1278. unit.spiketrains.append(st)
  1279. # Segment
  1280. seg = Segment()
  1281. blk.segments.append(seg)
  1282. # One signal on Segment
  1283. asigseg = AnalogSignal(signal=[2], units="uA",
  1284. sampling_rate=pq.Hz)
  1285. seg.analogsignals.append(asigseg)
  1286. # One spiketrain on Segment
  1287. stseg = SpikeTrain([10], units="ms", t_stop=40)
  1288. seg.spiketrains.append(stseg)
  1289. # Write, compare, and check counts
  1290. self.writer.write_block(blk)
  1291. self.compare_blocks([blk], self.reader.blocks)
  1292. self.assertEqual(len(chidx.analogsignals), 2)
  1293. self.assertEqual(len(seg.analogsignals), 1)
  1294. self.assertEqual(len(chidx.analogsignals), 2)
  1295. self.assertEqual(len(chidx.units[0].spiketrains), 3)
  1296. self.assertEqual(len(seg.analogsignals), 1)
  1297. self.assertEqual(len(seg.spiketrains), 1)
  1298. # Check counts with separate reader
  1299. checksignalcounts(self.filename)
  1300. # Write again and check counts
  1301. secondwrite = os.path.join(self.tempdir, "testnixio-2.nix")
  1302. with NixIO(secondwrite, "ow") as w:
  1303. w.write_block(blk)
  1304. self.compare_blocks([blk], self.reader.blocks)
  1305. # Read back and check counts
  1306. scndreader = nix.File.open(secondwrite, mode=nix.FileMode.ReadOnly)
  1307. self.compare_blocks([blk], scndreader.blocks)
  1308. checksignalcounts(secondwrite)
  1309. def test_to_value(self):
  1310. section = self.io.nix_file.create_section("Metadata value test",
  1311. "Test")
  1312. writeprop = self.io._write_property
  1313. # quantity
  1314. qvalue = pq.Quantity(10, "mV")
  1315. writeprop(section, "qvalue", qvalue)
  1316. self.assertEqual(section["qvalue"], 10)
  1317. self.assertEqual(section.props["qvalue"].unit, "mV")
  1318. # datetime
  1319. dt = self.rdate()
  1320. writeprop(section, "dt", dt)
  1321. self.assertEqual(section["dt"], dt_to_nix(dt)[0])
  1322. # string
  1323. randstr = self.rsentence()
  1324. writeprop(section, "randstr", randstr)
  1325. self.assertEqual(section["randstr"], randstr)
  1326. # bytes
  1327. bytestring = b"bytestring"
  1328. writeprop(section, "randbytes", bytestring)
  1329. self.assertEqual(section["randbytes"], bytestring.decode())
  1330. # iterables
  1331. randlist = np.random.random(10).tolist()
  1332. writeprop(section, "randlist", randlist)
  1333. self.assertEqual(randlist, section["randlist"])
  1334. randarray = np.random.random(10)
  1335. writeprop(section, "randarray", randarray)
  1336. np.testing.assert_almost_equal(randarray, section["randarray"])
  1337. # numpy item
  1338. npval = np.float64(2398)
  1339. writeprop(section, "npval", npval)
  1340. self.assertEqual(npval, section["npval"])
  1341. # number
  1342. val = 42
  1343. writeprop(section, "val", val)
  1344. self.assertEqual(val, section["val"])
  1345. # empty string (gets stored as empty list)
  1346. writeprop(section, "emptystring", "")
  1347. self.assertEqual(list(), section["emptystring"])
  1348. def test_annotations_special_cases(self):
  1349. # Special cases for annotations: empty list, list of strings,
  1350. # multidimensional lists/arrays
  1351. # These are handled differently on read, so we test them on a block
  1352. # instead of just checking the property writer method
  1353. # empty value
  1354. # empty list
  1355. wblock = Block("block with empty list", an_empty_list=list())
  1356. self.writer.write_block(wblock)
  1357. rblock = self.writer.read_block(neoname="block with empty list")
  1358. self.assertEqual(rblock.annotations["an_empty_list"], list())
  1359. # empty tuple (gets read out as list)
  1360. wblock = Block("block with empty tuple", an_empty_tuple=tuple())
  1361. self.writer.write_block(wblock)
  1362. rblock = self.writer.read_block(neoname="block with empty tuple")
  1363. self.assertEqual(rblock.annotations["an_empty_tuple"], list())
  1364. # list of strings
  1365. losval = ["one", "two", "one million"]
  1366. wblock = Block("block with list of strings",
  1367. los=losval)
  1368. self.writer.write_block(wblock)
  1369. rblock = self.writer.read_block(neoname="block with list of strings")
  1370. self.assertEqual(rblock.annotations["los"], losval)
  1371. # TODO: multi dimensional value (GH Issue #501)
  1372. def test_empty_array_annotations(self):
  1373. wblock = Block("block with spiketrain")
  1374. wseg = Segment()
  1375. wseg.spiketrains = [SpikeTrain(times=[] * pq.s, t_stop=1 * pq.s,
  1376. array_annotations={'empty': []})]
  1377. wblock.segments = [wseg]
  1378. self.writer.write_block(wblock)
  1379. try:
  1380. rblock = self.writer.read_block(neoname="block with spiketrain")
  1381. except Exception as exc:
  1382. self.fail('The following exception was raised when'
  1383. + ' reading the block with an empty array annotation:\n'
  1384. + str(exc))
  1385. rst = rblock.segments[0].spiketrains[0]
  1386. self.assertEqual(len(rst.array_annotations), 1)
  1387. self.assertIn('empty', rst.array_annotations.keys())
  1388. self.assertEqual(len(rst.array_annotations['empty']), 0)
  1389. def test_write_proxyobjects(self):
  1390. def generate_complete_block():
  1391. block = Block()
  1392. seg = Segment()
  1393. block.segments.append(seg)
  1394. # add spiketrain
  1395. waveforms = self.rquant((3, 5, 10), pq.mV)
  1396. spiketrain = SpikeTrain(times=[1, 1.1, 1.2] * pq.ms,
  1397. t_stop=1.5 * pq.s,
  1398. name="spikes with wf",
  1399. description="spikes for waveform test",
  1400. waveforms=waveforms)
  1401. seg.spiketrains.append(spiketrain)
  1402. # add imagesequence
  1403. imgseq = ImageSequence(name="img1",
  1404. image_data=self.rquant((10, 20, 10), pq.mV),
  1405. frame_duration=pq.Quantity(1, "ms"),
  1406. spatial_scale=pq.meter)
  1407. seg.imagesequences.append(imgseq)
  1408. # add signals
  1409. asig = AnalogSignal(signal=self.rquant((19, 15), pq.mV),
  1410. sampling_rate=pq.Quantity(10, "Hz"))
  1411. seg.analogsignals.append(asig)
  1412. irsig = IrregularlySampledSignal(signal=np.random.random((20, 30)),
  1413. times=self.rquant(20, pq.ms, True),
  1414. units=pq.A)
  1415. seg.irregularlysampledsignals.append(irsig)
  1416. # add events and epochs
  1417. epoch = Epoch(times=[1, 1, 10, 3] * pq.ms,
  1418. durations=[3, 3, 3, 1] * pq.ms,
  1419. labels=np.array(["one", "two", "three", "four"]),
  1420. name="test epoch", description="an epoch for testing")
  1421. seg.epochs.append(epoch)
  1422. event = Event(times=np.arange(0, 30, 10) * pq.s,
  1423. labels=np.array(["0", "1", "2"]),
  1424. name="event name",
  1425. description="event description")
  1426. seg.events.append(event)
  1427. # add channel index and unit
  1428. channel = ChannelIndex([0], channel_names=['mychannelname'],
  1429. channel_ids=[4],
  1430. name=['testname'])
  1431. block.channel_indexes.append(channel)
  1432. unit = Unit(name='myunit', description='blablabla',
  1433. file_origin='fileA.nix',
  1434. myannotation='myannotation')
  1435. channel.units.append(unit)
  1436. unit.spiketrains.append(spiketrain)
  1437. # make sure everything is linked properly
  1438. block.create_relationship()
  1439. return block
  1440. block = generate_complete_block()
  1441. basename, ext = os.path.splitext(self.filename)
  1442. filename2 = basename + '-2.' + ext
  1443. # writing block to file 1
  1444. with NixIO(filename2, 'ow') as io:
  1445. io.write_block(block)
  1446. # reading data as lazy objects from file 1
  1447. with NixIO_lazy(filename2) as io:
  1448. block_lazy = io.read_block(lazy=True)
  1449. self.write_and_compare([block_lazy])
  1450. def test_annotation_types(self):
  1451. annotations = {
  1452. "somedate": self.rdate(),
  1453. "now": datetime.now(),
  1454. "today": date.today(),
  1455. "sometime": time(13, 37, 42),
  1456. "somequantity": self.rquant(10, pq.ms),
  1457. "somestring": self.rsentence(3),
  1458. "npfloat": np.float(10),
  1459. "nparray": np.array([1, 2, 400]),
  1460. "emptystr": "",
  1461. }
  1462. wblock = Block("annotation_block", **annotations)
  1463. self.writer.write_block(wblock)
  1464. rblock = self.writer.read_block(neoname="annotation_block")
  1465. for k in annotations:
  1466. orig = annotations[k]
  1467. readval = rblock.annotations[k]
  1468. if isinstance(orig, np.ndarray):
  1469. np.testing.assert_almost_equal(orig, readval)
  1470. else:
  1471. self.assertEqual(annotations[k], rblock.annotations[k])
  1472. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  1473. class NixIOReadTest(NixIOTest):
  1474. nixfile = None
  1475. nix_blocks = None
  1476. @classmethod
  1477. def setUpClass(cls):
  1478. cls.tempdir = mkdtemp(prefix="nixiotest")
  1479. cls.filename = os.path.join(cls.tempdir, "testnixio.nix")
  1480. if HAVE_NIX:
  1481. cls.nixfile = cls.create_full_nix_file(cls.filename)
  1482. def setUp(self):
  1483. self.io = NixIO(self.filename, "ro")
  1484. @classmethod
  1485. def tearDownClass(cls):
  1486. if HAVE_NIX:
  1487. cls.nixfile.close()
  1488. shutil.rmtree(cls.tempdir)
  1489. def tearDown(self):
  1490. self.io.close()
  1491. def test_all_read(self):
  1492. neo_blocks = self.io.read_all_blocks()
  1493. nix_blocks = self.io.nix_file.blocks
  1494. self.compare_blocks(neo_blocks, nix_blocks)
  1495. def test_iter_read(self):
  1496. blocknames = [blk.name for blk in self.nixfile.blocks]
  1497. for blk, nixname in zip(self.io.iter_blocks(), blocknames):
  1498. self.assertEqual(blk.annotations["nix_name"], nixname)
  1499. def test_nix_name_read(self):
  1500. for nixblock in self.nixfile.blocks:
  1501. nixname = nixblock.name
  1502. neoblock = self.io.read_block(nixname=nixname)
  1503. self.assertEqual(neoblock.annotations["nix_name"], nixname)
  1504. def test_index_read(self):
  1505. for idx, nixblock in enumerate(self.nixfile.blocks):
  1506. neoblock = self.io.read_block(index=idx)
  1507. self.assertEqual(neoblock.annotations["nix_name"], nixblock.name)
  1508. self.assertEqual(neoblock.annotations["nix_name"],
  1509. self.nixfile.blocks[idx].name)
  1510. def test_auto_index_read(self):
  1511. for nixblock in self.nixfile.blocks:
  1512. neoblock = self.io.read_block() # don't specify index
  1513. self.assertEqual(neoblock.annotations["nix_name"], nixblock.name)
  1514. # No more blocks - should return None
  1515. self.assertIs(self.io.read_block(), None)
  1516. self.assertIs(self.io.read_block(), None)
  1517. self.assertIs(self.io.read_block(), None)
  1518. with NixIO(self.filename, "ro") as nf:
  1519. neoblock = nf.read_block(index=1)
  1520. self.assertEqual(self.nixfile.blocks[1].name,
  1521. neoblock.annotations["nix_name"])
  1522. neoblock = nf.read_block() # should start again from 0
  1523. self.assertEqual(self.nixfile.blocks[0].name,
  1524. neoblock.annotations["nix_name"])
  1525. def test_neo_name_read(self):
  1526. for nixblock in self.nixfile.blocks:
  1527. neoname = nixblock.metadata["neo_name"]
  1528. neoblock = self.io.read_block(neoname=neoname)
  1529. self.assertEqual(neoblock.annotations["nix_name"], nixblock.name)
  1530. def test_array_annotations_read(self):
  1531. for bl in self.io.read_all_blocks():
  1532. nix_block = self.nixfile.blocks[bl.annotations['nix_name']]
  1533. for seg in bl.segments:
  1534. for anasig in seg.analogsignals:
  1535. da = nix_block.data_arrays[anasig.annotations['nix_name'] + '.0']
  1536. self.assertIn('anasig_arr_ann', da.metadata)
  1537. self.assertIn('anasig_arr_ann', anasig.array_annotations)
  1538. nix_ann = da.metadata['anasig_arr_ann']
  1539. neo_ann = anasig.array_annotations['anasig_arr_ann']
  1540. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1541. self.assertEqual(da.metadata.props['anasig_arr_ann'].unit,
  1542. units_to_string(neo_ann.units))
  1543. for irrsig in seg.irregularlysampledsignals:
  1544. da = nix_block.data_arrays[irrsig.annotations['nix_name'] + '.0']
  1545. self.assertIn('irrsig_arr_ann', da.metadata)
  1546. self.assertIn('irrsig_arr_ann', irrsig.array_annotations)
  1547. nix_ann = da.metadata['irrsig_arr_ann']
  1548. neo_ann = irrsig.array_annotations['irrsig_arr_ann']
  1549. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1550. self.assertEqual(da.metadata.props['irrsig_arr_ann'].unit,
  1551. units_to_string(neo_ann.units))
  1552. for imgseq in seg.imagesequences:
  1553. da = nix_block.data_arrays[imgseq.annotations['nix_name'] + '.0']
  1554. self.assertIn('imgseq_arr_ann', da.metadata)
  1555. self.assertIn('imgseq_arr_ann', imgseq.array_annotations)
  1556. nix_ann = da.metadata['imgseq_arr_ann']
  1557. neo_ann = imgseq.array_annotations['imgseq_arr_ann']
  1558. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1559. self.assertEqual(da.metadata.props['imgseq_arr_ann'].unit,
  1560. units_to_string(neo_ann.units))
  1561. for ev in seg.events:
  1562. da = nix_block.multi_tags[ev.annotations['nix_name']]
  1563. self.assertIn('ev_arr_ann', da.metadata)
  1564. self.assertIn('ev_arr_ann', ev.array_annotations)
  1565. nix_ann = da.metadata['ev_arr_ann']
  1566. neo_ann = ev.array_annotations['ev_arr_ann']
  1567. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1568. self.assertEqual(da.metadata.props['ev_arr_ann'].unit,
  1569. units_to_string(neo_ann.units))
  1570. for ep in seg.epochs:
  1571. da = nix_block.multi_tags[ep.annotations['nix_name']]
  1572. self.assertIn('ep_arr_ann', da.metadata)
  1573. self.assertIn('ep_arr_ann', ep.array_annotations)
  1574. nix_ann = da.metadata['ep_arr_ann']
  1575. neo_ann = ep.array_annotations['ep_arr_ann']
  1576. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1577. self.assertEqual(da.metadata.props['ep_arr_ann'].unit,
  1578. units_to_string(neo_ann.units))
  1579. for st in seg.spiketrains:
  1580. da = nix_block.multi_tags[st.annotations['nix_name']]
  1581. self.assertIn('st_arr_ann', da.metadata)
  1582. self.assertIn('st_arr_ann', st.array_annotations)
  1583. nix_ann = da.metadata['st_arr_ann']
  1584. neo_ann = st.array_annotations['st_arr_ann']
  1585. self.assertTrue(np.all(nix_ann == neo_ann.magnitude))
  1586. self.assertEqual(da.metadata.props['st_arr_ann'].unit,
  1587. units_to_string(neo_ann.units))
  1588. def test_read_blocks_are_writable(self):
  1589. filename = os.path.join(self.tempdir, "testnixio_out.nix")
  1590. writer = NixIO(filename, "ow")
  1591. blocks = self.io.read_all_blocks()
  1592. try:
  1593. writer.write_all_blocks(blocks)
  1594. except Exception as exc:
  1595. self.fail('The following exception was raised when'
  1596. + ' writing the blocks loaded with NixIO:\n'
  1597. + str(exc))
  1598. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  1599. class NixIOContextTests(NixIOTest):
  1600. def setUp(self):
  1601. self.tempdir = mkdtemp(prefix="nixiotest")
  1602. self.filename = os.path.join(self.tempdir, "testnixio.nix")
  1603. def tearDown(self):
  1604. shutil.rmtree(self.tempdir)
  1605. def test_context_write(self):
  1606. neoblock = Block(name=self.rword(), description=self.rsentence())
  1607. with NixIO(self.filename, "ow") as iofile:
  1608. iofile.write_block(neoblock)
  1609. nixfile = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  1610. self.compare_blocks([neoblock], nixfile.blocks)
  1611. nixfile.close()
  1612. neoblock.annotate(**self.rdict(5))
  1613. with NixIO(self.filename, "rw") as iofile:
  1614. iofile.write_block(neoblock)
  1615. nixfile = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  1616. self.compare_blocks([neoblock], nixfile.blocks)
  1617. nixfile.close()
  1618. def test_context_read(self):
  1619. nixfile = nix.File.open(self.filename, nix.FileMode.Overwrite)
  1620. name_one = self.rword()
  1621. name_two = self.rword()
  1622. nixfile.create_block(name_one, "neo.block")
  1623. nixfile.create_block(name_two, "neo.block")
  1624. nixfile.close()
  1625. with NixIO(self.filename, "ro") as iofile:
  1626. blocks = iofile.read_all_blocks()
  1627. self.assertEqual(blocks[0].annotations["nix_name"], name_one)
  1628. self.assertEqual(blocks[1].annotations["nix_name"], name_two)
  1629. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  1630. class NixIOVerTests(NixIOTest):
  1631. def setUp(self):
  1632. self.tempdir = mkdtemp(prefix="nixiotest")
  1633. self.filename = os.path.join(self.tempdir, "testnixio.nix")
  1634. def tearDown(self):
  1635. shutil.rmtree(self.tempdir)
  1636. def test_new_file(self):
  1637. with NixIO(self.filename, "ow") as iofile:
  1638. self.assertEqual(iofile._file_version, neover)
  1639. nixfile = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  1640. filever = nixfile.sections["neo"]["version"]
  1641. self.assertEqual(filever, neover)
  1642. nixfile.close()
  1643. def test_oldfile_nover(self):
  1644. nixfile = nix.File.open(self.filename, nix.FileMode.Overwrite)
  1645. nixfile.close()
  1646. with NixIO(self.filename, "ro") as iofile:
  1647. self.assertEqual(iofile._file_version, '0.5.2') # compat version
  1648. nixfile = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  1649. self.assertNotIn("neo", nixfile.sections)
  1650. nixfile.close()
  1651. with NixIO(self.filename, "rw") as iofile:
  1652. self.assertEqual(iofile._file_version, '0.5.2') # compat version
  1653. # section should have been created now
  1654. nixfile = nix.File.open(self.filename, nix.FileMode.ReadOnly)
  1655. self.assertIn("neo", nixfile.sections)
  1656. self.assertEqual(nixfile.sections["neo"]["version"], '0.5.2')
  1657. nixfile.close()
  1658. def test_file_with_ver(self):
  1659. someversion = '0.100.10'
  1660. nixfile = nix.File.open(self.filename, nix.FileMode.Overwrite)
  1661. filemd = nixfile.create_section("neo", "neo.metadata")
  1662. filemd["version"] = someversion
  1663. nixfile.close()
  1664. with NixIO(self.filename, "ro") as iofile:
  1665. self.assertEqual(iofile._file_version, someversion)
  1666. with NixIO(self.filename, "rw") as iofile:
  1667. self.assertEqual(iofile._file_version, someversion)
  1668. with NixIO(self.filename, "ow") as iofile:
  1669. self.assertEqual(iofile._file_version, neover)
  1670. @unittest.skipUnless(HAVE_NIX, "Requires NIX")
  1671. class CommonTests(BaseTestIO, unittest.TestCase):
  1672. ioclass = NixIO
  1673. read_and_write_is_bijective = False
  1674. if __name__ == "__main__":
  1675. unittest.main()