Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

1
1

elphyio.py 153 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312
  1. # -*- coding: utf-8 -*-
  2. """
  3. README
  4. =====================================================================================
  5. This is the implementation of the NEO IO for Elphy files.
  6. IO dependencies:
  7. - NEO
  8. - types
  9. - numpy
  10. - quantities
  11. Quick reference:
  12. =====================================================================================
  13. Class ElphyIO() with methods read_block() and write_block() are implemented.
  14. This classes represent the way to access and produce Elphy files
  15. from NEO objects.
  16. As regards reading an existing Elphy file, start by initializing a IO class with it:
  17. >>> import neo
  18. >>> r = neo.io.ElphyIO( filename="Elphy.DAT" )
  19. >>> r
  20. <neo.io.elphyio.ElphyIO object at 0xa1e960c>
  21. Read the file content into NEO object Block:
  22. >>> bl = r.read_block()
  23. >>> bl
  24. <neo.core.block.Block object at 0x9e3d44c>
  25. Now you can then read all Elphy data as NEO objects:
  26. >>> b1.segments
  27. [<neo.core.segment.Segment object at 0x9ed85cc>,
  28. <neo.core.segment.Segment object at 0x9ed85ec>,
  29. <neo.core.segment.Segment object at 0x9ed880c>,
  30. <neo.core.segment.Segment object at 0x9ed89cc>]
  31. >>> bl.segments[0].analogsignals[0]
  32. <AnalogSignal(array([ 0. , -0.0061037 , -0.0061037 , ..., 0. ,
  33. -0.0061037 , -0.01831111]) * mV, [0.0 s, 7226.2 s], sampling rate: 10.0 Hz)>
  34. These functions return NEO objects, completely "detached" from the original Elphy file.
  35. Changes to the runtime objects will not cause any changes in the file.
  36. Having already existing NEO structures, it is possible to write them as an Elphy file.
  37. For example, given a segment:
  38. >>> s = neo.Segment()
  39. filled with other NEO structures:
  40. >>> import numpy as np
  41. >>> import quantities as pq
  42. >>> a = AnalogSignal( signal=np.random.rand(300), t_start=42*pq.ms)
  43. >>> s.analogsignals.append( a )
  44. and added to a newly created NEO Block:
  45. >>> bl = neo.Block()
  46. >>> bl.segments.append( s )
  47. Then, it's easy to create an Elphy file:
  48. >>> r = neo.io.ElphyIO( filename="ElphyNeoTest.DAT" )
  49. >>> r.write_block( bl )
  50. Author: Thierry Brizzi
  51. Domenico Guarino
  52. """
  53. # needed for python 3 compatibility
  54. from __future__ import absolute_import
  55. # python commons:
  56. from datetime import datetime
  57. from fractions import gcd
  58. from os import path
  59. import re
  60. import struct
  61. from time import time
  62. # note neo.core needs only numpy and quantities
  63. import numpy as np
  64. import quantities as pq
  65. # I need to subclass BaseIO
  66. from neo.io.baseio import BaseIO
  67. # to import from core
  68. from neo.core import (Block, Segment, ChannelIndex,
  69. AnalogSignal, Event, SpikeTrain)
  70. # --------------------------------------------------------
  71. # OBJECTS
  72. class ElphyScaleFactor(object):
  73. """
  74. Useful to retrieve real values from integer
  75. ones that are stored in an Elphy file :
  76. ``scale`` : compute the actual value of a sample
  77. with this following formula :
  78. ``delta`` * value + ``offset``
  79. """
  80. def __init__(self, delta, offset):
  81. self.delta = delta
  82. self.offset = offset
  83. def scale(self, value):
  84. return value * self.delta + self.offset
  85. class BaseSignal(object):
  86. """
  87. A descriptor storing main signal properties :
  88. ``layout`` : the :class:``ElphyLayout` object
  89. that extracts data from a file.
  90. ``episode`` : the episode in which the signal
  91. has been acquired.
  92. ``sampling_frequency`` : the sampling frequency
  93. of the analog to digital converter.
  94. ``sampling_period`` : the sampling period of the
  95. analog to digital converter computed from sampling_frequency.
  96. ``t_start`` : the start time of the signal acquisition.
  97. ``t_stop`` : the end time of the signal acquisition.
  98. ``duration`` : the duration of the signal acquisition
  99. computed from t_start and t_stop.
  100. ``n_samples`` : the number of sample acquired during the
  101. recording computed from the duration and the sampling period.
  102. ``name`` : a label to identify the signal.
  103. ``data`` : a property triggering data extraction.
  104. """
  105. def __init__(self, layout, episode, sampling_frequency, start, stop, name=None):
  106. self.layout = layout
  107. self.episode = episode
  108. self.sampling_frequency = sampling_frequency
  109. self.sampling_period = 1 / sampling_frequency
  110. self.t_start = start
  111. self.t_stop = stop
  112. self.duration = self.t_stop - self.t_start
  113. self.n_samples = int(self.duration / self.sampling_period)
  114. self.name = name
  115. @property
  116. def data(self):
  117. raise NotImplementedError('must be overloaded in subclass')
  118. class ElphySignal(BaseSignal):
  119. """
  120. Subclass of :class:`BaseSignal` corresponding to Elphy's analog channels :
  121. ``channel`` : the identifier of the analog channel providing the signal.
  122. ``units`` : an array containing x and y coordinates units.
  123. ``x_unit`` : a property to access the x-coordinates unit.
  124. ``y_unit`` : a property to access the y-coordinates unit.
  125. ``data`` : a property that delegate data extraction to the
  126. ``get_signal_data`` function of the ```layout`` object.
  127. """
  128. def __init__(self, layout, episode, channel, x_unit, y_unit, sampling_frequency, start, stop,
  129. name=None):
  130. super(ElphySignal, self).__init__(layout, episode, sampling_frequency, start, stop, name)
  131. self.channel = channel
  132. self.units = [x_unit, y_unit]
  133. def __str__(self):
  134. return "%s ep_%s ch_%s [%s, %s]" % (
  135. self.layout.file.name, self.episode, self.channel, self.x_unit, self.y_unit)
  136. def __repr__(self):
  137. return self.__str__()
  138. @property
  139. def x_unit(self):
  140. """
  141. Return the x-coordinate of the signal.
  142. """
  143. return self.units[0]
  144. @property
  145. def y_unit(self):
  146. """
  147. Return the y-coordinate of the signal.
  148. """
  149. return self.units[1]
  150. @property
  151. def data(self):
  152. return self.layout.get_signal_data(self.episode, self.channel)
  153. class ElphyTag(BaseSignal):
  154. """
  155. Subclass of :class:`BaseSignal` corresponding to Elphy's tag channels :
  156. ``number`` : the identifier of the tag channel.
  157. ``x_unit`` : the unit of the x-coordinate.
  158. """
  159. def __init__(self, layout, episode, number, x_unit, sampling_frequency, start, stop,
  160. name=None):
  161. super(ElphyTag, self).__init__(layout, episode, sampling_frequency, start, stop, name)
  162. self.number = number
  163. self.units = [x_unit, None]
  164. def __str__(self):
  165. return "%s : ep_%s tag_ch_%s [%s]" % (
  166. self.layout.file.name, self.episode, self.number, self.x_unit)
  167. def __repr__(self):
  168. return self.__str__()
  169. @property
  170. def x_unit(self):
  171. """
  172. Return the x-coordinate of the signal.
  173. """
  174. return self.units[0]
  175. @property
  176. def data(self):
  177. return self.layout.get_tag_data(self.episode, self.number)
  178. @property
  179. def channel(self):
  180. return self.number
  181. class ElphyEvent(object):
  182. """
  183. A descriptor that store a set of events properties :
  184. ``layout`` : the :class:``ElphyLayout` object
  185. that extracts data from a file.
  186. ``episode`` : the episode in which the signal
  187. has been acquired.
  188. ``number`` : the identifier of the channel.
  189. ``x_unit`` : the unit of the x-coordinate.
  190. ``n_events`` : the number of events.
  191. ``name`` : a label to identify the event.
  192. ``times`` : a property triggering event times extraction.
  193. """
  194. def __init__(self, layout, episode, number, x_unit, n_events, ch_number=None, name=None):
  195. self.layout = layout
  196. self.episode = episode
  197. self.number = number
  198. self.x_unit = x_unit
  199. self.n_events = n_events
  200. self.name = name
  201. self.ch_number = ch_number
  202. def __str__(self):
  203. return "%s : ep_%s evt_ch_%s [%s]" % (
  204. self.layout.file.name, self.episode, self.number, self.x_unit)
  205. def __repr__(self):
  206. return self.__str__()
  207. @property
  208. def channel(self):
  209. return self.number
  210. @property
  211. def times(self):
  212. return self.layout.get_event_data(self.episode, self.number)
  213. @property
  214. def data(self):
  215. return self.times
  216. class ElphySpikeTrain(ElphyEvent):
  217. """
  218. A descriptor that store spiketrain properties :
  219. ``wf_samples`` : number of samples composing waveforms.
  220. ``wf_sampling_frequency`` : sampling frequency of waveforms.
  221. ``wf_sampling_period`` : sampling period of waveforms.
  222. ``wf_units`` : the units of the x and y coordinates of waveforms.
  223. ``t_start`` : the time before the arrival of the spike which
  224. corresponds to the starting time of a waveform.
  225. ``name`` : a label to identify the event.
  226. ``times`` : a property triggering event times extraction.
  227. ``waveforms`` : a property triggering waveforms extraction.
  228. """
  229. def __init__(self, layout, episode, number, x_unit, n_events, wf_sampling_frequency,
  230. wf_samples, unit_x_wf, unit_y_wf, t_start, name=None):
  231. super(ElphySpikeTrain, self).__init__(layout, episode, number, x_unit, n_events, name)
  232. self.wf_samples = wf_samples
  233. self.wf_sampling_frequency = wf_sampling_frequency
  234. assert wf_sampling_frequency, "bad sampling frequency"
  235. self.wf_sampling_period = 1.0 / wf_sampling_frequency
  236. self.wf_units = [unit_x_wf, unit_y_wf]
  237. self.t_start = t_start
  238. @property
  239. def x_unit_wf(self):
  240. """
  241. Return the x-coordinate of waveforms.
  242. """
  243. return self.wf_units[0]
  244. @property
  245. def y_unit_wf(self):
  246. """
  247. Return the y-coordinate of waveforms.
  248. """
  249. return self.wf_units[1]
  250. @property
  251. def times(self):
  252. return self.layout.get_spiketrain_data(self.episode, self.number)
  253. @property
  254. def waveforms(self):
  255. return self.layout.get_waveform_data(self.episode, self.number) if self.wf_samples \
  256. else None
  257. # --------------------------------------------------------
  258. # BLOCKS
  259. class BaseBlock(object):
  260. """
  261. Represent a chunk of file storing metadata or
  262. raw data. A convenient class to break down the
  263. structure of an Elphy file to several building
  264. blocks :
  265. ``layout`` : the layout containing the block.
  266. ``identifier`` : the label that identified the block.
  267. ``size`` : the size of the block.
  268. ``start`` : the file index corresponding to the starting byte of the block.
  269. ``end`` : the file index corresponding to the ending byte of the block
  270. NB : Subclassing this class is a convenient
  271. way to set the properties using polymorphism
  272. rather than a conditional structure. By this
  273. way each :class:`BaseBlock` type know how to
  274. iterate through the Elphy file and store
  275. interesting data.
  276. """
  277. def __init__(self, layout, identifier, start, size):
  278. self.layout = layout
  279. self.identifier = identifier
  280. self.size = size
  281. self.start = start
  282. self.end = self.start + self.size - 1
  283. class ElphyBlock(BaseBlock):
  284. """
  285. A subclass of :class:`BaseBlock`. Useful to
  286. store the location and size of interesting
  287. data within a block :
  288. ``parent_block`` : the parent block containing the block.
  289. ``header_size`` : the size of the header permitting the
  290. identification of the type of the block.
  291. ``data_offset`` : the file index located after the block header.
  292. ``data_size`` : the size of data located after the header.
  293. ``sub_blocks`` : the sub-blocks contained by the block.
  294. """
  295. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i",
  296. parent_block=None):
  297. super(ElphyBlock, self).__init__(layout, identifier, start, size)
  298. # a block may be a sub-block of another block
  299. self.parent_block = parent_block
  300. # pascal language store strings in 2 different ways
  301. # ... first, if in the program the size of the string is
  302. # specified (fixed) then the file stores the length
  303. # of the string and allocate a number of bytes equal
  304. # to the specified size
  305. # ... if this size is not specified the length of the
  306. # string is also stored but the file allocate dynamically
  307. # a number of bytes equal to the actual size of the string
  308. l_ident = len(self.identifier)
  309. if fixed_length:
  310. l_ident += (fixed_length - l_ident)
  311. self.header_size = l_ident + 1 + type_dict[size_format]
  312. # starting point of data located in the block
  313. self.data_offset = self.start + self.header_size
  314. self.data_size = self.size - self.header_size
  315. # a block may have sub-blocks
  316. # it is to subclasses to initialize
  317. # this property
  318. self.sub_blocks = list()
  319. def __repr__(self):
  320. return "%s : size = %s, start = %s, end = %s" % (
  321. self.identifier, self.size, self.start, self.end)
  322. def add_sub_block(self, block):
  323. """
  324. Append a block to the sub-block list.
  325. """
  326. self.sub_blocks.append(block)
  327. class FileInfoBlock(ElphyBlock):
  328. """
  329. Base class of all subclasses whose the purpose is to
  330. extract user file info stored into an Elphy file :
  331. ``header`` : the header block relative to the block.
  332. ``file`` : the file containing the block.
  333. NB : User defined metadata are not really practical.
  334. An Elphy script must know the order of metadata storage
  335. to know exactly how to retrieve these data. That's why
  336. it is necessary to subclass and reproduce elphy script
  337. commands to extract metadata relative to a protocol.
  338. Consequently managing a new protocol implies to refactor
  339. the file info extraction.
  340. """
  341. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i",
  342. parent_block=None):
  343. super(FileInfoBlock, self).__init__(layout, identifier, start,
  344. size, fixed_length, size_format,
  345. parent_block=parent_block)
  346. self.header = None
  347. self.file = self.layout.file
  348. def get_protocol_and_version(self):
  349. """
  350. Return a tuple useful to identify the
  351. kind of protocol that has generated a
  352. file during data acquisition.
  353. """
  354. raise Exception("must be overloaded in a subclass")
  355. def get_user_file_info(self):
  356. """
  357. Return a dictionary containing all
  358. user file info stored in the file.
  359. """
  360. raise Exception("must be overloaded in a subclass")
  361. def get_sparsenoise_revcor(self):
  362. """
  363. Return 'REVCOR' user file info. This method is common
  364. to :class:`ClassicFileInfo` and :class:`MultistimFileInfo`
  365. because the last one is able to store this kind of metadata.
  366. """
  367. header = dict()
  368. header['n_div_x'] = read_from_char(self.file, 'h')
  369. header['n_div_y'] = read_from_char(self.file, 'h')
  370. header['gray_levels'] = read_from_char(self.file, 'h')
  371. header['position_x'] = read_from_char(self.file, 'ext')
  372. header['position_y'] = read_from_char(self.file, 'ext')
  373. header['length'] = read_from_char(self.file, 'ext')
  374. header['width'] = read_from_char(self.file, 'ext')
  375. header['orientation'] = read_from_char(self.file, 'ext')
  376. header['expansion'] = read_from_char(self.file, 'h')
  377. header['scotoma'] = read_from_char(self.file, 'h')
  378. header['seed'] = read_from_char(self.file, 'h')
  379. # dt_on and dt_off may not exist in old revcor formats
  380. rollback = self.file.tell()
  381. header['dt_on'] = read_from_char(self.file, 'ext')
  382. if header['dt_on'] is None:
  383. self.file.seek(rollback)
  384. rollback = self.file.tell()
  385. header['dt_off'] = read_from_char(self.file, 'ext')
  386. if header['dt_off'] is None:
  387. self.file.seek(rollback)
  388. return header
  389. class ClassicFileInfo(FileInfoBlock):
  390. """
  391. Extract user file info stored into an Elphy file corresponding to
  392. sparse noise (revcor), moving bar and flashbar protocols.
  393. """
  394. def detect_protocol_from_name(self, path):
  395. pattern = "\d{4}(\d+|\D)\D"
  396. codes = {
  397. 'r': 'sparsenoise',
  398. 'o': 'movingbar',
  399. 'f': 'flashbar',
  400. 'm': 'multistim' # here just for assertion
  401. }
  402. filename = path.split(path)[1]
  403. match = re.search(pattern, path)
  404. if hasattr(match, 'end'):
  405. code = codes.get(path[match.end() - 1].lower(), None)
  406. assert code != 'm', "multistim file detected"
  407. return code
  408. elif 'spt' in filename.lower():
  409. return 'spontaneousactivity'
  410. else:
  411. return None
  412. def get_protocol_and_version(self):
  413. if self.layout and self.layout.info_block:
  414. self.file.seek(self.layout.info_block.data_offset)
  415. version = self.get_title()
  416. if version in ['REVCOR1', 'REVCOR2', 'REVCOR + PAIRING']:
  417. name = "sparsenoise"
  418. elif version in ['BARFLASH']:
  419. name = "flashbar"
  420. elif version in ['ORISTIM', 'ORISTM', 'ORISTM1', 'ORITUN']:
  421. name = "movingbar"
  422. else:
  423. name = self.detect_protocol_from_name(self.file.name)
  424. self.file.seek(0)
  425. return name, version
  426. return None, None
  427. def get_title(self):
  428. title_length, title = struct.unpack('<B20s', self.file.read(21))
  429. return unicode(title[0:title_length])
  430. def get_user_file_info(self):
  431. header = dict()
  432. if self.layout and self.layout.info_block:
  433. self.file.seek(self.layout.info_block.data_offset)
  434. header['title'] = self.get_title()
  435. # test the protocol name to trigger
  436. # the right header extraction
  437. if self.layout.elphy_file.protocol == 'sparsenoise':
  438. header.update(self.get_sparsenoise_revcor())
  439. elif self.layout.elphy_file.protocol == 'flashbar':
  440. header.update(self.get_flashbar_header())
  441. elif self.layout.elphy_file.protocol == 'movingbar':
  442. header.update(self.get_movingbar_header())
  443. self.file.seek(0)
  444. return header
  445. def get_flashbar_header(self):
  446. header = dict()
  447. orientations = list()
  448. tmp = self.file.tell()
  449. for _ in range(0, 50):
  450. l, ori = struct.unpack('<B5s', self.file.read(6))
  451. try:
  452. orientations.append(float(ori[0:l]))
  453. except:
  454. return header
  455. header['orientations'] = orientations if orientations else None
  456. self.file.seek(tmp + 50 * 6)
  457. _tmp = read_from_char(self.file, 'h')
  458. header['number_of_orientations'] = _tmp if tmp < 0 else None
  459. _tmp = read_from_char(self.file, 'h')
  460. header['number_of_repetitions'] = _tmp if tmp < 0 else None
  461. header['position_x'] = read_from_char(self.file, 'ext')
  462. header['position_y'] = read_from_char(self.file, 'ext')
  463. header['length'] = read_from_char(self.file, 'ext')
  464. header['width'] = read_from_char(self.file, 'ext')
  465. header['orientation'] = read_from_char(self.file, 'ext')
  466. header['excursion'] = read_from_char(self.file, 'i')
  467. header['dt_on'] = None
  468. return header
  469. def get_movingbar_header(self):
  470. header = dict()
  471. orientations = list()
  472. tmp = self.file.tell()
  473. for _ in range(0, 50):
  474. l, ori = struct.unpack('<B5s', self.file.read(6))
  475. orientations.append(float(ori[0:l]))
  476. header['orientations'] = orientations if orientations else None
  477. self.file.seek(tmp + 50 * 6)
  478. _tmp = read_from_char(self.file, 'h')
  479. header['number_of_orientations'] = _tmp if tmp < 0 else None
  480. _tmp = read_from_char(self.file, 'h')
  481. header['number_of_repetitions'] = _tmp if tmp < 0 else None
  482. header['position_x'] = read_from_char(self.file, 'ext')
  483. header['position_y'] = read_from_char(self.file, 'ext')
  484. header['length'] = read_from_char(self.file, 'ext')
  485. header['width'] = read_from_char(self.file, 'ext')
  486. header['orientation'] = read_from_char(self.file, 'ext')
  487. header['excursion'] = read_from_char(self.file, 'h')
  488. header['speed'] = read_from_char(self.file, 'h')
  489. header['dim_x'] = read_from_char(self.file, 'h')
  490. header['dim_y'] = read_from_char(self.file, 'h')
  491. return header
  492. class MultistimFileInfo(FileInfoBlock):
  493. def get_protocol_and_version(self):
  494. # test if there is an available info_block
  495. if self.layout and self.layout.info_block:
  496. # go to the info_block
  497. sub_block = self.layout.info_block
  498. self.file.seek(sub_block.data_offset)
  499. # get the first four parameters
  500. # acqLGN = read_from_char(self.file, 'i')
  501. center = read_from_char(self.file, 'i')
  502. surround = read_from_char(self.file, 'i')
  503. version = self.get_title()
  504. # test the type of protocol from
  505. # center and surround parameters
  506. if (surround >= 2):
  507. name = None
  508. version = None
  509. else:
  510. if center == 2:
  511. name = "sparsenoise"
  512. elif center == 3:
  513. name = "densenoise"
  514. elif center == 4:
  515. name = "densenoise"
  516. elif center == 5:
  517. name = "grating"
  518. else:
  519. name = None
  520. version = None
  521. self.file.seek(0)
  522. return name, version
  523. return None, None
  524. def get_title(self):
  525. title_length = read_from_char(self.file, 'B')
  526. title, = struct.unpack('<%ss' % title_length, self.file.read(title_length))
  527. self.file.seek(self.file.tell() + 255 - title_length)
  528. return unicode(title)
  529. def get_user_file_info(self):
  530. header = dict()
  531. if self.layout and self.layout.info_block:
  532. # go to the info_block
  533. sub_block = self.layout.info_block
  534. self.file.seek(sub_block.data_offset)
  535. # get the first four parameters
  536. acqLGN = read_from_char(self.file, 'i')
  537. center = read_from_char(self.file, 'i')
  538. surround = read_from_char(self.file, 'i')
  539. # store info in the header
  540. header['acqLGN'] = acqLGN
  541. header['center'] = center
  542. header['surround'] = surround
  543. if not (header['surround'] >= 2):
  544. header.update(self.get_center_header(center))
  545. self.file.seek(0)
  546. return header
  547. def get_center_header(self, code):
  548. # get file info corresponding
  549. # to the executed protocol
  550. # for the center first ...
  551. if code == 0:
  552. return self.get_sparsenoise_revcor()
  553. elif code == 2:
  554. return self.get_sparsenoise_center()
  555. elif code == 3:
  556. return self.get_densenoise_center(True)
  557. elif code == 4:
  558. return self.get_densenoise_center(False)
  559. elif code == 5:
  560. return dict()
  561. # return self.get_grating_center()
  562. else:
  563. return dict()
  564. def get_surround_header(self, code):
  565. # then the surround
  566. if code == 2:
  567. return self.get_sparsenoise_surround()
  568. elif code == 3:
  569. return self.get_densenoise_surround(True)
  570. elif code == 4:
  571. return self.get_densenoise_surround(False)
  572. elif code == 5:
  573. raise NotImplementedError()
  574. return self.get_grating_center()
  575. else:
  576. return dict()
  577. def get_center_surround(self, center, surround):
  578. header = dict()
  579. header['stim_center'] = self.get_center_header(center)
  580. header['stim_surround'] = self.get_surround_header(surround)
  581. return header
  582. def get_sparsenoise_center(self):
  583. header = dict()
  584. header['title'] = self.get_title()
  585. header['number_of_sequences'] = read_from_char(self.file, 'i')
  586. header['pretrigger_duration'] = read_from_char(self.file, 'ext')
  587. header['n_div_x'] = read_from_char(self.file, 'h')
  588. header['n_div_y'] = read_from_char(self.file, 'h')
  589. header['gray_levels'] = read_from_char(self.file, 'h')
  590. header['position_x'] = read_from_char(self.file, 'ext')
  591. header['position_y'] = read_from_char(self.file, 'ext')
  592. header['length'] = read_from_char(self.file, 'ext')
  593. header['width'] = read_from_char(self.file, 'ext')
  594. header['orientation'] = read_from_char(self.file, 'ext')
  595. header['expansion'] = read_from_char(self.file, 'h')
  596. header['scotoma'] = read_from_char(self.file, 'h')
  597. header['seed'] = read_from_char(self.file, 'h')
  598. header['luminance_1'] = read_from_char(self.file, 'ext')
  599. header['luminance_2'] = read_from_char(self.file, 'ext')
  600. header['dt_count'] = read_from_char(self.file, 'i')
  601. dt_array = list()
  602. for _ in range(0, header['dt_count']):
  603. dt_array.append(read_from_char(self.file, 'ext'))
  604. header['dt_on'] = dt_array if dt_array else None
  605. header['dt_off'] = read_from_char(self.file, 'ext')
  606. return header
  607. def get_sparsenoise_surround(self):
  608. header = dict()
  609. header['title_surround'] = self.get_title()
  610. header['gap'] = read_from_char(self.file, 'ext')
  611. header['n_div_x'] = read_from_char(self.file, 'h')
  612. header['n_div_y'] = read_from_char(self.file, 'h')
  613. header['gray_levels'] = read_from_char(self.file, 'h')
  614. header['expansion'] = read_from_char(self.file, 'h')
  615. header['scotoma'] = read_from_char(self.file, 'h')
  616. header['seed'] = read_from_char(self.file, 'h')
  617. header['luminance_1'] = read_from_char(self.file, 'ext')
  618. header['luminance_2'] = read_from_char(self.file, 'ext')
  619. header['dt_on'] = read_from_char(self.file, 'ext')
  620. header['dt_off'] = read_from_char(self.file, 'ext')
  621. return header
  622. def get_densenoise_center(self, is_binary):
  623. header = dict()
  624. header['stimulus_type'] = "B" if is_binary else "T"
  625. header['title'] = self.get_title()
  626. _tmp = read_from_char(self.file, 'i')
  627. header['number_of_sequences'] = _tmp if _tmp < 0 else None
  628. rollback = self.file.tell()
  629. header['stimulus_duration'] = read_from_char(self.file, 'ext')
  630. if header['stimulus_duration'] is None:
  631. self.file.seek(rollback)
  632. header['pretrigger_duration'] = read_from_char(self.file, 'ext')
  633. header['n_div_x'] = read_from_char(self.file, 'h')
  634. header['n_div_y'] = read_from_char(self.file, 'h')
  635. header['position_x'] = read_from_char(self.file, 'ext')
  636. header['position_y'] = read_from_char(self.file, 'ext')
  637. header['length'] = read_from_char(self.file, 'ext')
  638. header['width'] = read_from_char(self.file, 'ext')
  639. header['orientation'] = read_from_char(self.file, 'ext')
  640. header['expansion'] = read_from_char(self.file, 'h')
  641. header['seed'] = read_from_char(self.file, 'h')
  642. header['luminance_1'] = read_from_char(self.file, 'ext')
  643. header['luminance_2'] = read_from_char(self.file, 'ext')
  644. header['dt_on'] = read_from_char(self.file, 'ext')
  645. header['dt_off'] = read_from_char(self.file, 'ext')
  646. return header
  647. def get_densenoise_surround(self, is_binary):
  648. header = dict()
  649. header['title_surround'] = self.get_title()
  650. header['gap'] = read_from_char(self.file, 'ext')
  651. header['n_div_x'] = read_from_char(self.file, 'h')
  652. header['n_div_y'] = read_from_char(self.file, 'h')
  653. header['expansion'] = read_from_char(self.file, 'h')
  654. header['seed'] = read_from_char(self.file, 'h')
  655. header['luminance_1'] = read_from_char(self.file, 'ext')
  656. header['luminance_2'] = read_from_char(self.file, 'ext')
  657. header['dt_on'] = read_from_char(self.file, 'ext')
  658. header['dt_off'] = read_from_char(self.file, 'ext')
  659. return header
  660. def get_grating_center(self):
  661. pass
  662. def get_grating_surround(self):
  663. pass
  664. class Header(ElphyBlock):
  665. """
  666. A convenient subclass of :class:`Block` to store
  667. Elphy file header properties.
  668. NB : Subclassing this class is a convenient
  669. way to set the properties of the header using
  670. polymorphism rather than a conditional structure.
  671. """
  672. def __init__(self, layout, identifier, size, fixed_length=None, size_format="i"):
  673. super(Header, self).__init__(layout, identifier, 0, size, fixed_length, size_format)
  674. class Acquis1Header(Header):
  675. """
  676. A subclass of :class:`Header` used to
  677. identify the 'ACQUIS1/GS/1991' format.
  678. Whereas more recent format, the header
  679. contains all data relative to episodes,
  680. channels and traces :
  681. ``n_channels`` : the number of acquisition channels.
  682. ``nbpt`` and ``nbptEx`` : parameters useful to compute the number of samples by episodes.
  683. ``tpData`` : the data format identifier used to compute sample size.
  684. ``x_unit`` : the x-coordinate unit for all channels in an episode.
  685. ``y_units`` : an array containing y-coordinate units for each channel in the episode.
  686. ``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
  687. times relative to each sample in a channel.
  688. ``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
  689. the actual values relative to samples.
  690. ``continuous`` : a boolean telling if the file has been acquired in
  691. continuous mode.
  692. ``preSeqI`` : the size in bytes of the data preceding raw data.
  693. ``postSeqI`` : the size in bytes of the data preceding raw data.
  694. ``dat_length`` : the length in bytes of the data in the file.
  695. ``sample_size`` : the size in bytes of a sample.
  696. ``n_samples`` : the number of samples.
  697. ``ep_size`` : the size in bytes of an episode.
  698. ``n_episodes`` : the number of recording sequences store in the file.
  699. NB :
  700. The size is read from the file,
  701. the identifier is a string containing
  702. 15 characters and the size is encoded
  703. as small integer.
  704. See file 'FicDefAc1.pas' to identify
  705. the parsed parameters.
  706. """
  707. def __init__(self, layout):
  708. fileobj = layout.file
  709. super(Acquis1Header, self).__init__(layout, "ACQUIS1/GS/1991", 1024, 15, "h")
  710. # parse the header to store interesting data about episodes and channels
  711. fileobj.seek(18)
  712. # extract episode properties
  713. n_channels = read_from_char(fileobj, 'B')
  714. assert not ((n_channels < 1) or (n_channels > 16)), "bad number of channels"
  715. nbpt = read_from_char(fileobj, 'h')
  716. l_xu, x_unit = struct.unpack('<B3s', fileobj.read(4))
  717. # extract units for each channel
  718. y_units = list()
  719. for i in range(1, 7):
  720. l_yu, y_unit = struct.unpack('<B3s', fileobj.read(4))
  721. y_units.append(y_unit[0:l_yu])
  722. # extract i1, i2, x1, x2 and compute dX and X0
  723. i1, i2 = struct.unpack('<hh', fileobj.read(4))
  724. x1 = read_from_char(fileobj, 'ext')
  725. x2 = read_from_char(fileobj, 'ext')
  726. if (i1 != i2) and (x1 != x2):
  727. dX = (x2 - x1) / (i2 - i1)
  728. X0 = x1 - i1 * dX
  729. else:
  730. dX = None
  731. X0 = None
  732. # raise Exception("bad X-scale parameters")
  733. # extract j1 and j2, y1 and y2 and compute dY
  734. j1 = struct.unpack('<hhhhhh', fileobj.read(12))
  735. j2 = struct.unpack('<hhhhhh', fileobj.read(12))
  736. y1 = list()
  737. for i in range(1, 7):
  738. y1.append(read_from_char(fileobj, 'ext'))
  739. y2 = list()
  740. for i in range(1, 7):
  741. y2.append(read_from_char(fileobj, 'ext'))
  742. dY_ar = list()
  743. Y0_ar = list()
  744. for i in range(0, n_channels):
  745. # detect division by zero
  746. if (j1[i] != j2[i]) and (y1[i] != y2[i]):
  747. dY_ar.append((y2[i] - y1[i]) / (j2[i] - j1[i]))
  748. Y0_ar.append(y1[i] - j1[i] * dY_ar[i])
  749. else:
  750. dY_ar.append(None)
  751. Y0_ar.append(None)
  752. NbMacq = read_from_char(fileobj, 'h')
  753. # fileobj.read(300) #Macq:typeTabMarqueAcq; { 300 octets }
  754. max_mark = 100
  755. Macq = list()
  756. for i in range(0, max_mark):
  757. Macq.append(list(struct.unpack('<ch', fileobj.read(3))))
  758. # Xmini,Xmaxi,Ymini,Ymaxi:array[1..6] of float; #fileobj.read(240)
  759. x_mini = list()
  760. for i in range(0, 6):
  761. x_mini.append(read_from_char(fileobj, 'ext'))
  762. x_maxi = list()
  763. for i in range(0, 6):
  764. x_maxi.append(read_from_char(fileobj, 'ext'))
  765. y_mini = list()
  766. for i in range(0, 6):
  767. y_mini.append(read_from_char(fileobj, 'ext'))
  768. y_maxi = list()
  769. for i in range(0, 6):
  770. y_maxi.append(read_from_char(fileobj, 'ext'))
  771. # modeA:array[1..6] of byte; #fileobj.read(6)
  772. modeA = list(struct.unpack('<BBBBBB', fileobj.read(6)))
  773. continuous = read_from_char(fileobj, '?')
  774. preSeqI, postSeqI = struct.unpack('<hh', fileobj.read(4))
  775. # EchelleSeqI:boolean; #fileobj.read(1)
  776. ep_scaled = read_from_char(fileobj, '?')
  777. nbptEx = read_from_char(fileobj, 'H')
  778. x1s, x2s = struct.unpack('<ff', fileobj.read(8))
  779. y1s = list()
  780. for i in range(0, 6):
  781. y1s.append(read_from_char(fileobj, 'f'))
  782. y2s = list()
  783. for i in range(0, 6):
  784. y2s.append(read_from_char(fileobj, 'f'))
  785. # fileobj.read(96) # Xminis,Xmaxis,Yminis,Ymaxis:array[1..6] of single;
  786. x_minis = list()
  787. for i in range(0, 6):
  788. x_minis.append(read_from_char(fileobj, 'f'))
  789. x_maxis = list()
  790. for i in range(0, 6):
  791. x_maxis.append(read_from_char(fileobj, 'f'))
  792. y_minis = list()
  793. for i in range(0, 6):
  794. y_minis.append(read_from_char(fileobj, 'f'))
  795. y_maxis = list()
  796. for i in range(0, 6):
  797. y_maxis.append(read_from_char(fileobj, 'f'))
  798. n_ep = read_from_char(fileobj, 'h')
  799. tpData = read_from_char(fileobj, 'h')
  800. assert tpData in [3, 2, 1, 0], "bad sample size"
  801. no_analog_data = read_from_char(fileobj, '?')
  802. self.n_ep = n_ep
  803. self.n_channels = n_channels
  804. self.nbpt = nbpt
  805. self.i1 = i1
  806. self.i2 = i2
  807. self.x1 = x1
  808. self.x2 = x2
  809. self.dX = dX
  810. self.X0 = X0
  811. self.x_unit = x_unit[0:l_xu]
  812. self.dY_ar = dY_ar
  813. self.Y0_ar = Y0_ar
  814. self.y_units = y_units[0:n_channels]
  815. self.NbMacq = NbMacq
  816. self.Macq = Macq
  817. self.x_mini = x_mini[0:n_channels]
  818. self.x_maxi = x_maxi[0:n_channels]
  819. self.y_mini = y_mini[0:n_channels]
  820. self.y_maxi = y_maxi[0:n_channels]
  821. self.modeA = modeA
  822. self.continuous = continuous
  823. self.preSeqI = preSeqI
  824. self.postSeqI = postSeqI
  825. self.ep_scaled = ep_scaled
  826. self.nbptEx = nbptEx
  827. self.x1s = x1s
  828. self.x2s = x2s
  829. self.y1s = y1s
  830. self.y2s = y2s
  831. self.x_minis = x_minis[0:n_channels]
  832. self.x_maxis = x_maxis[0:n_channels]
  833. self.y_minis = y_minis[0:n_channels]
  834. self.y_maxis = y_maxis[0:n_channels]
  835. self.tpData = 2 if not tpData else tpData
  836. self.no_analog_data = no_analog_data
  837. self.dat_length = self.layout.file_size - self.layout.data_offset
  838. self.sample_size = type_dict[types[tpData]]
  839. if self.continuous:
  840. self.n_samples = self.dat_length / (self.n_channels * self.sample_size)
  841. else:
  842. self.n_samples = self.nbpt + self.nbptEx * 32768
  843. ep_size = self.preSeqI + self.postSeqI
  844. if not self.no_analog_data:
  845. ep_size += self.n_samples * self.sample_size * self.n_channels
  846. self.ep_size = ep_size
  847. self.n_episodes = (self.dat_length / self.ep_size) if (self.n_samples != 0) else 0
  848. class DAC2GSHeader(Header):
  849. """
  850. A subclass of :class:`Header` used to
  851. identify the 'DAC2/GS/2000' format.
  852. NB : the size is fixed to 20 bytes,
  853. the identifier is a string containing
  854. 15 characters and the size is encoded
  855. as integer.
  856. """
  857. def __init__(self, layout):
  858. super(DAC2GSHeader, self).__init__(layout, "DAC2/GS/2000", 20, 15, "i")
  859. class DAC2Header(Header):
  860. """
  861. A subclass of :class:`Header` used to
  862. identify the 'DAC2 objects' format.
  863. NB : the size is fixed to 18 bytes,
  864. the identifier is a string containing
  865. 15 characters and the size is encoded
  866. as small integer.
  867. """
  868. def __init__(self, layout):
  869. super(DAC2Header, self).__init__(layout, "DAC2 objects", 18, 15, "h")
  870. class DAC2GSMainBlock(ElphyBlock):
  871. """
  872. Subclass of :class:`Block` useful to store data corresponding to
  873. the 'Main' block stored in the DAC2/GS/2000 format :
  874. ``n_channels`` : the number of acquisition channels.
  875. ``nbpt`` : the number of samples by episodes.
  876. ``tpData`` : the data format identifier used to compute sample size.
  877. ``x_unit`` : the x-coordinate unit for all channels in an episode.
  878. ``y_units`` : an array containing y-coordinate units for each channel in the episode.
  879. ``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
  880. times relative to each sample in a channel.
  881. ``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
  882. the actual values relative to samples.
  883. ``continuous`` : a boolean telling if the file has been acquired in
  884. continuous mode.
  885. ``preSeqI`` : the size in bytes of the data preceding raw data.
  886. ``postSeqI`` : the size in bytes of the data preceding raw data.
  887. ``withTags`` : a boolean telling if tags are recorded.
  888. ``tagShift`` : the number of tag channels and the shift to apply
  889. to encoded values to retrieve acquired values.
  890. ``dat_length`` : the length in bytes of the data in the file.
  891. ``sample_size`` : the size in bytes of a sample.
  892. ``n_samples`` : the number of samples.
  893. ``ep_size`` : the size in bytes of an episode.
  894. ``n_episodes`` : the number of recording sequences store in the file.
  895. NB : see file 'FdefDac2.pas' to identify the other parsed parameters.
  896. """
  897. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i"):
  898. super(DAC2GSMainBlock, self).__init__(
  899. layout, identifier, start, size, fixed_length, size_format)
  900. # parse the file to retrieve episodes and channels properties
  901. n_channels, nbpt, tpData = struct.unpack('<BiB', layout.file.read(6))
  902. l_xu, xu, dX, X0 = struct.unpack('<B10sdd', layout.file.read(27))
  903. y_units = list()
  904. dY_ar = list()
  905. Y0_ar = list()
  906. for _ in range(0, 16):
  907. l_yu, yu, dY, Y0 = struct.unpack('<B10sdd', layout.file.read(27))
  908. y_units.append(yu[0:l_yu])
  909. dY_ar.append(dY)
  910. Y0_ar.append(Y0)
  911. preSeqI, postSeqI, continuous, varEp, withTags = struct.unpack(
  912. '<ii???', layout.file.read(11))
  913. # some file doesn't precise the tagShift
  914. position = layout.file.tell()
  915. if position >= self.end:
  916. tagShift = 0
  917. else:
  918. tagShift = read_from_char(layout.file, 'B')
  919. # setup object properties
  920. self.n_channels = n_channels
  921. self.nbpt = nbpt
  922. self.tpData = tpData
  923. self.x_unit = xu[0:l_xu]
  924. self.dX = dX
  925. self.X0 = X0
  926. self.y_units = y_units[0:n_channels]
  927. self.dY_ar = dY_ar[0:n_channels]
  928. self.Y0_ar = Y0_ar[0:n_channels]
  929. self.continuous = continuous
  930. if self.continuous:
  931. self.preSeqI = 0
  932. self.postSeqI = 0
  933. else:
  934. self.preSeqI = preSeqI
  935. self.postSeqI = postSeqI
  936. self.varEp = varEp
  937. self.withTags = withTags
  938. if not self.withTags:
  939. self.tagShift = 0
  940. else:
  941. if tagShift == 0:
  942. self.tagShift = 4
  943. else:
  944. self.tagShift = tagShift
  945. self.sample_size = type_dict[types[self.tpData]]
  946. self.dat_length = self.layout.file_size - self.layout.data_offset
  947. if self.continuous:
  948. if self.n_channels > 0:
  949. self.n_samples = self.dat_length / (self.n_channels * self.sample_size)
  950. else:
  951. self.n_samples = 0
  952. else:
  953. self.n_samples = self.nbpt
  954. self.ep_size = (self.preSeqI + self.postSeqI + self.n_samples * self.sample_size *
  955. self.n_channels)
  956. self.n_episodes = self.dat_length / self.ep_size if (self.n_samples != 0) else 0
  957. class DAC2GSEpisodeBlock(ElphyBlock):
  958. """
  959. Subclass of :class:`Block` useful to store data corresponding to
  960. 'DAC2SEQ' blocks stored in the DAC2/GS/2000 format.
  961. ``n_channels`` : the number of acquisition channels.
  962. ``nbpt`` : the number of samples by episodes.
  963. ``tpData`` : the data format identifier used to compute the sample size.
  964. ``x_unit`` : the x-coordinate unit for all channels in an episode.
  965. ``y_units`` : an array containing y-coordinate units for each channel in the episode.
  966. ``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
  967. times relative to each sample in a channel.
  968. ``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
  969. the actual values relative to samples.
  970. ``postSeqI`` : the size in bytes of the data preceding raw data.
  971. NB : see file 'FdefDac2.pas' to identify the parsed parameters.
  972. """
  973. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i"):
  974. main = layout.main_block
  975. n_channels, nbpt, tpData, postSeqI = struct.unpack('<BiBi', layout.file.read(10))
  976. l_xu, xu, dX, X0 = struct.unpack('<B10sdd', layout.file.read(27))
  977. y_units = list()
  978. dY_ar = list()
  979. Y0_ar = list()
  980. for _ in range(0, 16):
  981. l_yu, yu, dY, Y0 = struct.unpack('<B10sdd', layout.file.read(27))
  982. y_units.append(yu[0:l_yu])
  983. dY_ar.append(dY)
  984. Y0_ar.append(Y0)
  985. super(DAC2GSEpisodeBlock, self).__init__(layout, identifier,
  986. start, layout.main_block.ep_size, fixed_length,
  987. size_format)
  988. self.n_channels = main.n_channels
  989. self.nbpt = main.nbpt
  990. self.tpData = main.tpData
  991. if not main.continuous:
  992. self.postSeqI = postSeqI
  993. self.x_unit = xu[0:l_xu]
  994. self.dX = dX
  995. self.X0 = X0
  996. self.y_units = y_units[0:n_channels]
  997. self.dY_ar = dY_ar[0:n_channels]
  998. self.Y0_ar = Y0_ar[0:n_channels]
  999. else:
  1000. self.postSeqI = 0
  1001. self.x_unit = main.x_unit
  1002. self.dX = main.dX
  1003. self.X0 = main.X0
  1004. self.y_units = main.y_units
  1005. self.dY_ar = main.dY_ar
  1006. self.Y0_ar = main.Y0_ar
  1007. class DAC2EpisodeBlock(ElphyBlock):
  1008. """
  1009. Subclass of :class:`Block` useful to store data corresponding to
  1010. 'B_Ep' blocks stored in the last version of Elphy format :
  1011. ``ep_block`` : a shortcut the the 'Ep' sub-block.
  1012. ``ch_block`` : a shortcut the the 'Adc' sub-block.
  1013. ``ks_block`` : a shortcut the the 'KSamp' sub-block.
  1014. ``kt_block`` : a shortcut the the 'Ktype' sub-block.
  1015. """
  1016. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1017. super(DAC2EpisodeBlock, self).__init__(
  1018. layout, identifier, start, size, fixed_length, size_format)
  1019. self.ep_block = None
  1020. self.ch_block = None
  1021. self.ks_block = None
  1022. self.kt_block = None
  1023. def set_episode_block(self):
  1024. blocks = self.layout.get_blocks_of_type('Ep', target_blocks=self.sub_blocks)
  1025. self.ep_block = blocks[0] if blocks else None
  1026. def set_channel_block(self):
  1027. blocks = self.layout.get_blocks_of_type('Adc', target_blocks=self.sub_blocks)
  1028. self.ch_block = blocks[0] if blocks else None
  1029. def set_sub_sampling_block(self):
  1030. blocks = self.layout.get_blocks_of_type('Ksamp', target_blocks=self.sub_blocks)
  1031. self.ks_block = blocks[0] if blocks else None
  1032. def set_sample_size_block(self):
  1033. blocks = self.layout.get_blocks_of_type('Ktype', target_blocks=self.sub_blocks)
  1034. self.kt_block = blocks[0] if blocks else None
  1035. class DummyDataBlock(BaseBlock):
  1036. """
  1037. Subclass of :class:`BaseBlock` useful to
  1038. identify chunk of blocks that are actually
  1039. corresponding to acquired data.
  1040. """
  1041. pass
  1042. class DAC2RDataBlock(ElphyBlock):
  1043. """
  1044. Subclass of :class:`Block` useful to store data corresponding to
  1045. 'RDATA' blocks stored in the last version of Elphy format :
  1046. ``data_start`` : the starting point of raw data.
  1047. NB : This kind of block is preceeded by a structure which size is encoded
  1048. as a 2 bytes unsigned short. Consequently, data start at data_offset plus
  1049. the size.
  1050. """
  1051. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1052. super(DAC2RDataBlock, self).__init__(
  1053. layout, identifier, start, size, fixed_length, size_format)
  1054. self.data_start = self.data_offset + read_from_char(layout.file, 'H')
  1055. class DAC2CyberTagBlock(ElphyBlock):
  1056. """
  1057. Subclass of :class:`Block` useful to store data corresponding to
  1058. 'RCyberTag' blocks stored in the last version of Elphy format :
  1059. ``data_start`` : the starting point of raw data.
  1060. NB : This kind of block is preceeded by a structure which size is encoded
  1061. as a 2 bytes unsigned short. Consequently, data start at data_offset plus
  1062. the size.
  1063. """
  1064. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1065. super(DAC2CyberTagBlock, self).__init__(
  1066. layout, identifier, start, size, fixed_length, size_format)
  1067. self.data_start = self.data_offset + read_from_char(layout.file, 'H')
  1068. class DAC2EventBlock(ElphyBlock):
  1069. """
  1070. Subclass of :class:`Block` useful to store
  1071. data corresponding to 'REVT' blocks stored
  1072. in the last version of Elphy format :
  1073. ``data_start`` : the starting point of raw data.
  1074. ``n_evt_channels`` : the number of channels used to acquire events.
  1075. ``n_events`` : an array containing the number of events for each event channel.
  1076. """
  1077. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1078. super(DAC2EventBlock, self).__init__(
  1079. layout, identifier, start, size, fixed_length, size_format)
  1080. fileobj = self.layout.file
  1081. jump = self.data_offset + read_from_char(fileobj, 'H')
  1082. fileobj.seek(jump)
  1083. # extract the number of event channel
  1084. self.n_evt_channels = read_from_char(fileobj, 'i')
  1085. # extract for each event channel
  1086. # the corresponding number of events
  1087. n_events = list()
  1088. for _ in range(0, self.n_evt_channels):
  1089. n_events.append(read_from_char(fileobj, 'i'))
  1090. self.n_events = n_events
  1091. self.data_start = fileobj.tell()
  1092. class DAC2SpikeBlock(DAC2EventBlock):
  1093. """
  1094. Subclass of :class:`DAC2EventBlock` useful
  1095. to identify 'RSPK' and make the distinction
  1096. with 'REVT' blocks stored in the last version
  1097. of Elphy format.
  1098. """
  1099. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1100. super(DAC2SpikeBlock, self).__init__(
  1101. layout, identifier, start, size, fixed_length, size_format)
  1102. fileobj = self.layout.file
  1103. jump = self.data_offset
  1104. fileobj.seek(jump) # go to SpikeBlock
  1105. jump = self.data_offset + read_from_char(fileobj, 'h')
  1106. fileobj.seek(jump)
  1107. # extract the number of event channel
  1108. self.n_evt_channels = read_from_char(fileobj, 'i')
  1109. # extract for each event channel
  1110. # the corresponding number of events
  1111. n_events = list()
  1112. for _ in range(0, self.n_evt_channels):
  1113. n_events.append(read_from_char(fileobj, 'i'))
  1114. self.n_events = n_events
  1115. self.data_start = fileobj.tell()
  1116. class DAC2WaveFormBlock(ElphyBlock):
  1117. """
  1118. Subclass of :class:`Block` useful to store data corresponding to
  1119. 'RspkWave' blocks stored in the last version of Elphy format :
  1120. ``data_start`` : the starting point of raw data.
  1121. ``n_spk_channels`` : the number of channels used to acquire spiketrains.
  1122. ``n_spikes`` : an array containing the number of spikes for each spiketrain.
  1123. ``pre_trigger`` : the number of samples of a waveform arriving before a spike.
  1124. ``wavelength`` : the number of samples in a waveform.
  1125. """
  1126. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
  1127. super(DAC2WaveFormBlock, self).__init__(
  1128. layout, identifier, start, size, fixed_length, size_format)
  1129. fileobj = self.layout.file
  1130. jump = self.data_offset + read_from_char(fileobj, 'H')
  1131. fileobj.seek(jump)
  1132. self.wavelength = read_from_char(fileobj, 'i')
  1133. self.pre_trigger = read_from_char(fileobj, 'i')
  1134. self.n_spk_channels = read_from_char(fileobj, 'i')
  1135. n_spikes = list()
  1136. for _ in range(0, self.n_spk_channels):
  1137. n_spikes.append(read_from_char(fileobj, 'i'))
  1138. self.n_spikes = n_spikes
  1139. self.data_start = fileobj.tell()
  1140. class DAC2EpSubBlock(ElphyBlock):
  1141. """
  1142. Subclass of :class:`Block` useful to retrieve data corresponding
  1143. to a 'Ep' sub-block stored in the last version of Elphy format :
  1144. ``n_channels`` : the number of acquisition channels.
  1145. ``nbpt`` : the number of samples by episodes
  1146. ``tpData`` : the data format identifier used to store signal samples.
  1147. ``x_unit`` : the x-coordinate unit for all channels in an episode.
  1148. ``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
  1149. times relative to each sample in a channel.
  1150. ``continuous`` : a boolean telling if the file has been acquired in
  1151. continuous mode.
  1152. ``tag_mode`` : identify the way tags are stored in a file.
  1153. ``tag_shift`` : the number of bits that tags occupy in a 16-bits sample
  1154. and the shift necessary to do to retrieve the value of the sample.
  1155. ``dX_wf`` and ``X0_wf``: the scale factors necessary to retrieve the actual
  1156. times relative to each waveforms.
  1157. ``dY_wf`` and ``Y0_wf``: the scale factors necessary to retrieve the actual
  1158. values relative to waveform samples.
  1159. ``x_unit_wf`` and ``y_unit_wf``: the unit of x and y coordinates for all waveforms in an
  1160. episode.
  1161. """
  1162. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l",
  1163. parent_block=None):
  1164. super(DAC2EpSubBlock, self).__init__(layout, identifier, start,
  1165. size, fixed_length, size_format,
  1166. parent_block=parent_block)
  1167. fileobj = self.layout.file
  1168. n_channels, nbpt, tpData, l_xu, x_unit, dX, X0 = struct.unpack(
  1169. '<BiBB10sdd', fileobj.read(33))
  1170. continuous, tag_mode, tag_shift = struct.unpack('<?BB', fileobj.read(3))
  1171. DxuSpk, X0uSpk, nbSpk, DyuSpk, Y0uSpk, l_xuspk, unitXSpk, l_yuspk, unitYSpk = \
  1172. struct.unpack('<ddiddB10sB10s', fileobj.read(58))
  1173. cyber_time, pc_time = struct.unpack('<dI', fileobj.read(12))
  1174. # necessary properties to reconstruct
  1175. # signals stored into the file
  1176. self.n_channels = n_channels
  1177. self.nbpt = nbpt
  1178. self.tpData = tpData
  1179. self.x_unit = x_unit[0:l_xu]
  1180. self.dX = dX
  1181. self.X0 = X0
  1182. self.continuous = continuous
  1183. self.tag_mode = tag_mode
  1184. self.tag_shift = tag_shift if self.tag_mode == 1 else 0
  1185. # following properties are valid
  1186. # when using multielectrode system
  1187. # named BlackRock / Cyberkinetics
  1188. # if fileobj.tell() < self.end :
  1189. self.dX_wf = DxuSpk
  1190. self.X0_wf = X0uSpk
  1191. self.n_spikes = nbSpk
  1192. self.dY_wf = DyuSpk
  1193. self.Y0_wf = Y0uSpk
  1194. self.x_unit_wf = unitXSpk[0:l_xuspk]
  1195. self.y_unit_wf = unitYSpk[0:l_yuspk]
  1196. self.cyber_time = cyber_time
  1197. self.pc_time = pc_time
  1198. class DAC2AdcSubBlock(ElphyBlock):
  1199. """
  1200. Subclass of :class:`SubBlock` useful to retrieve data corresponding
  1201. to a 'Adc' sub-block stored in the last version of Elphy format :
  1202. ``y_units`` : an array containing all y-coordinates for each channel.
  1203. ``dY_ar`` and ``Y0_ar`` : arrays containing scaling factors for each
  1204. channel useful to compute the actual value of a signal sample.
  1205. """
  1206. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l",
  1207. parent_block=None):
  1208. super(DAC2AdcSubBlock, self).__init__(layout, identifier, start,
  1209. size, fixed_length, size_format,
  1210. parent_block=parent_block)
  1211. fileobj = self.layout.file
  1212. # fileobj.seek(start + len(identifier) + 1)
  1213. ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
  1214. n_channels = ep_block.n_channels
  1215. self.y_units = list()
  1216. self.dY_ar = list()
  1217. self.Y0_ar = list()
  1218. for _ in range(0, n_channels):
  1219. l_yu, y_unit, dY, Y0 = struct.unpack('<B10sdd', fileobj.read(27))
  1220. self.y_units.append(y_unit[0:l_yu])
  1221. self.dY_ar.append(dY)
  1222. self.Y0_ar.append(Y0)
  1223. class DAC2KSampSubBlock(ElphyBlock):
  1224. """
  1225. Subclass of :class:`SubBlock` useful to retrieve data corresponding
  1226. to a 'Ksamp' sub-block stored in the last version of Elphy format :
  1227. ``k_sampling`` : an array containing all sub-sampling factors
  1228. corresponding to each acquired channel. If a factor is equal to
  1229. zero, then the channel has been converted into an event channel.
  1230. """
  1231. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l",
  1232. parent_block=None):
  1233. super(DAC2KSampSubBlock, self).__init__(layout, identifier, start,
  1234. size, fixed_length, size_format,
  1235. parent_block=parent_block)
  1236. fileobj = self.layout.file
  1237. ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
  1238. n_channels = ep_block.n_channels
  1239. k_sampling = list()
  1240. for _ in range(0, n_channels):
  1241. k_sampling.append(read_from_char(fileobj, "H"))
  1242. self.k_sampling = k_sampling
  1243. class DAC2KTypeSubBlock(ElphyBlock):
  1244. """
  1245. Subclass of :class:`SubBlock` useful to retrieve data corresponding
  1246. to a 'Ktype' sub-block stored in the last version of Elphy format :
  1247. ``k_types`` : an array containing all data formats identifier used
  1248. to compute sample size.
  1249. """
  1250. def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l",
  1251. parent_block=None):
  1252. super(DAC2KTypeSubBlock, self).__init__(layout, identifier, start,
  1253. size, fixed_length, size_format,
  1254. parent_block=parent_block)
  1255. fileobj = self.layout.file
  1256. ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
  1257. n_channels = ep_block.n_channels
  1258. k_types = list()
  1259. for _ in range(0, n_channels):
  1260. k_types.append(read_from_char(fileobj, "B"))
  1261. self.k_types = k_types
  1262. # --------------------------------------------------------
  1263. # UTILS
  1264. # symbols of types that could
  1265. # encode a value in an elphy file
  1266. types = (
  1267. 'B',
  1268. 'b',
  1269. 'h',
  1270. 'H',
  1271. 'l',
  1272. 'f',
  1273. 'real48',
  1274. 'd',
  1275. 'ext',
  1276. 's_complex',
  1277. 'd_complex',
  1278. 'complex',
  1279. 'none'
  1280. )
  1281. # a dictionary linking python.struct
  1282. # formats to their actual size in bytes
  1283. type_dict = {
  1284. 'c': 1,
  1285. 'b': 1,
  1286. 'B': 1,
  1287. '?': 1,
  1288. 'h': 2,
  1289. 'H': 2,
  1290. 'i': 4,
  1291. 'I': 4,
  1292. 'l': 4,
  1293. 'L': 4,
  1294. 'q': 8,
  1295. 'Q': 8,
  1296. 'f': 4,
  1297. 'd': 8,
  1298. 'H+l': 6,
  1299. 'ext': 10,
  1300. 'real48': 6,
  1301. 's_complex': 8,
  1302. 'd_complex': 16,
  1303. 'complex': 20,
  1304. 'none': 0
  1305. }
  1306. # a dictionary liking python.struct
  1307. # formats to numpy formats
  1308. numpy_map = {
  1309. 'b': np.int8,
  1310. 'B': np.uint8,
  1311. 'h': np.int16,
  1312. 'H': np.uint16,
  1313. 'i': np.int32,
  1314. 'I': np.uint32,
  1315. 'l': np.int32,
  1316. 'L': np.uint32,
  1317. 'q': np.int64,
  1318. 'Q': np.uint64,
  1319. 'f': np.float32,
  1320. 'd': np.float64,
  1321. 'H+l': 6,
  1322. 'ext': 10,
  1323. 'real48': 6,
  1324. 'SComp': 8,
  1325. 'DComp': 16,
  1326. 'Comp': 20,
  1327. 'none': 0
  1328. }
  1329. def read_from_char(data, type_char):
  1330. """
  1331. Return the value corresponding
  1332. to the specified character type.
  1333. """
  1334. n_bytes = type_dict[type_char]
  1335. ascii = data.read(n_bytes) if hasattr(data, 'read') else data
  1336. if type_char != 'ext':
  1337. try:
  1338. value = struct.unpack('<%s' % type_char, ascii)[0]
  1339. except:
  1340. # the value could not been read
  1341. # because the value is not compatible
  1342. # with the specified type
  1343. value = None
  1344. else:
  1345. try:
  1346. value = float(ascii)
  1347. except:
  1348. value = None
  1349. return value
  1350. def least_common_multiple(a, b):
  1351. """
  1352. Return the value of the least common multiple.
  1353. """
  1354. return (a * b) / gcd(a, b)
  1355. # --------------------------------------------------------
  1356. # LAYOUT
  1357. b_float = 'f8'
  1358. b_int = 'i2'
  1359. class ElphyLayout(object):
  1360. """
  1361. A convenient class to know how data
  1362. are organised into an Elphy file :
  1363. ``elphy_file`` : a :class:`ElphyFile`
  1364. asking file introspection.
  1365. ``blocks`` : a set of :class:``BaseBlock`
  1366. objects partitioning a file and extracting
  1367. some useful metadata.
  1368. ``ìnfo_block`` : a shortcut to a :class:`FileInfoBlock`
  1369. object containing metadata describing a recording
  1370. protocol (sparsenoise, densenoise, movingbar or flashbar)
  1371. ``data_blocks`` : a shortcut to access directly
  1372. blocks containing raw data.
  1373. NB : Subclassing this class is a convenient
  1374. way to retrieve blocks constituting a file,
  1375. their relative information and location of
  1376. raw data using polymorphism rather than a
  1377. conditional structure.
  1378. """
  1379. def __init__(self, elphy_file):
  1380. self.elphy_file = elphy_file
  1381. self.blocks = list()
  1382. self.info_block = None
  1383. self.data_blocks = None
  1384. @property
  1385. def file(self):
  1386. return self.elphy_file.file
  1387. @property
  1388. def file_size(self):
  1389. return self.elphy_file.file_size
  1390. def is_continuous(self):
  1391. return self.is_continuous()
  1392. def add_block(self, block):
  1393. self.blocks.append(block)
  1394. @property
  1395. def header(self):
  1396. return self.blocks[0]
  1397. def get_blocks_of_type(self, identifier, target_blocks=None):
  1398. blocks = self.blocks if target_blocks is None else target_blocks
  1399. return [k for k in blocks if (k.identifier == identifier)]
  1400. def set_info_block(self):
  1401. raise NotImplementedError('must be overloaded in a subclass')
  1402. def set_data_blocks(self):
  1403. raise NotImplementedError('must be overloaded in a subclass')
  1404. def get_tag(self, episode, tag_channel):
  1405. raise NotImplementedError('must be overloaded in a subclass')
  1406. @property
  1407. def n_episodes(self):
  1408. raise NotImplementedError('must be overloaded in a subclass')
  1409. def n_channels(self, episode):
  1410. raise NotImplementedError('must be overloaded in a subclass')
  1411. def n_tags(self, episode):
  1412. raise NotImplementedError('must be overloaded in a subclass')
  1413. def n_samples(self, episode, channel):
  1414. raise NotImplementedError('must be overloaded in a subclass')
  1415. def sample_type(self, ep, ch):
  1416. raise NotImplementedError('must be overloaded in a subclass')
  1417. def sample_size(self, ep, ch):
  1418. symbol = self.sample_symbol(ep, ch)
  1419. return type_dict[symbol]
  1420. def sample_symbol(self, ep, ch):
  1421. tp = self.sample_type(ep, ch)
  1422. try:
  1423. return types[tp]
  1424. except:
  1425. return 'h'
  1426. def sampling_period(self, ep, ch):
  1427. raise NotImplementedError('must be overloaded in a subclass')
  1428. def x_scale_factors(self, ep, ch):
  1429. raise NotImplementedError('must be overloaded in a subclass')
  1430. def y_scale_factors(self, ep, ch):
  1431. raise NotImplementedError('must be overloaded in a subclass')
  1432. def x_tag_scale_factors(self, ep):
  1433. raise NotImplementedError('must be overloaded in a subclass')
  1434. def x_unit(self, ep, ch):
  1435. raise NotImplementedError('must be overloaded in a subclass')
  1436. def y_unit(self, ep, ch):
  1437. raise NotImplementedError('must be overloaded in a subclass')
  1438. def tag_shift(self, ep):
  1439. raise NotImplementedError('must be overloaded in a subclass')
  1440. def get_channel_for_tags(self, ep):
  1441. raise NotImplementedError('must be overloaded in a subclass')
  1442. def get_signal(self, episode, channel):
  1443. """
  1444. Return the signal description relative
  1445. to the specified episode and channel.
  1446. """
  1447. assert episode in range(1, self.n_episodes + 1)
  1448. assert channel in range(1, self.n_channels(episode) + 1)
  1449. t_start = 0
  1450. sampling_period = self.sampling_period(episode, channel)
  1451. t_stop = sampling_period * self.n_samples(episode, channel)
  1452. return ElphySignal(
  1453. self,
  1454. episode,
  1455. channel,
  1456. self.x_unit(episode, channel),
  1457. self.y_unit(episode, channel),
  1458. 1 / sampling_period,
  1459. t_start,
  1460. t_stop
  1461. )
  1462. def create_channel_mask(self, ep):
  1463. """
  1464. Return the minimal pattern of channel numbers
  1465. representing the succession of channels in the
  1466. multiplexed data. It is necessary to do the mapping
  1467. between a sample stored in the file and its relative
  1468. channel.
  1469. """
  1470. raise NotImplementedError('must be overloaded in a subclass')
  1471. def get_data_blocks(self, ep):
  1472. """
  1473. Return a set of :class:`DummyDataBlock` instances
  1474. that defined the actual location of samples in blocks
  1475. encapsulating raw data.
  1476. """
  1477. raise NotImplementedError('must be overloaded in a subclass')
  1478. def create_bit_mask(self, ep, ch):
  1479. """
  1480. Build a mask to apply on the entire episode
  1481. in order to only keep values corresponding
  1482. to the specified channel.
  1483. """
  1484. ch_mask = self.create_channel_mask(ep)
  1485. _mask = list()
  1486. for _ch in ch_mask:
  1487. size = self.sample_size(ep, _ch)
  1488. val = 1 if _ch == ch else 0
  1489. for _ in xrange(0, size):
  1490. _mask.append(val)
  1491. return np.array(_mask)
  1492. def load_bytes(self, data_blocks, dtype='<i1', start=None, end=None, expected_size=None):
  1493. """
  1494. Return list of bytes contained
  1495. in the specified set of blocks.
  1496. NB : load all data as files cannot exceed 4Gb
  1497. find later other solutions to spare memory.
  1498. """
  1499. chunks = list()
  1500. raw = ''
  1501. # keep only data blocks having
  1502. # a size greater than zero
  1503. blocks = [k for k in data_blocks if k.size > 0]
  1504. for data_block in blocks:
  1505. self.file.seek(data_block.start)
  1506. raw = self.file.read(data_block.size)[0:expected_size]
  1507. databytes = np.frombuffer(raw, dtype=dtype)
  1508. chunks.append(databytes)
  1509. # concatenate all chunks and return
  1510. # the specified slice
  1511. if len(chunks) > 0:
  1512. databytes = np.concatenate(chunks)
  1513. return databytes[start:end]
  1514. else:
  1515. return np.array([])
  1516. def reshape_bytes(self, databytes, reshape, datatypes, order='<'):
  1517. """
  1518. Reshape a numpy array containing a set of databytes.
  1519. """
  1520. assert datatypes and len(datatypes) == len(reshape), "datatypes are not well defined"
  1521. l_bytes = len(databytes)
  1522. # create the mask for each shape
  1523. shape_mask = list()
  1524. for shape in reshape:
  1525. for _ in xrange(1, shape + 1):
  1526. shape_mask.append(shape)
  1527. # create a set of masks to extract data
  1528. bit_masks = list()
  1529. for shape in reshape:
  1530. bit_mask = list()
  1531. for value in shape_mask:
  1532. bit = 1 if (value == shape) else 0
  1533. bit_mask.append(bit)
  1534. bit_masks.append(np.array(bit_mask))
  1535. # extract data
  1536. n_samples = l_bytes / np.sum(reshape)
  1537. data = np.empty([len(reshape), n_samples], dtype=(int, int))
  1538. for index, bit_mask in enumerate(bit_masks):
  1539. tmp = self.filter_bytes(databytes, bit_mask)
  1540. tp = '%s%s%s' % (order, datatypes[index], reshape[index])
  1541. data[index] = np.frombuffer(tmp, dtype=tp)
  1542. return data.T
  1543. def filter_bytes(self, databytes, bit_mask):
  1544. """
  1545. Detect from a bit mask which bits
  1546. to keep to recompose the signal.
  1547. """
  1548. n_bytes = len(databytes)
  1549. mask = np.ones(n_bytes, dtype=int)
  1550. np.putmask(mask, mask, bit_mask)
  1551. to_keep = np.where(mask > 0)[0]
  1552. return databytes.take(to_keep)
  1553. def load_channel_data(self, ep, ch):
  1554. """
  1555. Return a numpy array containing the
  1556. list of bytes corresponding to the
  1557. specified episode and channel.
  1558. """
  1559. # memorise the sample size and symbol
  1560. sample_size = self.sample_size(ep, ch)
  1561. sample_symbol = self.sample_symbol(ep, ch)
  1562. # create a bit mask to define which
  1563. # sample to keep from the file
  1564. bit_mask = self.create_bit_mask(ep, ch)
  1565. # load all bytes contained in an episode
  1566. data_blocks = self.get_data_blocks(ep)
  1567. databytes = self.load_bytes(data_blocks)
  1568. raw = self.filter_bytes(databytes, bit_mask)
  1569. # reshape bytes from the sample size
  1570. dt = np.dtype(numpy_map[sample_symbol])
  1571. dt.newbyteorder('<')
  1572. return np.frombuffer(raw.reshape([len(raw) / sample_size, sample_size]), dt)
  1573. def apply_op(self, np_array, value, op_type):
  1574. """
  1575. A convenient function to apply an operator
  1576. over all elements of a numpy array.
  1577. """
  1578. if op_type == "shift_right":
  1579. return np_array >> value
  1580. elif op_type == "shift_left":
  1581. return np_array << value
  1582. elif op_type == "mask":
  1583. return np_array & value
  1584. else:
  1585. return np_array
  1586. def get_tag_mask(self, tag_ch, tag_mode):
  1587. """
  1588. Return a mask useful to retrieve
  1589. bits that encode a tag channel.
  1590. """
  1591. if tag_mode == 1:
  1592. tag_mask = 0b01 if (tag_ch == 1) else 0b10
  1593. elif tag_mode in [2, 3]:
  1594. ar_mask = np.zeros(16, dtype=int)
  1595. ar_mask[tag_ch - 1] = 1
  1596. st = "0b" + ''.join(np.array(np.flipud(ar_mask), dtype=str))
  1597. tag_mask = eval(st)
  1598. return tag_mask
  1599. def load_encoded_tags(self, ep, tag_ch):
  1600. """
  1601. Return a numpy array containing
  1602. bytes corresponding to the specified
  1603. episode and channel.
  1604. """
  1605. tag_mode = self.tag_mode(ep)
  1606. tag_mask = self.get_tag_mask(tag_ch, tag_mode)
  1607. if tag_mode in [1, 2]:
  1608. # digidata or itc mode
  1609. # available for all formats
  1610. ch = self.get_channel_for_tags(ep)
  1611. raw = self.load_channel_data(ep, ch)
  1612. return self.apply_op(raw, tag_mask, "mask")
  1613. elif tag_mode == 3:
  1614. # cyber k mode
  1615. # only available for DAC2 objects format
  1616. # store bytes corresponding to the blocks
  1617. # containing tags in a numpy array and reshape
  1618. # it to have a set of tuples (time, value)
  1619. ck_blocks = self.get_blocks_of_type(ep, 'RCyberTag')
  1620. databytes = self.load_bytes(ck_blocks)
  1621. raw = self.reshape_bytes(databytes, reshape=(4, 2), datatypes=('u', 'u'), order='<')
  1622. # keep only items that are compatible
  1623. # with the specified tag channel
  1624. raw[:, 1] = self.apply_op(raw[:, 1], tag_mask, "mask")
  1625. # computing numpy.diff is useful to know
  1626. # how many times a value is maintained
  1627. # and necessary to reconstruct the
  1628. # compressed signal ...
  1629. repeats = np.array(np.diff(raw[:, 0]), dtype=int)
  1630. data = np.repeat(raw[:-1, 1], repeats, axis=0)
  1631. # ... note that there is always
  1632. # a transition at t=0 for synchronisation
  1633. # purpose, consequently it is not necessary
  1634. # to complete with zeros when the first
  1635. # transition arrive ...
  1636. return data
  1637. def load_encoded_data(self, ep, ch):
  1638. """
  1639. Get encoded value of raw data from the elphy file.
  1640. """
  1641. tag_shift = self.tag_shift(ep)
  1642. data = self.load_channel_data(ep, ch)
  1643. if tag_shift:
  1644. return self.apply_op(data, tag_shift, "shift_right")
  1645. else:
  1646. return data
  1647. def get_signal_data(self, ep, ch):
  1648. """
  1649. Return a numpy array containing all samples of a
  1650. signal, acquired on an Elphy analog channel, formatted
  1651. as a list of (time, value) tuples.
  1652. """
  1653. # get data from the file
  1654. y_data = self.load_encoded_data(ep, ch)
  1655. x_data = np.arange(0, len(y_data))
  1656. # create a recarray
  1657. data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_float)])
  1658. # put in the recarray the scaled data
  1659. x_factors = self.x_scale_factors(ep, ch)
  1660. y_factors = self.y_scale_factors(ep, ch)
  1661. data['x'] = x_factors.scale(x_data)
  1662. data['y'] = y_factors.scale(y_data)
  1663. return data
  1664. def get_tag_data(self, ep, tag_ch):
  1665. """
  1666. Return a numpy array containing all samples of a
  1667. signal, acquired on an Elphy tag channel, formatted
  1668. as a list of (time, value) tuples.
  1669. """
  1670. # get data from the file
  1671. y_data = self.load_encoded_tags(ep, tag_ch)
  1672. x_data = np.arange(0, len(y_data))
  1673. # create a recarray
  1674. data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_int)])
  1675. # put in the recarray the scaled data
  1676. factors = self.x_tag_scale_factors(ep)
  1677. data['x'] = factors.scale(x_data)
  1678. data['y'] = y_data
  1679. return data
  1680. class Acquis1Layout(ElphyLayout):
  1681. """
  1682. A subclass of :class:`ElphyLayout` to know
  1683. how the 'ACQUIS1/GS/1991' format is organised.
  1684. Extends :class:`ElphyLayout` to store the
  1685. offset used to retrieve directly raw data :
  1686. ``data_offset`` : an offset to jump directly
  1687. to the raw data.
  1688. """
  1689. def __init__(self, fileobj, data_offset):
  1690. super(Acquis1Layout, self).__init__(fileobj)
  1691. self.data_offset = data_offset
  1692. self.data_blocks = None
  1693. def get_blocks_end(self):
  1694. return self.data_offset
  1695. def is_continuous(self):
  1696. return self.header.continuous
  1697. def get_episode_blocks(self):
  1698. raise NotImplementedError()
  1699. def set_info_block(self):
  1700. i_blks = self.get_blocks_of_type('USER INFO')
  1701. assert len(i_blks) < 2, 'too many info blocks'
  1702. if len(i_blks):
  1703. self.info_block = i_blks[0]
  1704. def set_data_blocks(self):
  1705. data_blocks = list()
  1706. size = self.header.n_samples * self.header.sample_size * self.header.n_channels
  1707. for ep in range(0, self.header.n_episodes):
  1708. start = self.data_offset + ep * self.header.ep_size + self.header.preSeqI
  1709. data_blocks.append(DummyDataBlock(self, 'Acquis1Data', start, size))
  1710. self.data_blocks = data_blocks
  1711. def get_data_blocks(self, ep):
  1712. return [self.data_blocks[ep - 1]]
  1713. @property
  1714. def n_episodes(self):
  1715. return self.header.n_episodes
  1716. def n_channels(self, episode):
  1717. return self.header.n_channels
  1718. def n_tags(self, episode):
  1719. return 0
  1720. def tag_mode(self, ep):
  1721. return 0
  1722. def tag_shift(self, ep):
  1723. return 0
  1724. def get_channel_for_tags(self, ep):
  1725. return None
  1726. @property
  1727. def no_analog_data(self):
  1728. return True if (self.n_episodes == 0) else self.header.no_analog_data
  1729. def sample_type(self, ep, ch):
  1730. return self.header.tpData
  1731. def sampling_period(self, ep, ch):
  1732. return self.header.dX
  1733. def n_samples(self, ep, ch):
  1734. return self.header.n_samples
  1735. def x_tag_scale_factors(self, ep):
  1736. return ElphyScaleFactor(
  1737. self.header.dX,
  1738. self.header.X0
  1739. )
  1740. def x_scale_factors(self, ep, ch):
  1741. return ElphyScaleFactor(
  1742. self.header.dX,
  1743. self.header.X0
  1744. )
  1745. def y_scale_factors(self, ep, ch):
  1746. dY = self.header.dY_ar[ch - 1]
  1747. Y0 = self.header.Y0_ar[ch - 1]
  1748. # TODO: see why this kind of exception exists
  1749. if dY is None or Y0 is None:
  1750. raise Exception('bad Y-scale factors for episode %s channel %s' % (ep, ch))
  1751. return ElphyScaleFactor(dY, Y0)
  1752. def x_unit(self, ep, ch):
  1753. return self.header.x_unit
  1754. def y_unit(self, ep, ch):
  1755. return self.header.y_units[ch - 1]
  1756. @property
  1757. def ep_size(self):
  1758. return self.header.ep_size
  1759. @property
  1760. def file_duration(self):
  1761. return self.header.dX * self.n_samples
  1762. def get_tag(self, episode, tag_channel):
  1763. return None
  1764. def create_channel_mask(self, ep):
  1765. return np.arange(1, self.header.n_channels + 1)
  1766. class DAC2GSLayout(ElphyLayout):
  1767. """
  1768. A subclass of :class:`ElphyLayout` to know
  1769. how the 'DAC2 / GS / 2000' format is organised.
  1770. Extends :class:`ElphyLayout` to store the
  1771. offset used to retrieve directly raw data :
  1772. ``data_offset`` : an offset to jump directly
  1773. after the 'MAIN' block where 'DAC2SEQ' blocks
  1774. start.
  1775. ``main_block```: a shortcut to access 'MAIN' block.
  1776. ``episode_blocks`` : a shortcut to access blocks
  1777. corresponding to episodes.
  1778. """
  1779. def __init__(self, fileobj, data_offset):
  1780. super(DAC2GSLayout, self).__init__(fileobj)
  1781. self.data_offset = data_offset
  1782. self.main_block = None
  1783. self.episode_blocks = None
  1784. def get_blocks_end(self):
  1785. return self.file_size # data_offset
  1786. def is_continuous(self):
  1787. main_block = self.main_block
  1788. return main_block.continuous if main_block else False
  1789. def get_episode_blocks(self):
  1790. raise NotImplementedError()
  1791. def set_main_block(self):
  1792. main_block = self.get_blocks_of_type('MAIN')
  1793. self.main_block = main_block[0] if main_block else None
  1794. def set_episode_blocks(self):
  1795. ep_blocks = self.get_blocks_of_type('DAC2SEQ')
  1796. self.episode_blocks = ep_blocks if ep_blocks else None
  1797. def set_info_block(self):
  1798. i_blks = self.get_blocks_of_type('USER INFO')
  1799. assert len(i_blks) < 2, "too many info blocks"
  1800. if len(i_blks):
  1801. self.info_block = i_blks[0]
  1802. def set_data_blocks(self):
  1803. data_blocks = list()
  1804. identifier = 'DAC2GSData'
  1805. size = self.main_block.n_samples * self.main_block.sample_size * self.main_block.n_channels
  1806. if not self.is_continuous():
  1807. blocks = self.get_blocks_of_type('DAC2SEQ')
  1808. for block in blocks:
  1809. start = block.start + self.main_block.preSeqI
  1810. data_blocks.append(DummyDataBlock(self, identifier, start, size))
  1811. else:
  1812. start = self.blocks[-1].end + 1 + self.main_block.preSeqI
  1813. data_blocks.append(DummyDataBlock(self, identifier, start, size))
  1814. self.data_blocks = data_blocks
  1815. def get_data_blocks(self, ep):
  1816. return [self.data_blocks[ep - 1]]
  1817. def episode_block(self, ep):
  1818. return self.main_block if self.is_continuous() else self.episode_blocks[ep - 1]
  1819. def tag_mode(self, ep):
  1820. return 1 if self.main_block.withTags else 0
  1821. def tag_shift(self, ep):
  1822. return self.main_block.tagShift
  1823. def get_channel_for_tags(self, ep):
  1824. return 1
  1825. def sample_type(self, ep, ch):
  1826. return self.main_block.tpData
  1827. def sample_size(self, ep, ch):
  1828. size = super(DAC2GSLayout, self).sample_size(ep, ch)
  1829. assert size == 2, "sample size is always 2 bytes for DAC2/GS/2000 format"
  1830. return size
  1831. def sampling_period(self, ep, ch):
  1832. block = self.episode_block(ep)
  1833. return block.dX
  1834. def x_tag_scale_factors(self, ep):
  1835. block = self.episode_block(ep)
  1836. return ElphyScaleFactor(
  1837. block.dX,
  1838. block.X0,
  1839. )
  1840. def x_scale_factors(self, ep, ch):
  1841. block = self.episode_block(ep)
  1842. return ElphyScaleFactor(
  1843. block.dX,
  1844. block.X0,
  1845. )
  1846. def y_scale_factors(self, ep, ch):
  1847. block = self.episode_block(ep)
  1848. return ElphyScaleFactor(
  1849. block.dY_ar[ch - 1],
  1850. block.Y0_ar[ch - 1]
  1851. )
  1852. def x_unit(self, ep, ch):
  1853. block = self.episode_block(ep)
  1854. return block.x_unit
  1855. def y_unit(self, ep, ch):
  1856. block = self.episode_block(ep)
  1857. return block.y_units[ch - 1]
  1858. def n_samples(self, ep, ch):
  1859. return self.main_block.n_samples
  1860. def ep_size(self, ep):
  1861. return self.main_block.ep_size
  1862. @property
  1863. def n_episodes(self):
  1864. return self.main_block.n_episodes
  1865. def n_channels(self, episode):
  1866. return self.main_block.n_channels
  1867. def n_tags(self, episode):
  1868. return 2 if self.main_block.withTags else 0
  1869. @property
  1870. def file_duration(self):
  1871. return self.main_block.dX * self.n_samples
  1872. def get_tag(self, episode, tag_channel):
  1873. assert episode in range(1, self.n_episodes + 1)
  1874. # there are none or 2 tag channels
  1875. if self.tag_mode(episode) == 1:
  1876. assert tag_channel in range(1, 3), "DAC2/GS/2000 format support only 2 tag channels"
  1877. block = self.episode_block(episode)
  1878. t_stop = self.main_block.n_samples * block.dX
  1879. return ElphyTag(self, episode, tag_channel, block.x_unit, 1.0 / block.dX, 0, t_stop)
  1880. else:
  1881. return None
  1882. def n_tag_samples(self, ep, tag_channel):
  1883. return self.main_block.n_samples
  1884. def get_tag_data(self, episode, tag_channel):
  1885. # memorise some useful properties
  1886. block = self.episode_block(episode)
  1887. sample_size = self.sample_size(episode, tag_channel)
  1888. sample_symbol = self.sample_symbol(episode, tag_channel)
  1889. # create a bit mask to define which
  1890. # sample to keep from the file
  1891. channel_mask = self.create_channel_mask(episode)
  1892. bit_mask = self.create_bit_mask(channel_mask, 1)
  1893. # get bytes from the file
  1894. data_block = self.data_blocks[episode - 1]
  1895. n_bytes = data_block.size
  1896. self.file.seek(data_block.start)
  1897. databytes = np.frombuffer(self.file.read(n_bytes), '<i1')
  1898. # detect which bits keep to recompose the tag
  1899. ep_mask = np.ones(n_bytes, dtype=int)
  1900. np.putmask(ep_mask, ep_mask, bit_mask)
  1901. to_keep = np.where(ep_mask > 0)[0]
  1902. raw = databytes.take(to_keep)
  1903. raw = raw.reshape([len(raw) / sample_size, sample_size])
  1904. # create a recarray containing data
  1905. dt = np.dtype(numpy_map[sample_symbol])
  1906. dt.newbyteorder('<')
  1907. tag_mask = 0b01 if (tag_channel == 1) else 0b10
  1908. y_data = np.frombuffer(raw, dt) & tag_mask
  1909. x_data = np.arange(0, len(y_data)) * block.dX + block.X0
  1910. data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_int)])
  1911. data['x'] = x_data
  1912. data['y'] = y_data
  1913. return data
  1914. def create_channel_mask(self, ep):
  1915. return np.arange(1, self.main_block.n_channels + 1)
  1916. class DAC2Layout(ElphyLayout):
  1917. """
  1918. A subclass of :class:`ElphyLayout` to know
  1919. how the Elphy format is organised.
  1920. Whereas other formats storing raw data at the
  1921. end of the file, 'DAC2 objects' format spreads
  1922. them over multiple blocks :
  1923. ``episode_blocks`` : a shortcut to access blocks
  1924. corresponding to episodes.
  1925. """
  1926. def __init__(self, fileobj):
  1927. super(DAC2Layout, self).__init__(fileobj)
  1928. self.episode_blocks = None
  1929. def get_blocks_end(self):
  1930. return self.file_size
  1931. def is_continuous(self):
  1932. ep_blocks = [k for k in self.blocks if k.identifier.startswith('B_Ep')]
  1933. if ep_blocks:
  1934. ep_block = ep_blocks[0]
  1935. ep_sub_block = ep_block.sub_blocks[0]
  1936. return ep_sub_block.continuous
  1937. else:
  1938. return False
  1939. def set_episode_blocks(self):
  1940. self.episode_blocks = [k for k in self.blocks if str(k.identifier).startswith('B_Ep')]
  1941. def set_info_block(self):
  1942. # in fact the file info are contained into a single sub-block with an USR identifier
  1943. i_blks = self.get_blocks_of_type('B_Finfo')
  1944. assert len(i_blks) < 2, "too many info blocks"
  1945. if len(i_blks):
  1946. i_blk = i_blks[0]
  1947. sub_blocks = i_blk.sub_blocks
  1948. if len(sub_blocks):
  1949. self.info_block = sub_blocks[0]
  1950. def set_data_blocks(self):
  1951. data_blocks = list()
  1952. blocks = self.get_blocks_of_type('RDATA')
  1953. for block in blocks:
  1954. start = block.data_start
  1955. size = block.end + 1 - start
  1956. data_blocks.append(DummyDataBlock(self, 'RDATA', start, size))
  1957. self.data_blocks = data_blocks
  1958. def get_data_blocks(self, ep):
  1959. return self.group_blocks_of_type(ep, 'RDATA')
  1960. def group_blocks_of_type(self, ep, identifier):
  1961. ep_blocks = list()
  1962. blocks = [k for k in self.get_blocks_stored_in_episode(ep) if k.identifier == identifier]
  1963. for block in blocks:
  1964. start = block.data_start
  1965. size = block.end + 1 - start
  1966. ep_blocks.append(DummyDataBlock(self, identifier, start, size))
  1967. return ep_blocks
  1968. def get_blocks_stored_in_episode(self, ep):
  1969. data_blocks = [k for k in self.blocks if k.identifier == 'RDATA']
  1970. n_ep = self.n_episodes
  1971. blk_1 = self.episode_block(ep)
  1972. blk_2 = self.episode_block((ep + 1) % n_ep)
  1973. i_1 = self.blocks.index(blk_1)
  1974. i_2 = self.blocks.index(blk_2)
  1975. if (blk_1 == blk_2) or (i_2 < i_1):
  1976. return [k for k in data_blocks if self.blocks.index(k) > i_1]
  1977. else:
  1978. return [k for k in data_blocks if self.blocks.index(k) in xrange(i_1, i_2)]
  1979. def set_cyberk_blocks(self):
  1980. ck_blocks = list()
  1981. blocks = self.get_blocks_of_type('RCyberTag')
  1982. for block in blocks:
  1983. start = block.data_start
  1984. size = block.end + 1 - start
  1985. ck_blocks.append(DummyDataBlock(self, 'RCyberTag', start, size))
  1986. self.ck_blocks = ck_blocks
  1987. def episode_block(self, ep):
  1988. return self.episode_blocks[ep - 1]
  1989. @property
  1990. def n_episodes(self):
  1991. return len(self.episode_blocks)
  1992. def analog_index(self, episode):
  1993. """
  1994. Return indices relative to channels
  1995. used for analog signals.
  1996. """
  1997. block = self.episode_block(episode)
  1998. tag_mode = block.ep_block.tag_mode
  1999. an_index = np.where(np.array(block.ks_block.k_sampling) > 0)
  2000. if tag_mode == 2:
  2001. an_index = an_index[:-1]
  2002. return an_index
  2003. def n_channels(self, episode):
  2004. """
  2005. Return the number of channels used
  2006. for analog signals but also events.
  2007. NB : in Elphy this 2 kinds of channels
  2008. are not differenciated.
  2009. """
  2010. block = self.episode_block(episode)
  2011. tag_mode = block.ep_block.tag_mode
  2012. n_channels = len(block.ks_block.k_sampling)
  2013. return n_channels if tag_mode != 2 else n_channels - 1
  2014. def n_tags(self, episode):
  2015. block = self.episode_block(episode)
  2016. tag_mode = block.ep_block.tag_mode
  2017. tag_map = {0: 0, 1: 2, 2: 16, 3: 16}
  2018. return tag_map.get(tag_mode, 0)
  2019. def n_events(self, episode):
  2020. """
  2021. Return the number of channels
  2022. dedicated to events.
  2023. """
  2024. block = self.episode_block(episode)
  2025. return block.ks_block.k_sampling.count(0)
  2026. def n_spiketrains(self, episode):
  2027. spk_blocks = [k for k in self.blocks if k.identifier == 'RSPK']
  2028. return spk_blocks[0].n_evt_channels if spk_blocks else 0
  2029. def sub_sampling(self, ep, ch):
  2030. """
  2031. Return the sub-sampling factor for
  2032. the specified episode and channel.
  2033. """
  2034. block = self.episode_block(ep)
  2035. return block.ks_block.k_sampling[ch - 1] if block.ks_block else 1
  2036. def aggregate_size(self, block, ep):
  2037. ag_count = self.aggregate_sample_count(block)
  2038. ag_size = 0
  2039. for ch in range(1, ag_count + 1):
  2040. if (block.ks_block.k_sampling[ch - 1] != 0):
  2041. ag_size += self.sample_size(ep, ch)
  2042. return ag_size
  2043. def n_samples(self, ep, ch):
  2044. block = self.episode_block(ep)
  2045. if not block.ep_block.continuous:
  2046. return block.ep_block.nbpt / self.sub_sampling(ep, ch)
  2047. else:
  2048. # for continuous case there isn't any place
  2049. # in the file that contains the number of
  2050. # samples unlike the episode case ...
  2051. data_blocks = self.get_data_blocks(ep)
  2052. total_size = np.sum([k.size for k in data_blocks])
  2053. # count the number of samples in an
  2054. # aggregate and compute its size in order
  2055. # to determine the size of an aggregate
  2056. ag_count = self.aggregate_sample_count(block)
  2057. ag_size = self.aggregate_size(block, ep)
  2058. n_ag = total_size / ag_size
  2059. # the number of samples is equal
  2060. # to the number of aggregates ...
  2061. n_samples = n_ag
  2062. n_chunks = total_size % ag_size
  2063. # ... but not when there exists
  2064. # a incomplete aggregate at the
  2065. # end of the file, consequently
  2066. # the preeceeding computed number
  2067. # of samples must be incremented
  2068. # by one only if the channel map
  2069. # to a sample in the last aggregate
  2070. # ... maybe this last part should be
  2071. # deleted because the n_chunks is always
  2072. # null in continuous mode
  2073. if n_chunks:
  2074. last_ag_size = total_size - n_ag * ag_count
  2075. size = 0
  2076. for i in range(0, ch):
  2077. size += self.sample_size(ep, i + 1)
  2078. if size <= last_ag_size:
  2079. n_samples += 1
  2080. return n_samples
  2081. def sample_type(self, ep, ch):
  2082. block = self.episode_block(ep)
  2083. return block.kt_block.k_types[ch - 1] if block.kt_block else block.ep_block.tpData
  2084. def sampling_period(self, ep, ch):
  2085. block = self.episode_block(ep)
  2086. return block.ep_block.dX * self.sub_sampling(ep, ch)
  2087. def x_tag_scale_factors(self, ep):
  2088. block = self.episode_block(ep)
  2089. return ElphyScaleFactor(
  2090. block.ep_block.dX,
  2091. block.ep_block.X0
  2092. )
  2093. def x_scale_factors(self, ep, ch):
  2094. block = self.episode_block(ep)
  2095. return ElphyScaleFactor(
  2096. block.ep_block.dX * block.ks_block.k_sampling[ch - 1],
  2097. block.ep_block.X0,
  2098. )
  2099. def y_scale_factors(self, ep, ch):
  2100. block = self.episode_block(ep)
  2101. return ElphyScaleFactor(
  2102. block.ch_block.dY_ar[ch - 1],
  2103. block.ch_block.Y0_ar[ch - 1]
  2104. )
  2105. def x_unit(self, ep, ch):
  2106. block = self.episode_block(ep)
  2107. return block.ep_block.x_unit
  2108. def y_unit(self, ep, ch):
  2109. block = self.episode_block(ep)
  2110. return block.ch_block.y_units[ch - 1]
  2111. def tag_mode(self, ep):
  2112. block = self.episode_block(ep)
  2113. return block.ep_block.tag_mode
  2114. def tag_shift(self, ep):
  2115. block = self.episode_block(ep)
  2116. return block.ep_block.tag_shift
  2117. def get_channel_for_tags(self, ep):
  2118. block = self.episode_block(ep)
  2119. tag_mode = self.tag_mode(ep)
  2120. if tag_mode == 1:
  2121. ks = np.array(block.ks_block.k_sampling)
  2122. mins = np.where(ks == ks.min())[0] + 1
  2123. return mins[0]
  2124. elif tag_mode == 2:
  2125. return block.ep_block.n_channels
  2126. else:
  2127. return None
  2128. def aggregate_sample_count(self, block):
  2129. """
  2130. Return the number of sample in an aggregate.
  2131. """
  2132. # compute the least common multiple
  2133. # for channels having block.ks_block.k_sampling[ch] > 0
  2134. lcm0 = 1
  2135. for i in range(0, block.ep_block.n_channels):
  2136. if block.ks_block.k_sampling[i] > 0:
  2137. lcm0 = least_common_multiple(lcm0, block.ks_block.k_sampling[i])
  2138. # sum quotients lcm / KSampling
  2139. count = 0
  2140. for i in range(0, block.ep_block.n_channels):
  2141. if block.ks_block.k_sampling[i] > 0:
  2142. count += lcm0 / block.ks_block.k_sampling[i]
  2143. return count
  2144. def create_channel_mask(self, ep):
  2145. """
  2146. Return the minimal pattern of channel numbers
  2147. representing the succession of channels in the
  2148. multiplexed data. It is useful to do the mapping
  2149. between a sample stored in the file and its relative
  2150. channel.
  2151. NB : This function has been converted from the
  2152. 'TseqBlock.BuildMask' method of the file 'ElphyFormat.pas'
  2153. stored in Elphy source code.
  2154. """
  2155. block = self.episode_block(ep)
  2156. ag_count = self.aggregate_sample_count(block)
  2157. mask_ar = np.zeros(ag_count, dtype='i')
  2158. ag_size = 0
  2159. i = 0
  2160. k = 0
  2161. while k < ag_count:
  2162. for j in range(0, block.ep_block.n_channels):
  2163. if (block.ks_block.k_sampling[j] != 0) and (i % block.ks_block.k_sampling[j] == 0):
  2164. mask_ar[k] = j + 1
  2165. ag_size += self.sample_size(ep, j + 1)
  2166. k += 1
  2167. if k >= ag_count:
  2168. break
  2169. i += 1
  2170. return mask_ar
  2171. def get_signal(self, episode, channel):
  2172. block = self.episode_block(episode)
  2173. k_sampling = np.array(block.ks_block.k_sampling)
  2174. evt_channels = np.where(k_sampling == 0)[0]
  2175. if channel not in evt_channels:
  2176. return super(DAC2Layout, self).get_signal(episode, channel)
  2177. else:
  2178. k_sampling[channel - 1] = -1
  2179. return self.get_event(episode, channel, k_sampling)
  2180. def get_tag(self, episode, tag_channel):
  2181. """
  2182. Return a :class:`ElphyTag` which is a
  2183. descriptor of the specified event channel.
  2184. """
  2185. assert episode in range(1, self.n_episodes + 1)
  2186. # there are none, 2 or 16 tag
  2187. # channels depending on tag_mode
  2188. tag_mode = self.tag_mode(episode)
  2189. if tag_mode:
  2190. block = self.episode_block(episode)
  2191. x_unit = block.ep_block.x_unit
  2192. # verify the validity of the tag channel
  2193. if tag_mode == 1:
  2194. assert tag_channel in range(
  2195. 1, 3), "Elphy format support only 2 tag channels for tag_mode == 1"
  2196. elif tag_mode == 2:
  2197. assert tag_channel in range(
  2198. 1, 17), "Elphy format support only 16 tag channels for tag_mode == 2"
  2199. elif tag_mode == 3:
  2200. assert tag_channel in range(
  2201. 1, 17), "Elphy format support only 16 tag channels for tag_mode == 3"
  2202. smp_period = block.ep_block.dX
  2203. smp_freq = 1.0 / smp_period
  2204. if tag_mode != 3:
  2205. ch = self.get_channel_for_tags(episode)
  2206. n_samples = self.n_samples(episode, ch)
  2207. t_stop = (n_samples - 1) * smp_freq
  2208. else:
  2209. # get the max of n_samples multiplied by the sampling
  2210. # period done on every analog channels in order to avoid
  2211. # the selection of a channel without concrete signals
  2212. t_max = list()
  2213. for ch in self.analog_index(episode):
  2214. n_samples = self.n_samples(episode, ch)
  2215. factors = self.x_scale_factors(episode, ch)
  2216. chtime = n_samples * factors.delta
  2217. t_max.append(chtime)
  2218. time_max = max(t_max)
  2219. # as (n_samples_tag - 1) * dX_tag
  2220. # and time_max = n_sample_tag * dX_tag
  2221. # it comes the following duration
  2222. t_stop = time_max - smp_period
  2223. return ElphyTag(self, episode, tag_channel, x_unit, smp_freq, 0, t_stop)
  2224. else:
  2225. return None
  2226. def get_event(self, ep, ch, marked_ks):
  2227. """
  2228. Return a :class:`ElphyEvent` which is a
  2229. descriptor of the specified event channel.
  2230. """
  2231. assert ep in range(1, self.n_episodes + 1)
  2232. assert ch in range(1, self.n_channels + 1)
  2233. # find the event channel number
  2234. evt_channel = np.where(marked_ks == -1)[0][0]
  2235. assert evt_channel in range(1, self.n_events(ep) + 1)
  2236. block = self.episode_block(ep)
  2237. ep_blocks = self.get_blocks_stored_in_episode(ep)
  2238. evt_blocks = [k for k in ep_blocks if k.identifier == 'REVT']
  2239. n_events = np.sum([k.n_events[evt_channel - 1] for k in evt_blocks], dtype=int)
  2240. x_unit = block.ep_block.x_unit
  2241. return ElphyEvent(self, ep, evt_channel, x_unit, n_events, ch_number=ch)
  2242. def load_encoded_events(self, episode, evt_channel, identifier):
  2243. """
  2244. Return times stored as a 4-bytes integer
  2245. in the specified event channel.
  2246. """
  2247. data_blocks = self.group_blocks_of_type(episode, identifier)
  2248. ep_blocks = self.get_blocks_stored_in_episode(episode)
  2249. evt_blocks = [k for k in ep_blocks if k.identifier == identifier]
  2250. # compute events on each channel
  2251. n_events = np.sum([k.n_events for k in evt_blocks], dtype=int, axis=0)
  2252. pre_events = np.sum(n_events[0:evt_channel - 1], dtype=int)
  2253. start = pre_events
  2254. end = start + n_events[evt_channel - 1]
  2255. expected_size = 4 * np.sum(n_events, dtype=int)
  2256. return self.load_bytes(data_blocks, dtype='<i4', start=start, end=end,
  2257. expected_size=expected_size)
  2258. def load_encoded_spikes(self, episode, evt_channel, identifier):
  2259. """
  2260. Return times stored as a 4-bytes integer
  2261. in the specified spike channel.
  2262. NB: it is meant for Blackrock-type, having an additional byte for each event time as spike
  2263. sorting label.
  2264. These additiona bytes are appended trailing the times.
  2265. """
  2266. # to load the requested spikes for the specified episode and event channel:
  2267. # get all the elphy blocks having as identifier 'RSPK' (or whatever)
  2268. all_rspk_blocks = [k for k in self.blocks if k.identifier == identifier]
  2269. rspk_block = all_rspk_blocks[episode - 1]
  2270. # RDATA(h?dI) REVT(NbVeV:I, NbEv:256I ... spike data are 4byte integers
  2271. rspk_header = 4 * (rspk_block.size - rspk_block.data_size - 2 + len(rspk_block.n_events))
  2272. pre_events = np.sum(rspk_block.n_events[0:evt_channel - 1], dtype=int, axis=0)
  2273. # the real start is after header, preceeding events (which are 4byte) and preceeding
  2274. # labels (1byte)
  2275. start = rspk_header + (4 * pre_events) + pre_events
  2276. end = start + 4 * rspk_block.n_events[evt_channel - 1]
  2277. raw = self.load_bytes([rspk_block], dtype='<i1', start=start,
  2278. end=end, expected_size=rspk_block.size)
  2279. # re-encoding after reading byte by byte
  2280. res = np.frombuffer(raw[0:(4 * rspk_block.n_events[evt_channel - 1])], dtype='<i4')
  2281. res.sort() # sometimes timings are not sorted
  2282. # print "load_encoded_data() - spikes:",res
  2283. return res
  2284. def get_episode_name(self, episode):
  2285. episode_name = "episode %s" % episode
  2286. names = [k for k in self.blocks if k.identifier == 'COM']
  2287. if len(names) > 0:
  2288. name = names[episode - 1]
  2289. start = name.size + 1 - name.data_size + 1
  2290. end = name.end - name.start + 1
  2291. chars = self.load_bytes([name], dtype='uint8', start=start,
  2292. end=end, expected_size=name.size).tolist()
  2293. # print "chars[%s:%s]: %s" % (start,end,chars)
  2294. episode_name = ''.join([chr(k) for k in chars])
  2295. return episode_name
  2296. def get_event_data(self, episode, evt_channel):
  2297. """
  2298. Return times contained in the specified event channel.
  2299. This function is triggered when the 'times' property of
  2300. an :class:`ElphyEvent` descriptor instance is accessed.
  2301. """
  2302. times = self.load_encoded_events(episode, evt_channel, "REVT")
  2303. block = self.episode_block(episode)
  2304. return times * block.ep_block.dX / len(block.ks_block.k_sampling)
  2305. def get_spiketrain(self, episode, electrode_id):
  2306. """
  2307. Return a :class:`Spike` which is a
  2308. descriptor of the specified spike channel.
  2309. """
  2310. assert episode in range(1, self.n_episodes + 1)
  2311. assert electrode_id in range(1, self.n_spiketrains(episode) + 1)
  2312. # get some properties stored in the episode sub-block
  2313. block = self.episode_block(episode)
  2314. x_unit = block.ep_block.x_unit
  2315. x_unit_wf = getattr(block.ep_block, 'x_unit_wf', None)
  2316. y_unit_wf = getattr(block.ep_block, 'y_unit_wf', None)
  2317. # number of spikes in the entire episode
  2318. spk_blocks = [k for k in self.blocks if k.identifier == 'RSPK']
  2319. n_events = np.sum([k.n_events[electrode_id - 1] for k in spk_blocks], dtype=int)
  2320. # number of samples in a waveform
  2321. wf_sampling_frequency = 1.0 / block.ep_block.dX
  2322. wf_blocks = [k for k in self.blocks if k.identifier == 'RspkWave']
  2323. if wf_blocks:
  2324. wf_samples = wf_blocks[0].wavelength
  2325. t_start = wf_blocks[0].pre_trigger * block.ep_block.dX
  2326. else:
  2327. wf_samples = 0
  2328. t_start = 0
  2329. return ElphySpikeTrain(self, episode, electrode_id, x_unit, n_events,
  2330. wf_sampling_frequency, wf_samples, x_unit_wf, y_unit_wf, t_start)
  2331. def get_spiketrain_data(self, episode, electrode_id):
  2332. """
  2333. Return times contained in the specified spike channel.
  2334. This function is triggered when the 'times' property of
  2335. an :class:`Spike` descriptor instance is accessed.
  2336. NB : The 'RSPK' block is not actually identical to the 'EVT' one,
  2337. because all units relative to a time are stored directly after all
  2338. event times, 1 byte for each. This function doesn't return these
  2339. units. But, they could be retrieved from the 'RspkWave' block with
  2340. the 'get_waveform_data function'
  2341. """
  2342. block = self.episode_block(episode)
  2343. times = self.load_encoded_spikes(episode, electrode_id, "RSPK")
  2344. return times * block.ep_block.dX
  2345. def load_encoded_waveforms(self, episode, electrode_id):
  2346. """
  2347. Return times on which waveforms are defined
  2348. and a numpy recarray containing all the data
  2349. stored in the RspkWave block.
  2350. """
  2351. # load data corresponding to the RspkWave block
  2352. identifier = "RspkWave"
  2353. data_blocks = self.group_blocks_of_type(episode, identifier)
  2354. databytes = self.load_bytes(data_blocks)
  2355. # select only data corresponding
  2356. # to the specified spk_channel
  2357. ep_blocks = self.get_blocks_stored_in_episode(episode)
  2358. wf_blocks = [k for k in ep_blocks if k.identifier == identifier]
  2359. wf_samples = wf_blocks[0].wavelength
  2360. events = np.sum([k.n_spikes for k in wf_blocks], dtype=int, axis=0)
  2361. n_events = events[electrode_id - 1]
  2362. pre_events = np.sum(events[0:electrode_id - 1], dtype=int)
  2363. start = pre_events
  2364. end = start + n_events
  2365. # data must be reshaped before
  2366. dtype = [
  2367. # the time of the spike arrival
  2368. ('elphy_time', 'u4', (1,)),
  2369. ('device_time', 'u4', (1,)),
  2370. # the identifier of the electrode
  2371. # would also be the 'trodalness'
  2372. # but this tetrode devices are not
  2373. # implemented in Elphy
  2374. ('channel_id', 'u2', (1,)),
  2375. # the 'category' of the waveform
  2376. ('unit_id', 'u1', (1,)),
  2377. # do not used
  2378. ('dummy', 'u1', (13,)),
  2379. # samples of the waveform
  2380. ('waveform', 'i2', (wf_samples,))
  2381. ]
  2382. x_start = wf_blocks[0].pre_trigger
  2383. x_stop = wf_samples - x_start
  2384. return np.arange(-x_start, x_stop), np.frombuffer(databytes, dtype=dtype)[start:end]
  2385. def get_waveform_data(self, episode, electrode_id):
  2386. """
  2387. Return waveforms corresponding to the specified
  2388. spike channel. This function is triggered when the
  2389. ``waveforms`` property of an :class:`Spike` descriptor
  2390. instance is accessed.
  2391. """
  2392. block = self.episode_block(episode)
  2393. times, databytes = self.load_encoded_waveforms(episode, electrode_id)
  2394. n_events, = databytes.shape
  2395. wf_samples = databytes['waveform'].shape[1]
  2396. dtype = [
  2397. ('time', float),
  2398. ('electrode_id', int),
  2399. ('unit_id', int),
  2400. ('waveform', float, (wf_samples, 2))
  2401. ]
  2402. data = np.empty(n_events, dtype=dtype)
  2403. data['electrode_id'] = databytes['channel_id'][:, 0]
  2404. data['unit_id'] = databytes['unit_id'][:, 0]
  2405. data['time'] = databytes['elphy_time'][:, 0] * block.ep_block.dX
  2406. data['waveform'][:, :, 0] = times * block.ep_block.dX
  2407. data['waveform'][:, :, 1] = databytes['waveform'] * \
  2408. block.ep_block.dY_wf + block.ep_block.Y0_wf
  2409. return data
  2410. def get_rspk_data(self, spk_channel):
  2411. """
  2412. Return times stored as a 4-bytes integer
  2413. in the specified event channel.
  2414. """
  2415. evt_blocks = self.get_blocks_of_type('RSPK')
  2416. # compute events on each channel
  2417. n_events = np.sum([k.n_events for k in evt_blocks], dtype=int, axis=0)
  2418. # sum of array values up to spk_channel-1!!!!
  2419. pre_events = np.sum(n_events[0:spk_channel], dtype=int)
  2420. start = pre_events + (7 + len(n_events)) # rspk header
  2421. end = start + n_events[spk_channel]
  2422. expected_size = 4 * np.sum(n_events, dtype=int) # constant
  2423. return self.load_bytes(evt_blocks, dtype='<i4', start=start, end=end,
  2424. expected_size=expected_size)
  2425. # ---------------------------------------------------------
  2426. # factories.py
  2427. class LayoutFactory(object):
  2428. """
  2429. Generate base elements composing the layout of a file.
  2430. """
  2431. def __init__(self, elphy_file):
  2432. self.elphy_file = elphy_file
  2433. self.pattern = r"\d{4}(\d+|\D)\D"
  2434. self.block_subclasses = dict()
  2435. @property
  2436. def file(self):
  2437. return self.elphy_file.file
  2438. def create_layout(self):
  2439. """
  2440. Return the actual :class:`ElphyLayout` subclass
  2441. instance used in an :class:`ElphyFile` object.
  2442. """
  2443. raise Exception('must be overloaded in a subclass')
  2444. def create_header(self, layout):
  2445. """
  2446. Return the actual :class:`Header` instance used
  2447. in an :class:`ElphyLayout` subclass object.
  2448. """
  2449. raise Exception('must be overloaded in a subclass')
  2450. def create_block(self, layout):
  2451. """
  2452. Return a :class:`Block` instance composing
  2453. the :class:`ElphyLayout` subclass instance.
  2454. """
  2455. raise Exception('must be overloaded in a subclass')
  2456. def create_sub_block(self, block, sub_offset):
  2457. """
  2458. Return a set of sub-blocks stored
  2459. in DAC2 objects format files.
  2460. """
  2461. self.file.seek(sub_offset)
  2462. sub_ident_size = read_from_char(self.file, 'B')
  2463. sub_identifier, = struct.unpack('<%ss' % sub_ident_size, self.file.read(sub_ident_size))
  2464. sub_data_size = read_from_char(self.file, 'H')
  2465. sub_data_offset = sub_offset + sub_ident_size + 3
  2466. size_format = "H"
  2467. if sub_data_size == 0xFFFF:
  2468. _ch = 'l'
  2469. sub_data_size = read_from_char(self.file, _ch)
  2470. size_format += "+%s" % (_ch)
  2471. sub_data_offset += 4
  2472. sub_size = len(sub_identifier) + 1 + type_dict[size_format] + sub_data_size
  2473. if sub_identifier == 'Ep':
  2474. block_type = DAC2EpSubBlock
  2475. elif sub_identifier == 'Adc':
  2476. block_type = DAC2AdcSubBlock
  2477. elif sub_identifier == 'Ksamp':
  2478. block_type = DAC2KSampSubBlock
  2479. elif sub_identifier == 'Ktype':
  2480. block_type = DAC2KTypeSubBlock
  2481. elif sub_identifier == 'USR':
  2482. block_type = self.select_file_info_subclass()
  2483. else:
  2484. block_type = ElphyBlock
  2485. block = block_type(block.layout, sub_identifier, sub_offset, sub_size,
  2486. size_format=size_format, parent_block=block)
  2487. self.file.seek(self.file.tell() + sub_data_size)
  2488. return block
  2489. def create_episode(self, block):
  2490. raise Exception('must be overloaded in a subclass')
  2491. def create_channel(self, block):
  2492. raise Exception('must be overloaded in a subclass')
  2493. def is_multistim(self, path):
  2494. """
  2495. Return a boolean telling if the
  2496. specified file is a multistim one.
  2497. """
  2498. match = re.search(self.pattern, path)
  2499. return hasattr(match, 'end') and path[match.end() - 1] in ['m', 'M']
  2500. def select_file_info_subclass(self):
  2501. """
  2502. Detect the type of a file from its nomenclature
  2503. and return its relative :class:`ClassicFileInfo` or
  2504. :class:`MultistimFileInfo` class. Useful to transparently
  2505. access to user file info stored in an Elphy file.
  2506. """
  2507. if not self.is_multistim(self.file.name):
  2508. return ClassicFileInfo
  2509. else:
  2510. return MultistimFileInfo
  2511. def select_block_subclass(self, identifier):
  2512. return self.block_subclasses.get(identifier, ElphyBlock)
  2513. class Acquis1Factory(LayoutFactory):
  2514. """
  2515. Subclass of :class:`LayoutFactory` useful to
  2516. generate base elements composing the layout
  2517. of Acquis1 file format.
  2518. """
  2519. def __init__(self, elphy_file):
  2520. super(Acquis1Factory, self).__init__(elphy_file)
  2521. self.file.seek(16)
  2522. self.data_offset = read_from_char(self.file, 'h')
  2523. self.file.seek(0)
  2524. # the set of interesting blocks useful
  2525. # to retrieve data stored in a file
  2526. self.block_subclasses = {
  2527. "USER INFO": self.select_file_info_subclass()
  2528. }
  2529. def create_layout(self):
  2530. return Acquis1Layout(self.elphy_file, self.data_offset)
  2531. def create_header(self, layout):
  2532. return Acquis1Header(layout)
  2533. def create_block(self, layout, offset):
  2534. self.file.seek(offset)
  2535. ident_size, identifier = struct.unpack('<B15s', self.file.read(16))
  2536. identifier = identifier[0:ident_size]
  2537. size = read_from_char(self.file, 'h')
  2538. block_type = self.select_block_subclass(identifier)
  2539. block = block_type(layout, identifier, offset, size, fixed_length=15, size_format='h')
  2540. self.file.seek(0)
  2541. return block
  2542. class DAC2GSFactory(LayoutFactory):
  2543. """
  2544. Subclass of :class:`LayoutFactory` useful to
  2545. generate base elements composing the layout
  2546. of DAC2/GS/2000 file format.
  2547. """
  2548. def __init__(self, elphy_file):
  2549. super(DAC2GSFactory, self).__init__(elphy_file)
  2550. self.file.seek(16)
  2551. self.data_offset = read_from_char(self.file, 'i')
  2552. self.file.seek(0)
  2553. # the set of interesting blocks useful
  2554. # to retrieve data stored in a file
  2555. self.block_subclasses = {
  2556. "USER INFO": self.select_file_info_subclass(),
  2557. "DAC2SEQ": DAC2GSEpisodeBlock,
  2558. 'MAIN': DAC2GSMainBlock,
  2559. }
  2560. def create_layout(self):
  2561. return DAC2GSLayout(self.elphy_file, self.data_offset)
  2562. def create_header(self, layout):
  2563. return DAC2GSHeader(layout)
  2564. def create_block(self, layout, offset):
  2565. self.file.seek(offset)
  2566. ident_size, identifier = struct.unpack('<B15s', self.file.read(16))
  2567. # block title size is 7 or 15 bytes
  2568. # 7 is for sequence blocs
  2569. if identifier.startswith('DAC2SEQ'):
  2570. self.file.seek(self.file.tell() - 8)
  2571. length = 7
  2572. else:
  2573. length = 15
  2574. identifier = identifier[0:ident_size]
  2575. size = read_from_char(self.file, 'i')
  2576. block_type = self.select_block_subclass(identifier)
  2577. block = block_type(layout, identifier, offset, size, fixed_length=length, size_format='i')
  2578. self.file.seek(0)
  2579. return block
  2580. class DAC2Factory(LayoutFactory):
  2581. """
  2582. Subclass of :class:`LayoutFactory` useful to
  2583. generate base elements composing the layout
  2584. of DAC2 objects file format.
  2585. """
  2586. def __init__(self, elphy_file):
  2587. super(DAC2Factory, self).__init__(elphy_file)
  2588. # the set of interesting blocks useful
  2589. # to retrieve data stored in a file
  2590. self.block_subclasses = {
  2591. "B_Ep": DAC2EpisodeBlock,
  2592. "RDATA": DAC2RDataBlock,
  2593. "RCyberTag": DAC2CyberTagBlock,
  2594. "REVT": DAC2EventBlock,
  2595. "RSPK": DAC2SpikeBlock,
  2596. "RspkWave": DAC2WaveFormBlock
  2597. }
  2598. def create_layout(self):
  2599. return DAC2Layout(self.elphy_file)
  2600. def create_header(self, layout):
  2601. return DAC2Header(layout)
  2602. def create_block(self, layout, offset):
  2603. self.file.seek(offset)
  2604. size = read_from_char(self.file, 'l')
  2605. ident_size = read_from_char(self.file, 'B')
  2606. identifier, = struct.unpack('<%ss' % ident_size, self.file.read(ident_size))
  2607. block_type = self.select_block_subclass(identifier)
  2608. block = block_type(layout, identifier, offset, size, size_format='l')
  2609. self.file.seek(0)
  2610. return block
  2611. # caching all available layout factories
  2612. factories = {
  2613. "ACQUIS1/GS/1991": Acquis1Factory,
  2614. "DAC2/GS/2000": DAC2GSFactory,
  2615. "DAC2 objects": DAC2Factory
  2616. }
  2617. # --------------------------------------------------------
  2618. # ELPHY FILE
  2619. """
  2620. Classes useful to retrieve data from the
  2621. three major Elphy formats, i.e : Acquis1, DAC2/GS/2000, DAC2 objects.
  2622. The :class:`ElphyFile` class is useful to access raw data and user info
  2623. that stores protocol metadata. Internally, It uses a subclass :class:`ElphyLayout`
  2624. to handle each kind of file format : :class:`Acquis1Layout`, :class:`DAC2GSLayout`
  2625. and :class:`DAC2Layout`.
  2626. These layouts decompose the file structure into several blocks of data, inheriting
  2627. from the :class:`BaseBlock`, corresponding for example to the header of the file,
  2628. the user info, the raw data, the episode or channel properties. Each subclass of
  2629. :class:`BaseBlock` map to a file chunk and is responsible to store metadata contained
  2630. in this chunk. These metadata could be also useful to reconstruct raw data.
  2631. Consequently, when an :class:`ElphyLayout` layout is requested by its relative
  2632. :class:`ElphyFile`, It iterates through :class:`BaseBlock` objects to retrieve
  2633. asked data.
  2634. NB : The reader is not able to read Acquis1 and DAC2/GS/2000 event channels.
  2635. """
  2636. class ElphyFile(object):
  2637. """
  2638. A convenient class useful to read Elphy files.
  2639. It acts like a file reader that wraps up a python
  2640. file opened in 'rb' mode in order to retrieve
  2641. directly from an Elphy file raw data and metadata
  2642. relative to protocols.
  2643. ``path`` : the path of the elphy file.
  2644. ``file`` : the python file object that iterates
  2645. through the elphy file.
  2646. ``file_size`` : the size of the elphy file on the
  2647. hard disk drive.
  2648. ``nomenclature`` : the label that identifies the
  2649. kind of elphy format, i.e. 'Acquis1', 'DAC2/GS/2000',
  2650. 'DAC2 objects'.
  2651. ``factory`` : the :class:`LayoutFactory` object which
  2652. generates the base component of the elphy file layout.
  2653. ``layout`` : the :class:`ElphyLayout` object which
  2654. decomposes the file structure into several blocks of
  2655. data (:class:`BaseBlock` objects). The :class:`ElphyFile`
  2656. object do requests to this layout which iterates through
  2657. this blocks before returning asked data.
  2658. ``protocol`` : the acquisition protocol which has generated
  2659. the file.
  2660. ``version`` : the variant of the acquisition protocol.
  2661. NB : An elphy file could store several kind of data :
  2662. (1) 'User defined' metadata which are stored in a block
  2663. called 'USER INFO' ('Acquis1' and 'DAC2/GS/2000') or 'USR'
  2664. ('DAC2 objects') of the ``layout``. They could be used for
  2665. example to describe stimulation parameters.
  2666. (2) Raw data acquired on separate analog channels. Data
  2667. coming from each channel are multiplexed in blocks dedicated
  2668. to raw data storage :
  2669. - For Acquis1 format, raw data are stored directly
  2670. after the file header.
  2671. - For DAC2/GS/2000, in continuous mode they are stored
  2672. after all blocks composing the file else they are stored
  2673. in a 'DAC2SEQ' block.
  2674. - For 'DAC2 objects' they are stored in 'RDATA' blocks.
  2675. In continuous mode raw data could be spread other multiple
  2676. 'RDATA' blocks. Whereas in episode mode there is a single
  2677. 'RDATA' block for each episode.
  2678. These raw data are placed under the 'channels' node of a
  2679. TDataFile object listed in Elphy's "Inspect" tool.
  2680. (3) ElphyEvents dedicated to threshold detection in analog
  2681. channels. ElphyEvents are only available for 'DAC2 objects'
  2682. format. For 'Acquis1' and 'DAC2/GS/2000' these events are
  2683. in fact stored in another kind of file format called
  2684. 'event' format with the '.evt' extension which is opened
  2685. by Elphy as same time as the '.dat' file. This 'event'
  2686. format is not yet implemented because it seems that it
  2687. was not really used.
  2688. These events are also placed under the 'channels' node
  2689. of a TDataFile object in Elphy's "Inspect" tool.
  2690. (4) ElphyTags that appeared after 'DAC2/GS/2000' release. They
  2691. are also present in 'DAC2 objects' format. Each, tag occupies
  2692. a channel called 'tag' channel. Their encoding depends on the
  2693. kind of acquisition card :
  2694. - For 'digidata' cards (``tag_mode``=1) and if tags are acquired,
  2695. they are directly encoded in 2 (digidata 1322) or 4 (digidata 1200)
  2696. significant bits of 16-bits samples coming from an analog channel.
  2697. In all cases they are only 2 bits encoding the tag channels. The
  2698. sample value could be encoded on 16, 14 or 12 bits and retrieved by
  2699. applying a shift equal to ``tag_shift`` to the right.
  2700. - For ITC cards (``tag_mode``=2), tags are transmitted by a channel
  2701. fully dedicated to 'tag channels' providing 16-bits samples. In this
  2702. case, each bit corresponds to a 'tag channel'.
  2703. - For Blackrock/Cyberkinetics devices (``tag_mode``=3), tags are also
  2704. transmitted by a channel fully dedicated to tags, but the difference is
  2705. that only transitions are stored in 'RCyberTag' blocks. This case in only
  2706. available in 'DAC2 objects' format.
  2707. These tags are placed under the 'Vtags' node of a TDataFile
  2708. object in Elphy's "Inspect" tool.
  2709. (5) Spiketrains coming from an electrode of a Blackrock/Cyberkinetics
  2710. multi-electrode device. These data are only available in 'DAC2 objects'
  2711. format.
  2712. These spiketrains are placed under the 'Vspk' node of a TDataFile
  2713. object in Elphy's "Inspect" tool.
  2714. (6) Waveforms relative to each time of a spiketrain. These data are only
  2715. available in 'DAC2 objects' format. These waveforms are placed under the
  2716. 'Wspk' node of a TDataFile object in Elphy's "Inspect" tool.
  2717. """
  2718. def __init__(self, file_path):
  2719. self.path = file_path
  2720. self.folder, self.filename = path.split(self.path)
  2721. self.file = None
  2722. self.file_size = None
  2723. self.nomenclature = None
  2724. self.factory = None
  2725. self.layout = None
  2726. # writing support
  2727. self.header_size = None
  2728. def __del__(self):
  2729. """
  2730. Trigger closing of the file.
  2731. """
  2732. self.close()
  2733. # super(ElphyFile, self).__del__()
  2734. def open(self):
  2735. """
  2736. Setup the internal structure.
  2737. NB : Call this function before
  2738. extracting data from a file.
  2739. """
  2740. if self.file:
  2741. self.file.close()
  2742. try:
  2743. self.file = open(self.path, 'rb')
  2744. except Exception as e:
  2745. raise Exception("python couldn't open file %s : %s" % (self.path, e))
  2746. self.file_size = path.getsize(self.file.name)
  2747. self.creation_date = datetime.fromtimestamp(path.getctime(self.file.name))
  2748. self.modification_date = datetime.fromtimestamp(path.getmtime(self.file.name))
  2749. self.nomenclature = self.get_nomenclature()
  2750. self.factory = self.get_factory()
  2751. self.layout = self.create_layout()
  2752. def close(self):
  2753. """
  2754. Close the file.
  2755. """
  2756. if self.file:
  2757. self.file.close()
  2758. def get_nomenclature(self):
  2759. """
  2760. Return the title of the file header
  2761. giving the actual file format. This
  2762. title is encoded as a pascal string
  2763. containing 15 characters and stored
  2764. as 16 bytes of binary data.
  2765. """
  2766. self.file.seek(0)
  2767. length, title = struct.unpack('<B15s', self.file.read(16))
  2768. self.file.seek(0)
  2769. title = title[0:length]
  2770. if hasattr(title, 'decode'):
  2771. title = title.decode()
  2772. if title not in factories:
  2773. title = "format is not implemented ('%s' not in %s)" % (title, str(factories.keys()))
  2774. return title
  2775. def set_nomenclature(self):
  2776. """
  2777. As in get_nomenclature, but set the title of the file header
  2778. in the file, encoded as a pascal string containing
  2779. 15 characters and stored as 16 bytes of binary data.
  2780. """
  2781. self.file.seek(0)
  2782. title = 'DAC2 objects'
  2783. st = struct.Struct('<B15sH')
  2784. header_rec = [len(title), title, 18] # constant header
  2785. header_chr = st.pack(*header_rec)
  2786. self.header_size = len(header_chr)
  2787. self.file.write(header_chr)
  2788. def get_factory(self):
  2789. """
  2790. Return a subclass of :class:`LayoutFactory`
  2791. useful to build the file layout depending
  2792. on header title.
  2793. """
  2794. if hasattr(self.nomenclature, 'decode'):
  2795. self.nomenclature = self.nomenclature.decode()
  2796. return factories[self.nomenclature](self)
  2797. def write(self, data):
  2798. """
  2799. Assume the blocks are already filled.
  2800. It is able to write several types of block: B_Ep, RDATA, ...
  2801. and subBlock: Adc, Ksamp, Ktype, dataRecord, ...
  2802. In the following shape:
  2803. B_Ep
  2804. |_ Ep
  2805. |_ Adc
  2806. |_ Adc
  2807. |_ ...
  2808. |_ Ktype
  2809. RDATA
  2810. |_ dataRecord+data
  2811. """
  2812. # close if open and reopen for writing
  2813. if self.file:
  2814. self.file.close()
  2815. try:
  2816. self.file = open(self.path, 'wb')
  2817. except Exception as e:
  2818. raise Exception("python couldn't open file %s : %s" % (self.path, e))
  2819. self.file_size = 0
  2820. self.creation_date = datetime.now()
  2821. self.modification_date = datetime.now()
  2822. self.set_nomenclature()
  2823. # then call ElphyFile writing routines to write the serialized string
  2824. self.file.write(data) # actual writing
  2825. # close file
  2826. self.close()
  2827. def create_layout(self):
  2828. """
  2829. Build the :class:`Layout` object corresponding
  2830. to the file format and configure properties of
  2831. itself and then its blocks and sub-blocks.
  2832. NB : this function must be called before all kind
  2833. of requests on the file because it is used also to setup
  2834. the internal properties of the :class:`ElphyLayout`
  2835. object or some :class:`BaseBlock` objects. Consequently,
  2836. executing some function corresponding to a request on
  2837. the file has many chances to lead to bad results.
  2838. """
  2839. # create the layout
  2840. layout = self.factory.create_layout()
  2841. # create the header block and
  2842. # add it to the list of blocks
  2843. header = self.factory.create_header(layout)
  2844. layout.add_block(header)
  2845. # set the position of the cursor
  2846. # in order to be after the header
  2847. # block and then compute its last
  2848. # valid position to know when stop
  2849. # the iteration through the file
  2850. offset = header.size
  2851. offset_stop = layout.get_blocks_end()
  2852. # in continuous mode DAC2/GS/2000 raw data are not stored
  2853. # into several DAC2SEQ blocks, they are stored after all
  2854. # available blocks, that's why it is necessary to limit the
  2855. # loop to data_offset when it is a DAC2/GS/2000 format
  2856. is_continuous = False
  2857. detect_continuous = False
  2858. detect_main = False
  2859. while (offset < offset_stop) and not (is_continuous and (offset >= layout.data_offset)):
  2860. block = self.factory.create_block(layout, offset)
  2861. # create the sub blocks if it is DAC2 objects format
  2862. # this is only done for B_Ep and B_Finfo blocks for
  2863. # DAC2 objects format, maybe it could be useful to
  2864. # spread this to other block types.
  2865. # if isinstance(header, DAC2Header) and (block.identifier in ['B_Ep']) :
  2866. if isinstance(header, DAC2Header) and (block.identifier in ['B_Ep', 'B_Finfo']):
  2867. sub_offset = block.data_offset
  2868. while sub_offset < block.start + block.size:
  2869. sub_block = self.factory.create_sub_block(block, sub_offset)
  2870. block.add_sub_block(sub_block)
  2871. sub_offset += sub_block.size
  2872. # set up some properties of some DAC2Layout sub-blocks
  2873. if isinstance(sub_block, (
  2874. DAC2EpSubBlock, DAC2AdcSubBlock, DAC2KSampSubBlock, DAC2KTypeSubBlock)):
  2875. block.set_episode_block()
  2876. block.set_channel_block()
  2877. block.set_sub_sampling_block()
  2878. block.set_sample_size_block()
  2879. # SpikeTrain
  2880. # if isinstance(header, DAC2Header) and (block.identifier in ['RSPK']) :
  2881. # print "\nElphyFile.create_layout() - RSPK"
  2882. # print "ElphyFile.create_layout() - n_events",block.n_events
  2883. # print "ElphyFile.create_layout() - n_evt_channels",block.n_evt_channels
  2884. layout.add_block(block)
  2885. offset += block.size
  2886. # set up as soon as possible the shortcut
  2887. # to the main block of a DAC2GSLayout
  2888. if (not detect_main and isinstance(layout, DAC2GSLayout)
  2889. and isinstance(block, DAC2GSMainBlock)):
  2890. layout.set_main_block()
  2891. detect_main = True
  2892. # detect if the file is continuous when
  2893. # the 'MAIN' block has been parsed
  2894. if not detect_continuous:
  2895. is_continuous = isinstance(header, DAC2GSHeader) and layout.is_continuous()
  2896. # set up the shortcut to blocks corresponding
  2897. # to episodes, only available for DAC2Layout
  2898. # and also DAC2GSLayout if not continuous
  2899. if isinstance(layout, DAC2Layout) or (
  2900. isinstance(layout, DAC2GSLayout) and not layout.is_continuous()):
  2901. layout.set_episode_blocks()
  2902. layout.set_data_blocks()
  2903. # finally set up the user info block of the layout
  2904. layout.set_info_block()
  2905. self.file.seek(0)
  2906. return layout
  2907. def is_continuous(self):
  2908. return self.layout.is_continuous()
  2909. @property
  2910. def n_episodes(self):
  2911. """
  2912. Return the number of recording sequences.
  2913. """
  2914. return self.layout.n_episodes
  2915. def n_channels(self, episode):
  2916. """
  2917. Return the number of recording
  2918. channels involved in data acquisition
  2919. and relative to the specified episode :
  2920. ``episode`` : the recording sequence identifier.
  2921. """
  2922. return self.layout.n_channels(episode)
  2923. def n_tags(self, episode):
  2924. """
  2925. Return the number of tag channels
  2926. relative to the specified episode :
  2927. ``episode`` : the recording sequence identifier.
  2928. """
  2929. return self.layout.n_tags(episode)
  2930. def n_events(self, episode):
  2931. """
  2932. Return the number of event channels
  2933. relative to the specified episode :
  2934. ``episode`` : the recording sequence identifier.
  2935. """
  2936. return self.layout.n_events(episode)
  2937. def n_spiketrains(self, episode):
  2938. """
  2939. Return the number of event channels
  2940. relative to the specified episode :
  2941. ``episode`` : the recording sequence identifier.
  2942. """
  2943. return self.layout.n_spiketrains(episode)
  2944. def n_waveforms(self, episode):
  2945. """
  2946. Return the number of waveform channels :
  2947. """
  2948. return self.layout.n_waveforms(episode)
  2949. def get_signal(self, episode, channel):
  2950. """
  2951. Return the signal or event descriptor relative
  2952. to the specified episode and channel :
  2953. ``episode`` : the recording sequence identifier.
  2954. ``channel`` : the analog channel identifier.
  2955. NB : For 'DAC2 objects' format, it could
  2956. be also used to retrieve events.
  2957. """
  2958. return self.layout.get_signal(episode, channel)
  2959. def get_tag(self, episode, tag_channel):
  2960. """
  2961. Return the tag descriptor relative to
  2962. the specified episode and tag channel :
  2963. ``episode`` : the recording sequence identifier.
  2964. ``tag_channel`` : the tag channel identifier.
  2965. NB : There isn't any tag channels for
  2966. 'Acquis1' format. ElphyTag channels appeared
  2967. after 'DAC2/GS/2000' release. They are
  2968. also present in 'DAC2 objects' format.
  2969. """
  2970. return self.layout.get_tag(episode, tag_channel)
  2971. def get_event(self, episode, evt_channel):
  2972. """
  2973. Return the event relative the
  2974. specified episode and event channel.
  2975. `episode`` : the recording sequence identifier.
  2976. ``tag_channel`` : the tag channel identifier.
  2977. """
  2978. return self.layout.get_event(episode, evt_channel)
  2979. def get_spiketrain(self, episode, electrode_id):
  2980. """
  2981. Return the spiketrain relative to the
  2982. specified episode and electrode_id.
  2983. ``episode`` : the recording sequence identifier.
  2984. ``electrode_id`` : the identifier of the electrode providing the spiketrain.
  2985. NB : Available only for 'DAC2 objects' format.
  2986. This descriptor can return the times of a spiketrain
  2987. and waveforms relative to each of these times.
  2988. """
  2989. return self.layout.get_spiketrain(episode, electrode_id)
  2990. @property
  2991. def comments(self):
  2992. raise NotImplementedError()
  2993. def get_user_file_info(self):
  2994. """
  2995. Return user defined file metadata.
  2996. """
  2997. if not self.layout.info_block:
  2998. return dict()
  2999. else:
  3000. return self.layout.info_block.get_user_file_info()
  3001. @property
  3002. def episode_info(self, ep_number):
  3003. raise NotImplementedError()
  3004. def get_signals(self):
  3005. """
  3006. Get all available analog or event channels stored into an Elphy file.
  3007. """
  3008. signals = list()
  3009. for ep in range(1, self.n_episodes + 1):
  3010. for ch in range(1, self.n_channels(ep) + 1):
  3011. signal = self.get_signal(ep, ch)
  3012. signals.append(signal)
  3013. return signals
  3014. def get_tags(self):
  3015. """
  3016. Get all available tag channels stored into an Elphy file.
  3017. """
  3018. tags = list()
  3019. for ep in range(1, self.n_episodes + 1):
  3020. for tg in range(1, self.n_tags(ep) + 1):
  3021. tag = self.get_tag(ep, tg)
  3022. tags.append(tag)
  3023. return tags
  3024. def get_spiketrains(self):
  3025. """
  3026. Get all available spiketrains stored into an Elphy file.
  3027. """
  3028. spiketrains = list()
  3029. for ep in range(1, self.n_episodes + 1):
  3030. for ch in range(1, self.n_spiketrains(ep) + 1):
  3031. spiketrain = self.get_spiketrain(ep, ch)
  3032. spiketrains.append(spiketrain)
  3033. return spiketrains
  3034. def get_rspk_spiketrains(self):
  3035. """
  3036. Get all available spiketrains stored into an Elphy file.
  3037. """
  3038. spiketrains = list()
  3039. spk_blocks = self.layout.get_blocks_of_type('RSPK')
  3040. for bl in spk_blocks:
  3041. # print "ElphyFile.get_spiketrains() - identifier:",bl.identifier
  3042. for ch in range(0, bl.n_evt_channels):
  3043. spiketrain = self.layout.get_rspk_data(ch)
  3044. spiketrains.append(spiketrain)
  3045. return spiketrains
  3046. def get_names(self):
  3047. com_blocks = list()
  3048. com_blocks = self.layout.get_blocks_of_type('COM')
  3049. return com_blocks
  3050. # --------------------------------------------------------
  3051. class ElphyIO(BaseIO):
  3052. """
  3053. Class for reading from and writing to an Elphy file.
  3054. It enables reading:
  3055. - :class:`Block`
  3056. - :class:`Segment`
  3057. - :class:`ChannelIndex`
  3058. - :class:`Event`
  3059. - :class:`SpikeTrain`
  3060. Usage:
  3061. >>> from neo import io
  3062. >>> r = io.ElphyIO(filename='ElphyExample.DAT')
  3063. >>> seg = r.read_block()
  3064. >>> print(seg.analogsignals) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
  3065. >>> print(seg.spiketrains) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
  3066. >>> print(seg.events) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
  3067. >>> print(anasig._data_description)
  3068. >>> anasig = r.read_analogsignal()
  3069. >>> bl = Block()
  3070. >>> # creating segments, their contents and append to bl
  3071. >>> r.write_block( bl )
  3072. """
  3073. is_readable = True # This class can read data
  3074. is_writable = False # This class can write data
  3075. # This class is able to directly or indirectly handle the following objects
  3076. supported_objects = [Block, Segment, AnalogSignal, SpikeTrain]
  3077. # This class can return a Block
  3078. readable_objects = [Block]
  3079. # This class is not able to write objects
  3080. writeable_objects = []
  3081. has_header = False
  3082. is_streameable = False
  3083. # This is for GUI stuff : a definition for parameters when reading.
  3084. # This dict should be keyed by object (`Block`). Each entry is a list
  3085. # of tuple. The first entry in each tuple is the parameter name. The
  3086. # second entry is a dict with keys 'value' (for default value),
  3087. # and 'label' (for a descriptive name).
  3088. # Note that if the highest-level object requires parameters,
  3089. # common_io_test will be skipped.
  3090. read_params = {
  3091. }
  3092. # do not supported write so no GUI stuff
  3093. write_params = {
  3094. }
  3095. name = 'Elphy IO'
  3096. extensions = ['DAT']
  3097. # mode can be 'file' or 'dir' or 'fake' or 'database'
  3098. mode = 'file'
  3099. # internal serialized representation of neo data
  3100. serialized = None
  3101. def __init__(self, filename=None):
  3102. """
  3103. Arguments:
  3104. filename : the filename to read
  3105. """
  3106. BaseIO.__init__(self)
  3107. self.filename = filename
  3108. self.elphy_file = ElphyFile(self.filename)
  3109. def read_block(self, lazy=False, ):
  3110. """
  3111. Return :class:`Block`.
  3112. Parameters:
  3113. lazy : postpone actual reading of the file.
  3114. """
  3115. assert not lazy, 'Do not support lazy'
  3116. # basic
  3117. block = Block(name=None)
  3118. # get analog and tag channels
  3119. try:
  3120. self.elphy_file.open()
  3121. except Exception as e:
  3122. self.elphy_file.close()
  3123. raise Exception("cannot open file %s : %s" % (self.filename, e))
  3124. # create a segment containing all analog,
  3125. # tag and event channels for the episode
  3126. if self.elphy_file.n_episodes is None:
  3127. print("File '%s' appears to have no episodes" % (self.filename))
  3128. return block
  3129. for episode in range(1, self.elphy_file.n_episodes + 1):
  3130. segment = self.read_segment(episode)
  3131. segment.block = block
  3132. block.segments.append(segment)
  3133. # close file
  3134. self.elphy_file.close()
  3135. # result
  3136. return block
  3137. def write_block(self, block):
  3138. """
  3139. Write a given Neo Block to an Elphy file, its structure being, for example:
  3140. Neo -> Elphy
  3141. --------------------------------------------------------------
  3142. Block File
  3143. Segment Episode Block (B_Ep)
  3144. AnalogSignalArray Episode Descriptor (Ep + Adc + Ksamp + Ktype)
  3145. multichannel RDATA (with a ChannelMask multiplexing channels)
  3146. 2D NumPy Array
  3147. ...
  3148. AnalogSignalArray
  3149. AnalogSignal
  3150. AnalogSignal
  3151. ...
  3152. ...
  3153. SpikeTrain Event Block (RSPK)
  3154. SpikeTrain
  3155. ...
  3156. Arguments::
  3157. block: the block to be saved
  3158. """
  3159. # Serialize Neo structure into Elphy file
  3160. # each analog signal will be serialized as elphy Episode Block (with its subblocks)
  3161. # then all spiketrains will be serialized into an Rspk Block (an Event Block with addons).
  3162. # Serialize (and size) all Neo structures before writing them to file
  3163. # Since to write each Elphy Block is required to know in advance its size,
  3164. # which includes that of its subblocks, it is necessary to
  3165. # serialize first the lowest structures.
  3166. # Iterate over block structures
  3167. elphy_limit = 256
  3168. All = ''
  3169. # print "\n\n--------------------------------------------\n"
  3170. # print "write_block() - n_segments:",len(block.segments)
  3171. for seg in block.segments:
  3172. analogsignals = 0 # init
  3173. nbchan = 0
  3174. nbpt = 0
  3175. chls = 0
  3176. Dxu = 1e-8 # 0.0000001
  3177. Rxu = 1e+8 # 10000000.0
  3178. X0uSpk = 0.0
  3179. CyberTime = 0.0
  3180. aa_units = []
  3181. NbEv = []
  3182. serialized_analog_data = ''
  3183. serialized_spike_data = ''
  3184. # AnalogSignals
  3185. # Neo signalarrays are 2D numpy array where each row is an array of samples for a
  3186. # channel:
  3187. # signalarray A = [[ 1, 2, 3, 4 ],
  3188. # [ 5, 6, 7, 8 ]]
  3189. # signalarray B = [[ 9, 10, 11, 12 ],
  3190. # [ 13, 14, 15, 16 ]]
  3191. # Neo Segments can have more than one signalarray.
  3192. # To be converted in Elphy analog channels they need to be all in a 2D array, not in
  3193. # several 2D arrays.
  3194. # Concatenate all analogsignalarrays into one and then flatten it.
  3195. # Elphy RDATA blocks contain Fortran styled samples:
  3196. # 1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15, 4, 8, 12, 16
  3197. # AnalogSignalArrays -> analogsignals
  3198. # get the first to have analogsignals with the right shape
  3199. # Annotations for analogsignals array come as a list of int being source ids
  3200. # here, put each source id on a separate dict entry in order to have a matching
  3201. # afterwards
  3202. idx = 0
  3203. annotations = dict()
  3204. # get all the others
  3205. # print "write_block() - n_analogsignals:",len(seg.analogsignals)
  3206. # print "write_block() - n_analogsignalarrays:",len(seg.analogsignalarrays)
  3207. for asigar in seg.analogsignalarrays:
  3208. idx, annotations = self.get_annotations_dict(
  3209. annotations, "analogsignal", asigar.annotations.items(), asigar.name, idx)
  3210. # array structure
  3211. _, chls = asigar.shape
  3212. # units
  3213. for _ in range(chls):
  3214. aa_units.append(asigar.units)
  3215. Dxu = asigar.sampling_period
  3216. Rxu = asigar.sampling_rate
  3217. if isinstance(analogsignals, np.ndarray):
  3218. analogsignals = np.hstack((analogsignals, asigar))
  3219. else:
  3220. analogsignals = asigar # first time
  3221. # collect and reshape all analogsignals
  3222. if isinstance(analogsignals, np.ndarray):
  3223. # transpose matrix since in Neo channels are column-wise while in Elphy are
  3224. # row-wise
  3225. analogsignals = analogsignals.T
  3226. # get dimensions
  3227. nbchan, nbpt = analogsignals.shape
  3228. # serialize AnalogSignal
  3229. analog_data_fmt = '<' + str(analogsignals.size) + 'f'
  3230. # serialized flattened numpy channels in 'F'ortran style
  3231. analog_data_64 = analogsignals.flatten('F')
  3232. # elphy normally uses float32 values (for performance reasons)
  3233. analog_data = np.array(analog_data_64, dtype=np.float32)
  3234. serialized_analog_data += struct.pack(analog_data_fmt, *analog_data)
  3235. # SpikeTrains
  3236. # Neo spiketrains are stored as a one-dimensional array of times
  3237. # [ 0.11, 1.23, 2.34, 3.45, 4.56, 5.67, 6.78, 7.89 ... ]
  3238. # These are converted into Elphy Rspk Block which will contain all of them
  3239. # RDATA + NbVeV:integer for the number of channels (spiketrains)
  3240. # + NbEv:integer[] for the number of event per channel
  3241. # followed by the actual arrays of integer containing spike times
  3242. # spiketrains = seg.spiketrains
  3243. # ... but consider elphy loading limitation:
  3244. NbVeV = len(seg.spiketrains)
  3245. # print "write_block() - n_spiketrains:",NbVeV
  3246. if len(seg.spiketrains) > elphy_limit:
  3247. NbVeV = elphy_limit
  3248. # serialize format
  3249. spiketrain_data_fmt = '<'
  3250. spiketrains = []
  3251. for idx, train in enumerate(seg.spiketrains[:NbVeV]):
  3252. # print "write_block() - train.size:", train.size,idx
  3253. # print "write_block() - train:", train
  3254. fake, annotations = self.get_annotations_dict(
  3255. annotations, "spiketrain", train.annotations.items(), '', idx)
  3256. # annotations.update( dict( [("spiketrain-"+str(idx),
  3257. # train.annotations['source_id'])] ) )
  3258. # print "write_block() - train[%s].annotation['source_id']:%s"
  3259. # "" % (idx,train.annotations['source_id'])
  3260. # total number of events format + blackrock sorting mark (0 for neo)
  3261. spiketrain_data_fmt += str(train.size) + "i" + str(train.size) + "B"
  3262. # get starting time
  3263. X0uSpk = train.t_start.item()
  3264. CyberTime = train.t_stop.item()
  3265. # count number of events per train
  3266. NbEv.append(train.size)
  3267. # multiply by sampling period
  3268. train = train * Rxu
  3269. # all flattened spike train
  3270. # blackrock acquisition card also adds a byte for each event to sort it
  3271. spiketrains.extend([spike.item() for spike in train] +
  3272. [0 for _ in range(train.size)])
  3273. # Annotations
  3274. # print annotations
  3275. # using DBrecord elphy block, they will be available as values in elphy environment
  3276. # separate keys and values in two separate serialized strings
  3277. ST_sub = ''
  3278. st_fmt = ''
  3279. st_data = []
  3280. BUF_sub = ''
  3281. serialized_ST_data = ''
  3282. serialized_BUF_data = ''
  3283. for key in sorted(annotations.iterkeys()):
  3284. # take all values, get their type and concatenate
  3285. fmt = ''
  3286. data = []
  3287. value = annotations[key]
  3288. if isinstance(value, (int, np.int32, np.int64)):
  3289. # elphy type 2
  3290. fmt = '<Bq'
  3291. data = [2, value]
  3292. elif type(value) == str:
  3293. # elphy type 4
  3294. str_len = len(value)
  3295. fmt = '<BI' + str(str_len) + 's'
  3296. data = [4, str_len, value]
  3297. else:
  3298. print("ElphyIO.write_block() - unknown annotation type: %s" % type(value))
  3299. continue
  3300. # last, serialization
  3301. # BUF values
  3302. serialized_BUF_data += struct.pack(fmt, *data)
  3303. # ST values
  3304. # take each key and concatenate using 'crlf'
  3305. st_fmt += str(len(key)) + 's2s'
  3306. st_data.extend([key, "\r\n"])
  3307. # ST keys
  3308. serialized_ST_data = struct.pack(st_fmt, *st_data)
  3309. # SpikeTrains
  3310. # serialized spike trains
  3311. serialized_spike_data += struct.pack(spiketrain_data_fmt, *spiketrains)
  3312. # ------------- Elphy Structures to be filled --------------
  3313. # 'Ep'
  3314. data_format = '<BiBB10sdd?BBddiddB10sB10sdI'
  3315. # setting values
  3316. uX = 'ms '
  3317. pc_time = datetime.now()
  3318. pc_time = pc_time.microsecond * 1000
  3319. data_values = [
  3320. nbchan, # nbchan : byte
  3321. nbpt, # nbpt : integer - nominal number of samples per channel
  3322. 0, # tpData : byte - not used
  3323. 10, # uX length
  3324. uX, # uX : string - time units
  3325. Dxu, # Dxu : double - sampling rate, scaling parameters on time axis
  3326. 0.0, # X0u : double - starting, scaling parameters on time axis
  3327. False, # continuous : boolean
  3328. 0, # TagMode : byte - 0: not a tag channel
  3329. 0, # TagShift : byte
  3330. Dxu, # DxuSpk : double
  3331. X0uSpk, # X0uSpk : double
  3332. NbVeV, # nbSpk : integer
  3333. 0.0, # DyuSpk : double
  3334. 0.0, # Y0uSpk : double
  3335. 10, # uX length
  3336. uX, # unitXSpk : string
  3337. 10, # uX length
  3338. ' ', # unitYSpk : string
  3339. CyberTime, # CyberTime : double
  3340. pc_time # PCtime : longword - time in milliseconds
  3341. ]
  3342. Ep_chr = self.get_serialized(data_format, data_values)
  3343. Ep_sub = self.get_serialized_subblock('Ep', Ep_chr)
  3344. # 'Adc'
  3345. # Then, one or more (nbchan) Analog/Digital Channel will be, having their fixed data
  3346. # format
  3347. data_format = "<B10sdd"
  3348. # when Ep.tpdata is an integer type, Dyu nad Y0u are parameters such that
  3349. # for an adc value j, the real value is y = Dyu*j + Y0u
  3350. Adc_chrl = ""
  3351. for dc in aa_units:
  3352. # create
  3353. Adc_chr = [] # init
  3354. Dyu, UnitY = '{}'.format(dc).split()
  3355. data_values = [
  3356. 10, # size
  3357. UnitY + ' ', # uY string : vertical units
  3358. float(Dyu), # Dyu double : scaling parameter
  3359. 0.0 # Y0u double : scaling parameter
  3360. ]
  3361. Adc_chr = self.get_serialized(data_format, data_values)
  3362. Adc_chrl += Adc_chr
  3363. Adc_sub = self.get_serialized_subblock('Adc', Adc_chrl)
  3364. # print "Adc size:",len(Adc_sub)
  3365. # 'Ksamp'
  3366. # subblock containing an array of nbchan bytes
  3367. # data_format = '<h...' # nbchan times Bytes
  3368. # data_values = [ 1, 1, ... ] # nbchan times 1
  3369. data_format = "<" + ("h" * nbchan)
  3370. data_values = [1 for _ in range(nbchan)]
  3371. Ksamp_chr = self.get_serialized(data_format, data_values)
  3372. Ksamp_sub = self.get_serialized_subblock('Ksamp', Ksamp_chr)
  3373. # print "Ksamp size: %s" % (len(Ksamp_sub))
  3374. # 'Ktype'
  3375. # subblock containing an array of nbchan bytes
  3376. # data_format = '<B...' # nbchan times Bytes
  3377. # data_values = [ 2, ... ] # nbchan times ctype
  3378. # Possible values are:
  3379. # 0: byte
  3380. # 1: short
  3381. # 2: smallint
  3382. # 3: word
  3383. # 4: longint
  3384. # 5: single
  3385. # 6: real48
  3386. # 7: double
  3387. # 8: extended DATA
  3388. # array of nbchan bytes specifying type of data forthcoming
  3389. ctype = 5 # single float
  3390. data_format = "<" + ("B" * nbchan)
  3391. data_values = [ctype for n in range(nbchan)]
  3392. Ktype_chr = self.get_serialized(data_format, data_values)
  3393. Ktype_sub = self.get_serialized_subblock('Ktype', Ktype_chr)
  3394. # print "Ktype size: %s" % (len(Ktype_sub))
  3395. # Episode data serialization:
  3396. # concatenate all its data strings under a block
  3397. Ep_data = Ep_sub + Adc_sub + Ksamp_sub + Ktype_sub
  3398. # print "\n---- Finishing:\nEp subs size: %s" % (len(Ep_data))
  3399. Ep_blk = self.get_serialized_block('B_Ep', Ep_data)
  3400. # print "B_Ep size: %s" % (len(Ep_blk))
  3401. # 'RDATA'
  3402. # It produces a two part (header+data) content coming from analog/digital inputs.
  3403. pctime = time()
  3404. data_format = "<h?dI"
  3405. data_values = [15, True, pctime, 0]
  3406. RDATA_chr = self.get_serialized(data_format, data_values, serialized_analog_data)
  3407. RDATA_blk = self.get_serialized_block('RDATA', RDATA_chr)
  3408. # print "RDATA size: %s" % (len(RDATA_blk))
  3409. # 'Rspk'
  3410. # like an REVT block + addons
  3411. # It starts with a RDATA header, after an integer with the number of events,
  3412. # then the events per channel and finally all the events one after the other
  3413. data_format = "<h?dII" + str(NbVeV) + "I"
  3414. data_values = [15, True, pctime, 0, NbVeV]
  3415. data_values.extend(NbEv)
  3416. Rspk_chr = self.get_serialized(data_format, data_values, serialized_spike_data)
  3417. Rspk_blk = self.get_serialized_block('RSPK', Rspk_chr)
  3418. # print "RSPK size: %s" % (len(Rspk_blk))
  3419. # 'DBrecord'
  3420. # like a block + subblocks
  3421. # serializzation
  3422. ST_sub = self.get_serialized_subblock('ST', serialized_ST_data)
  3423. # print "ST size: %s" % (len(ST_sub))
  3424. BUF_sub = self.get_serialized_subblock('BUF', serialized_BUF_data)
  3425. # print "BUF size: %s" % (len(BUF_sub))
  3426. annotations_data = ST_sub + BUF_sub
  3427. # data_format = "<h?dI"
  3428. # data_values = [ 15, True, pctime, 0 ]
  3429. # DBrec_chr = self.get_serialized( data_format, data_values, annotations_data )
  3430. DBrec_blk = self.get_serialized_block('DBrecord', annotations_data)
  3431. # print "DBrecord size: %s" % (len(DBrec_blk))
  3432. # 'COM'
  3433. # print "write_block() - segment name:", seg.name
  3434. # name of the file - NEO Segment name
  3435. data_format = '<h' + str(len(seg.name)) + 's'
  3436. data_values = [len(seg.name), seg.name]
  3437. SEG_COM_chr = self.get_serialized(data_format, data_values)
  3438. SEG_COM_blk = self.get_serialized_block('COM', SEG_COM_chr)
  3439. # Complete data serialization: concatenate all data strings
  3440. All += Ep_blk + RDATA_blk + Rspk_blk + DBrec_blk + SEG_COM_blk
  3441. # ElphyFile (open, write and close)
  3442. self.elphy_file.write(All)
  3443. def get_serialized(self, data_format, data_values, ext_data=''):
  3444. data_chr = struct.pack(data_format, *data_values)
  3445. return data_chr + ext_data
  3446. def get_serialized_block(self, ident, data):
  3447. """
  3448. Generic Block Header
  3449. This function (without needing a layout and the rest) creates a binary serialized version
  3450. of the block containing the format string and the actual data for the following
  3451. Elphy Block Header structure:
  3452. size: longint // 4-byte integer
  3453. ident: string[XXX]; // a Pascal variable-length string
  3454. data: array[1..YYY] of byte;
  3455. For example:
  3456. '<IB22s' followed by an array of bytes as specified
  3457. """
  3458. # endian 4byte ident
  3459. data_format = "<IB" + str(len(ident)) + "s"
  3460. data_size = 4 + 1 + len(ident) + len(data) # all: <IBs...data...
  3461. data_values = [data_size, len(ident), ident]
  3462. data_chr = struct.pack(data_format, *data_values)
  3463. return data_chr + data
  3464. def get_serialized_subblock(self, ident, data):
  3465. """
  3466. Generic Sub-Block Header
  3467. This function (without needing a layout and the rest) creates a binary serialized version
  3468. of the block containing the format string and the actual data for the following
  3469. Elphy Sub-Block Header structure:
  3470. id: string[XXX]; // a Pascal variable-length string
  3471. size1: word // 2-byte unsigned integer
  3472. data: array[1..YYY] of byte;
  3473. For example:
  3474. '<B22sH4522L' followed by an array of bytes as specified
  3475. """
  3476. data_size = len(data)
  3477. # endian size+string 2byte array of data_size bytes
  3478. data_format = "<B" + str(len(ident)) + "s" + "h"
  3479. data_values = [len(ident), ident, data_size]
  3480. data_chr = struct.pack(data_format, *data_values)
  3481. return data_chr + data
  3482. def get_annotations_dict(self, annotations, prefix, items, name='', idx=0):
  3483. """
  3484. Helper function to retrieve annotations in a dictionary to be serialized as Elphy DBrecord
  3485. """
  3486. for (key, value) in items:
  3487. # print "get_annotation_dict() - items[%s]" % (key)
  3488. if isinstance(value, (list, tuple, np.ndarray)):
  3489. for element in value:
  3490. annotations.update(
  3491. dict([(prefix + "-" + name + "-" + key + "-" + str(idx), element)]))
  3492. idx = idx + 1
  3493. else:
  3494. annotations.update(dict([(prefix + "-" + key + "-" + str(idx), value)]))
  3495. return (idx, annotations)
  3496. def read_segment(self, episode):
  3497. """
  3498. Internal method used to return :class:`Segment` data to the main read method.
  3499. Parameters:
  3500. elphy_file : is the elphy object.
  3501. episode : number of elphy episode, roughly corresponding to a segment
  3502. """
  3503. # print "name:",self.elphy_file.layout.get_episode_name(episode)
  3504. episode_name = self.elphy_file.layout.get_episode_name(episode)
  3505. name = episode_name if len(episode_name) > 0 else "episode %s" % str(episode + 1)
  3506. segment = Segment(name=name)
  3507. # create an analog signal for
  3508. # each channel in the episode
  3509. for channel in range(1, self.elphy_file.n_channels(episode) + 1):
  3510. signal = self.elphy_file.get_signal(episode, channel)
  3511. analog_signal = AnalogSignal(
  3512. signal.data['y'],
  3513. units=signal.y_unit,
  3514. t_start=signal.t_start * getattr(pq, signal.x_unit.strip()),
  3515. t_stop=signal.t_stop * getattr(pq, signal.x_unit.strip()),
  3516. # sampling_rate = signal.sampling_frequency * pq.kHz,
  3517. sampling_period=signal.sampling_period * getattr(pq, signal.x_unit.strip()),
  3518. channel_name="episode %s, channel %s" % (int(episode + 1), int(channel + 1))
  3519. )
  3520. analog_signal.segment = segment
  3521. segment.analogsignals.append(analog_signal)
  3522. # create a spiketrain for each
  3523. # spike channel in the episode
  3524. # in case of multi-electrode
  3525. # acquisition context
  3526. n_spikes = self.elphy_file.n_spiketrains(episode)
  3527. # print "read_segment() - n_spikes:",n_spikes
  3528. if n_spikes > 0:
  3529. for spk in range(1, n_spikes + 1):
  3530. spiketrain = self.read_spiketrain(episode, spk)
  3531. spiketrain.segment = segment
  3532. segment.spiketrains.append(spiketrain)
  3533. # segment
  3534. return segment
  3535. def read_channelindex(self, episode):
  3536. """
  3537. Internal method used to return :class:`ChannelIndex` info.
  3538. Parameters:
  3539. elphy_file : is the elphy object.
  3540. episode : number of elphy episode, roughly corresponding to a segment
  3541. """
  3542. n_spikes = self.elphy_file.n_spikes
  3543. group = ChannelIndex(
  3544. name="episode %s, group of %s electrodes" % (episode, n_spikes)
  3545. )
  3546. for spk in range(0, n_spikes):
  3547. channel = self.read_channelindex(episode, spk)
  3548. group.channel_indexes.append(channel)
  3549. return group
  3550. def read_recordingchannel(self, episode, chl):
  3551. """
  3552. Internal method used to return a :class:`ChannelIndex` label.
  3553. Parameters:
  3554. elphy_file : is the elphy object.
  3555. episode : number of elphy episode, roughly corresponding to a segment.
  3556. chl : electrode number.
  3557. """
  3558. channel = ChannelIndex(name="episode %s, electrodes %s" % (episode, chl), index=[0])
  3559. return channel
  3560. def read_event(self, episode, evt):
  3561. """
  3562. Internal method used to return a list of elphy :class:`EventArray` acquired from event
  3563. channels.
  3564. Parameters:
  3565. elphy_file : is the elphy object.
  3566. episode : number of elphy episode, roughly corresponding to a segment.
  3567. evt : index of the event.
  3568. """
  3569. event = self.elphy_file.get_event(episode, evt)
  3570. neo_event = Event(
  3571. times=event.times * pq.s,
  3572. channel_name="episode %s, event channel %s" % (episode + 1, evt + 1)
  3573. )
  3574. return neo_event
  3575. def read_spiketrain(self, episode, spk):
  3576. """
  3577. Internal method used to return an elphy object :class:`SpikeTrain`.
  3578. Parameters:
  3579. elphy_file : is the elphy object.
  3580. episode : number of elphy episode, roughly corresponding to a segment.
  3581. spk : index of the spike array.
  3582. """
  3583. block = self.elphy_file.layout.episode_block(episode)
  3584. spike = self.elphy_file.get_spiketrain(episode, spk)
  3585. spikes = spike.times * pq.s
  3586. # print "read_spiketrain() - spikes: %s" % (len(spikes))
  3587. # print "read_spiketrain() - spikes:",spikes
  3588. dct = {
  3589. 'times': spikes,
  3590. # check
  3591. 't_start': block.ep_block.X0_wf if block.ep_block.X0_wf < spikes[0] else spikes[0],
  3592. 't_stop': block.ep_block.cyber_time if block.ep_block.cyber_time > spikes[-1] else
  3593. spikes[-1],
  3594. 'units': 's',
  3595. # special keywords to identify the
  3596. # electrode providing the spiketrain
  3597. # event though it is redundant with
  3598. # waveforms
  3599. 'label': "episode %s, electrode %s" % (episode, spk),
  3600. 'electrode_id': spk
  3601. }
  3602. # new spiketrain
  3603. return SpikeTrain(**dct)