Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

McsData.py 56 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272
  1. """
  2. McsData
  3. ~~~~~~~
  4. Data classes to wrap and hide raw data handling of the HDF5 data files.
  5. It is based on the MCS-HDF5 definitions of the given compatible versions.
  6. :copyright: (c) 2015 by Multi Channel Systems MCS GmbH
  7. :license: see LICENSE for more details
  8. """
  9. import h5py
  10. import datetime
  11. import math
  12. import uuid
  13. import exceptions
  14. import collections
  15. import numpy as np
  16. from McsPy import *
  17. from pint import UndefinedUnitError
  18. MCS_TICK = 1 * ureg.us
  19. CLR_TICK = 100 * ureg.ns
  20. # day -> number of clr ticks (100 ns)
  21. DAY_TO_CLR_TIME_TICK = 24 * 60 * 60 * (10**7)
  22. VERBOSE = True
  23. def dprint_name_value(n, v):
  24. if VERBOSE:
  25. print(n, v)
  26. class RawData(object):
  27. """
  28. This class holds the information of a complete MCS raw data file
  29. """
  30. def __init__(self, raw_data_path):
  31. """
  32. Crates and initializes a RawData object that provides access to the content of the given MCS-HDF5 file
  33. :param raw_data_path: path to a HDF5 file that contains raw data encoded in a supported MCS-HDF5 format version
  34. """
  35. self.raw_data_path = raw_data_path
  36. self.h5_file = h5py.File(raw_data_path, 'r')
  37. self.__validate_mcs_hdf5_version()
  38. self.__get_session_info()
  39. self.__recordings = None
  40. def __del__(self):
  41. self.h5_file.close()
  42. # Stub for with-Statement:
  43. #def __enter_(self):
  44. # return self
  45. #
  46. #def __exit__(self, type, value, traceback):
  47. # self.h5_file.close()
  48. def __str__(self):
  49. return super(RawData, self).__str__()
  50. def __validate_mcs_hdf5_version(self):
  51. "Check if the MCS-HDF5 protocol type and version of the file is supported by this class"
  52. root_grp = self.h5_file['/']
  53. if 'McsHdf5ProtocolType' in root_grp.attrs:
  54. self.mcs_hdf5_protocol_type = root_grp.attrs['McsHdf5ProtocolType']
  55. if self.mcs_hdf5_protocol_type == "RawData":
  56. self.mcs_hdf5_protocol_type_version = root_grp.attrs['McsHdf5ProtocolVersion']
  57. supported_versions = McsHdf5Protocols.SUPPORTED_PROTOCOLS[self.mcs_hdf5_protocol_type]
  58. if ((self.mcs_hdf5_protocol_type_version < supported_versions[0]) or
  59. (supported_versions[1] < self.mcs_hdf5_protocol_type_version)):
  60. raise IOError('Given HDF5 file has MCS-HDF5 RawData protocol version %s and supported are all versions from %s to %s' %
  61. (self.mcs_hdf5_protocol_type_version, supported_versions[0], supported_versions[1]))
  62. else:
  63. raise IOError("The root group of this HDF5 file has no 'McsHdf5ProtocolVersion' attribute -> so it could't be checked if the version is supported!")
  64. else:
  65. raise IOError("The root group of this HDF5 file has no 'McsHdf5ProtocolType attribute' -> this file is not supported by McsPy!")
  66. def __get_session_info(self):
  67. "Read all session metadata"
  68. data_attrs = self.h5_file['Data'].attrs.iteritems()
  69. session_attributes = data_attrs
  70. session_info = {}
  71. for (name, value) in session_attributes:
  72. #print(name, value)
  73. session_info[name] = value #.rstrip()
  74. self.comment = session_info['Comment'].rstrip()
  75. self.clr_date = session_info['Date'].rstrip()
  76. self.date_in_clr_ticks = session_info['DateInTicks']
  77. # self.date = datetime.datetime.fromordinal(int(math.ceil(self.date_in_clr_ticks / day_to_clr_time_tick)) + 1)
  78. self.date = datetime.datetime(1, 1, 1) + datetime.timedelta(microseconds=int(self.date_in_clr_ticks)/10)
  79. # self.file_guid = session_info['FileGUID'].rstrip()
  80. self.file_guid = uuid.UUID(session_info['FileGUID'].rstrip())
  81. self.mea_layout = session_info['MeaLayout'].rstrip()
  82. self.mea_sn = session_info['MeaSN'].rstrip()
  83. self.mea_name = session_info['MeaName'].rstrip()
  84. self.program_name = session_info['ProgramName'].rstrip()
  85. self.program_version = session_info['ProgramVersion'].rstrip()
  86. #return session_info
  87. def __read_recordings(self):
  88. "Read all recordings"
  89. data_folder = self.h5_file['Data']
  90. if len(data_folder) > 0:
  91. self.__recordings = {}
  92. for (name, value) in data_folder.iteritems():
  93. dprint_name_value(name, value)
  94. recording_name = name.split('_')
  95. if (len(recording_name) == 2) and (recording_name[0] == 'Recording'):
  96. self.__recordings[int(recording_name[1])] = Recording(value)
  97. @property
  98. def recordings(self):
  99. "Access recordings"
  100. if self.__recordings is None:
  101. self.__read_recordings()
  102. return self.__recordings
  103. class Recording(object):
  104. """
  105. Container class for one recording
  106. """
  107. def __init__(self, recording_grp):
  108. self.__recording_grp = recording_grp
  109. self.__get_recording_info()
  110. self.__analog_streams = None
  111. self.__frame_streams = None
  112. self.__event_streams = None
  113. self.__segment_streams = None
  114. self.__timestamp_streams = None
  115. def __get_recording_info(self):
  116. "Read metadata for this recording"
  117. recording_info = {}
  118. for (name, value) in self.__recording_grp.attrs.iteritems():
  119. recording_info[name] = value
  120. self.comment = recording_info['Comment'].rstrip()
  121. self.duration = recording_info['Duration']
  122. self.label = recording_info['Label'].rstrip()
  123. self.recording_id = recording_info['RecordingID']
  124. self.recording_type = recording_info['RecordingType'].rstrip()
  125. self.timestamp = recording_info['TimeStamp']
  126. def __read_analog_streams(self):
  127. "Read all contained analog streams"
  128. if 'AnalogStream' in self.__recording_grp:
  129. analog_stream_folder = self.__recording_grp['AnalogStream']
  130. if len(analog_stream_folder) > 0:
  131. self.__analog_streams = {}
  132. for (name, value) in analog_stream_folder.iteritems():
  133. dprint_name_value(name, value)
  134. stream_name = name.split('_')
  135. if (len(stream_name) == 2) and (stream_name[0] == 'Stream'):
  136. self.__analog_streams[int(stream_name[1])] = AnalogStream(value)
  137. def __read_frame_streams(self):
  138. "Read all contained frame streams"
  139. if 'FrameStream' in self.__recording_grp:
  140. frame_stream_folder = self.__recording_grp['FrameStream']
  141. if len(frame_stream_folder) > 0:
  142. self.__frame_streams = {}
  143. for (name, value) in frame_stream_folder.iteritems():
  144. dprint_name_value(name, value)
  145. stream_name = name.split('_')
  146. if (len(stream_name) == 2) and (stream_name[0] == 'Stream'):
  147. self.__frame_streams[int(stream_name[1])] = FrameStream(value)
  148. def __read_event_streams(self):
  149. "Read all contained event streams"
  150. if 'EventStream' in self.__recording_grp:
  151. event_stream_folder = self.__recording_grp['EventStream']
  152. if len(event_stream_folder) > 0:
  153. self.__event_streams = {}
  154. for (name, value) in event_stream_folder.iteritems():
  155. dprint_name_value(name, value)
  156. stream_name = name.split('_')
  157. if (len(stream_name) == 2) and (stream_name[0] == 'Stream'):
  158. index = int(stream_name[1])
  159. self.__event_streams[index] = EventStream(value)
  160. def __read_segment_streams(self):
  161. "Read all contained segment streams"
  162. if 'SegmentStream' in self.__recording_grp:
  163. segment_stream_folder = self.__recording_grp['SegmentStream']
  164. if len(segment_stream_folder) > 0:
  165. self.__segment_streams = {}
  166. for (name, value) in segment_stream_folder.iteritems():
  167. dprint_name_value(name, value)
  168. stream_name = name.split('_')
  169. if (len(stream_name) == 2) and (stream_name[0] == 'Stream'):
  170. self.__segment_streams[int(stream_name[1])] = SegmentStream(value)
  171. def __read_timestamp_streams(self):
  172. "Read all contained timestamp streams"
  173. if 'TimeStampStream' in self.__recording_grp:
  174. timestamp_stream_folder = self.__recording_grp['TimeStampStream']
  175. if len(timestamp_stream_folder) > 0:
  176. self.__timestamp_streams = {}
  177. for (name, value) in timestamp_stream_folder.iteritems():
  178. dprint_name_value(name, value)
  179. stream_name = name.split('_')
  180. if (len(stream_name) == 2) and (stream_name[0] == 'Stream'):
  181. self.__timestamp_streams[int(stream_name[1])] = TimeStampStream(value)
  182. @property
  183. def analog_streams(self):
  184. "Access all analog streams - collection of :class:`~McsPy.McsData.AnalogStream` objects"
  185. if self.__analog_streams is None:
  186. self.__read_analog_streams()
  187. return self.__analog_streams
  188. @property
  189. def frame_streams(self):
  190. "Access all frame streams - collection of :class:`~McsPy.McsData.FrameStream` objects"
  191. if self.__frame_streams is None:
  192. self.__read_frame_streams()
  193. return self.__frame_streams
  194. @property
  195. def event_streams(self):
  196. "Access event streams - collection of :class:`~McsPy.McsData.EventStream` objects"
  197. if self.__event_streams is None:
  198. self.__read_event_streams()
  199. return self.__event_streams
  200. @property
  201. def segment_streams(self):
  202. "Access segment streams - - collection of :class:`~McsPy.McsData.SegementStream` objects"
  203. if self.__segment_streams is None:
  204. self.__read_segment_streams()
  205. return self.__segment_streams
  206. @property
  207. def timestamp_streams(self):
  208. "Access timestamp streams - collection of :class:`~McsPy.McsData.TimestampStream` objects"
  209. if self.__timestamp_streams is None:
  210. self.__read_timestamp_streams()
  211. return self.__timestamp_streams
  212. @property
  213. def duration_time(self):
  214. "Duration of the recording"
  215. dur_time = (self.duration - self.timestamp) * ureg.us
  216. return dur_time
  217. class Stream(object):
  218. """
  219. Base class for all stream types
  220. """
  221. def __init__(self, stream_grp, info_type_name=None):
  222. """
  223. Initializes a stream object with its associated HDF5 folder
  224. :param stream_grp: folder of the HDF5 file that contains the data of this stream
  225. :param info_type_name: name of the Info-Type as given in class McsHdf5Protocols (default None -> no version check is executed)
  226. """
  227. self.stream_grp = stream_grp
  228. info_version = self.stream_grp.attrs["StreamInfoVersion"]
  229. if info_type_name != None:
  230. McsHdf5Protocols.check_protocol_type_version(info_type_name, info_version)
  231. self.__get_stream_info()
  232. def __get_stream_info(self):
  233. "Read all describing meta data common to each stream -> HDF5 folder attributes"
  234. stream_info = {}
  235. for (name, value) in self.stream_grp.attrs.iteritems():
  236. stream_info[name] = value
  237. self.info_version = stream_info['StreamInfoVersion']
  238. self.data_subtype = stream_info['DataSubType'].rstrip()
  239. self.label = stream_info['Label'].rstrip()
  240. self.source_stream_guid = uuid.UUID(stream_info['SourceStreamGUID'].rstrip())
  241. self.stream_guid = uuid.UUID(stream_info['StreamGUID'].rstrip())
  242. self.stream_type = stream_info['StreamType'].rstrip()
  243. class AnalogStream(Stream):
  244. """
  245. Container class for one analog stream of several channels.
  246. Description for each channel is provided by a channel-associated object of :class:`~McsPy.McsData.ChannelInfo`
  247. """
  248. def __init__(self, stream_grp):
  249. """
  250. Initializes an analog stream object containing several analog channels
  251. :param stream_grp: folder of the HDF5 file that contains the data of this analog stream
  252. """
  253. #McsHdf5Protocols.check_protocol_type_version("AnalogStreamInfoVersion", info_version)
  254. Stream.__init__(self, stream_grp, "AnalogStreamInfoVersion")
  255. self.__read_channels()
  256. def __read_channels(self):
  257. "Read all channels -> create Info structure and connect datasets"
  258. assert len(self.stream_grp) == 3
  259. for (name, value) in self.stream_grp.iteritems():
  260. dprint_name_value(name, value)
  261. # Read timestamp index of channels:
  262. self.timestamp_index = self.stream_grp['ChannelDataTimeStamps'][...]
  263. # Read infos per channel
  264. ch_infos = self.stream_grp['InfoChannel'][...]
  265. ch_info_version = self.stream_grp['InfoChannel'].attrs['InfoVersion']
  266. self.channel_infos = {}
  267. self.__map_row_to_channel_id = {}
  268. for channel_info in ch_infos:
  269. self.channel_infos[channel_info['ChannelID']] = ChannelInfo(ch_info_version, channel_info)
  270. self.__map_row_to_channel_id[channel_info['RowIndex']] = channel_info['ChannelID']
  271. # Connect the data set
  272. self.channel_data = self.stream_grp['ChannelData']
  273. def get_channel_in_range(self, channel_id, idx_start, idx_end):
  274. """
  275. Get the signal of the given channel over the curse of time and in its measured range.
  276. :param channel_id: ID of the channel
  277. :param idx_start: index of the first sampled signal value that should be returned (0 <= idx_start < idx_end <= count samples)
  278. :param idx_end: index of the last sampled signal value that should be returned (0 <= idx_start < idx_end <= count samples)
  279. :return: Tuple (vector of the signal, unit of the values)
  280. """
  281. if channel_id in self.channel_infos.keys():
  282. if idx_start < 0:
  283. idx_start = 0
  284. if idx_end > self.channel_data.shape[1]:
  285. idx_end = self.channel_data.shape[1]
  286. else:
  287. idx_end += 1
  288. signal = self.channel_data[self.channel_infos[channel_id].row_index, idx_start : idx_end]
  289. scale = self.channel_infos[channel_id].adc_step.magnitude
  290. #scale = self.channel_infos[channel_id].get_field('ConversionFactor') * (10**self.channel_infos[channel_id].get_field('Exponent'))
  291. signal_corrected = (signal - self.channel_infos[channel_id].get_field('ADZero')) * scale
  292. return (signal_corrected, self.channel_infos[channel_id].adc_step.units)
  293. def get_channel_sample_timestamps(self, channel_id, idx_start, idx_end):
  294. """
  295. Get the timestamps of the sampled values.
  296. :param channel_id: ID of the channel
  297. :param idx_start: index of the first signal timestamp that should be returned (0 <= idx_start < idx_end <= count samples)
  298. :param idx_end: index of the last signal timestamp that should be returned (0 <= idx_start < idx_end <= count samples)
  299. :return: Tuple (vector of the timestamps, unit of the timestamps)
  300. """
  301. if channel_id in self.channel_infos.keys():
  302. start_ts = 0L
  303. channel = self.channel_infos[channel_id]
  304. tick = channel.get_field('Tick')
  305. for ts_range in self.timestamp_index:
  306. if idx_end < ts_range[1]: # nothing to do anymore ->
  307. break
  308. if ts_range[2] < idx_start: # start is behind the end of this range ->
  309. continue
  310. else:
  311. idx_segment = idx_start - ts_range[1]
  312. start_ts = ts_range[0] + idx_segment * tick # timestamp of first index
  313. if idx_end <= ts_range[2]:
  314. time_range = start_ts + np.arange(0, (idx_end - ts_range[1] + 1) - idx_segment, 1) * tick
  315. else:
  316. time_range = start_ts + np.arange(0, (ts_range[2] - ts_range[1] + 1) - idx_segment, 1) * tick
  317. idx_start = ts_range[2] + 1
  318. if 'time' in locals():
  319. time = np.append(time, time_range)
  320. else:
  321. time = time_range
  322. return (time, MCS_TICK.units)
  323. class Info(object):
  324. """
  325. Base class of all info classes
  326. Derived classes contain meta information for data structures and fields.
  327. """
  328. def __init__(self, info_data):
  329. self.info = {}
  330. for name in info_data.dtype.names:
  331. self.info[name] = info_data[name]
  332. def get_field(self, name):
  333. "Get the field with that name -> access to the raw info array"
  334. return self.info[name]
  335. @property
  336. def group_id(self):
  337. "Get the id of the group that the objects belongs to"
  338. return self.info["GroupID"]
  339. @property
  340. def label(self):
  341. "Label of this object"
  342. return self.info['Label']
  343. @property
  344. def data_type(self):
  345. "Raw data type of this object"
  346. return self.info['RawDataType']
  347. class InfoSampledData(Info):
  348. """
  349. Base class of all info classes for evenly sampled data
  350. """
  351. def __init__(self, info):
  352. """
  353. Initialize an info object for sampled data
  354. :param info: array of info descriptors for this info object
  355. """
  356. Info.__init__(self, info)
  357. @property
  358. def sampling_frequency(self):
  359. "Get the used sampling frequency in Hz"
  360. frequency = 1 / self.sampling_tick.to_base_units()
  361. return frequency.to(ureg.Hz)
  362. @property
  363. def sampling_tick(self):
  364. "Get the used sampling tick"
  365. tick_time = self.info['Tick'] * MCS_TICK
  366. return tick_time
  367. class ChannelInfo(InfoSampledData):
  368. """
  369. Contains all describing meta data for one sampled channel
  370. """
  371. def __init__(self, info_version, info):
  372. """
  373. Initialize an info object for sampled channel data
  374. :param info_version: number of the protocol version used by the following info structure
  375. :param info: array of info descriptors for this channel info object
  376. """
  377. InfoSampledData.__init__(self, info)
  378. McsHdf5Protocols.check_protocol_type_version("InfoChannel", info_version)
  379. self.__version = info_version
  380. @property
  381. def channel_id(self):
  382. "Get the ID of the channel"
  383. return self.info['ChannelID']
  384. @property
  385. def row_index(self):
  386. "Get the index of the row that contains the associated channel data inside the data matrix"
  387. return self.info['RowIndex']
  388. @property
  389. def adc_step(self):
  390. "Size and unit of one ADC step for this channel"
  391. unit_name = self.info['Unit']
  392. # Should be tested that unit_name is a available in ureg (unit register)
  393. step = self.info['ConversionFactor'] * (10 ** self.info['Exponent'].astype(np.float64)) * ureg[unit_name]
  394. return step
  395. @property
  396. def version(self):
  397. "Version number of the Type-Definition"
  398. return self.__version
  399. class FrameStream(Stream):
  400. """
  401. Container class for one frame stream with different entities
  402. """
  403. def __init__(self, stream_grp):
  404. """
  405. Initializes an frame stream object that contains all frame entities that belong to it.
  406. :param stream_grp: folder of the HDF5 file that contains the data of this frame stream
  407. """
  408. Stream.__init__(self, stream_grp, "FrameStreamInfoVersion")
  409. self.__read_frame_entities()
  410. def __read_frame_entities(self):
  411. "Read all fream entities for this frame stream inside the associated frame entity folder"
  412. #assert len(self.stream_grp) == 3
  413. for (name, value) in self.stream_grp.iteritems():
  414. dprint_name_value(name, value)
  415. # Read infos per frame
  416. fr_infos = self.stream_grp['InfoFrame'][...]
  417. fr_info_version = self.stream_grp['InfoFrame'].attrs['InfoVersion']
  418. self.frame_entity = {}
  419. for frame_entity_info in fr_infos:
  420. frame_entity_group = "FrameDataEntity_" + str(frame_entity_info['FrameDataID'])
  421. conv_fact = self.__read_conversion_factor_matrix(frame_entity_group)
  422. frame_info = FrameEntityInfo(fr_info_version, frame_entity_info, conv_fact)
  423. self.frame_entity[frame_entity_info['FrameID']] = FrameEntity(self.stream_grp[frame_entity_group], frame_info)
  424. def __read_conversion_factor_matrix(self, frame_entity_group):
  425. "Read matrix of conversion factors inside the frame data entity folder"
  426. frame_entity_conv_matrix = frame_entity_group + "/ConversionFactors"
  427. conv_fact = self.stream_grp[frame_entity_conv_matrix][...]
  428. return conv_fact
  429. class FrameEntity(object):
  430. """
  431. Contains the stream of a specific frame entity.
  432. Meta-Information for this entity is available via an associated object of :class:`~McsPy.McsData.FrameEntityInfo`
  433. """
  434. def __init__(self, frame_entity_group, frame_info):
  435. """
  436. Initializes an frame entity object
  437. :param frame_entity_group: folder/group of the HDF5 file that contains the data for this frame entity
  438. :param frame_info: object of type FrameEntityInfo that contains the description of this frame entity
  439. """
  440. self.info = frame_info
  441. self.group = frame_entity_group
  442. self.timestamp_index = self.group['FrameDataTimeStamps'][...]
  443. # Connect the data set
  444. self.data = self.group['FrameData']
  445. def get_sensor_signal(self, sensor_x, sensor_y, idx_start, idx_end):
  446. """
  447. Get the signal of a single sensor over the curse of time and in its measured range.
  448. :param sensor_x: x coordinate of the sensor
  449. :param sensor_y: y coordinate of the sensor
  450. :param idx_start: index of the first sampled frame that should be returned (0 <= idx_start < idx_end <= count frames)
  451. :param idx_end: index of the last sampled frame that should be returned (0 <= idx_start < idx_end <= count frames)
  452. :return: Tuple (vector of the signal, unit of the values)
  453. """
  454. if sensor_x < 0 or self.data.shape[0] < sensor_x or sensor_y < 0 or self.data.shape[1] < sensor_y:
  455. raise exceptions.IndexError
  456. if idx_start < 0:
  457. idx_start = 0
  458. if idx_end > self.data.shape[2]:
  459. idx_end = self.data.shape[2]
  460. else:
  461. idx_end += 1
  462. sensor_signal = self.data[sensor_x, sensor_y, idx_start : idx_end]
  463. scale_factor = self.info.adc_step_for_sensor(sensor_x, sensor_y)
  464. scale = scale_factor.magnitude
  465. sensor_signal_corrected = (sensor_signal - self.info.get_field('ADZero')) * scale
  466. return (sensor_signal_corrected, scale_factor.units)
  467. def get_frame_timestamps(self, idx_start, idx_end):
  468. """
  469. Get the timestamps of the sampled frames.
  470. :param idx_start: index of the first sampled frame that should be returned (0 <= idx_start < idx_end <= count frames)
  471. :param idx_end: index of the last sampled frame that should be returned (0 <= idx_start < idx_end <= count frames)
  472. :return: Tuple (vector of the timestamps, unit of the timestamps)
  473. """
  474. if idx_start < 0 or self.data.shape[2] < idx_start or idx_end < idx_start or self.data.shape[2] < idx_end:
  475. raise exceptions.IndexError
  476. start_ts = 0L
  477. tick = self.info.get_field('Tick')
  478. for ts_range in self.timestamp_index:
  479. if idx_end < ts_range[1]: # nothing to do anymore ->
  480. break
  481. if ts_range[2] < idx_start: # start is behind the end of this range ->
  482. continue
  483. else:
  484. idx_segment = idx_start - ts_range[1]
  485. start_ts = ts_range[0] + idx_segment * tick # timestamp of first index
  486. if idx_end <= ts_range[2]:
  487. time_range = start_ts + np.arange(0, (idx_end - ts_range[1] + 1) - idx_segment, 1) * tick
  488. else:
  489. time_range = start_ts + np.arange(0, (ts_range[2] - ts_range[1] + 1) - idx_segment, 1) * tick
  490. idx_start = ts_range[2] + 1
  491. if 'time' in locals():
  492. time = np.append(time, time_range)
  493. else:
  494. time = time_range
  495. return (time, MCS_TICK.units)
  496. class Frame(object):
  497. """
  498. Frame definition
  499. """
  500. def __init__(self, left, top, right, bottom):
  501. self.__left = left
  502. self.__top = top
  503. self.__right = right
  504. self.__bottom = bottom
  505. @property
  506. def left(self):
  507. return self.__left
  508. @property
  509. def top(self):
  510. return self.__top
  511. @property
  512. def right(self):
  513. return self.__right
  514. @property
  515. def bottom(self):
  516. return self.__bottom
  517. @property
  518. def width(self):
  519. return self.__right - self.__left + 1
  520. @property
  521. def height(self):
  522. return self.__bottom - self.__top + 1
  523. class FrameEntityInfo(InfoSampledData):
  524. """
  525. Contains all describing meta data for one frame entity
  526. """
  527. def __init__(self, info_version, info, conv_factor_matrix):
  528. """
  529. Initializes an describing info object that contains all descriptions of this frame entity.
  530. :param info_version: number of the protocol version used by the following info structure
  531. :param info: array of frame entity descriptors as represented by one row of the InfoFrame structure inside the HDF5 file
  532. :param conv_factor_matrix: matrix of conversion factor as represented by the ConversionFactors structure inside one FrameDataEntity folder of the HDF5 file
  533. """
  534. InfoSampledData.__init__(self, info)
  535. McsHdf5Protocols.check_protocol_type_version("FrameEntityInfo", info_version)
  536. self.__version = info_version
  537. self.frame = Frame(info['FrameLeft'], info['FrameTop'], info['FrameRight'], info['FrameBottom'])
  538. self.reference_frame = Frame(info['ReferenceFrameLeft'], info['ReferenceFrameTop'], info['ReferenceFrameRight'], info['ReferenceFrameBottom'])
  539. self.conversion_factors = conv_factor_matrix
  540. @property
  541. def frame_id(self):
  542. "ID of the frame"
  543. return self.info['FrameID']
  544. @property
  545. def sensor_spacing(self):
  546. "Returns the spacing of the sensors in micro-meter"
  547. return self.info['SensorSpacing']
  548. @property
  549. def adc_basic_step(self):
  550. "Returns the value of one basic ADC-Step"
  551. unit_name = self.info['Unit']
  552. # Should be tested that unit_name is a available in ureg (unit register)
  553. basic_step = (10 ** self.info['Exponent'].astype(np.float64)) * ureg[unit_name]
  554. return basic_step
  555. def adc_step_for_sensor(self, x, y):
  556. "Returns the combined (virtual) ADC-Step for the sensor (x,y)"
  557. adc_sensor_step = self.conversion_factors[x, y] * self.adc_basic_step
  558. return adc_sensor_step
  559. @property
  560. def version(self):
  561. "Version number of the Type-Definition"
  562. return self.__version
  563. class EventStream(Stream):
  564. """
  565. Container class for one event stream with different entities
  566. """
  567. def __init__(self, stream_grp):
  568. """
  569. Initializes an event stream object that contains all entities that belong to it.
  570. :param stream_grp: folder of the HDF5 file that contains the data of this event stream
  571. """
  572. Stream.__init__(self, stream_grp, "EventStreamInfoVersion")
  573. self.__read_event_entities()
  574. def __read_event_entities(self):
  575. "Create all event entities of this event stream"
  576. for (name, value) in self.stream_grp.iteritems():
  577. dprint_name_value(name, value)
  578. # Read infos per event entity
  579. event_infos = self.stream_grp['InfoEvent'][...]
  580. event_entity_info_version = self.stream_grp['InfoEvent'].attrs['InfoVersion']
  581. self.event_entity = {}
  582. for event_entity_info in event_infos:
  583. event_entity_name = "EventEntity_" + str(event_entity_info['EventID'])
  584. event_info = EventEntityInfo(event_entity_info_version, event_entity_info)
  585. if event_entity_name in self.stream_grp:
  586. self.event_entity[event_entity_info['EventID']] = EventEntity(self.stream_grp[event_entity_name], event_info)
  587. class EventEntity(object):
  588. """
  589. Contains the event data of a specific entity.
  590. Meta-Information for this entity is available via an associated object of :class:`~McsPy.McsData.EventEntityInfo`
  591. """
  592. def __init__(self, event_data, event_info):
  593. """
  594. Initializes an event entity object
  595. :param event_data: dataset of the HDF5 file that contains the data for this event entity
  596. :param event_info: object of type EventEntityInfo that contains the description of this entity
  597. """
  598. self.info = event_info
  599. # Connect the data set
  600. self.data = event_data
  601. @property
  602. def count(self):
  603. """Number of contained events"""
  604. dim = self.data.shape
  605. return dim[1]
  606. def __handle_indices(self, idx_start, idx_end):
  607. """Check indices for consistency and set default values if nothing was provided"""
  608. if idx_start == None:
  609. idx_start = 0
  610. if idx_end == None:
  611. idx_end = self.count
  612. if idx_start < 0 or self.data.shape[1] < idx_start or idx_end < idx_start or self.data.shape[1] < idx_end:
  613. raise exceptions.IndexError
  614. return (idx_start, idx_end)
  615. def get_events(self, idx_start=None, idx_end=None):
  616. """Get all n events of this entity of the given index range (idx_start <= idx < idx_end)
  617. :param idx_start: start index of the range (including), if nothing is given -> 0
  618. :param idx_end: end index of the range (excluding, if nothing is given -> last index
  619. :return: Tuple of (2 x n matrix of timestamp (1. row) and duration (2. row), Used unit of time)
  620. """
  621. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  622. events = self.data[..., idx_start:idx_end]
  623. return (events * MCS_TICK.magnitude, MCS_TICK.units)
  624. def get_event_timestamps(self, idx_start=None, idx_end=None):
  625. """Get all n event timestamps of this entity of the given index range
  626. :param idx_start: start index of the range, if nothing is given -> 0
  627. :param idx_end: end index of the range, if nothing is given -> last index
  628. :return: Tuple of (n-length array of timestamps, Used unit of time)
  629. """
  630. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  631. events = self.data[0, idx_start:idx_end]
  632. return (events * MCS_TICK.magnitude, MCS_TICK.units)
  633. def get_event_durations(self, idx_start=None, idx_end=None):
  634. """Get all n event durations of this entity of the given index range
  635. :param idx_start: start index of the range, if nothing is given -> 0
  636. :param idx_end: end index of the range, if nothing is given -> last index
  637. :return: Tuple of (n-length array of duration, Used unit of time)
  638. """
  639. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  640. events = self.data[1, idx_start:idx_end]
  641. return (events * MCS_TICK.magnitude, MCS_TICK.units)
  642. class EventEntityInfo(Info):
  643. """
  644. Contains all meta data for one event entity
  645. """
  646. def __init__(self, info_version, info):
  647. """
  648. Initializes an describing info object with an array that contains all descriptions of this event entity.
  649. :param info_version: number of the protocol version used by the following info structure
  650. :param info: array of event entity descriptors as represented by one row of the InfoEvent structure inside the HDF5 file
  651. """
  652. Info.__init__(self, info)
  653. McsHdf5Protocols.check_protocol_type_version("EventEntityInfo", info_version)
  654. self.__version = info_version
  655. if info["SourceChannelIDs"] == "":
  656. source_channel_ids = [-1]
  657. source_channel_labels = ["N/A"]
  658. else:
  659. source_channel_ids = [int(x) for x in info['SourceChannelIDs'].split(',')]
  660. source_channel_labels = [x.strip() for x in info['SourceChannelLabels'].split(',')]
  661. self.__source_channels = {}
  662. for idx, channel_id in enumerate(source_channel_ids):
  663. self.__source_channels[channel_id] = source_channel_labels[idx]
  664. @property
  665. def id(self):
  666. "Event ID"
  667. return self.info['EventID']
  668. @property
  669. def raw_data_bytes(self):
  670. "Lenght of raw data in bytes"
  671. return self.info['RawDataBytes']
  672. @property
  673. def source_channel_ids(self):
  674. "ID's of all channels that were involved in the event generation."
  675. return self.__source_channels.keys()
  676. @property
  677. def source_channel_labels(self):
  678. "Labels of the channels that were involved in the event generation."
  679. return self.__source_channels
  680. @property
  681. def version(self):
  682. "Version number of the Type-Definition"
  683. return self.__version
  684. class SegmentStream(Stream):
  685. """
  686. Container class for one segment stream of different segment entities
  687. """
  688. def __init__(self, stream_grp):
  689. Stream.__init__(self, stream_grp, "SegmentStreamInfoVersion")
  690. self.__read_segment_entities()
  691. def __read_segment_entities(self):
  692. "Read and initialize all segment entities"
  693. for (name, value) in self.stream_grp.iteritems():
  694. dprint_name_value(name, value)
  695. # Read infos per segment entity
  696. segment_infos = self.stream_grp['InfoSegment'][...]
  697. segment_info_version = self.stream_grp['InfoSegment'].attrs['InfoVersion']
  698. self.segment_entity = {}
  699. for segment_entity_info in segment_infos:
  700. ch_info_version = self.stream_grp['SourceInfoChannel'].attrs['InfoVersion']
  701. source_channel_infos = self.__get_source_channel_infos(ch_info_version, self.stream_grp['SourceInfoChannel'][...])
  702. segment_info = SegmentEntityInfo(segment_info_version, segment_entity_info, source_channel_infos)
  703. if self.data_subtype == "Average":
  704. segment_entity_data_name = "AverageData_" + str(segment_entity_info['SegmentID'])
  705. segment_entity_average_annotation_name = "AverageData_Range_" + str(segment_entity_info['SegmentID'])
  706. if segment_entity_data_name in self.stream_grp:
  707. self.segment_entity[segment_entity_info['SegmentID']] = AverageSegmentEntity(self.stream_grp[segment_entity_data_name],
  708. self.stream_grp[segment_entity_average_annotation_name],
  709. segment_info)
  710. else:
  711. segment_entity_data_name = "SegmentData_" + str(segment_entity_info['SegmentID'])
  712. segment_entity_ts_name = "SegmentData_ts_" + str(segment_entity_info['SegmentID'])
  713. if segment_entity_data_name in self.stream_grp:
  714. self.segment_entity[segment_entity_info['SegmentID']] = SegmentEntity(self.stream_grp[segment_entity_data_name],
  715. self.stream_grp[segment_entity_ts_name],
  716. segment_info)
  717. def __get_source_channel_infos(self, ch_info_version, source_channel_infos):
  718. "Create a dictionary of all present source channels"
  719. source_channels = {}
  720. for source_channel_info in source_channel_infos:
  721. source_channels[source_channel_info['ChannelID']] = ChannelInfo(ch_info_version, source_channel_info)
  722. return source_channels
  723. class SegmentEntity(object):
  724. """
  725. Segment entity class,
  726. Meta-Information for this entity is available via an associated object of :class:`~McsPy.McsData.SegmentEntityInfo`
  727. """
  728. def __init__(self, segment_data, segment_ts, segment_info):
  729. """
  730. Initializes a segment entity.
  731. :param segment_data: 2d-matrix (one segment) or 3d-cube (n segments) of segment data
  732. :param segment_ts: timestamp vector for every segment (2d) or multi-segments (3d)
  733. :param segment_info: segment info object that contains all meta data for this segment entity
  734. :return: Segment entity
  735. """
  736. self.info = segment_info
  737. # connect the data set
  738. self.data = segment_data
  739. # connect the timestamp vector
  740. self.data_ts = segment_ts
  741. assert self.segment_sample_count == self.data_ts.shape[1], 'Timestamp index is not compatible with dataset!!!'
  742. @property
  743. def segment_sample_count(self):
  744. "Number of contained samples of segments (2d) or multi-segments (3d)"
  745. dim = self.data.shape
  746. if len(dim) == 3:
  747. return dim[2]
  748. else:
  749. return dim[1]
  750. @property
  751. def segment_count(self):
  752. "Number of segments that are sampled for one time point (2d) -> 1 and (3d) -> n"
  753. dim = self.data.shape
  754. if len(dim) == 3:
  755. return dim[1]
  756. else:
  757. return 1
  758. def __handle_indices(self, idx_start, idx_end):
  759. """Check indices for consistency and set default values if nothing was provided"""
  760. sample_count = self.segment_sample_count
  761. if idx_start == None:
  762. idx_start = 0
  763. if idx_end == None:
  764. idx_end = sample_count
  765. if idx_start < 0 or sample_count < idx_start or idx_end < idx_start or sample_count < idx_end:
  766. raise exceptions.IndexError
  767. return (idx_start, idx_end)
  768. def get_segment_in_range(self, segment_id, flat=False, idx_start=None, idx_end=None):
  769. """
  770. Get the a/the segment signals in its measured range.
  771. :param segment_id: id resp. number of the segment (0 if only one segment is present or the index inside the multi-segment collection)
  772. :param flat: true -> one-dimensional vector of the sequentially ordered segments, false -> k x n matrix of the n segments of k sample points
  773. :param idx_start: index of the first segment that should be returned (0 <= idx_start < idx_end <= count segments)
  774. :param idx_end: index of the last segment that should be returned (0 <= idx_start < idx_end <= count segments)
  775. :return: Tuple (of a flat vector of the sequentially ordered segments or a k x n matrix of the n segments of k sample points depending on the value of *flat* , and the unit of the values)
  776. """
  777. if segment_id in self.info.source_channel_of_segment.keys():
  778. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  779. if self.segment_count == 1:
  780. signal = self.data[..., idx_start : idx_end]
  781. else:
  782. signal = self.data[..., segment_id, idx_start : idx_end]
  783. source_channel = self.info.source_channel_of_segment[segment_id]
  784. scale = source_channel.adc_step.magnitude
  785. signal_corrected = (signal - source_channel.get_field('ADZero')) * scale
  786. if flat:
  787. signal_corrected = np.reshape(signal_corrected, -1, 'F')
  788. return (signal_corrected, source_channel.adc_step.units)
  789. def get_segment_sample_timestamps(self, segment_id, flat=False, idx_start=None, idx_end=None):
  790. """
  791. Get the timestamps of the sample points of the measured segment.
  792. :param segment_id: id resp. number of the segment (0 if only one segment is present or the index inside the multi-segment collection)
  793. :param flat: true -> one-dimensional vector of the sequentially ordered segment timestamps, false -> k x n matrix of the k timestamps of n segments
  794. :param idx_start: index of the first segment for that timestamps should be returned (0 <= idx_start < idx_end <= count segments)
  795. :param idx_end: index of the last segment for that timestamps should be returned (0 <= idx_start < idx_end <= count segments)
  796. :return: Tuple (of a flat vector of the sequentially ordered segments or a k x n matrix of the n segments of k sample points depending on the value of *flat* , and the unit of the values)
  797. """
  798. if segment_id in self.info.source_channel_of_segment.keys():
  799. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  800. data_ts = self.data_ts[idx_start:idx_end]
  801. source_channel = self.info.source_channel_of_segment[segment_id]
  802. signal_ts = np.zeros((self.data.shape[0], data_ts.shape[1]), dtype=np.long)
  803. segment_ts = np.zeros(self.data.shape[0], dtype=np.long) + source_channel.sampling_tick.magnitude
  804. segment_ts[0] = 0
  805. segment_ts = np.cumsum(segment_ts)
  806. for i in range(data_ts.shape[1]):
  807. col = (data_ts[0, i] - self.info.pre_interval.magnitude) + segment_ts
  808. signal_ts[:, i] = col
  809. if flat:
  810. signal_ts = np.reshape(signal_ts, -1, 'F')
  811. return (signal_ts, source_channel.sampling_tick.units)
  812. AverageSegmentTuple = collections.namedtuple('AverageSegmentTuple', ['mean', 'std_dev', 'time_tick_unit', 'signal_unit'])
  813. """
  814. Named tuple that describe one or more average segments (mean, std_dev, time_tick_unit, signal_unit).
  815. .. note::
  816. * :class:`~AverageSegmentTuple.mean` - mean signal values
  817. * :class:`~AverageSegmentTuple.std_dev` - standard deviation of the signal value (it is 0 if there was only one sample segment)
  818. * :class:`~AverageSegmentTuple.time_tick_unit` - sampling interval with time unit
  819. * :class:`~AverageSegmentTuple.signal_unit` - measured unit of the signal
  820. """
  821. class AverageSegmentEntity(object):
  822. """
  823. Contains a number of signal segments that are calcualted as averages of number of segments occured in a given time range.
  824. Meta-Information for this entity is available via an associated object of :class:`~McsPy.McsData.SegmentEntityInfo`
  825. """
  826. def __init__(self, segment_average_data, segment_average_annotation, segment_info):
  827. """
  828. Initializes an average segment entity
  829. :param segment_avarage_data: 2d-matrix (one average) or 3d-cube (n averages) of average segments
  830. :param segment_annotation: annotation vector for every average segment
  831. :param segment_info: segment info object that contains all meta data for this segment entity
  832. :return: Average segment entity
  833. """
  834. self.info = segment_info
  835. # connect the data set
  836. self.data = segment_average_data
  837. # connect the timestamp vector
  838. self.data_annotation = segment_average_annotation
  839. assert self.number_of_averages == self.data_annotation.shape[1], 'Timestamp index is not compatible with dataset!!!'
  840. @property
  841. def number_of_averages(self):
  842. "Number of average segments inside this average entity"
  843. dim = self.data.shape
  844. return dim[2]
  845. @property
  846. def sample_length(self):
  847. "Number of sample points of an average segment"
  848. dim = self.data.shape
  849. return dim[1]
  850. def time_ranges(self):
  851. """
  852. List of time range tuples for all contained average segments
  853. :return: List of tuple with start and end time point
  854. """
  855. time_range_list = []
  856. for idx in range(self.data_annotation.shape[-1]):
  857. time_range_list.append((self.data_annotation[0, idx] * MCS_TICK, self.data_annotation[1, idx] * MCS_TICK))
  858. return time_range_list
  859. def time_range(self, average_segment_idx):
  860. """
  861. Get the time range for that the average segment was calculated
  862. :param average_segment_idx: index resp. number of the average segment
  863. :return: Tuple with start and end time point
  864. """
  865. return (self.data_annotation[0, average_segment_idx] * MCS_TICK, self.data_annotation[1, average_segment_idx] * MCS_TICK)
  866. def average_counts(self):
  867. """
  868. List of counts of samples for all contained average segments
  869. :param average_segment_idx: id resp. number of the average segment
  870. :return: sample count
  871. """
  872. sample_count_list = []
  873. for idx in range(self.data_annotation.shape[-1]):
  874. sample_count_list.append(self.data_annotation[2, idx])
  875. return sample_count_list
  876. def average_count(self, average_segment_idx):
  877. """
  878. Count of samples that were used to calculate the average
  879. :param average_segment_idx: id resp. number of the average segment
  880. :return: sample count
  881. """
  882. return self.data_annotation[2, average_segment_idx]
  883. def __calculate_scaled_average(self, mean_data, std_dev_data):
  884. """
  885. Shift and scale average segments appropriate
  886. """
  887. assert len(self.info.source_channel_of_segment) == 1, "There should be only one source channel for one average segment entity!"
  888. source_channel = self.info.source_channel_of_segment[0] # take the first and only source channel
  889. scale = source_channel.adc_step.magnitude
  890. mean_shifted_and_scaled = (mean_data - source_channel.get_field('ADZero')) * scale
  891. std_dev_scaled = std_dev_data * scale
  892. data_tuple = AverageSegmentTuple(mean=mean_shifted_and_scaled,
  893. std_dev=std_dev_scaled,
  894. time_tick_unit=source_channel.sampling_tick,
  895. signal_unit=source_channel.adc_step.units)
  896. return data_tuple
  897. def get_scaled_average_segments(self):
  898. """
  899. Get all contained average segments in its measured physical range.
  900. :return: :class:`~McsPy.McsData.AverageSegmentTuple` containing the k x n matrices for mean and standard deviation of all contained average segments n with the associated sampling and measuring information
  901. """
  902. mean = self.data[0, ...]
  903. std_dev = self.data[1, ...]
  904. return self.__calculate_scaled_average(mean, std_dev)
  905. def get_scaled_average_segment(self, average_segment_idx):
  906. """
  907. Get the selected average segment in its measured physical range.
  908. :param segment_idx: index resp. number of the average segment
  909. :return: :class:`~McsPy.McsData.AverageSegmentTuple` containing the mean and standard deviation vector of the average segment with the associated sampling and measuring information
  910. """
  911. mean = self.data[0, ..., average_segment_idx]
  912. std_dev = self.data[1, ..., average_segment_idx]
  913. return self.__calculate_scaled_average(mean, std_dev)
  914. def __calculate_shifted_average(self, mean_data, std_dev_data):
  915. """
  916. Shift average segments appropriate
  917. """
  918. assert len(self.info.source_channel_of_segment) == 1, "There should be only one source channel for one average segment entity!"
  919. source_channel = self.info.source_channel_of_segment[0] # take the first and only source channel
  920. mean = mean_data
  921. mean_shifted = mean - source_channel.get_field('ADZero')
  922. data_tuple = AverageSegmentTuple(mean=mean_shifted,
  923. std_dev=std_dev_data,
  924. time_tick_unit=source_channel.sampling_tick,
  925. signal_unit=source_channel.adc_step)
  926. return data_tuple
  927. def get_average_segments(self):
  928. """
  929. Get all contained average segments AD-offset in ADC values with its measuring conditions
  930. :return: :class:`~McsPy.McsData.AverageSegmentTuple` containing the mean and standard deviation vector of the average segment in ADC steps with sampling tick and ADC-Step definition
  931. """
  932. mean = self.data[0, ...]
  933. std_dev = self.data[1, ...]
  934. return self.__calculate_shifted_average(mean, std_dev)
  935. def get_average_segment(self, average_segment_idx):
  936. """
  937. Get the AD-offset corrected average segment in ADC values with its measuring conditions
  938. :param segment_id: id resp. number of the segment
  939. :return: :class:`~McsPy.McsData.AverageSegmentTuple` containing the k x n matrices for mean and standard deviation of all contained average segments in ADC steps with sampling tick and ADC-Step definition
  940. """
  941. mean = self.data[0, ..., average_segment_idx]
  942. std_dev = self.data[1, ..., average_segment_idx]
  943. return self.__calculate_shifted_average(mean, std_dev)
  944. class SegmentEntityInfo(Info):
  945. """
  946. Contains all meta data for one segment entity
  947. """
  948. def __init__(self, info_version, info, source_channel_infos):
  949. """
  950. Initializes an describing info object with an array that contains all descriptions of this segment entity.
  951. :param info_version: number of the protocol version used by the following info structure
  952. :param info: array of segment entity descriptors as represented by one row of the SegmentEvent structure inside the HDF5 file
  953. :param source_channel_infos: dictionary of source channels from where the segments were taken
  954. """
  955. Info.__init__(self, info)
  956. McsHdf5Protocols.check_protocol_type_version("SegmentEntityInfo", info_version)
  957. self.__version = info_version
  958. source_channel_ids = [int(x) for x in info['SourceChannelIDs'].split(',')]
  959. self.source_channel_of_segment = {}
  960. for idx, channel_id in enumerate(source_channel_ids):
  961. self.source_channel_of_segment[idx] = source_channel_infos[channel_id]
  962. @property
  963. def id(self):
  964. "Segment ID"
  965. return self.info['SegmentID']
  966. @property
  967. def pre_interval(self):
  968. "Interval [start of the segment <- defining event timestamp]"
  969. return self.info['PreInterval'] * MCS_TICK
  970. @property
  971. def post_interval(self):
  972. "Interval [defining event timestamp -> end of the segment]"
  973. return self.info['PostInterval'] * MCS_TICK
  974. @property
  975. def type(self):
  976. "Type of the segment like 'Average' or 'Cutout'"
  977. return self.info['SegmentType']
  978. @property
  979. def count(self):
  980. "Count of segments inside the segment entity"
  981. return len(self.source_channel_of_segment)
  982. @property
  983. def version(self):
  984. "Version number of the Type-Definition"
  985. return self.__version
  986. class TimeStampStream(Stream):
  987. """
  988. Container class for one timestamp stream with different entities
  989. """
  990. def __init__(self, stream_grp):
  991. """
  992. Initializes an timestamp stream object that contains all entities that belong to it.
  993. :param stream_grp: folder of the HDF5 file that contains the data of this timestamp stream
  994. """
  995. Stream.__init__(self, stream_grp, "TimeStampStreamInfoVersion")
  996. self.__read_timestamp_entities()
  997. def __read_timestamp_entities(self):
  998. "Create all timestamp entities of this timestamp stream"
  999. for (name, value) in self.stream_grp.iteritems():
  1000. dprint_name_value(name, value)
  1001. # Read infos per timestamp entity
  1002. timestamp_infos = self.stream_grp['InfoTimeStamp'][...]
  1003. timestamp_info_version = self.stream_grp['InfoTimeStamp'].attrs['InfoVersion']
  1004. self.timestamp_entity = {}
  1005. for timestamp_entity_info in timestamp_infos:
  1006. timestamp_entity_name = "TimeStampEntity_" + str(timestamp_entity_info['TimeStampEntityID'])
  1007. timestamp_info = TimeStampEntityInfo(timestamp_info_version, timestamp_entity_info)
  1008. if timestamp_entity_name in self.stream_grp:
  1009. self.timestamp_entity[timestamp_entity_info['TimeStampEntityID']] = TimeStampEntity(self.stream_grp[timestamp_entity_name], timestamp_info)
  1010. class TimeStampEntity(object):
  1011. """
  1012. Time-Stamp entity class,
  1013. Meta-Information for this entity is available via an associated object of :class:`~McsPy.McsData.TimestampEntityInfo`
  1014. """
  1015. def __init__(self, timestamp_data, timestamp_info):
  1016. """
  1017. Initializes an timestamp entity object
  1018. :param timestamp_data: dataset of the HDF5 file that contains the data for this timestamp entity
  1019. :param timestamp_info: object of type TimeStampEntityInfo that contains the description of this entity
  1020. """
  1021. self.info = timestamp_info
  1022. # Connect the data set
  1023. self.data = timestamp_data
  1024. @property
  1025. def count(self):
  1026. """Number of contained timestamps"""
  1027. dim = self.data.shape
  1028. return dim[1]
  1029. def __handle_indices(self, idx_start, idx_end):
  1030. """Check indices for consistency and set default values if nothing was provided"""
  1031. if idx_start == None:
  1032. idx_start = 0
  1033. if idx_end == None:
  1034. idx_end = self.count
  1035. if idx_start < 0 or self.data.shape[1] < idx_start or idx_end < idx_start or self.data.shape[1] < idx_end:
  1036. raise exceptions.IndexError
  1037. return (idx_start, idx_end)
  1038. def get_timestamps(self, idx_start=None, idx_end=None):
  1039. """Get all n time stamps of this entity of the given index range (idx_start <= idx < idx_end)
  1040. :param idx_start: start index of the range (including), if nothing is given -> 0
  1041. :param idx_end: end index of the range (excluding, if nothing is given -> last index
  1042. :return: Tuple of (n-length array of timestamps, Used unit of time)
  1043. """
  1044. idx_start, idx_end = self.__handle_indices(idx_start, idx_end)
  1045. timestamps = self.data[idx_start:idx_end]
  1046. scale = self.info.measuring_unit
  1047. return (timestamps, scale)
  1048. class TimeStampEntityInfo(Info):
  1049. """
  1050. Contains all meta data for one timestamp entity
  1051. """
  1052. def __init__(self, info_version, info):
  1053. """
  1054. Initializes an describing info object with an array that contains all descriptions of this timestamp entity.
  1055. :param info_version: number of the protocol version used by the following info structure
  1056. :param info: array of event entity descriptors as represented by one row of the InfoTimeStamp structure inside the HDF5 file
  1057. """
  1058. Info.__init__(self, info)
  1059. McsHdf5Protocols.check_protocol_type_version("TimeStampEntityInfo", info_version)
  1060. self.__version = info_version
  1061. source_channel_ids = [int(x) for x in info['SourceChannelIDs'].split(',')]
  1062. source_channel_labels = [x.strip() for x in info['SourceChannelLabels'].split(',')]
  1063. self.__source_channels = {}
  1064. for idx, channel_id in enumerate(source_channel_ids):
  1065. self.__source_channels[channel_id] = source_channel_labels[idx]
  1066. @property
  1067. def id(self):
  1068. "Timestamp entity ID"
  1069. return self.info['TimeStampEntityID']
  1070. @property
  1071. def unit(self):
  1072. "Unit in which the timestamps are measured"
  1073. return self.info['Unit']
  1074. @property
  1075. def exponent(self):
  1076. "Exponent for the unit in which the timestamps are measured"
  1077. return int(self.info['Exponent'])
  1078. @property
  1079. def measuring_unit(self):
  1080. "Unit in which the timestamp entity was measured"
  1081. try:
  1082. provided_base_unit = ureg.parse_expression(self.unit)
  1083. except UndefinedUnitError as unit_undefined:
  1084. print "Could not find unit \'%s\' in the Unit-Registry" % self.unit #unit_undefined.unit_names
  1085. return None
  1086. else:
  1087. return (10**self.exponent) * provided_base_unit
  1088. @property
  1089. def data_type(self):
  1090. "DataType for the timestamps"
  1091. return 'Long'
  1092. @property
  1093. def source_channel_ids(self):
  1094. "ID's of all channels that were involved in the timestamp generation."
  1095. return self.__source_channels.keys()
  1096. @property
  1097. def source_channel_labels(self):
  1098. "Labels of the channels that were involved in the timestamp generation."
  1099. return self.__source_channels
  1100. @property
  1101. def version(self):
  1102. "Version number of the Type-Definition"
  1103. return self.__version