spiketrain.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791
  1. # -*- coding: utf-8 -*-
  2. '''
  3. This module implements :class:`SpikeTrain`, an array of spike times.
  4. :class:`SpikeTrain` derives from :class:`BaseNeo`, from
  5. :module:`neo.core.baseneo`, and from :class:`quantites.Quantity`, which
  6. inherits from :class:`numpy.array`.
  7. Inheritance from :class:`numpy.array` is explained here:
  8. http://docs.scipy.org/doc/numpy/user/basics.subclassing.html
  9. In brief:
  10. * Initialization of a new object from constructor happens in :meth:`__new__`.
  11. This is where user-specified attributes are set.
  12. * :meth:`__array_finalize__` is called for all new objects, including those
  13. created by slicing. This is where attributes are copied over from
  14. the old object.
  15. '''
  16. # needed for python 3 compatibility
  17. from __future__ import absolute_import, division, print_function
  18. import sys
  19. import copy
  20. import warnings
  21. import numpy as np
  22. import quantities as pq
  23. from neo.core.baseneo import BaseNeo, MergeError, merge_annotations
  24. from neo.core.dataobject import DataObject, ArrayDict
  25. def check_has_dimensions_time(*values):
  26. '''
  27. Verify that all arguments have a dimensionality that is compatible
  28. with time.
  29. '''
  30. errmsgs = []
  31. for value in values:
  32. dim = value.dimensionality
  33. if (len(dim) != 1 or list(dim.values())[0] != 1 or not isinstance(list(dim.keys())[0],
  34. pq.UnitTime)):
  35. errmsgs.append("value %s has dimensions %s, not [time]" % (value, dim.simplified))
  36. if errmsgs:
  37. raise ValueError("\n".join(errmsgs))
  38. def _check_time_in_range(value, t_start, t_stop, view=False):
  39. '''
  40. Verify that all times in :attr:`value` are between :attr:`t_start`
  41. and :attr:`t_stop` (inclusive.
  42. If :attr:`view` is True, vies are used for the test.
  43. Using drastically increases the speed, but is only safe if you are
  44. certain that the dtype and units are the same
  45. '''
  46. if t_start > t_stop:
  47. raise ValueError("t_stop (%s) is before t_start (%s)" % (t_stop, t_start))
  48. if not value.size:
  49. return
  50. if view:
  51. value = value.view(np.ndarray)
  52. t_start = t_start.view(np.ndarray)
  53. t_stop = t_stop.view(np.ndarray)
  54. if value.min() < t_start:
  55. raise ValueError("The first spike (%s) is before t_start (%s)" % (value, t_start))
  56. if value.max() > t_stop:
  57. raise ValueError("The last spike (%s) is after t_stop (%s)" % (value, t_stop))
  58. def _check_waveform_dimensions(spiketrain):
  59. '''
  60. Verify that waveform is compliant with the waveform definition as
  61. quantity array 3D (spike, channel_index, time)
  62. '''
  63. if not spiketrain.size:
  64. return
  65. waveforms = spiketrain.waveforms
  66. if (waveforms is None) or (not waveforms.size):
  67. return
  68. if waveforms.shape[0] != len(spiketrain):
  69. raise ValueError("Spiketrain length (%s) does not match to number of "
  70. "waveforms present (%s)" % (len(spiketrain), waveforms.shape[0]))
  71. def _new_spiketrain(cls, signal, t_stop, units=None, dtype=None, copy=True,
  72. sampling_rate=1.0 * pq.Hz, t_start=0.0 * pq.s, waveforms=None, left_sweep=None,
  73. name=None, file_origin=None, description=None, array_annotations=None,
  74. annotations=None, segment=None, unit=None):
  75. '''
  76. A function to map :meth:`BaseAnalogSignal.__new__` to function that
  77. does not do the unit checking. This is needed for :module:`pickle` to work.
  78. '''
  79. if annotations is None:
  80. annotations = {}
  81. obj = SpikeTrain(signal, t_stop, units, dtype, copy, sampling_rate, t_start, waveforms,
  82. left_sweep, name, file_origin, description, array_annotations, **annotations)
  83. obj.segment = segment
  84. obj.unit = unit
  85. return obj
  86. class SpikeTrain(DataObject):
  87. '''
  88. :class:`SpikeTrain` is a :class:`Quantity` array of spike times.
  89. It is an ensemble of action potentials (spikes) emitted by the same unit
  90. in a period of time.
  91. *Usage*::
  92. >>> from neo.core import SpikeTrain
  93. >>> from quantities import s
  94. >>>
  95. >>> train = SpikeTrain([3, 4, 5]*s, t_stop=10.0)
  96. >>> train2 = train[1:3]
  97. >>>
  98. >>> train.t_start
  99. array(0.0) * s
  100. >>> train.t_stop
  101. array(10.0) * s
  102. >>> train
  103. <SpikeTrain(array([ 3., 4., 5.]) * s, [0.0 s, 10.0 s])>
  104. >>> train2
  105. <SpikeTrain(array([ 4., 5.]) * s, [0.0 s, 10.0 s])>
  106. *Required attributes/properties*:
  107. :times: (quantity array 1D, numpy array 1D, or list) The times of
  108. each spike.
  109. :units: (quantity units) Required if :attr:`times` is a list or
  110. :class:`~numpy.ndarray`, not if it is a
  111. :class:`~quantites.Quantity`.
  112. :t_stop: (quantity scalar, numpy scalar, or float) Time at which
  113. :class:`SpikeTrain` ended. This will be converted to the
  114. same units as :attr:`times`. This argument is required because it
  115. specifies the period of time over which spikes could have occurred.
  116. Note that :attr:`t_start` is highly recommended for the same
  117. reason.
  118. Note: If :attr:`times` contains values outside of the
  119. range [t_start, t_stop], an Exception is raised.
  120. *Recommended attributes/properties*:
  121. :name: (str) A label for the dataset.
  122. :description: (str) Text description.
  123. :file_origin: (str) Filesystem path or URL of the original data file.
  124. :t_start: (quantity scalar, numpy scalar, or float) Time at which
  125. :class:`SpikeTrain` began. This will be converted to the
  126. same units as :attr:`times`.
  127. Default: 0.0 seconds.
  128. :waveforms: (quantity array 3D (spike, channel_index, time))
  129. The waveforms of each spike.
  130. :sampling_rate: (quantity scalar) Number of samples per unit time
  131. for the waveforms.
  132. :left_sweep: (quantity array 1D) Time from the beginning
  133. of the waveform to the trigger time of the spike.
  134. :sort: (bool) If True, the spike train will be sorted by time.
  135. *Optional attributes/properties*:
  136. :dtype: (numpy dtype or str) Override the dtype of the signal array.
  137. :copy: (bool) Whether to copy the times array. True by default.
  138. Must be True when you request a change of units or dtype.
  139. :array_annotations: (dict) Dict mapping strings to numpy arrays containing annotations \
  140. for all data points
  141. Note: Any other additional arguments are assumed to be user-specific
  142. metadata and stored in :attr:`annotations`.
  143. *Properties available on this object*:
  144. :sampling_period: (quantity scalar) Interval between two samples.
  145. (1/:attr:`sampling_rate`)
  146. :duration: (quantity scalar) Duration over which spikes can occur,
  147. read-only.
  148. (:attr:`t_stop` - :attr:`t_start`)
  149. :spike_duration: (quantity scalar) Duration of a waveform, read-only.
  150. (:attr:`waveform`.shape[2] * :attr:`sampling_period`)
  151. :right_sweep: (quantity scalar) Time from the trigger times of the
  152. spikes to the end of the waveforms, read-only.
  153. (:attr:`left_sweep` + :attr:`spike_duration`)
  154. :times: (quantity array 1D) Returns the :class:`SpikeTrain` as a quantity array.
  155. *Slicing*:
  156. :class:`SpikeTrain` objects can be sliced. When this occurs, a new
  157. :class:`SpikeTrain` (actually a view) is returned, with the same
  158. metadata, except that :attr:`waveforms` is also sliced in the same way
  159. (along dimension 0). Note that t_start and t_stop are not changed
  160. automatically, although you can still manually change them.
  161. '''
  162. _single_parent_objects = ('Segment', 'Unit')
  163. _quantity_attr = 'times'
  164. _necessary_attrs = (('times', pq.Quantity, 1), ('t_start', pq.Quantity, 0),
  165. ('t_stop', pq.Quantity, 0))
  166. _recommended_attrs = ((('waveforms', pq.Quantity, 3), ('left_sweep', pq.Quantity, 0),
  167. ('sampling_rate', pq.Quantity, 0)) + BaseNeo._recommended_attrs)
  168. def __new__(cls, times, t_stop, units=None, dtype=None, copy=True, sampling_rate=1.0 * pq.Hz,
  169. t_start=0.0 * pq.s, waveforms=None, left_sweep=None, name=None, file_origin=None,
  170. description=None, array_annotations=None, **annotations):
  171. '''
  172. Constructs a new :clas:`Spiketrain` instance from data.
  173. This is called whenever a new :class:`SpikeTrain` is created from the
  174. constructor, but not when slicing.
  175. '''
  176. if len(times) != 0 and waveforms is not None and len(times) != waveforms.shape[0]:
  177. # len(times)!=0 has been used to workaround a bug occuring during neo import
  178. raise ValueError("the number of waveforms should be equal to the number of spikes")
  179. # Make sure units are consistent
  180. # also get the dimensionality now since it is much faster to feed
  181. # that to Quantity rather than a unit
  182. if units is None:
  183. # No keyword units, so get from `times`
  184. try:
  185. dim = times.units.dimensionality
  186. except AttributeError:
  187. raise ValueError('you must specify units')
  188. else:
  189. if hasattr(units, 'dimensionality'):
  190. dim = units.dimensionality
  191. else:
  192. dim = pq.quantity.validate_dimensionality(units)
  193. if hasattr(times, 'dimensionality'):
  194. if times.dimensionality.items() == dim.items():
  195. units = None # units will be taken from times, avoids copying
  196. else:
  197. if not copy:
  198. raise ValueError("cannot rescale and return view")
  199. else:
  200. # this is needed because of a bug in python-quantities
  201. # see issue # 65 in python-quantities github
  202. # remove this if it is fixed
  203. times = times.rescale(dim)
  204. if dtype is None:
  205. if not hasattr(times, 'dtype'):
  206. dtype = np.float
  207. elif hasattr(times, 'dtype') and times.dtype != dtype:
  208. if not copy:
  209. raise ValueError("cannot change dtype and return view")
  210. # if t_start.dtype or t_stop.dtype != times.dtype != dtype,
  211. # _check_time_in_range can have problems, so we set the t_start
  212. # and t_stop dtypes to be the same as times before converting them
  213. # to dtype below
  214. # see ticket #38
  215. if hasattr(t_start, 'dtype') and t_start.dtype != times.dtype:
  216. t_start = t_start.astype(times.dtype)
  217. if hasattr(t_stop, 'dtype') and t_stop.dtype != times.dtype:
  218. t_stop = t_stop.astype(times.dtype)
  219. # check to make sure the units are time
  220. # this approach is orders of magnitude faster than comparing the
  221. # reference dimensionality
  222. if (len(dim) != 1 or list(dim.values())[0] != 1 or not isinstance(list(dim.keys())[0],
  223. pq.UnitTime)):
  224. ValueError("Unit has dimensions %s, not [time]" % dim.simplified)
  225. # Construct Quantity from data
  226. obj = pq.Quantity(times, units=units, dtype=dtype, copy=copy).view(cls)
  227. # if the dtype and units match, just copy the values here instead
  228. # of doing the much more expensive creation of a new Quantity
  229. # using items() is orders of magnitude faster
  230. if (hasattr(t_start, 'dtype')
  231. and t_start.dtype == obj.dtype
  232. and hasattr(t_start, 'dimensionality')
  233. and t_start.dimensionality.items() == dim.items()):
  234. obj.t_start = t_start.copy()
  235. else:
  236. obj.t_start = pq.Quantity(t_start, units=dim, dtype=obj.dtype)
  237. if (hasattr(t_stop, 'dtype') and t_stop.dtype == obj.dtype
  238. and hasattr(t_stop, 'dimensionality')
  239. and t_stop.dimensionality.items() == dim.items()):
  240. obj.t_stop = t_stop.copy()
  241. else:
  242. obj.t_stop = pq.Quantity(t_stop, units=dim, dtype=obj.dtype)
  243. # Store attributes
  244. obj.waveforms = waveforms
  245. obj.left_sweep = left_sweep
  246. obj.sampling_rate = sampling_rate
  247. # parents
  248. obj.segment = None
  249. obj.unit = None
  250. # Error checking (do earlier?)
  251. _check_time_in_range(obj, obj.t_start, obj.t_stop, view=True)
  252. return obj
  253. def __init__(self, times, t_stop, units=None, dtype=np.float, copy=True,
  254. sampling_rate=1.0 * pq.Hz, t_start=0.0 * pq.s, waveforms=None, left_sweep=None,
  255. name=None, file_origin=None, description=None, array_annotations=None,
  256. **annotations):
  257. '''
  258. Initializes a newly constructed :class:`SpikeTrain` instance.
  259. '''
  260. # This method is only called when constructing a new SpikeTrain,
  261. # not when slicing or viewing. We use the same call signature
  262. # as __new__ for documentation purposes. Anything not in the call
  263. # signature is stored in annotations.
  264. # Calls parent __init__, which grabs universally recommended
  265. # attributes and sets up self.annotations
  266. DataObject.__init__(self, name=name, file_origin=file_origin, description=description,
  267. array_annotations=array_annotations, **annotations)
  268. def _repr_pretty_(self, pp, cycle):
  269. super(SpikeTrain, self)._repr_pretty_(pp, cycle)
  270. def rescale(self, units):
  271. '''
  272. Return a copy of the :class:`SpikeTrain` converted to the specified
  273. units
  274. '''
  275. obj = super(SpikeTrain, self).rescale(units)
  276. obj.t_start = self.t_start.rescale(units)
  277. obj.t_stop = self.t_stop.rescale(units)
  278. obj.unit = self.unit
  279. return obj
  280. def __reduce__(self):
  281. '''
  282. Map the __new__ function onto _new_BaseAnalogSignal, so that pickle
  283. works
  284. '''
  285. import numpy
  286. return _new_spiketrain, (self.__class__, numpy.array(self), self.t_stop, self.units,
  287. self.dtype, True, self.sampling_rate, self.t_start,
  288. self.waveforms, self.left_sweep, self.name, self.file_origin,
  289. self.description, self.array_annotations, self.annotations,
  290. self.segment, self.unit)
  291. def __array_finalize__(self, obj):
  292. '''
  293. This is called every time a new :class:`SpikeTrain` is created.
  294. It is the appropriate place to set default values for attributes
  295. for :class:`SpikeTrain` constructed by slicing or viewing.
  296. User-specified values are only relevant for construction from
  297. constructor, and these are set in __new__. Then they are just
  298. copied over here.
  299. Note that the :attr:`waveforms` attibute is not sliced here. Nor is
  300. :attr:`t_start` or :attr:`t_stop` modified.
  301. '''
  302. # This calls Quantity.__array_finalize__ which deals with
  303. # dimensionality
  304. super(SpikeTrain, self).__array_finalize__(obj)
  305. # Supposedly, during initialization from constructor, obj is supposed
  306. # to be None, but this never happens. It must be something to do
  307. # with inheritance from Quantity.
  308. if obj is None:
  309. return
  310. # Set all attributes of the new object `self` from the attributes
  311. # of `obj`. For instance, when slicing, we want to copy over the
  312. # attributes of the original object.
  313. self.t_start = getattr(obj, 't_start', None)
  314. self.t_stop = getattr(obj, 't_stop', None)
  315. self.waveforms = getattr(obj, 'waveforms', None)
  316. self.left_sweep = getattr(obj, 'left_sweep', None)
  317. self.sampling_rate = getattr(obj, 'sampling_rate', None)
  318. self.segment = getattr(obj, 'segment', None)
  319. self.unit = getattr(obj, 'unit', None)
  320. # The additional arguments
  321. self.annotations = getattr(obj, 'annotations', {})
  322. # Add empty array annotations, because they cannot always be copied,
  323. # but do not overwrite existing ones from slicing etc.
  324. # This ensures the attribute exists
  325. if not hasattr(self, 'array_annotations'):
  326. self.array_annotations = ArrayDict(self._get_arr_ann_length())
  327. # Note: Array annotations have to be changed when slicing or initializing an object,
  328. # copying them over in spite of changed data would result in unexpected behaviour
  329. # Globally recommended attributes
  330. self.name = getattr(obj, 'name', None)
  331. self.file_origin = getattr(obj, 'file_origin', None)
  332. self.description = getattr(obj, 'description', None)
  333. if hasattr(obj, 'lazy_shape'):
  334. self.lazy_shape = obj.lazy_shape
  335. def __deepcopy__(self, memo):
  336. cls = self.__class__
  337. new_st = cls(np.array(self), self.t_stop, units=self.units, dtype=self.dtype, copy=True,
  338. sampling_rate=self.sampling_rate, t_start=self.t_start,
  339. waveforms=self.waveforms, left_sweep=self.left_sweep, name=self.name,
  340. file_origin=self.file_origin, description=self.description)
  341. new_st.__dict__.update(self.__dict__)
  342. memo[id(self)] = new_st
  343. for k, v in self.__dict__.items():
  344. try:
  345. setattr(new_st, k, copy.deepcopy(v, memo))
  346. except TypeError:
  347. setattr(new_st, k, v)
  348. return new_st
  349. def __repr__(self):
  350. '''
  351. Returns a string representing the :class:`SpikeTrain`.
  352. '''
  353. return '<SpikeTrain(%s, [%s, %s])>' % (
  354. super(SpikeTrain, self).__repr__(), self.t_start, self.t_stop)
  355. def sort(self):
  356. '''
  357. Sorts the :class:`SpikeTrain` and its :attr:`waveforms`, if any,
  358. by time.
  359. '''
  360. # sort the waveforms by the times
  361. sort_indices = np.argsort(self)
  362. if self.waveforms is not None and self.waveforms.any():
  363. self.waveforms = self.waveforms[sort_indices]
  364. self.array_annotate(**copy.deepcopy(self.array_annotations_at_index(sort_indices)))
  365. # now sort the times
  366. # We have sorted twice, but `self = self[sort_indices]` introduces
  367. # a dependency on the slicing functionality of SpikeTrain.
  368. super(SpikeTrain, self).sort()
  369. def __getslice__(self, i, j):
  370. '''
  371. Get a slice from :attr:`i` to :attr:`j`.
  372. Doesn't get called in Python 3, :meth:`__getitem__` is called instead
  373. '''
  374. return self.__getitem__(slice(i, j))
  375. def __add__(self, time):
  376. '''
  377. Shifts the time point of all spikes by adding the amount in
  378. :attr:`time` (:class:`Quantity`)
  379. If `time` is a scalar, this also shifts :attr:`t_start` and :attr:`t_stop`.
  380. If `time` is an array, :attr:`t_start` and :attr:`t_stop` are not changed unless
  381. some of the new spikes would be outside this range.
  382. In this case :attr:`t_start` and :attr:`t_stop` are modified if necessary to
  383. ensure they encompass all spikes.
  384. It is not possible to add two SpikeTrains (raises ValueError).
  385. '''
  386. spikes = self.view(pq.Quantity)
  387. check_has_dimensions_time(time)
  388. if isinstance(time, SpikeTrain):
  389. raise TypeError("Can't add two spike trains")
  390. new_times = spikes + time
  391. if time.size > 1:
  392. t_start = min(self.t_start, np.min(new_times))
  393. t_stop = max(self.t_stop, np.max(new_times))
  394. else:
  395. t_start = self.t_start + time
  396. t_stop = self.t_stop + time
  397. return SpikeTrain(times=new_times, t_stop=t_stop, units=self.units,
  398. sampling_rate=self.sampling_rate, t_start=t_start,
  399. waveforms=self.waveforms, left_sweep=self.left_sweep, name=self.name,
  400. file_origin=self.file_origin, description=self.description,
  401. array_annotations=copy.deepcopy(self.array_annotations),
  402. **self.annotations)
  403. def __sub__(self, time):
  404. '''
  405. Shifts the time point of all spikes by subtracting the amount in
  406. :attr:`time` (:class:`Quantity`)
  407. If `time` is a scalar, this also shifts :attr:`t_start` and :attr:`t_stop`.
  408. If `time` is an array, :attr:`t_start` and :attr:`t_stop` are not changed unless
  409. some of the new spikes would be outside this range.
  410. In this case :attr:`t_start` and :attr:`t_stop` are modified if necessary to
  411. ensure they encompass all spikes.
  412. In general, it is not possible to subtract two SpikeTrain objects (raises ValueError).
  413. However, if `time` is itself a SpikeTrain of the same size as the SpikeTrain,
  414. returns a Quantities array (since this is often used in checking
  415. whether two spike trains are the same or in calculating the inter-spike interval.
  416. '''
  417. spikes = self.view(pq.Quantity)
  418. check_has_dimensions_time(time)
  419. if isinstance(time, SpikeTrain):
  420. if self.size == time.size:
  421. return spikes - time
  422. else:
  423. raise TypeError("Can't subtract spike trains with different sizes")
  424. else:
  425. new_times = spikes - time
  426. if time.size > 1:
  427. t_start = min(self.t_start, np.min(new_times))
  428. t_stop = max(self.t_stop, np.max(new_times))
  429. else:
  430. t_start = self.t_start - time
  431. t_stop = self.t_stop - time
  432. return SpikeTrain(times=spikes - time, t_stop=t_stop, units=self.units,
  433. sampling_rate=self.sampling_rate, t_start=t_start,
  434. waveforms=self.waveforms, left_sweep=self.left_sweep, name=self.name,
  435. file_origin=self.file_origin, description=self.description,
  436. array_annotations=copy.deepcopy(self.array_annotations),
  437. **self.annotations)
  438. def __getitem__(self, i):
  439. '''
  440. Get the item or slice :attr:`i`.
  441. '''
  442. obj = super(SpikeTrain, self).__getitem__(i)
  443. if hasattr(obj, 'waveforms') and obj.waveforms is not None:
  444. obj.waveforms = obj.waveforms.__getitem__(i)
  445. try:
  446. obj.array_annotate(**copy.deepcopy(self.array_annotations_at_index(i)))
  447. except AttributeError: # If Quantity was returned, not SpikeTrain
  448. pass
  449. return obj
  450. def __setitem__(self, i, value):
  451. '''
  452. Set the value the item or slice :attr:`i`.
  453. '''
  454. if not hasattr(value, "units"):
  455. value = pq.Quantity(value,
  456. units=self.units) # or should we be strict: raise ValueError(
  457. # "Setting a value # requires a quantity")?
  458. # check for values outside t_start, t_stop
  459. _check_time_in_range(value, self.t_start, self.t_stop)
  460. super(SpikeTrain, self).__setitem__(i, value)
  461. def __setslice__(self, i, j, value):
  462. if not hasattr(value, "units"):
  463. value = pq.Quantity(value, units=self.units)
  464. _check_time_in_range(value, self.t_start, self.t_stop)
  465. super(SpikeTrain, self).__setslice__(i, j, value)
  466. def _copy_data_complement(self, other, deep_copy=False):
  467. '''
  468. Copy the metadata from another :class:`SpikeTrain`.
  469. Note: Array annotations can not be copied here because length of data can change
  470. '''
  471. # Note: Array annotations cannot be copied because length of data can be changed
  472. # here which would cause inconsistencies
  473. for attr in ("left_sweep", "sampling_rate", "name", "file_origin", "description",
  474. "annotations"):
  475. attr_value = getattr(other, attr, None)
  476. if deep_copy:
  477. attr_value = copy.deepcopy(attr_value)
  478. setattr(self, attr, attr_value)
  479. def duplicate_with_new_data(self, signal, t_start=None, t_stop=None, waveforms=None,
  480. deep_copy=True, units=None):
  481. '''
  482. Create a new :class:`SpikeTrain` with the same metadata
  483. but different data (times, t_start, t_stop)
  484. Note: Array annotations can not be copied here because length of data can change
  485. '''
  486. # using previous t_start and t_stop if no values are provided
  487. if t_start is None:
  488. t_start = self.t_start
  489. if t_stop is None:
  490. t_stop = self.t_stop
  491. if waveforms is None:
  492. waveforms = self.waveforms
  493. if units is None:
  494. units = self.units
  495. else:
  496. units = pq.quantity.validate_dimensionality(units)
  497. new_st = self.__class__(signal, t_start=t_start, t_stop=t_stop, waveforms=waveforms,
  498. units=units)
  499. new_st._copy_data_complement(self, deep_copy=deep_copy)
  500. # Note: Array annotations are not copied here, because length of data could change
  501. # overwriting t_start and t_stop with new values
  502. new_st.t_start = t_start
  503. new_st.t_stop = t_stop
  504. # consistency check
  505. _check_time_in_range(new_st, new_st.t_start, new_st.t_stop, view=False)
  506. _check_waveform_dimensions(new_st)
  507. return new_st
  508. def time_slice(self, t_start, t_stop):
  509. '''
  510. Creates a new :class:`SpikeTrain` corresponding to the time slice of
  511. the original :class:`SpikeTrain` between (and including) times
  512. :attr:`t_start` and :attr:`t_stop`. Either parameter can also be None
  513. to use infinite endpoints for the time interval.
  514. '''
  515. _t_start = t_start
  516. _t_stop = t_stop
  517. if t_start is None:
  518. _t_start = -np.inf
  519. if t_stop is None:
  520. _t_stop = np.inf
  521. indices = (self >= _t_start) & (self <= _t_stop)
  522. new_st = self[indices]
  523. new_st.t_start = max(_t_start, self.t_start)
  524. new_st.t_stop = min(_t_stop, self.t_stop)
  525. if self.waveforms is not None:
  526. new_st.waveforms = self.waveforms[indices]
  527. return new_st
  528. def merge(self, other):
  529. '''
  530. Merge another :class:`SpikeTrain` into this one.
  531. The times of the :class:`SpikeTrain` objects combined in one array
  532. and sorted.
  533. If the attributes of the two :class:`SpikeTrain` are not
  534. compatible, an Exception is raised.
  535. '''
  536. if self.sampling_rate != other.sampling_rate:
  537. raise MergeError("Cannot merge, different sampling rates")
  538. if self.t_start != other.t_start:
  539. raise MergeError("Cannot merge, different t_start")
  540. if self.t_stop != other.t_stop:
  541. raise MemoryError("Cannot merge, different t_stop")
  542. if self.left_sweep != other.left_sweep:
  543. raise MemoryError("Cannot merge, different left_sweep")
  544. if self.segment != other.segment:
  545. raise MergeError("Cannot merge these two signals as they belong to"
  546. " different segments.")
  547. if hasattr(self, "lazy_shape"):
  548. if hasattr(other, "lazy_shape"):
  549. merged_lazy_shape = (self.lazy_shape[0] + other.lazy_shape[0])
  550. else:
  551. raise MergeError("Cannot merge a lazy object with a real"
  552. " object.")
  553. if other.units != self.units:
  554. other = other.rescale(self.units)
  555. wfs = [self.waveforms is not None, other.waveforms is not None]
  556. if any(wfs) and not all(wfs):
  557. raise MergeError("Cannot merge signal with waveform and signal "
  558. "without waveform.")
  559. stack = np.concatenate((np.asarray(self), np.asarray(other)))
  560. sorting = np.argsort(stack)
  561. stack = stack[sorting]
  562. kwargs = {}
  563. kwargs['array_annotations'] = self._merge_array_annotations(other, sorting=sorting)
  564. for name in ("name", "description", "file_origin"):
  565. attr_self = getattr(self, name)
  566. attr_other = getattr(other, name)
  567. if attr_self == attr_other:
  568. kwargs[name] = attr_self
  569. else:
  570. kwargs[name] = "merge(%s, %s)" % (attr_self, attr_other)
  571. merged_annotations = merge_annotations(self.annotations, other.annotations)
  572. kwargs.update(merged_annotations)
  573. train = SpikeTrain(stack, units=self.units, dtype=self.dtype, copy=False,
  574. t_start=self.t_start, t_stop=self.t_stop,
  575. sampling_rate=self.sampling_rate, left_sweep=self.left_sweep, **kwargs)
  576. if all(wfs):
  577. wfs_stack = np.vstack((self.waveforms, other.waveforms))
  578. wfs_stack = wfs_stack[sorting]
  579. train.waveforms = wfs_stack
  580. train.segment = self.segment
  581. if train.segment is not None:
  582. self.segment.spiketrains.append(train)
  583. if hasattr(self, "lazy_shape"):
  584. train.lazy_shape = merged_lazy_shape
  585. return train
  586. def _merge_array_annotations(self, other, sorting=None):
  587. '''
  588. Merges array annotations of 2 different objects.
  589. The merge happens in such a way that the result fits the merged data
  590. In general this means concatenating the arrays from the 2 objects.
  591. If an annotation is only present in one of the objects, it will be omitted.
  592. Apart from that the array_annotations need to be sorted according to the sorting of
  593. the spikes.
  594. :return Merged array_annotations
  595. '''
  596. assert sorting is not None, "The order of the merged spikes must be known"
  597. merged_array_annotations = {}
  598. omitted_keys_self = []
  599. keys = self.array_annotations.keys()
  600. for key in keys:
  601. try:
  602. self_ann = copy.deepcopy(self.array_annotations[key])
  603. other_ann = copy.deepcopy(other.array_annotations[key])
  604. if isinstance(self_ann, pq.Quantity):
  605. other_ann.rescale(self_ann.units)
  606. arr_ann = np.concatenate([self_ann, other_ann]) * self_ann.units
  607. else:
  608. arr_ann = np.concatenate([self_ann, other_ann])
  609. merged_array_annotations[key] = arr_ann[sorting]
  610. # Annotation only available in 'self', must be skipped
  611. # Ignore annotations present only in one of the SpikeTrains
  612. except KeyError:
  613. omitted_keys_self.append(key)
  614. continue
  615. omitted_keys_other = [key for key in other.array_annotations if
  616. key not in self.array_annotations]
  617. if omitted_keys_self or omitted_keys_other:
  618. warnings.warn("The following array annotations were omitted, because they were only "
  619. "present in one of the merged objects: {} from the one that was merged "
  620. "into and {} from the one that was merged into the other"
  621. "".format(omitted_keys_self, omitted_keys_other), UserWarning)
  622. return merged_array_annotations
  623. @property
  624. def times(self):
  625. '''
  626. Returns the :class:`SpikeTrain` as a quantity array.
  627. '''
  628. return pq.Quantity(self)
  629. @property
  630. def duration(self):
  631. '''
  632. Duration over which spikes can occur,
  633. (:attr:`t_stop` - :attr:`t_start`)
  634. '''
  635. if self.t_stop is None or self.t_start is None:
  636. return None
  637. return self.t_stop - self.t_start
  638. @property
  639. def spike_duration(self):
  640. '''
  641. Duration of a waveform.
  642. (:attr:`waveform`.shape[2] * :attr:`sampling_period`)
  643. '''
  644. if self.waveforms is None or self.sampling_rate is None:
  645. return None
  646. return self.waveforms.shape[2] / self.sampling_rate
  647. @property
  648. def sampling_period(self):
  649. '''
  650. Interval between two samples.
  651. (1/:attr:`sampling_rate`)
  652. '''
  653. if self.sampling_rate is None:
  654. return None
  655. return 1.0 / self.sampling_rate
  656. @sampling_period.setter
  657. def sampling_period(self, period):
  658. '''
  659. Setter for :attr:`sampling_period`
  660. '''
  661. if period is None:
  662. self.sampling_rate = None
  663. else:
  664. self.sampling_rate = 1.0 / period
  665. @property
  666. def right_sweep(self):
  667. '''
  668. Time from the trigger times of the spikes to the end of the waveforms.
  669. (:attr:`left_sweep` + :attr:`spike_duration`)
  670. '''
  671. dur = self.spike_duration
  672. if self.left_sweep is None or dur is None:
  673. return None
  674. return self.left_sweep + dur