statistics.py 46 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261
  1. # -*- coding: utf-8 -*-
  2. """
  3. Statistical measures of spike trains (e.g., Fano factor) and functions to estimate firing rates.
  4. :copyright: Copyright 2014-2016 by the Elephant team, see AUTHORS.txt.
  5. :license: Modified BSD, see LICENSE.txt for details.
  6. """
  7. from __future__ import division, print_function
  8. import numpy as np
  9. import quantities as pq
  10. import scipy.stats
  11. import scipy.signal
  12. import neo
  13. from neo.core import SpikeTrain
  14. import elephant.conversion as conv
  15. import elephant.kernels as kernels
  16. import warnings
  17. # warnings.simplefilter('always', DeprecationWarning)
  18. def isi(spiketrain, axis=-1):
  19. """
  20. Return an array containing the inter-spike intervals of the SpikeTrain.
  21. Accepts a Neo SpikeTrain, a Quantity array, or a plain NumPy array.
  22. If either a SpikeTrain or Quantity array is provided, the return value will
  23. be a quantities array, otherwise a plain NumPy array. The units of
  24. the quantities array will be the same as spiketrain.
  25. Parameters
  26. ----------
  27. spiketrain : Neo SpikeTrain or Quantity array or NumPy ndarray
  28. The spike times.
  29. axis : int, optional
  30. The axis along which the difference is taken.
  31. Default is the last axis.
  32. Returns
  33. -------
  34. NumPy array or quantities array.
  35. """
  36. if axis is None:
  37. axis = -1
  38. if isinstance(spiketrain, neo.SpikeTrain):
  39. intervals = np.diff(
  40. np.sort(spiketrain.times.view(pq.Quantity)), axis=axis)
  41. else:
  42. intervals = np.diff(np.sort(spiketrain), axis=axis)
  43. return intervals
  44. def mean_firing_rate(spiketrain, t_start=None, t_stop=None, axis=None):
  45. """
  46. Return the firing rate of the SpikeTrain.
  47. Accepts a Neo SpikeTrain, a Quantity array, or a plain NumPy array.
  48. If either a SpikeTrain or Quantity array is provided, the return value will
  49. be a quantities array, otherwise a plain NumPy array. The units of
  50. the quantities array will be the inverse of the spiketrain.
  51. The interval over which the firing rate is calculated can be optionally
  52. controlled with `t_start` and `t_stop`
  53. Parameters
  54. ----------
  55. spiketrain : Neo SpikeTrain or Quantity array or NumPy ndarray
  56. The spike times.
  57. t_start : float or Quantity scalar, optional
  58. The start time to use for the interval.
  59. If not specified, retrieved from the``t_start`
  60. attribute of `spiketrain`. If that is not present, default to
  61. `0`. Any value from `spiketrain` below this value is ignored.
  62. t_stop : float or Quantity scalar, optional
  63. The stop time to use for the time points.
  64. If not specified, retrieved from the `t_stop`
  65. attribute of `spiketrain`. If that is not present, default to
  66. the maximum value of `spiketrain`. Any value from
  67. `spiketrain` above this value is ignored.
  68. axis : int, optional
  69. The axis over which to do the calculation.
  70. Default is `None`, do the calculation over the flattened array.
  71. Returns
  72. -------
  73. float, quantities scalar, NumPy array or quantities array.
  74. Notes
  75. -----
  76. If `spiketrain` is a Quantity or Neo SpikeTrain and `t_start` or `t_stop`
  77. are not, `t_start` and `t_stop` are assumed to have the same units as
  78. `spiketrain`.
  79. Raises
  80. ------
  81. TypeError
  82. If `spiketrain` is a NumPy array and `t_start` or `t_stop`
  83. is a quantity scalar.
  84. """
  85. if t_start is None:
  86. t_start = getattr(spiketrain, 't_start', 0)
  87. found_t_start = False
  88. if t_stop is None:
  89. if hasattr(spiketrain, 't_stop'):
  90. t_stop = spiketrain.t_stop
  91. else:
  92. t_stop = np.max(spiketrain, axis=axis)
  93. found_t_start = True
  94. # figure out what units, if any, we are dealing with
  95. if hasattr(spiketrain, 'units'):
  96. units = spiketrain.units
  97. else:
  98. units = None
  99. # convert everything to the same units
  100. if hasattr(t_start, 'units'):
  101. if units is None:
  102. raise TypeError('t_start cannot be a Quantity if '
  103. 'spiketrain is not a quantity')
  104. t_start = t_start.rescale(units)
  105. elif units is not None:
  106. t_start = pq.Quantity(t_start, units=units)
  107. if hasattr(t_stop, 'units'):
  108. if units is None:
  109. raise TypeError('t_stop cannot be a Quantity if '
  110. 'spiketrain is not a quantity')
  111. t_stop = t_stop.rescale(units)
  112. elif units is not None:
  113. t_stop = pq.Quantity(t_stop, units=units)
  114. if not axis or not found_t_start:
  115. return np.sum((spiketrain >= t_start) & (spiketrain <= t_stop),
  116. axis=axis) / (t_stop - t_start)
  117. else:
  118. # this is needed to handle broadcasting between spiketrain and t_stop
  119. t_stop_test = np.expand_dims(t_stop, axis)
  120. return np.sum((spiketrain >= t_start) & (spiketrain <= t_stop_test),
  121. axis=axis) / (t_stop - t_start)
  122. # we make `cv` an alias for scipy.stats.variation for the convenience
  123. # of former NeuroTools users
  124. cv = scipy.stats.variation
  125. def fanofactor(spiketrains):
  126. """
  127. Evaluates the empirical Fano factor F of the spike counts of
  128. a list of `neo.core.SpikeTrain` objects.
  129. Given the vector v containing the observed spike counts (one per
  130. spike train) in the time window [t0, t1], F is defined as:
  131. F := var(v)/mean(v).
  132. The Fano factor is typically computed for spike trains representing the
  133. activity of the same neuron over different trials. The higher F, the larger
  134. the cross-trial non-stationarity. In theory for a time-stationary Poisson
  135. process, F=1.
  136. Parameters
  137. ----------
  138. spiketrains : list of neo.SpikeTrain objects, quantity arrays, numpy arrays or lists
  139. Spike trains for which to compute the Fano factor of spike counts.
  140. Returns
  141. -------
  142. fano : float or nan
  143. The Fano factor of the spike counts of the input spike trains. If an
  144. empty list is specified, or if all spike trains are empty, F:=nan.
  145. """
  146. # Build array of spike counts (one per spike train)
  147. spike_counts = np.array([len(t) for t in spiketrains])
  148. # Compute FF
  149. if all([count == 0 for count in spike_counts]):
  150. fano = np.nan
  151. else:
  152. fano = spike_counts.var() / spike_counts.mean()
  153. return fano
  154. def lv(v):
  155. """
  156. Calculate the measure of local variation LV for
  157. a sequence of time intervals between events.
  158. Given a vector v containing a sequence of intervals, the LV is
  159. defined as:
  160. .math $$ LV := \\frac{1}{N}\\sum_{i=1}^{N-1}
  161. \\frac{3(isi_i-isi_{i+1})^2}
  162. {(isi_i+isi_{i+1})^2} $$
  163. The LV is typically computed as a substitute for the classical
  164. coefficient of variation for sequences of events which include
  165. some (relatively slow) rate fluctuation. As with the CV, LV=1 for
  166. a sequence of intervals generated by a Poisson process.
  167. Parameters
  168. ----------
  169. v : quantity array, numpy array or list
  170. Vector of consecutive time intervals
  171. Returns
  172. -------
  173. lvar : float
  174. The LV of the inter-spike interval of the input sequence.
  175. Raises
  176. ------
  177. AttributeError :
  178. If an empty list is specified, or if the sequence has less
  179. than two entries, an AttributeError will be raised.
  180. ValueError :
  181. Only vector inputs are supported. If a matrix is passed to the
  182. function a ValueError will be raised.
  183. References
  184. ----------
  185. ..[1] Shinomoto, S., Shima, K., & Tanji, J. (2003). Differences in spiking
  186. patterns among cortical neurons. Neural Computation, 15, 2823–2842.
  187. """
  188. # convert to array, cast to float
  189. v = np.asarray(v)
  190. # ensure we have enough entries
  191. if v.size < 2:
  192. raise AttributeError("Input size is too small. Please provide "
  193. "an input with more than 1 entry.")
  194. # calculate LV and return result
  195. # raise error if input is multi-dimensional
  196. return 3. * np.mean(np.power(np.diff(v) / (v[:-1] + v[1:]), 2))
  197. def cv2(v):
  198. """
  199. Calculate the measure of CV2 for a sequence of time intervals between
  200. events.
  201. Given a vector v containing a sequence of intervals, the CV2 is
  202. defined as:
  203. .math $$ CV2 := \\frac{1}{N}\\sum_{i=1}^{N-1}
  204. \\frac{2|isi_{i+1}-isi_i|}
  205. {|isi_{i+1}+isi_i|} $$
  206. The CV2 is typically computed as a substitute for the classical
  207. coefficient of variation (CV) for sequences of events which include
  208. some (relatively slow) rate fluctuation. As with the CV, CV2=1 for
  209. a sequence of intervals generated by a Poisson process.
  210. Parameters
  211. ----------
  212. v : quantity array, numpy array or list
  213. Vector of consecutive time intervals
  214. Returns
  215. -------
  216. cv2 : float
  217. The CV2 of the inter-spike interval of the input sequence.
  218. Raises
  219. ------
  220. AttributeError :
  221. If an empty list is specified, or if the sequence has less
  222. than two entries, an AttributeError will be raised.
  223. AttributeError :
  224. Only vector inputs are supported. If a matrix is passed to the
  225. function an AttributeError will be raised.
  226. References
  227. ----------
  228. ..[1] Holt, G. R., Softky, W. R., Koch, C., & Douglas, R. J. (1996).
  229. Comparison of discharge variability in vitro and in vivo in cat visual
  230. cortex neurons. Journal of neurophysiology, 75(5), 1806-1814.
  231. """
  232. # convert to array, cast to float
  233. v = np.asarray(v)
  234. # ensure the input ia a vector
  235. if len(v.shape) > 1:
  236. raise AttributeError("Input shape is larger than 1. Please provide "
  237. "a vector in input.")
  238. # ensure we have enough entries
  239. if v.size < 2:
  240. raise AttributeError("Input size is too small. Please provide "
  241. "an input with more than 1 entry.")
  242. # calculate CV2 and return result
  243. return 2. * np.mean(np.absolute(np.diff(v)) / (v[:-1] + v[1:]))
  244. # sigma2kw and kw2sigma only needed for oldfct_instantaneous_rate!
  245. # to finally be taken out of Elephant
  246. def sigma2kw(form): # pragma: no cover
  247. warnings.simplefilter('always', DeprecationWarning)
  248. warnings.warn("deprecated", DeprecationWarning, stacklevel=2)
  249. if form.upper() == 'BOX':
  250. coeff = 2.0 * np.sqrt(3)
  251. elif form.upper() == 'TRI':
  252. coeff = 2.0 * np.sqrt(6)
  253. elif form.upper() == 'EPA':
  254. coeff = 2.0 * np.sqrt(5)
  255. elif form.upper() == 'GAU':
  256. coeff = 2.0 * 2.7 # > 99% of distribution weight
  257. elif form.upper() == 'ALP':
  258. coeff = 5.0
  259. elif form.upper() == 'EXP':
  260. coeff = 5.0
  261. return coeff
  262. def kw2sigma(form): # pragma: no cover
  263. warnings.simplefilter('always', DeprecationWarning)
  264. warnings.warn("deprecated", DeprecationWarning, stacklevel=2)
  265. return 1/sigma2kw(form)
  266. # to finally be taken out of Elephant
  267. def make_kernel(form, sigma, sampling_period, direction=1): # pragma: no cover
  268. """
  269. Creates kernel functions for convolution.
  270. Constructs a numeric linear convolution kernel of basic shape to be used
  271. for data smoothing (linear low pass filtering) and firing rate estimation
  272. from single trial or trial-averaged spike trains.
  273. Exponential and alpha kernels may also be used to represent postynaptic
  274. currents / potentials in a linear (current-based) model.
  275. Parameters
  276. ----------
  277. form : {'BOX', 'TRI', 'GAU', 'EPA', 'EXP', 'ALP'}
  278. Kernel form. Currently implemented forms are BOX (boxcar),
  279. TRI (triangle), GAU (gaussian), EPA (epanechnikov), EXP (exponential),
  280. ALP (alpha function). EXP and ALP are asymmetric kernel forms and
  281. assume optional parameter `direction`.
  282. sigma : Quantity
  283. Standard deviation of the distribution associated with kernel shape.
  284. This parameter defines the time resolution of the kernel estimate
  285. and makes different kernels comparable (cf. [1] for symmetric kernels).
  286. This is used here as an alternative definition to the cut-off
  287. frequency of the associated linear filter.
  288. sampling_period : float
  289. Temporal resolution of input and output.
  290. direction : {-1, 1}
  291. Asymmetric kernels have two possible directions.
  292. The values are -1 or 1, default is 1. The
  293. definition here is that for direction = 1 the
  294. kernel represents the impulse response function
  295. of the linear filter. Default value is 1.
  296. Returns
  297. -------
  298. kernel : numpy.ndarray
  299. Array of kernel. The length of this array is always an odd
  300. number to represent symmetric kernels such that the center bin
  301. coincides with the median of the numeric array, i.e for a
  302. triangle, the maximum will be at the center bin with equal
  303. number of bins to the right and to the left.
  304. norm : float
  305. For rate estimates. The kernel vector is normalized such that
  306. the sum of all entries equals unity sum(kernel)=1. When
  307. estimating rate functions from discrete spike data (0/1) the
  308. additional parameter `norm` allows for the normalization to
  309. rate in spikes per second.
  310. For example:
  311. ``rate = norm * scipy.signal.lfilter(kernel, 1, spike_data)``
  312. m_idx : int
  313. Index of the numerically determined median (center of gravity)
  314. of the kernel function.
  315. Examples
  316. --------
  317. To obtain single trial rate function of trial one should use::
  318. r = norm * scipy.signal.fftconvolve(sua, kernel)
  319. To obtain trial-averaged spike train one should use::
  320. r_avg = norm * scipy.signal.fftconvolve(sua, np.mean(X,1))
  321. where `X` is an array of shape `(l,n)`, `n` is the number of trials and
  322. `l` is the length of each trial.
  323. See also
  324. --------
  325. elephant.statistics.instantaneous_rate
  326. References
  327. ----------
  328. .. [1] Meier R, Egert U, Aertsen A, Nawrot MP, "FIND - a unified framework
  329. for neural data analysis"; Neural Netw. 2008 Oct; 21(8):1085-93.
  330. .. [2] Nawrot M, Aertsen A, Rotter S, "Single-trial estimation of neuronal
  331. firing rates - from single neuron spike trains to population activity";
  332. J. Neurosci Meth 94: 81-92; 1999.
  333. """
  334. warnings.simplefilter('always', DeprecationWarning)
  335. warnings.warn("deprecated", DeprecationWarning, stacklevel=2)
  336. forms_abbreviated = np.array(['BOX', 'TRI', 'GAU', 'EPA', 'EXP', 'ALP'])
  337. forms_verbose = np.array(['boxcar', 'triangle', 'gaussian', 'epanechnikov',
  338. 'exponential', 'alpha'])
  339. if form in forms_verbose:
  340. form = forms_abbreviated[forms_verbose == form][0]
  341. assert form.upper() in ('BOX', 'TRI', 'GAU', 'EPA', 'EXP', 'ALP'), \
  342. "form must be one of either 'BOX','TRI','GAU','EPA','EXP' or 'ALP'!"
  343. assert direction in (1, -1), "direction must be either 1 or -1"
  344. # conversion to SI units (s)
  345. if sigma < 0:
  346. raise ValueError('sigma must be positive!')
  347. SI_sigma = sigma.rescale('s').magnitude
  348. SI_time_stamp_resolution = sampling_period.rescale('s').magnitude
  349. norm = 1. / SI_time_stamp_resolution
  350. if form.upper() == 'BOX':
  351. w = 2.0 * SI_sigma * np.sqrt(3)
  352. # always odd number of bins
  353. width = 2 * np.floor(w / 2.0 / SI_time_stamp_resolution) + 1
  354. height = 1. / width
  355. kernel = np.ones((1, width)) * height # area = 1
  356. elif form.upper() == 'TRI':
  357. w = 2 * SI_sigma * np.sqrt(6)
  358. halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution)
  359. trileft = np.arange(1, halfwidth + 2)
  360. triright = np.arange(halfwidth, 0, -1) # odd number of bins
  361. triangle = np.append(trileft, triright)
  362. kernel = triangle / triangle.sum() # area = 1
  363. elif form.upper() == 'EPA':
  364. w = 2.0 * SI_sigma * np.sqrt(5)
  365. halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution)
  366. base = np.arange(-halfwidth, halfwidth + 1)
  367. parabula = base**2
  368. epanech = parabula.max() - parabula # inverse parabula
  369. kernel = epanech / epanech.sum() # area = 1
  370. elif form.upper() == 'GAU':
  371. w = 2.0 * SI_sigma * 2.7 # > 99% of distribution weight
  372. halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution) # always odd
  373. base = np.arange(-halfwidth, halfwidth + 1) * SI_time_stamp_resolution
  374. g = np.exp(
  375. -(base**2) / 2.0 / SI_sigma**2) / SI_sigma / np.sqrt(2.0 * np.pi)
  376. kernel = g / g.sum() # area = 1
  377. elif form.upper() == 'ALP':
  378. w = 5.0 * SI_sigma
  379. alpha = np.arange(
  380. 1, (
  381. 2.0 * np.floor(w / SI_time_stamp_resolution / 2.0) + 1) +
  382. 1) * SI_time_stamp_resolution
  383. alpha = (2.0 / SI_sigma**2) * alpha * np.exp(
  384. -alpha * np.sqrt(2) / SI_sigma)
  385. kernel = alpha / alpha.sum() # normalization
  386. if direction == -1:
  387. kernel = np.flipud(kernel)
  388. elif form.upper() == 'EXP':
  389. w = 5.0 * SI_sigma
  390. expo = np.arange(
  391. 1, (
  392. 2.0 * np.floor(w / SI_time_stamp_resolution / 2.0) + 1) +
  393. 1) * SI_time_stamp_resolution
  394. expo = np.exp(-expo / SI_sigma)
  395. kernel = expo / expo.sum()
  396. if direction == -1:
  397. kernel = np.flipud(kernel)
  398. kernel = kernel.ravel()
  399. m_idx = np.nonzero(kernel.cumsum() >= 0.5)[0].min()
  400. return kernel, norm, m_idx
  401. # to finally be taken out of Elephant
  402. def oldfct_instantaneous_rate(spiketrain, sampling_period, form,
  403. sigma='auto', t_start=None, t_stop=None,
  404. acausal=True, trim=False): # pragma: no cover
  405. """
  406. Estimate instantaneous firing rate by kernel convolution.
  407. Parameters
  408. -----------
  409. spiketrain: 'neo.SpikeTrain'
  410. Neo object that contains spike times, the unit of the time stamps
  411. and t_start and t_stop of the spike train.
  412. sampling_period : Quantity
  413. time stamp resolution of the spike times. the same resolution will
  414. be assumed for the kernel
  415. form : {'BOX', 'TRI', 'GAU', 'EPA', 'EXP', 'ALP'}
  416. Kernel form. Currently implemented forms are BOX (boxcar),
  417. TRI (triangle), GAU (gaussian), EPA (epanechnikov), EXP (exponential),
  418. ALP (alpha function). EXP and ALP are asymmetric kernel forms and
  419. assume optional parameter `direction`.
  420. sigma : string or Quantity
  421. Standard deviation of the distribution associated with kernel shape.
  422. This parameter defines the time resolution of the kernel estimate
  423. and makes different kernels comparable (cf. [1] for symmetric kernels).
  424. This is used here as an alternative definition to the cut-off
  425. frequency of the associated linear filter.
  426. Default value is 'auto'. In this case, the optimized kernel width for
  427. the rate estimation is calculated according to [1]. Note that the
  428. automatized calculation of the kernel width ONLY works for gaussian
  429. kernel shapes!
  430. t_start : Quantity (Optional)
  431. start time of the interval used to compute the firing rate, if None
  432. assumed equal to spiketrain.t_start
  433. Default:None
  434. t_stop : Qunatity
  435. End time of the interval used to compute the firing rate (included).
  436. If none assumed equal to spiketrain.t_stop
  437. Default:None
  438. acausal : bool
  439. if True, acausal filtering is used, i.e., the gravity center of the
  440. filter function is aligned with the spike to convolve
  441. Default:None
  442. m_idx : int
  443. index of the value in the kernel function vector that corresponds
  444. to its gravity center. this parameter is not mandatory for
  445. symmetrical kernels but it is required when asymmetrical kernels
  446. are to be aligned at their gravity center with the event times if None
  447. is assumed to be the median value of the kernel support
  448. Default : None
  449. trim : bool
  450. if True, only the 'valid' region of the convolved
  451. signal are returned, i.e., the points where there
  452. isn't complete overlap between kernel and spike train
  453. are discarded
  454. NOTE: if True and an asymmetrical kernel is provided
  455. the output will not be aligned with [t_start, t_stop]
  456. Returns
  457. -------
  458. rate : neo.AnalogSignal
  459. Contains the rate estimation in unit hertz (Hz).
  460. Has a property 'rate.times' which contains the time axis of the rate
  461. estimate. The unit of this property is the same as the resolution that
  462. is given as an argument to the function.
  463. Raises
  464. ------
  465. TypeError:
  466. If argument value for the parameter `sigma` is not a quantity object
  467. or string 'auto'.
  468. See also
  469. --------
  470. elephant.statistics.make_kernel
  471. References
  472. ----------
  473. ..[1] H. Shimazaki, S. Shinomoto, J Comput Neurosci (2010) 29:171–182.
  474. """
  475. warnings.simplefilter('always', DeprecationWarning)
  476. warnings.warn("deprecated", DeprecationWarning, stacklevel=2)
  477. if sigma == 'auto':
  478. form = 'GAU'
  479. unit = spiketrain.units
  480. kernel_width = sskernel(spiketrain.magnitude, tin=None,
  481. bootstrap=True)['optw']
  482. sigma = kw2sigma(form) * kernel_width * unit
  483. elif not isinstance(sigma, pq.Quantity):
  484. raise TypeError('sigma must be either a quantities object or "auto".'
  485. ' Found: %s, value %s' % (type(sigma), str(sigma)))
  486. kernel, norm, m_idx = make_kernel(form=form, sigma=sigma,
  487. sampling_period=sampling_period)
  488. units = pq.CompoundUnit(
  489. "%s*s" % str(sampling_period.rescale('s').magnitude))
  490. spiketrain = spiketrain.rescale(units)
  491. if t_start is None:
  492. t_start = spiketrain.t_start
  493. else:
  494. t_start = t_start.rescale(spiketrain.units)
  495. if t_stop is None:
  496. t_stop = spiketrain.t_stop
  497. else:
  498. t_stop = t_stop.rescale(spiketrain.units)
  499. time_vector = np.zeros(int((t_stop - t_start)) + 1)
  500. spikes_slice = spiketrain.time_slice(t_start, t_stop) \
  501. if len(spiketrain) else np.array([])
  502. for spike in spikes_slice:
  503. index = int((spike - t_start))
  504. time_vector[index] += 1
  505. r = norm * scipy.signal.fftconvolve(time_vector, kernel, 'full')
  506. if np.any(r < 0):
  507. warnings.warn('Instantaneous firing rate approximation contains '
  508. 'negative values, possibly caused due to machine '
  509. 'precision errors')
  510. if acausal:
  511. if not trim:
  512. r = r[m_idx:-(kernel.size - m_idx)]
  513. elif trim:
  514. r = r[2 * m_idx:-2 * (kernel.size - m_idx)]
  515. t_start = t_start + m_idx * spiketrain.units
  516. t_stop = t_stop - ((kernel.size) - m_idx) * spiketrain.units
  517. else:
  518. if not trim:
  519. r = r[m_idx:-(kernel.size - m_idx)]
  520. elif trim:
  521. r = r[2 * m_idx:-2 * (kernel.size - m_idx)]
  522. t_start = t_start + m_idx * spiketrain.units
  523. t_stop = t_stop - ((kernel.size) - m_idx) * spiketrain.units
  524. rate = neo.AnalogSignal(signal=r.reshape(r.size, 1),
  525. sampling_period=sampling_period,
  526. units=pq.Hz, t_start=t_start)
  527. return rate, sigma
  528. def instantaneous_rate(spiketrain, sampling_period, kernel='auto',
  529. cutoff=5.0, t_start=None, t_stop=None, trim=False):
  530. """
  531. Estimates instantaneous firing rate by kernel convolution.
  532. Parameters
  533. -----------
  534. spiketrain : neo.SpikeTrain or list of neo.SpikeTrain objects
  535. Neo object that contains spike times, the unit of the time stamps
  536. and t_start and t_stop of the spike train.
  537. sampling_period : Time Quantity
  538. Time stamp resolution of the spike times. The same resolution will
  539. be assumed for the kernel
  540. kernel : string 'auto' or callable object of :class:`Kernel` from module
  541. 'kernels.py'. Currently implemented kernel forms are rectangular,
  542. triangular, epanechnikovlike, gaussian, laplacian, exponential,
  543. and alpha function.
  544. Example: kernel = kernels.RectangularKernel(sigma=10*ms, invert=False)
  545. The kernel is used for convolution with the spike train and its
  546. standard deviation determines the time resolution of the instantaneous
  547. rate estimation.
  548. Default: 'auto'. In this case, the optimized kernel width for the
  549. rate estimation is calculated according to [1] and with this width
  550. a gaussian kernel is constructed. Automatized calculation of the
  551. kernel width is not available for other than gaussian kernel shapes.
  552. cutoff : float
  553. This factor determines the cutoff of the probability distribution of
  554. the kernel, i.e., the considered width of the kernel in terms of
  555. multiples of the standard deviation sigma.
  556. Default: 5.0
  557. t_start : Time Quantity (optional)
  558. Start time of the interval used to compute the firing rate. If None
  559. assumed equal to spiketrain.t_start
  560. Default: None
  561. t_stop : Time Quantity (optional)
  562. End time of the interval used to compute the firing rate (included).
  563. If None assumed equal to spiketrain.t_stop
  564. Default: None
  565. trim : bool
  566. if False, the output of the Fast Fourier Transformation being a longer
  567. vector than the input vector by the size of the kernel is reduced back
  568. to the original size of the considered time interval of the spiketrain
  569. using the median of the kernel.
  570. if True, only the region of the convolved signal is returned, where
  571. there is complete overlap between kernel and spike train. This is
  572. achieved by reducing the length of the output of the Fast Fourier
  573. Transformation by a total of two times the size of the kernel, and
  574. t_start and t_stop are adjusted.
  575. Default: False
  576. Returns
  577. -------
  578. rate : neo.AnalogSignal
  579. Contains the rate estimation in unit hertz (Hz).
  580. Has a property 'rate.times' which contains the time axis of the rate
  581. estimate. The unit of this property is the same as the resolution that
  582. is given via the argument 'sampling_period' to the function.
  583. Raises
  584. ------
  585. TypeError:
  586. If `spiketrain` is not an instance of :class:`SpikeTrain` of Neo.
  587. If `sampling_period` is not a time quantity.
  588. If `kernel` is neither instance of :class:`Kernel` or string 'auto'.
  589. If `cutoff` is neither float nor int.
  590. If `t_start` and `t_stop` are neither None nor a time quantity.
  591. If `trim` is not bool.
  592. ValueError:
  593. If `sampling_period` is smaller than zero.
  594. Example
  595. --------
  596. kernel = kernels.AlphaKernel(sigma = 0.05*s, invert = True)
  597. rate = instantaneous_rate(spiketrain, sampling_period = 2*ms, kernel)
  598. References
  599. ----------
  600. ..[1] H. Shimazaki, S. Shinomoto, J Comput Neurosci (2010) 29:171–182.
  601. """
  602. # Merge spike trains if list of spike trains given:
  603. if isinstance(spiketrain, list):
  604. _check_consistency_of_spiketrainlist(spiketrain, t_start=t_start, t_stop=t_stop)
  605. if t_start is None:
  606. t_start = spiketrain[0].t_start
  607. if t_stop is None:
  608. t_stop = spiketrain[0].t_stop
  609. spikes = np.concatenate([st.magnitude for st in spiketrain])
  610. merged_spiketrain = SpikeTrain(np.sort(spikes), units=spiketrain[0].units,
  611. t_start=t_start, t_stop=t_stop)
  612. return instantaneous_rate(merged_spiketrain, sampling_period=sampling_period,
  613. kernel=kernel, cutoff=cutoff, t_start=t_start,
  614. t_stop=t_stop, trim=trim)
  615. # Checks of input variables:
  616. if not isinstance(spiketrain, SpikeTrain):
  617. raise TypeError(
  618. "spiketrain must be instance of :class:`SpikeTrain` of Neo!\n"
  619. " Found: %s, value %s" % (type(spiketrain), str(spiketrain)))
  620. if not (isinstance(sampling_period, pq.Quantity) and
  621. sampling_period.dimensionality.simplified ==
  622. pq.Quantity(1, "s").dimensionality):
  623. raise TypeError(
  624. "The sampling period must be a time quantity!\n"
  625. " Found: %s, value %s" % (type(sampling_period), str(sampling_period)))
  626. if sampling_period.magnitude < 0:
  627. raise ValueError("The sampling period must be larger than zero.")
  628. if kernel == 'auto':
  629. kernel_width = sskernel(spiketrain.magnitude, tin=None,
  630. bootstrap=True)['optw']
  631. if kernel_width is None:
  632. raise ValueError(
  633. "Unable to calculate optimal kernel width for "
  634. "instantaneous rate from input data.")
  635. unit = spiketrain.units
  636. sigma = 1 / (2.0 * 2.7) * kernel_width * unit
  637. # factor 2.0 connects kernel width with its half width,
  638. # factor 2.7 connects half width of Gaussian distribution with
  639. # 99% probability mass with its standard deviation.
  640. kernel = kernels.GaussianKernel(sigma)
  641. elif not isinstance(kernel, kernels.Kernel):
  642. raise TypeError(
  643. "kernel must be either instance of :class:`Kernel` "
  644. "or the string 'auto'!\n"
  645. " Found: %s, value %s" % (type(kernel), str(kernel)))
  646. if not (isinstance(cutoff, float) or isinstance(cutoff, int)):
  647. raise TypeError("cutoff must be float or integer!")
  648. if not (t_start is None or (isinstance(t_start, pq.Quantity) and
  649. t_start.dimensionality.simplified ==
  650. pq.Quantity(1, "s").dimensionality)):
  651. raise TypeError("t_start must be a time quantity!")
  652. if not (t_stop is None or (isinstance(t_stop, pq.Quantity) and
  653. t_stop.dimensionality.simplified ==
  654. pq.Quantity(1, "s").dimensionality)):
  655. raise TypeError("t_stop must be a time quantity!")
  656. if not (isinstance(trim, bool)):
  657. raise TypeError("trim must be bool!")
  658. # main function:
  659. units = pq.CompoundUnit("%s*s" % str(sampling_period.rescale('s').magnitude))
  660. spiketrain = spiketrain.rescale(units)
  661. if t_start is None:
  662. t_start = spiketrain.t_start
  663. else:
  664. t_start = t_start.rescale(spiketrain.units)
  665. if t_stop is None:
  666. t_stop = spiketrain.t_stop
  667. else:
  668. t_stop = t_stop.rescale(spiketrain.units)
  669. time_vector = np.zeros(int((t_stop - t_start)) + 1)
  670. spikes_slice = spiketrain.time_slice(t_start, t_stop) \
  671. if len(spiketrain) else np.array([])
  672. for spike in spikes_slice:
  673. index = int((spike - t_start))
  674. time_vector[index] += 1
  675. if cutoff < kernel.min_cutoff:
  676. cutoff = kernel.min_cutoff
  677. warnings.warn("The width of the kernel was adjusted to a minimally "
  678. "allowed width.")
  679. t_arr = np.arange(-cutoff * kernel.sigma.rescale(units).magnitude,
  680. cutoff * kernel.sigma.rescale(units).magnitude +
  681. sampling_period.rescale(units).magnitude,
  682. sampling_period.rescale(units).magnitude) * units
  683. r = scipy.signal.fftconvolve(time_vector,
  684. kernel(t_arr).rescale(pq.Hz).magnitude, 'full')
  685. if np.any(r < 0):
  686. warnings.warn("Instantaneous firing rate approximation contains "
  687. "negative values, possibly caused due to machine "
  688. "precision errors.")
  689. if not trim:
  690. r = r[kernel.median_index(t_arr):-(kernel(t_arr).size -
  691. kernel.median_index(t_arr))]
  692. elif trim:
  693. r = r[2 * kernel.median_index(t_arr):-2 * (kernel(t_arr).size -
  694. kernel.median_index(t_arr))]
  695. t_start += kernel.median_index(t_arr) * spiketrain.units
  696. t_stop -= (kernel(t_arr).size -
  697. kernel.median_index(t_arr)) * spiketrain.units
  698. rate = neo.AnalogSignal(signal=r.reshape(r.size, 1),
  699. sampling_period=sampling_period,
  700. units=pq.Hz, t_start=t_start, t_stop=t_stop)
  701. return rate
  702. def time_histogram(spiketrains, binsize, t_start=None, t_stop=None,
  703. output='counts', binary=False):
  704. """
  705. Time Histogram of a list of :attr:`neo.SpikeTrain` objects.
  706. Parameters
  707. ----------
  708. spiketrains : List of neo.SpikeTrain objects
  709. Spiketrains with a common time axis (same `t_start` and `t_stop`)
  710. binsize : quantities.Quantity
  711. Width of the histogram's time bins.
  712. t_start, t_stop : Quantity (optional)
  713. Start and stop time of the histogram. Only events in the input
  714. `spiketrains` falling between `t_start` and `t_stop` (both included)
  715. are considered in the histogram. If `t_start` and/or `t_stop` are not
  716. specified, the maximum `t_start` of all :attr:spiketrains is used as
  717. `t_start`, and the minimum `t_stop` is used as `t_stop`.
  718. Default: t_start = t_stop = None
  719. output : str (optional)
  720. Normalization of the histogram. Can be one of:
  721. * `counts`'`: spike counts at each bin (as integer numbers)
  722. * `mean`: mean spike counts per spike train
  723. * `rate`: mean spike rate per spike train. Like 'mean', but the
  724. counts are additionally normalized by the bin width.
  725. binary : bool (optional)
  726. If **True**, indicates whether all spiketrain objects should first
  727. binned to a binary representation (using the `BinnedSpikeTrain` class
  728. in the `conversion` module) and the calculation of the histogram is
  729. based on this representation.
  730. Note that the output is not binary, but a histogram of the converted,
  731. binary representation.
  732. Default: False
  733. Returns
  734. -------
  735. time_hist : neo.AnalogSignal
  736. A neo.AnalogSignal object containing the histogram values.
  737. `AnalogSignal[j]` is the histogram computed between
  738. `t_start + j * binsize` and `t_start + (j + 1) * binsize`.
  739. See also
  740. --------
  741. elephant.conversion.BinnedSpikeTrain
  742. """
  743. min_tstop = 0
  744. if t_start is None:
  745. # Find the internal range for t_start, where all spike trains are
  746. # defined; cut all spike trains taking that time range only
  747. max_tstart, min_tstop = conv._get_start_stop_from_input(spiketrains)
  748. t_start = max_tstart
  749. if not all([max_tstart == t.t_start for t in spiketrains]):
  750. warnings.warn(
  751. "Spiketrains have different t_start values -- "
  752. "using maximum t_start as t_start.")
  753. if t_stop is None:
  754. # Find the internal range for t_stop
  755. if min_tstop:
  756. t_stop = min_tstop
  757. if not all([min_tstop == t.t_stop for t in spiketrains]):
  758. warnings.warn(
  759. "Spiketrains have different t_stop values -- "
  760. "using minimum t_stop as t_stop.")
  761. else:
  762. min_tstop = conv._get_start_stop_from_input(spiketrains)[1]
  763. t_stop = min_tstop
  764. if not all([min_tstop == t.t_stop for t in spiketrains]):
  765. warnings.warn(
  766. "Spiketrains have different t_stop values -- "
  767. "using minimum t_stop as t_stop.")
  768. sts_cut = [st.time_slice(t_start=t_start, t_stop=t_stop) for st in
  769. spiketrains]
  770. # Bin the spike trains and sum across columns
  771. bs = conv.BinnedSpikeTrain(sts_cut, t_start=t_start, t_stop=t_stop,
  772. binsize=binsize)
  773. if binary:
  774. bin_hist = bs.to_sparse_bool_array().sum(axis=0)
  775. else:
  776. bin_hist = bs.to_sparse_array().sum(axis=0)
  777. # Flatten array
  778. bin_hist = np.ravel(bin_hist)
  779. # Renormalise the histogram
  780. if output == 'counts':
  781. # Raw
  782. bin_hist = bin_hist * pq.dimensionless
  783. elif output == 'mean':
  784. # Divide by number of input spike trains
  785. bin_hist = bin_hist * 1. / len(spiketrains) * pq.dimensionless
  786. elif output == 'rate':
  787. # Divide by number of input spike trains and bin width
  788. bin_hist = bin_hist * 1. / len(spiketrains) / binsize
  789. else:
  790. raise ValueError('Parameter output is not valid.')
  791. return neo.AnalogSignal(signal=bin_hist.reshape(bin_hist.size, 1),
  792. sampling_period=binsize, units=bin_hist.units,
  793. t_start=t_start)
  794. def complexity_pdf(spiketrains, binsize):
  795. """
  796. Complexity Distribution [1] of a list of :attr:`neo.SpikeTrain` objects.
  797. Probability density computed from the complexity histogram which is the
  798. histogram of the entries of the population histogram of clipped (binary)
  799. spike trains computed with a bin width of binsize.
  800. It provides for each complexity (== number of active neurons per bin) the
  801. number of occurrences. The normalization of that histogram to 1 is the
  802. probability density.
  803. Parameters
  804. ----------
  805. spiketrains : List of neo.SpikeTrain objects
  806. Spiketrains with a common time axis (same `t_start` and `t_stop`)
  807. binsize : quantities.Quantity
  808. Width of the histogram's time bins.
  809. Returns
  810. -------
  811. time_hist : neo.AnalogSignal
  812. A neo.AnalogSignal object containing the histogram values.
  813. `AnalogSignal[j]` is the histogram computed between .
  814. See also
  815. --------
  816. elephant.conversion.BinnedSpikeTrain
  817. References
  818. ----------
  819. [1]Gruen, S., Abeles, M., & Diesmann, M. (2008). Impact of higher-order
  820. correlations on coincidence distributions of massively parallel data.
  821. In Dynamic Brain-from Neural Spikes to Behaviors (pp. 96-114).
  822. Springer Berlin Heidelberg.
  823. """
  824. # Computing the population histogram with parameter binary=True to clip the
  825. # spike trains before summing
  826. pophist = time_histogram(spiketrains, binsize, binary=True)
  827. # Computing the histogram of the entries of pophist (=Complexity histogram)
  828. complexity_hist = np.histogram(
  829. pophist.magnitude, bins=range(0, len(spiketrains) + 2))[0]
  830. # Normalization of the Complexity Histogram to 1 (probabilty distribution)
  831. complexity_hist = complexity_hist / complexity_hist.sum()
  832. # Convert the Complexity pdf to an neo.AnalogSignal
  833. complexity_distribution = neo.AnalogSignal(
  834. np.array(complexity_hist).reshape(len(complexity_hist), 1) *
  835. pq.dimensionless, t_start=0 * pq.dimensionless,
  836. sampling_period=1 * pq.dimensionless)
  837. return complexity_distribution
  838. """Kernel Bandwidth Optimization.
  839. Python implementation by Subhasis Ray.
  840. Original matlab code (sskernel.m) here:
  841. http://2000.jukuin.keio.ac.jp/shimazaki/res/kernel.html
  842. This was translated into Python by Subhasis Ray, NCBS. Tue Jun 10
  843. 23:01:43 IST 2014
  844. """
  845. def nextpow2(x):
  846. """ Return the smallest integral power of 2 that >= x """
  847. n = 2
  848. while n < x:
  849. n = 2 * n
  850. return n
  851. def fftkernel(x, w):
  852. """
  853. y = fftkernel(x,w)
  854. Function `fftkernel' applies the Gauss kernel smoother to an input
  855. signal using FFT algorithm.
  856. Input argument
  857. x: Sample signal vector.
  858. w: Kernel bandwidth (the standard deviation) in unit of
  859. the sampling resolution of x.
  860. Output argument
  861. y: Smoothed signal.
  862. MAY 5/23, 2012 Author Hideaki Shimazaki
  863. RIKEN Brain Science Insitute
  864. http://2000.jukuin.keio.ac.jp/shimazaki
  865. Ported to Python: Subhasis Ray, NCBS. Tue Jun 10 10:42:38 IST 2014
  866. """
  867. L = len(x)
  868. Lmax = L + 3 * w
  869. n = nextpow2(Lmax)
  870. X = np.fft.fft(x, n)
  871. f = np.arange(0, n, 1.0) / n
  872. f = np.concatenate((-f[:int(n / 2)], f[int(n / 2):0:-1]))
  873. K = np.exp(-0.5 * (w * 2 * np.pi * f)**2)
  874. y = np.fft.ifft(X * K, n)
  875. y = y[:L].copy()
  876. return y
  877. def logexp(x):
  878. if x < 1e2:
  879. y = np.log(1 + np.exp(x))
  880. else:
  881. y = x
  882. return y
  883. def ilogexp(x):
  884. if x < 1e2:
  885. y = np.log(np.exp(x) - 1)
  886. else:
  887. y = x
  888. return y
  889. def cost_function(x, N, w, dt):
  890. """
  891. The cost function
  892. Cn(w) = sum_{i,j} int k(x - x_i) k(x - x_j) dx - 2 sum_{i~=j} k(x_i - x_j)
  893. """
  894. yh = np.abs(fftkernel(x, w / dt)) # density
  895. # formula for density
  896. C = np.sum(yh ** 2) * dt - 2 * np.sum(yh * x) * \
  897. dt + 2 / np.sqrt(2 * np.pi) / w / N
  898. C = C * N * N
  899. # formula for rate
  900. # C = dt*sum( yh.^2 - 2*yh.*y_hist + 2/sqrt(2*pi)/w*y_hist )
  901. return C, yh
  902. def sskernel(spiketimes, tin=None, w=None, bootstrap=False):
  903. """
  904. Calculates optimal fixed kernel bandwidth.
  905. spiketimes: sequence of spike times (sorted to be ascending).
  906. tin: (optional) time points at which the kernel bandwidth is to be estimated.
  907. w: (optional) vector of kernel bandwidths. If specified, optimal
  908. bandwidth is selected from this.
  909. bootstrap (optional): whether to calculate the 95% confidence
  910. interval. (default False)
  911. Returns
  912. A dictionary containing the following key value pairs:
  913. 'y': estimated density,
  914. 't': points at which estimation was computed,
  915. 'optw': optimal kernel bandwidth,
  916. 'w': kernel bandwidths examined,
  917. 'C': cost functions of w,
  918. 'confb95': (lower bootstrap confidence level, upper bootstrap confidence level),
  919. 'yb': bootstrap samples.
  920. If no optimal kernel could be found, all entries of the dictionary are set
  921. to None.
  922. Ref: Shimazaki, Hideaki, and Shigeru Shinomoto. 2010. Kernel
  923. Bandwidth Optimization in Spike Rate Estimation. Journal of
  924. Computational Neuroscience 29 (1-2):
  925. 171-82. doi:10.1007/s10827-009-0180-4.
  926. """
  927. if tin is None:
  928. time = np.max(spiketimes) - np.min(spiketimes)
  929. isi = np.diff(spiketimes)
  930. isi = isi[isi > 0].copy()
  931. dt = np.min(isi)
  932. tin = np.linspace(np.min(spiketimes),
  933. np.max(spiketimes),
  934. min(int(time / dt + 0.5), 1000)) # The 1000 seems somewhat arbitrary
  935. t = tin
  936. else:
  937. time = np.max(tin) - np.min(tin)
  938. spiketimes = spiketimes[(spiketimes >= np.min(tin)) &
  939. (spiketimes <= np.max(tin))].copy()
  940. isi = np.diff(spiketimes)
  941. isi = isi[isi > 0].copy()
  942. dt = np.min(isi)
  943. if dt > np.min(np.diff(tin)):
  944. t = np.linspace(np.min(tin), np.max(tin),
  945. min(int(time / dt + 0.5), 1000))
  946. else:
  947. t = tin
  948. dt = np.min(np.diff(tin))
  949. yhist, bins = np.histogram(spiketimes, np.r_[t - dt / 2, t[-1] + dt / 2])
  950. N = np.sum(yhist)
  951. yhist = yhist / (N * dt) # density
  952. optw = None
  953. y = None
  954. if w is not None:
  955. C = np.zeros(len(w))
  956. Cmin = np.inf
  957. for k, w_ in enumerate(w):
  958. C[k], yh = cost_function(yhist, N, w_, dt)
  959. if C[k] < Cmin:
  960. Cmin = C[k]
  961. optw = w_
  962. y = yh
  963. else:
  964. # Golden section search on a log-exp scale
  965. wmin = 2 * dt
  966. wmax = max(spiketimes) - min(spiketimes)
  967. imax = 20 # max iterations
  968. w = np.zeros(imax)
  969. C = np.zeros(imax)
  970. tolerance = 1e-5
  971. phi = 0.5 * (np.sqrt(5) + 1) # The Golden ratio
  972. a = ilogexp(wmin)
  973. b = ilogexp(wmax)
  974. c1 = (phi - 1) * a + (2 - phi) * b
  975. c2 = (2 - phi) * a + (phi - 1) * b
  976. f1, y1 = cost_function(yhist, N, logexp(c1), dt)
  977. f2, y2 = cost_function(yhist, N, logexp(c2), dt)
  978. k = 0
  979. while (np.abs(b - a) > (tolerance * (np.abs(c1) + np.abs(c2))))\
  980. and (k < imax):
  981. if f1 < f2:
  982. b = c2
  983. c2 = c1
  984. c1 = (phi - 1) * a + (2 - phi) * b
  985. f2 = f1
  986. f1, y1 = cost_function(yhist, N, logexp(c1), dt)
  987. w[k] = logexp(c1)
  988. C[k] = f1
  989. optw = logexp(c1)
  990. y = y1 / (np.sum(y1 * dt))
  991. else:
  992. a = c1
  993. c1 = c2
  994. c2 = (2 - phi) * a + (phi - 1) * b
  995. f1 = f2
  996. f2, y2 = cost_function(yhist, N, logexp(c2), dt)
  997. w[k] = logexp(c2)
  998. C[k] = f2
  999. optw = logexp(c2)
  1000. y = y2 / np.sum(y2 * dt)
  1001. k = k + 1
  1002. # Bootstrap confidence intervals
  1003. confb95 = None
  1004. yb = None
  1005. # If bootstrap is requested, and an optimal kernel was found
  1006. if bootstrap and optw:
  1007. nbs = 1000
  1008. yb = np.zeros((nbs, len(tin)))
  1009. for ii in range(nbs):
  1010. idx = np.floor(np.random.rand(N) * N).astype(int)
  1011. xb = spiketimes[idx]
  1012. y_histb, bins = np.histogram(
  1013. xb, np.r_[t - dt / 2, t[-1] + dt / 2]) / dt / N
  1014. yb_buf = fftkernel(y_histb, optw / dt).real
  1015. yb_buf = yb_buf / np.sum(yb_buf * dt)
  1016. yb[ii, :] = np.interp(tin, t, yb_buf)
  1017. ybsort = np.sort(yb, axis=0)
  1018. y95b = ybsort[np.floor(0.05 * nbs).astype(int), :]
  1019. y95u = ybsort[np.floor(0.95 * nbs).astype(int), :]
  1020. confb95 = (y95b, y95u)
  1021. # Only perform interpolation if y could be calculated
  1022. if y is not None:
  1023. y = np.interp(tin, t, y)
  1024. return {'y': y,
  1025. 't': tin,
  1026. 'optw': optw,
  1027. 'w': w,
  1028. 'C': C,
  1029. 'confb95': confb95,
  1030. 'yb': yb}
  1031. def _check_consistency_of_spiketrainlist(spiketrainlist, t_start=None, t_stop=None):
  1032. for spiketrain in spiketrainlist:
  1033. if not isinstance(spiketrain, SpikeTrain):
  1034. raise TypeError(
  1035. "spike train must be instance of :class:`SpikeTrain` of Neo!\n"
  1036. " Found: %s, value %s" % (type(spiketrain), str(spiketrain)))
  1037. if t_start is None and not spiketrain.t_start == spiketrainlist[0].t_start:
  1038. raise ValueError(
  1039. "the spike trains must have the same t_start!")
  1040. if t_stop is None and not spiketrain.t_stop == spiketrainlist[0].t_stop:
  1041. raise ValueError(
  1042. "the spike trains must have the same t_stop!")
  1043. if not spiketrain.units == spiketrainlist[0].units:
  1044. raise ValueError(
  1045. "the spike trains must have the same units!")
  1046. return None