Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

asciispiketrainio.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145
  1. # -*- coding: utf-8 -*-
  2. """
  3. Classe for reading/writing SpikeTrains in a text file.
  4. It is the simple case where different spiketrains are written line by line.
  5. Supported : Read/Write
  6. Author: sgarcia
  7. """
  8. import os
  9. import numpy as np
  10. import quantities as pq
  11. from neo.io.baseio import BaseIO
  12. from neo.core import Segment, SpikeTrain
  13. class AsciiSpikeTrainIO(BaseIO):
  14. """
  15. Classe for reading/writing SpikeTrains in a text file.
  16. Each Spiketrain is a line.
  17. Usage:
  18. >>> from neo import io
  19. >>> r = io.AsciiSpikeTrainIO( filename = 'File_ascii_spiketrain_1.txt')
  20. >>> seg = r.read_segment(lazy = False, cascade = True,)
  21. >>> print seg.spiketrains # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
  22. [<SpikeTrain(array([ 3.89981604, 4.73258781, 0.608428 , 4.60246277, 1.23805797,
  23. ...
  24. """
  25. is_readable = True
  26. is_writable = True
  27. supported_objects = [Segment , SpikeTrain]
  28. readable_objects = [Segment]
  29. writeable_objects = [Segment]
  30. has_header = False
  31. is_streameable = False
  32. read_params = {
  33. Segment : [
  34. ('delimiter' , {'value' : '\t', 'possible' : ['\t' , ' ' , ',' , ';'] }) ,
  35. ('t_start' , { 'value' : 0., } ),
  36. ]
  37. }
  38. write_params = {
  39. Segment : [
  40. ('delimiter' , {'value' : '\t', 'possible' : ['\t' , ' ' , ',' , ';'] }) ,
  41. ]
  42. }
  43. name = None
  44. extensions = [ 'txt' ]
  45. mode = 'file'
  46. def __init__(self , filename = None) :
  47. """
  48. This class read/write SpikeTrains in a text file.
  49. Each row is a spiketrain.
  50. **Arguments**
  51. filename : the filename to read/write
  52. """
  53. BaseIO.__init__(self)
  54. self.filename = filename
  55. def read_segment(self,
  56. lazy = False,
  57. cascade = True,
  58. delimiter = '\t',
  59. t_start = 0.*pq.s,
  60. unit = pq.s,
  61. ):
  62. """
  63. Arguments:
  64. delimiter : columns delimiter in file '\t' or one space or two space or ',' or ';'
  65. t_start : time start of all spiketrain 0 by default
  66. unit : unit of spike times, can be a str or directly a Quantities
  67. """
  68. unit = pq.Quantity(1, unit)
  69. seg = Segment(file_origin = os.path.basename(self.filename))
  70. if not cascade:
  71. return seg
  72. f = open(self.filename, 'Ur')
  73. for i,line in enumerate(f) :
  74. alldata = line[:-1].split(delimiter)
  75. if alldata[-1] == '': alldata = alldata[:-1]
  76. if alldata[0] == '': alldata = alldata[1:]
  77. if lazy:
  78. spike_times = [ ]
  79. t_stop = t_start
  80. else:
  81. spike_times = np.array(alldata).astype('f')
  82. t_stop = spike_times.max()*unit
  83. sptr = SpikeTrain(spike_times*unit, t_start=t_start, t_stop=t_stop)
  84. if lazy:
  85. sptr.lazy_shape = len(alldata)
  86. sptr.annotate(channel_index = i)
  87. seg.spiketrains.append(sptr)
  88. f.close()
  89. seg.create_many_to_one_relationship()
  90. return seg
  91. def write_segment(self, segment,
  92. delimiter = '\t',
  93. ):
  94. """
  95. Write SpikeTrain of a Segment in a txt file.
  96. Each row is a spiketrain.
  97. Arguments:
  98. segment : the segment to write. Only analog signals will be written.
  99. delimiter : columns delimiter in file '\t' or one space or two space or ',' or ';'
  100. information of t_start is lost
  101. """
  102. f = open(self.filename, 'w')
  103. for s,sptr in enumerate(segment.spiketrains) :
  104. for ts in sptr :
  105. f.write('%f%s'% (ts , delimiter) )
  106. f.write('\n')
  107. f.close()