Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

convert_to_nix.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184
  1. """
  2. This script loads a complete session in the blackrock format and converts it to a single nix file
  3. """
  4. import os
  5. import numpy as np
  6. import quantities as pq
  7. import neo
  8. from neo.test.tools import (assert_same_sub_schema,
  9. assert_same_annotations,
  10. assert_same_array_annotations)
  11. from elephant.signal_processing import butter
  12. from reachgraspio import reachgraspio
  13. # Choose which session you want to convert into a nix file
  14. session = "i140703-001"
  15. # session = "l101210-001"
  16. # Input data. i.e., original Blackrock files and odML
  17. dataset_dir = '../datasets_blackrock'
  18. session_path = f'{dataset_dir}/{session}'
  19. odml_dir = os.path.join('..', 'datasets_blackrock')
  20. # Output for the nix files
  21. nix_dataset_dir = '../datasets_nix'
  22. nix_session_path = f'{nix_dataset_dir}/{session}'
  23. ##### LOAD BLACKROCK FILES ############
  24. session = reachgraspio.ReachGraspIO(
  25. filename=session_path,
  26. odml_directory=odml_dir,
  27. verbose=False)
  28. block = session.read_block(lazy=False, load_waveforms=True)
  29. # =============================================================================
  30. # Create offline filtered LFP
  31. #
  32. # Here, we construct one offline filtered LFP from each ns5 (monkey L) or ns6
  33. # (monkey N) raw recording trace. For monkey N, this filtered LFP can be
  34. # compared to the LFPs in the ns2 file (note that monkey L contains only
  35. # behavioral signals in the ns2 file). Also, we assign telling names to each
  36. # Neo AnalogSignal, which is used for plotting later on in this script.
  37. # =============================================================================
  38. nsx_to_anasig_name = {2: 'LFP signal (online filtered)',
  39. 5: 'raw signal',
  40. 6: 'raw signal'}
  41. # this factor was experimentally determined as an approximate shift introduced
  42. # by the online filtering. Here, we integrate this shift such that the offline
  43. # filtered signal is aligned to the online filtered signal (if that were
  44. # available)
  45. time_shift_factor = -0.42*pq.ms
  46. filtered_anasig = None
  47. raw_anasig = None
  48. # identify neuronal signals and provide labels for plotting
  49. for anasig in block.segments[0].analogsignals:
  50. # skip non-neuronal signals
  51. if not anasig.annotations['neural_signal']:
  52. continue
  53. # identify nsx source of signals in this AnalogSignal object
  54. if 'nsx' in anasig.annotations:
  55. nsx = anasig.annotations['nsx']
  56. else:
  57. nsx = np.unique(anasig.array_annotations['nsx'])
  58. assert len(nsx) == 1, 'Different nsx sources in AnalogSignal'
  59. nsx = nsx[0]
  60. if nsx == 2:
  61. # AnalogSignal is LFP from ns2
  62. filtered_anasig = anasig
  63. elif nsx in [5, 6]:
  64. # AnalogSignal is raw signal from ns5 or ns6
  65. raw_anasig = anasig
  66. if filtered_anasig is None:
  67. print("Filtering raw time series to obtain LFP")
  68. for f in range(raw_anasig.shape[1]):
  69. # filtering must be done channel by channel for memory reasons (requires approx. 32 GB RAM)
  70. print(f"Processing channel {f}")
  71. filtered_signal = butter(
  72. raw_anasig[:, f],
  73. highpass_freq=None,
  74. lowpass_freq=250 * pq.Hz,
  75. filter_function='sosfiltfilt',
  76. order=4)
  77. downsampled_signal=filtered_signal.downsample(30).time_shift(time_shift_factor)
  78. # first run? Create a new Analogsignal
  79. if f == 0:
  80. offline_filtered_anasig = neo.AnalogSignal(
  81. np.zeros((downsampled_signal.shape[0], raw_anasig.shape[1])) *\
  82. downsampled_signal.units,
  83. t_start=downsampled_signal.t_start,
  84. sampling_rate=downsampled_signal.sampling_rate)
  85. offline_filtered_anasig[:, f] = downsampled_signal
  86. if 'nsx' in anasig.annotations:
  87. nsx = anasig.annotations['nsx']
  88. else:
  89. nsx = anasig.array_annotations["nsx"][0]
  90. offline_filtered_anasig.name = f"NeuralTimeSeriesDownsampled"
  91. offline_filtered_anasig.description = "Downsampled continuous neuronal recordings, where the downsampling was " \
  92. "performed off-line during post-processing"
  93. # Attach all offline filtered LFPs to the segment of data
  94. block.segments[0].analogsignals.append(offline_filtered_anasig)
  95. ##### SAVE NIX FILE ###################
  96. nix_filename = nix_session_path + '.nix'
  97. if os.path.exists(nix_filename):
  98. print('Nix file already exists and will not be overwritten.')
  99. else:
  100. with neo.NixIO(nix_filename) as io:
  101. print(f'Saving nix file at {nix_filename}')
  102. io.write_block(block)
  103. ##### VALIDATION OF FILE CONTENT ######
  104. with neo.NixIO(nix_filename, mode='ro') as io:
  105. blocks = io.read_all_blocks()
  106. assert len(blocks) == 1
  107. block_new = blocks[0]
  108. for seg_old, seg_new in zip(block.segments, block_new.segments):
  109. for anasig_old, anasig_new in zip(seg_old.analogsignals, seg_new.analogsignals):
  110. # ignoring differences in the file_origin attribute
  111. anasig_old.file_origin = anasig_new.file_origin
  112. assert_same_sub_schema(anasig_old, anasig_new)
  113. assert_same_annotations(anasig_old, anasig_new)
  114. assert_same_array_annotations(anasig_old, anasig_new)
  115. del anasig_old
  116. print(f'AnalogSignals are equivalent.')
  117. for st_old, st_new in zip(seg_old.spiketrains, seg_new.spiketrains):
  118. # ignoring differences in the file_origin attribute
  119. st_old.file_origin = st_new.file_origin
  120. assert_same_sub_schema(st_old, st_new)
  121. assert_same_annotations(st_old, st_new)
  122. assert_same_array_annotations(st_old, st_new)
  123. del st_old
  124. print(f'Spiketrains are equivalent.')
  125. for ev_old, ev_new in zip(seg_old.events, seg_new.events):
  126. # ignoring differences in the file_origin attribute
  127. ev_old.file_origin = ev_new.file_origin
  128. # ignore list-array type changes
  129. if 'color_codes' in ev_old.annotations:
  130. ev_old.annotations['color_codes'] = list(ev_old.annotations['color_codes'])
  131. assert_same_sub_schema(ev_old, ev_new)
  132. assert_same_annotations(ev_old, ev_new)
  133. assert_same_array_annotations(ev_old, ev_new)
  134. del ev_old
  135. print(f'Events are equivalent.')
  136. for ep_old, ep_new in zip(seg_old.epochs, seg_new.epochs):
  137. # ignoring differences in the file_origin attribute
  138. ep_old.file_origin = ep_new.file_origin
  139. assert_same_sub_schema(ep_old, ep_new)
  140. assert_same_annotations(ep_old, ep_new)
  141. assert_same_array_annotations(ep_old, ep_new)
  142. del ep_old
  143. print(f'Epochs are equivalent.')