Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

bids_minimal_processing.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. #!/usr/bin/env python3
  2. """
  3. This is the first script to run after copying the source files from
  4. DATA_raw to NHP-BIDS. It performs some minimal processing:
  5. - Image files are corrected for sphinx orientation
  6. - FSL orientation is corrected
  7. - Eventlog csv files are converted to tsv where possible
  8. - Files are moved to the correct folders
  9. After this, you should run:
  10. - bids_resample_isotropic_workflow.py
  11. >> resamples *ALL* images files to 1 mm isotropic
  12. - bids_reample_hires_isotropic_workflow.py
  13. >> resamples the high resolution anatomicals to 0.5 mm isotropis
  14. >> only use this when these files are actually present
  15. - bids_preprocessing_workflow.py
  16. >> performs preprocessing steps like normalisation and motion correction
  17. - bids_modelfit_workflow.py
  18. >> Fits a GLM and outputs statistics
  19. Questions & comments: c.klink@nin.knaw.nl
  20. """
  21. #import glob # paths & filenames
  22. import os # system functions
  23. import pandas as pd # data juggling
  24. # nipype
  25. import nipype.interfaces.io as nio # data i/o
  26. import nipype.interfaces.fsl as fsl # fsl
  27. import nipype.interfaces.freesurfer as fs # freesurfer
  28. from nipype.interfaces.utility import IdentityInterface
  29. from nipype.pipeline.engine import Workflow, Node, MapNode
  30. # custom library
  31. from subcode.bids_convert_csv_eventlog import ConvertCSVEventLog
  32. def create_images_workflow():
  33. # Correct for the sphinx position and use reorient to standard.
  34. workflow = Workflow(name='minimal_proc')
  35. inputs = Node(IdentityInterface(fields=['images']), name="in")
  36. outputs = Node(IdentityInterface(fields=['images']), name="out")
  37. sphinx = MapNode(fs.MRIConvert(sphinx=True),
  38. iterfield=['in_file'], name='sphinx')
  39. workflow.connect(inputs, 'images',
  40. sphinx, 'in_file')
  41. ro = MapNode(fsl.Reorient2Std(),
  42. iterfield=['in_file'],name='ro')
  43. workflow.connect(sphinx, 'out_file',
  44. ro, 'in_file')
  45. workflow.connect(ro, 'out_file',
  46. outputs, 'images')
  47. return workflow
  48. def run_workflow(csv_file, stop_on_first_crash,
  49. ignore_events):
  50. from nipype import config
  51. config.enable_debug_mode()
  52. # ------------------ Specify variables
  53. ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  54. data_dir = ds_root
  55. output_dir = ''
  56. working_dir = 'workingdirs/minimal_processing'
  57. # ------------------ Input Files
  58. # Read csv and use pandas to set-up image and ev-processing
  59. df = pd.read_csv(csv_file)
  60. # init lists
  61. sub_img=[]; ses_img=[]; dt_img=[]
  62. sub_ev=[]; ses_ev=[]; run_ev=[]
  63. # fill lists to iterate mapnodes
  64. for index, row in df.iterrows():
  65. for dt in row.datatype.strip("[]").split(" "):
  66. sub_img.append(row.subject)
  67. ses_img.append(row.session)
  68. dt_img.append(dt)
  69. for r in row.run.strip("[]").split(" "):
  70. sub_ev.append(row.subject)
  71. ses_ev.append(row.session)
  72. run_ev.append(r)
  73. # check if the file definitions are ok
  74. if len(dt_img) > 0:
  75. process_images = True
  76. else:
  77. process_images = False
  78. print('NB! No data-types specified. Not processing any images.')
  79. print('Check the csv-file if this is unexpected.')
  80. if len(run_ev) > 0:
  81. process_ev = True
  82. else:
  83. process_ev = False
  84. print('NB! No runs spcfied. Not processing eventlog files.'
  85. ' Images will still be processed.')
  86. print('Check the csv-file if this is unexpected.')
  87. if process_images:
  88. imgsource = Node(IdentityInterface(fields=[
  89. 'subject_id',
  90. 'session_id',
  91. 'datatype',
  92. ]), name="imgsource")
  93. imgsource.iterables = [
  94. ('session_id', ses_img),
  95. ('subject_id', sub_img),
  96. ('datatype', dt_img)
  97. ]
  98. imgsource.synchronize = True
  99. # SelectFiles
  100. imgfiles = Node(
  101. nio.SelectFiles({
  102. 'images':
  103. 'sourcedata/sub-{subject_id}/ses-{session_id}/{datatype}/'
  104. 'sub-{subject_id}_ses-{session_id}_*.nii.gz'
  105. }, base_directory=data_dir), name="img_files")
  106. if not ignore_events and process_ev: # only create an event node when handling events
  107. evsource = Node(IdentityInterface(fields=[
  108. 'subject_id',
  109. 'session_id',
  110. 'run_id',
  111. ]), name="evsource")
  112. evsource.iterables = [
  113. ('subject_id', sub_ev),
  114. ('session_id', ses_ev),
  115. ('run_id', run_ev),
  116. ]
  117. evsource.synchronize = True
  118. evfiles = Node(
  119. nio.SelectFiles({
  120. 'csv_eventlogs':
  121. 'sourcedata/sub-{subject_id}/ses-{session_id}/func/'
  122. 'sub-{subject_id}_ses-{session_id}_*_run-{run_id}_events/Log_*_eventlog.csv',
  123. 'stim_dir':
  124. 'sourcedata/sub-{subject_id}/ses-{session_id}/func/'
  125. 'sub-{subject_id}_ses-{session_id}_*_run-{run_id}_events/',
  126. }, base_directory=data_dir), name="evfiles")
  127. # ------------------ Output Files
  128. # Datasink
  129. outputfiles = Node(nio.DataSink(
  130. base_directory=ds_root,
  131. container=output_dir,
  132. parameterization=True),
  133. name="output_files")
  134. # Use the following DataSink output substitutions
  135. outputfiles.inputs.substitutions = [
  136. ('subject_id_', 'sub-'),
  137. ('session_id_', 'ses-'),
  138. ('/minimal_processing/', '/'),
  139. ('_out_reoriented.nii.gz', '.nii.gz')
  140. ]
  141. # Put result into a BIDS-like format
  142. outputfiles.inputs.regexp_substitutions = [
  143. (r'_run_id_([a-zA-Z0-9]*)_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)',
  144. r'/sub-\3/ses-\2/'),
  145. (r'_datatype_([a-z]*)_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)',
  146. r'sub-\3/ses-\2/\1'),
  147. (r'/_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)',
  148. r'/sub-\2/ses-\1/'),
  149. (r'/_ro[0-9]+/', r'/'),
  150. (r'/_csv2tsv[0-9]+/', r'/func/'),
  151. ]
  152. # -------------------------------------------- Create Pipeline
  153. workflow = Workflow(
  154. name='wrapper',
  155. base_dir=os.path.join(ds_root, working_dir))
  156. if process_images:
  157. workflow.connect([(imgsource, imgfiles,
  158. [('subject_id', 'subject_id'),
  159. ('session_id', 'session_id'),
  160. ('datatype', 'datatype'),
  161. ])])
  162. if not ignore_events and process_ev:
  163. workflow.connect([(evsource, evfiles,
  164. [('subject_id', 'subject_id'),
  165. ('session_id', 'session_id'),
  166. ('run_id', 'run_id'),
  167. ]),
  168. ])
  169. if process_images:
  170. minproc = create_images_workflow()
  171. workflow.connect(imgfiles, 'images',
  172. minproc, 'in.images')
  173. workflow.connect(minproc, 'out.images',
  174. outputfiles, 'minimal_processing.@images')
  175. if not ignore_events and process_ev:
  176. csv2tsv = MapNode(
  177. ConvertCSVEventLog(),
  178. iterfield=['in_file', 'stim_dir'],
  179. name='csv2tsv')
  180. workflow.connect(evfiles, 'csv_eventlogs',
  181. csv2tsv, 'in_file')
  182. workflow.connect(evfiles, 'stim_dir',
  183. csv2tsv, 'stim_dir')
  184. workflow.connect(csv2tsv, 'out_file',
  185. outputfiles, 'minimal_processing.@eventlogs')
  186. workflow.stop_on_first_crash = stop_on_first_crash
  187. workflow.keep_inputs = True
  188. workflow.remove_unnecessary_outputs = True
  189. workflow.write_graph()
  190. workflow.run()
  191. if __name__ == '__main__':
  192. import argparse
  193. parser = argparse.ArgumentParser(
  194. description='Perform minimal_processing for NHP fMRI.'
  195. )
  196. parser.add_argument('--csv',
  197. dest='csv_file',
  198. required=True,
  199. help='CSV file with subjects, sessions, and runs.'
  200. )
  201. parser.add_argument('--stop_on_first_crash',
  202. dest='stop_on_first_crash',
  203. action='store_true',
  204. help='Whether to stop on first crash.'
  205. )
  206. parser.add_argument('--ignore_events',
  207. dest='ignore_events',
  208. action='store_true',
  209. help='Whether to ignore all csv event files. '
  210. 'By default csv event files are processed for specified runs '
  211. '(while imaging files are processed for all runs)'
  212. )
  213. args = parser.parse_args()
  214. run_workflow(**vars(args))