Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

bids_preprocessing_workflow.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808
  1. #!/usr/bin/env python3
  2. """
  3. This script performs preprocessing on fMRI data. It assumes
  4. that data is in BIDS format and that the data has undergone
  5. minimal processing and resampling.
  6. After this, you can run:
  7. - bids_modelfit_workflow.py
  8. >> Fits a GLM and outputs statistics
  9. Questions & comments: c.klink@nin.knaw.nl
  10. """
  11. import nipype.interfaces.io as nio # Data i/o
  12. import nipype.pipeline.engine as pe # pypeline engine
  13. import os # system functions
  14. import argparse
  15. import pandas as pd
  16. import nipype.interfaces.fsl as fsl # fsl
  17. import nipype.interfaces.utility as util # utility
  18. from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth
  19. from nipype import LooseVersion
  20. import subcode.bids_transform_manualmask as transform_manualmask
  21. import subcode.bids_motioncorrection_workflow as motioncorrection_workflow
  22. import subcode.bids_undistort_workflow as undistort_workflow
  23. import nipype.interfaces.utility as niu
  24. ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  25. data_dir = ds_root
  26. def getmeanscale(medianvals):
  27. return ['-mul %.10f' % (10000. / val) for val in medianvals]
  28. def create_workflow():
  29. featpreproc = pe.Workflow(name="featpreproc")
  30. featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')
  31. # ===================================================================
  32. # _____ _
  33. # |_ _| | |
  34. # | | _ __ _ __ _ _| |_
  35. # | | | '_ \| '_ \| | | | __|
  36. # _| |_| | | | |_) | |_| | |_
  37. # |_____|_| |_| .__/ \__,_|\__|
  38. # | |
  39. # |_|
  40. # ===================================================================
  41. # ------------------ Specify variables
  42. inputnode = pe.Node(niu.IdentityInterface(fields=[
  43. 'funcs',
  44. 'subject_id',
  45. 'session_id',
  46. 'refsubject_id',
  47. 'fwhm', # smoothing
  48. 'highpass'
  49. ]), name="inputspec")
  50. # SelectFiles
  51. templates = {
  52. # FIELDMAP ========
  53. # fieldmaps are currently not used by this workflow
  54. # instead, everything is nonlinearly alligned to a reference
  55. # you should probably 'just' undistort this reference func
  56. # 'ref_manual_fmapmask':
  57. # 'manual-masks/sub-{subject_id}/fmap/'
  58. # 'sub-{subject_id}_ref_fmap_mask_res-1x1x1.nii.gz',
  59. # 'ref_fmap_magnitude':
  60. # 'manual-masks/sub-{subject_id}/fmap/'
  61. # 'sub-{subject_id}_ref_fmap_magnitude1_res-1x1x1.nii.gz',
  62. # 'ref_fmap_phasediff':
  63. # 'manual-masks/sub-{subject_id}/fmap/'
  64. # 'sub-{subject_id}_ref_fmap_phasediff_res-1x1x1.nii.gz',
  65. # 'fmap_phasediff':
  66. # 'derivatives/resampled-isotropic-1mm/'
  67. # 'sub-{subject_id}/ses-{session_id}/fmap/'
  68. # 'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc.nii.gz',
  69. # 'fmap_magnitude':
  70. # 'derivatives/resampled-isotropic-1mm/'
  71. # 'sub-{subject_id}/ses-{session_id}/fmap/'
  72. # 'sub-{subject_id}_ses-{session_id}_magnitude1_'
  73. # 'res-1x1x1_preproc.nii.gz',
  74. # 'fmap_mask':
  75. # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
  76. # 'sub-{subject_id}_ses-{session_id}_'
  77. # 'magnitude1_res-1x1x1_preproc.nii.gz',
  78. # FUNCTIONALS ========
  79. # >> These func references are undistorted with FUGUE <<
  80. # see the undistort.sh script in:
  81. # manual-masks/sub-eddy/ses-20170607b/func/
  82. 'ref_func':
  83. 'manual-masks/sub-{refsubject_id}/func/'
  84. 'sub-{subject_id}_ref_func_res-1x1x1.nii.gz',
  85. 'ref_funcmask':
  86. 'manual-masks/sub-{refsubject_id}/func/'
  87. 'sub-{subject_id}_ref_func_mask_res-1x1x1.nii.gz',
  88. # T1 ========
  89. # 1 mm iso ---
  90. # 'ref_t1':
  91. # 'manual-masks/sub-{subject_id}/anat/'
  92. # 'sub-{subject_id}_ref_anat_res-1x1x1.nii.gz',
  93. # 'ref_t1mask':
  94. # 'manual-masks/sub-{subject_id}/anat/'
  95. # 'sub-{subject_id}_ref_anat_mask_res-1x1x1.nii.gz',
  96. # 0.5 mm iso ---
  97. 'ref_t1':
  98. 'manual-masks/sub-{refsubject_id}/anat/'
  99. 'sub-{subject_id}_ref_anat_res-0.5x0.5x0.5.nii.gz',
  100. 'ref_t1mask':
  101. 'manual-masks/sub-{refsubject_id}/anat/'
  102. 'sub-{subject_id}_ref_anat_mask_res-0.5x0.5x0.5.nii.gz',
  103. # WEIGHTS ========
  104. # 'manualweights':
  105. # 'manual-masks/sub-eddy/ses-20170511/func/'
  106. # 'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
  107. # '_res-1x1x1_manualweights.nii.gz',
  108. }
  109. inputfiles = pe.Node(
  110. nio.SelectFiles(templates,
  111. base_directory=data_dir), name="input_files")
  112. featpreproc.connect(
  113. [(inputnode, inputfiles,
  114. [('subject_id', 'subject_id'),
  115. ('session_id', 'session_id'),
  116. ('refsubject_id', 'refsubject_id'),
  117. ])])
  118. # ===================================================================
  119. # ____ _ _
  120. # / __ \ | | | |
  121. # | | | |_ _| |_ _ __ _ _| |_
  122. # | | | | | | | __| '_ \| | | | __|
  123. # | |__| | |_| | |_| |_) | |_| | |_
  124. # \____/ \__,_|\__| .__/ \__,_|\__|
  125. # | |
  126. # |_|
  127. # ===================================================================
  128. # Datasink
  129. outputfiles = pe.Node(nio.DataSink(
  130. base_directory=ds_root,
  131. container='derivatives/featpreproc',
  132. parameterization=True),
  133. name="output_files")
  134. # Use the following DataSink output substitutions
  135. # each tuple is only matched once per file
  136. outputfiles.inputs.substitutions = [
  137. ('/_mc_method_afni3dAllinSlices/', '/'),
  138. ('/_mc_method_afni3dAllinSlices/', '/'), # needs to appear twice
  139. ('/oned_file/', '/'),
  140. ('/out_file/', '/'),
  141. ('/oned_matrix_save/', '/'),
  142. ('refsubject_id_', 'ref-'),
  143. ('subject_id_', 'sub-'),
  144. ('session_id_', 'ses-'),
  145. ]
  146. # Put result into a BIDS-like format
  147. outputfiles.inputs.regexp_substitutions = [
  148. (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
  149. (r'/_addmean[0-9]+/', r'/func/'),
  150. (r'/_funcbrains[0-9]+/', r'/func/'),
  151. (r'/_maskfunc[0-9]+/', r'/func/'),
  152. (r'/_mc[0-9]+/', r'/func/'),
  153. (r'/_meanfunc[0-9]+/', r'/func/'),
  154. (r'/_outliers[0-9]+/', r'/func/'),
  155. (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
  156. ]
  157. outputnode = pe.Node(interface=util.IdentityInterface(
  158. fields=['motion_parameters',
  159. 'motion_corrected',
  160. 'motion_plots',
  161. 'motion_outlier_files',
  162. 'mask',
  163. 'smoothed_files',
  164. 'highpassed_files',
  165. 'mean',
  166. 'func_unwarp',
  167. 'ref_func',
  168. 'ref_funcmask',
  169. 'ref_t1',
  170. 'ref_t1mask',
  171. ]),
  172. name='outputspec')
  173. # ===================================================================
  174. # _____ _ _ _
  175. # | __ (_) | (_)
  176. # | |__) | _ __ ___| |_ _ __ ___
  177. # | ___/ | '_ \ / _ \ | | '_ \ / _ \
  178. # | | | | |_) | __/ | | | | | __/
  179. # |_| |_| .__/ \___|_|_|_| |_|\___|
  180. # | |
  181. # |_|
  182. # ===================================================================
  183. # ~|~ _ _ _ _ |` _ _ _ _ _ _ _ _| _
  184. # | | (_|| |_\~|~(_)| | | | | | |(_|_\|<_\
  185. #
  186. # Transform manual skull-stripped masks to multiple images
  187. # --------------------------------------------------------
  188. # should just be used as input to motion correction,
  189. # after mc, all functionals should be aligned to reference
  190. transmanmask_mc = transform_manualmask.create_workflow()
  191. # - - - - - - Connections - - - - - - -
  192. featpreproc.connect(
  193. [(inputfiles, transmanmask_mc,
  194. [('subject_id', 'in.subject_id'),
  195. ('session_id', 'in.session_id'),
  196. ('refsubject_id', 'in.refsubject_id'),
  197. ])])
  198. #featpreproc.connect(inputfiles, 'ref_funcmask',
  199. # transmanmask_mc, 'in.manualmask')
  200. featpreproc.connect(inputfiles, 'ref_funcmask',
  201. transmanmask_mc, 'in.ref_funcmask')
  202. featpreproc.connect(inputnode, 'funcs',
  203. transmanmask_mc, 'in.funcs')
  204. featpreproc.connect(inputfiles, 'ref_func',
  205. transmanmask_mc, 'in.ref_func')
  206. # fieldmaps not being used
  207. if False:
  208. trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
  209. featpreproc.connect(inputfiles, 'ref_manual_fmapmask',
  210. trans_fmapmask, 'in.manualmask')
  211. featpreproc.connect(inputfiles, 'fmap_magnitude',
  212. trans_fmapmask, 'in.funcs')
  213. featpreproc.connect(inputfiles, 'ref_func',
  214. trans_fmapmask, 'in.manualmask_func_ref')
  215. # |\/| _ _|_. _ _ _ _ _ _ _ __|_. _ _
  216. # | |(_) | |(_)| | (_(_)| | (/_(_ | |(_)| |
  217. #
  218. # Perform motion correction, using some pipeline
  219. # --------------------------------------------------------
  220. # mc = motioncorrection_workflow.create_workflow_afni()
  221. # Register an image from the functionals to the reference image
  222. median_func = pe.MapNode(
  223. interface=fsl.maths.MedianImage(dimension="T"),
  224. name='median_func',
  225. iterfield=('in_file'),
  226. )
  227. pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
  228. name='premotioncorrection')
  229. featpreproc.connect(
  230. [
  231. (inputnode, median_func,
  232. [
  233. ('funcs', 'in_file'),
  234. ]),
  235. (median_func, pre_mc,
  236. [
  237. ('out_file', 'in.funcs'),
  238. ]),
  239. (inputfiles, pre_mc,
  240. [
  241. # median func image will be used a reference / base
  242. ('ref_func', 'in.ref_func'),
  243. ('ref_funcmask', 'in.ref_func_weights'),
  244. ]),
  245. (transmanmask_mc, pre_mc,
  246. [
  247. ('funcreg.out_file', 'in.funcs_masks'), # use mask as weights >>>> are we sure this is correct?
  248. ]),
  249. (pre_mc, outputnode,
  250. [
  251. ('mc.out_file', 'pre_motion_corrected'),
  252. ('mc.oned_file', 'pre_motion_parameters.oned_file'),
  253. ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
  254. ]),
  255. (outputnode, outputfiles,
  256. [
  257. ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
  258. ('pre_motion_parameters.oned_file',
  259. 'pre_motion_corrected.oned_file'),
  260. # warp parameters in ASCII (.1D)
  261. ('pre_motion_parameters.oned_matrix_save',
  262. 'pre_motion_corrected.oned_matrix_save'),
  263. # transformation matrices for each sub-brick
  264. ]),
  265. ])
  266. mc = motioncorrection_workflow.create_workflow_allin_slices(
  267. name='motioncorrection',
  268. iterfield=('in_file', 'ref_file', 'in_weight_file'))
  269. # - - - - - - Connections - - - - - - -
  270. featpreproc.connect(
  271. [(inputnode, mc,
  272. [
  273. ('funcs', 'in.funcs'),
  274. ]),
  275. (pre_mc, mc, [
  276. # the median image realigned to the reference functional
  277. # will serve as reference. This way motion correction is
  278. # done to an image more similar to the functionals
  279. ('mc.out_file', 'in.ref_func'),
  280. ]),
  281. (inputfiles, mc, [
  282. # Check and make sure the ref func mask is close enough
  283. # to the registered median image.
  284. ('ref_funcmask', 'in.ref_func_weights'),
  285. ]),
  286. (transmanmask_mc, mc, [
  287. ('funcreg.out_file', 'in.funcs_masks'), # use mask as weights
  288. ]),
  289. (mc, outputnode, [
  290. ('mc.out_file', 'motion_corrected'),
  291. ('mc.oned_file', 'motion_parameters.oned_file'),
  292. ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
  293. ]),
  294. (outputnode, outputfiles, [
  295. ('motion_corrected', 'motion_corrected.out_file'),
  296. ('motion_parameters.oned_file', 'motion_corrected.oned_file'),
  297. # warp parameters in ASCII (.1D)
  298. ('motion_parameters.oned_matrix_save',
  299. 'motion_corrected.oned_matrix_save'),
  300. # transformation matrices for each sub-brick
  301. ]),
  302. ])
  303. # |~. _ | _| _ _ _ _ _ _ _ _ _ __|_. _ _
  304. # |~|(/_|(_|| | |(_||_) (_(_)| | (/_(_ | |(_)| |
  305. # |
  306. # Unwarp EPI distortions
  307. # --------------------------------------------------------
  308. # Performing motion correction to a reference that is undistorted,
  309. # So b0_unwarp is currently not needed or used.
  310. if False:
  311. b0_unwarp = undistort_workflow.create_workflow()
  312. featpreproc.connect(
  313. [(inputfiles, b0_unwarp,
  314. [ # ('subject_id', 'in.subject_id'),
  315. # ('session_id', 'in.session_id'),
  316. ('fmap_phasediff', 'in.fmap_phasediff'),
  317. ('fmap_magnitude', 'in.fmap_magnitude'),
  318. ]),
  319. (mc, b0_unwarp,
  320. [('mc.out_file', 'in.funcs'),
  321. ]),
  322. (transmanmask_mc, b0_unwarp,
  323. [('funcreg.out_file', 'in.funcmasks'),
  324. ]),
  325. (trans_fmapmask, b0_unwarp,
  326. [('funcreg.out_file', 'in.fmap_mask')]),
  327. (b0_unwarp, outputfiles,
  328. [('out.funcs', 'func_unwarp.funcs'),
  329. ('out.funcmasks', 'func_unwarp.funcmasks'),
  330. ]),
  331. (b0_unwarp, outputnode,
  332. [('out.funcs', 'func_unwarp.funcs'),
  333. ('out.funcmasks', 'mask'),
  334. ]),
  335. ])
  336. # undistort the reference images
  337. if False:
  338. b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
  339. featpreproc.connect(
  340. [(inputfiles, b0_unwarp_ref,
  341. [ # ('subject_id', 'in.subject_id'),
  342. # ('session_id', 'in.session_id'),
  343. ('ref_fmap_phasediff', 'in.fmap_phasediff'),
  344. ('ref_fmap_magnitude', 'in.fmap_magnitude'),
  345. ('ref_manual_fmapmask', 'in.fmap_mask'),
  346. ('ref_func', 'in.funcs'),
  347. ('ref_funcmask', 'in.funcmasks'),
  348. ]),
  349. (b0_unwarp_ref, outputfiles,
  350. [('out.funcs', 'func_unwarp_ref.func'),
  351. ('out.funcmasks', 'func_unwarp_ref.funcmask'),
  352. ]),
  353. (b0_unwarp_ref, outputnode,
  354. [('out.funcs', 'ref_func'),
  355. ('out.funcmasks', 'ref_mask'),
  356. ]),
  357. ])
  358. else:
  359. featpreproc.connect(
  360. [(inputfiles, outputfiles,
  361. [('ref_func', 'reference/func'),
  362. ('ref_funcmask', 'reference/func_mask'),
  363. ]),
  364. (inputfiles, outputnode,
  365. [('ref_func', 'ref_func'),
  366. ('ref_funcmask', 'ref_funcmask'),
  367. ]),
  368. ])
  369. # |\/| _ _|_. _ _ _ _|_|. _ _ _
  370. # | |(_) | |(_)| | (_)|_|| ||(/_| _\
  371. #
  372. # --------------------------------------------------------
  373. # Apply brain masks to functionals
  374. # --------------------------------------------------------
  375. # Dilate mask
  376. dilatemask = pe.Node(
  377. interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'),
  378. name='dilatemask')
  379. featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')
  380. featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')
  381. funcbrains = pe.MapNode(
  382. fsl.BinaryMaths(operation='mul'),
  383. iterfield=('in_file', 'operand_file'),
  384. name='funcbrains'
  385. )
  386. featpreproc.connect(
  387. [(mc, funcbrains,
  388. [('mc.out_file', 'in_file'),
  389. ]),
  390. (dilatemask, funcbrains,
  391. [('out_file', 'operand_file'),
  392. ]),
  393. (funcbrains, outputfiles,
  394. [('out_file', 'funcbrains'),
  395. ]),
  396. ])
  397. # Detect motion outliers
  398. # --------------------------------------------------------
  399. import nipype.algorithms.rapidart as ra
  400. outliers = pe.MapNode(
  401. ra.ArtifactDetect(
  402. mask_type='file',
  403. # trying to "disable" `norm_threshold`:
  404. use_norm=True,
  405. norm_threshold=10.0, # combines translations in mm and rotations
  406. # use_norm=Undefined,
  407. # translation_threshold=1.0, # translation in mm
  408. # rotation_threshold=0.02, # rotation in radians
  409. zintensity_threshold=3.0, # z-score
  410. parameter_source='AFNI',
  411. save_plot=True),
  412. iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
  413. name='outliers')
  414. featpreproc.connect([
  415. (mc, outliers,
  416. [ # ('mc.par_file', 'realignment_parameters'),
  417. ('mc.oned_file', 'realignment_parameters'),
  418. ]),
  419. (funcbrains, outliers,
  420. [('out_file', 'realigned_files'),
  421. ]),
  422. (dilatemask, outliers,
  423. [('out_file', 'mask_file'),
  424. ]),
  425. (outliers, outputfiles,
  426. [('outlier_files', 'motion_outliers.@outlier_files'),
  427. ('plot_files', 'motion_outliers.@plot_files'),
  428. ('displacement_files', 'motion_outliers.@displacement_files'),
  429. ('intensity_files', 'motion_outliers.@intensity_files'),
  430. ('mask_files', 'motion_outliers.@mask_files'),
  431. ('statistic_files', 'motion_outliers.@statistic_files'),
  432. # ('norm_files', 'outliers.@norm_files'),
  433. ]),
  434. (mc, outputnode,
  435. [('mc.oned_file', 'motion_parameters'),
  436. ]),
  437. (outliers, outputnode,
  438. [('outlier_files', 'motion_outlier_files'),
  439. ('plot_files', 'motion_plots.@plot_files'),
  440. ('displacement_files', 'motion_outliers.@displacement_files'),
  441. ('intensity_files', 'motion_outliers.@intensity_files'),
  442. ('mask_files', 'motion_outliers.@mask_files'),
  443. ('statistic_files', 'motion_outliers.@statistic_files'),
  444. # ('norm_files', 'outliers.@norm_files'),
  445. ])
  446. ])
  447. """
  448. Determine the 2nd and 98th percentile intensities of each functional run
  449. """
  450. getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
  451. iterfield=['in_file'],
  452. name='getthreshold')
  453. if False:
  454. featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
  455. else:
  456. featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')
  457. """
  458. Threshold the first run of functional data at 10% of the 98th percentile
  459. """
  460. threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
  461. suffix='_thresh'),
  462. iterfield=['in_file', 'op_string'],
  463. name='threshold')
  464. if False:
  465. featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
  466. else:
  467. featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')
  468. """
  469. Define a function to get 10% of the intensity
  470. """
  471. def getthreshop(thresh):
  472. return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]
  473. featpreproc.connect(
  474. getthresh, ('out_stat', getthreshop),
  475. threshold, 'op_string')
  476. """
  477. Determine the median value of the functional runs using the mask
  478. """
  479. medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
  480. iterfield=['in_file', 'mask_file'],
  481. name='medianval')
  482. if False:
  483. featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
  484. else:
  485. featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')
  486. featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')
  487. # (~ _ _ _|_. _ | (~ _ _ _ _ _|_|_ . _ _
  488. # _)|_)(_| | |(_|| _)| | |(_)(_) | | ||| |(_|
  489. # | _|
  490. # Spatial smoothing (SUSAN)
  491. # --------------------------------------------------------
  492. # create_susan_smooth takes care of calculating the mean and median
  493. # functional, applying mask to functional, and running the smoothing
  494. smooth = create_susan_smooth(separate_masks=False)
  495. featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
  496. featpreproc.connect(mc, 'mc.out_file',
  497. smooth, 'inputnode.in_files')
  498. featpreproc.connect(dilatemask, 'out_file',
  499. smooth, 'inputnode.mask_file')
  500. # -------------------------------------------------------
  501. # The below is from workflows/fmri/fsl/preprocess.py
  502. """
  503. Mask the smoothed data with the dilated mask
  504. """
  505. maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
  506. op_string='-mas'),
  507. iterfield=['in_file', 'in_file2'],
  508. name='maskfunc3')
  509. featpreproc.connect(
  510. smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')
  511. featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')
  512. concatnode = pe.Node(interface=util.Merge(2),
  513. name='concat')
  514. tolist = lambda x: [x]
  515. def chooseindex(fwhm):
  516. if fwhm < 1:
  517. return [0]
  518. else:
  519. return [1]
  520. # maskfunc2 is the functional data before SUSAN
  521. if False:
  522. featpreproc.connect(b0_unwarp, ('out.funcs', tolist),
  523. concatnode, 'in1')
  524. else:
  525. featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
  526. # maskfunc3 is the functional data after SUSAN
  527. featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
  528. """
  529. The following nodes select smooth or unsmoothed data depending on the
  530. fwhm. This is because SUSAN defaults to smoothing the data with about the
  531. voxel size of the input data if the fwhm parameter is less than 1/3 of the
  532. voxel size.
  533. """
  534. selectnode = pe.Node(interface=util.Select(), name='select')
  535. featpreproc.connect(concatnode, 'out', selectnode, 'inlist')
  536. featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
  537. featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')
  538. """
  539. Scale the median value of the run is set to 10000.
  540. """
  541. meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
  542. iterfield=['in_file', 'op_string'],
  543. name='meanscale')
  544. featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
  545. """
  546. Define a function to get the scaling factor for intensity normalization
  547. """
  548. featpreproc.connect(
  549. medianval, ('out_stat', getmeanscale),
  550. meanscale, 'op_string')
  551. # |_|. _ |_ _ _ _ _
  552. # | ||(_|| ||_)(_|_\_\
  553. # _| |
  554. # Temporal filtering
  555. # --------------------------------------------------------
  556. highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
  557. iterfield=['in_file'],
  558. name='highpass')
  559. highpass_operand = lambda x: '-bptf %.10f -1' % x
  560. featpreproc.connect(
  561. inputnode, ('highpass', highpass_operand),
  562. highpass, 'op_string')
  563. featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')
  564. version = 0
  565. if fsl.Info.version() and \
  566. LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
  567. version = 507
  568. if version < 507:
  569. featpreproc.connect(
  570. highpass, 'out_file', outputnode, 'highpassed_files')
  571. else:
  572. """
  573. Add back the mean removed by the highpass filter operation as
  574. of FSL 5.0.7
  575. """
  576. meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
  577. suffix='_mean'),
  578. iterfield=['in_file'],
  579. name='meanfunc4')
  580. featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
  581. addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
  582. iterfield=['in_file', 'operand_file'],
  583. name='addmean')
  584. featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
  585. featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
  586. featpreproc.connect(
  587. addmean, 'out_file', outputnode, 'highpassed_files')
  588. """
  589. Generate a mean functional image from the first run
  590. """
  591. meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
  592. suffix='_mean'),
  593. iterfield=['in_file'],
  594. name='meanfunc3')
  595. featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
  596. featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')
  597. featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
  598. featpreproc.connect(outputnode, 'highpassed_files',
  599. outputfiles, 'highpassed_files')
  600. return(featpreproc)
  601. # ===================================================================
  602. # ______ _
  603. # | ____(_)
  604. # | |__ _ _ __
  605. # | __| | | '_ \
  606. # | | | | | | |
  607. # |_| |_|_| |_|
  608. #
  609. # ===================================================================
  610. def run_workflow(csv_file, fwhm, HighPass):
  611. # Using the name "level1flow" should allow the workingdirs file to be used
  612. # by the fmri_workflow pipeline.
  613. workflow = pe.Workflow(name='level1flow')
  614. workflow.base_dir = os.path.abspath('./workingdirs')
  615. featpreproc = create_workflow()
  616. inputnode = pe.Node(niu.IdentityInterface(fields=[
  617. 'subject_id',
  618. 'session_id',
  619. 'run_id',
  620. 'refsubject_id',
  621. ]), name="input")
  622. if csv_file is not None:
  623. print('=== reading csv ===')
  624. # Read csv and use pandas to set-up image and ev-processing
  625. df = pd.read_csv(csv_file)
  626. # init lists
  627. sub_img=[]; ses_img=[]; run_img=[]; ref_img=[]
  628. # fill lists to iterate mapnodes
  629. for index, row in df.iterrows():
  630. for r in row.run.strip("[]").split(" "):
  631. sub_img.append(row.subject)
  632. ses_img.append(row.session)
  633. run_img.append(r)
  634. if 'refsubject' in df.columns:
  635. if row.refsubject == 'nan':
  636. # empty field
  637. ref_img.append(row.subject)
  638. else:
  639. # non-empty field
  640. ref_img.append(row.refsubject)
  641. else:
  642. ref_img.append(row.subject)
  643. inputnode.iterables = [
  644. ('subject_id', sub_img),
  645. ('session_id', ses_img),
  646. ('run_id', run_img),
  647. ('refsubject_id', ref_img),
  648. ]
  649. inputnode.synchronize = True
  650. print(sub_img)
  651. print(ref_img)
  652. else:
  653. print("No csv-file specified. Don't know what data to process.")
  654. templates = {
  655. 'funcs':
  656. 'derivatives/resampled-isotropic-1mm/'
  657. 'sub-{subject_id}/ses-{session_id}/func/'
  658. 'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc.nii.gz',
  659. }
  660. inputfiles = pe.Node(
  661. nio.SelectFiles(templates,
  662. base_directory=data_dir), name="input_files")
  663. workflow.connect([
  664. (inputnode, inputfiles,
  665. [('subject_id', 'subject_id'),
  666. ('refsubject_id', 'refsubject_id'),
  667. ('session_id', 'session_id'),
  668. ('run_id', 'run_id'),
  669. ]),
  670. (inputnode, featpreproc,
  671. [('subject_id', 'inputspec.subject_id'),
  672. ('refsubject_id', 'inputspec.refsubject_id'),
  673. ('session_id', 'inputspec.session_id'),
  674. ]),
  675. (inputfiles, featpreproc,
  676. [('funcs', 'inputspec.funcs'),
  677. ])
  678. ])
  679. featpreproc.inputs.inputspec.fwhm = fwhm # spatial smoothing (default=2)
  680. featpreproc.inputs.inputspec.highpass = HighPass # FWHM in seconds (default=50)
  681. workflow.workflow_level = 'INFO' # INFO/DEBUG
  682. # workflow.stop_on_first_crash = True
  683. workflow.keep_inputs = True
  684. workflow.remove_unnecessary_outputs = True
  685. workflow.write_graph()
  686. workflow.run()
  687. if __name__ == '__main__':
  688. parser = argparse.ArgumentParser(
  689. description='Perform pre-processing step for NHP fMRI.')
  690. parser.add_argument('--csv',
  691. dest='csv_file', default=None,
  692. help='CSV file with subjects, sessions, and runs.')
  693. parser.add_argument('--fwhm',
  694. dest='fwhm', default=2.0,
  695. help='Set FWHM for smoothing in mm. (default is 2.0 mm)')
  696. parser.add_argument('--HighPass',
  697. dest='HighPass', default=50,
  698. help='Set high pass filter in seconds. (default = 50 s)')
  699. args = parser.parse_args()
  700. run_workflow(**vars(args))