s13.proc.FT.surf 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421
  1. #!/bin/tcsh -xef
  2. echo "auto-generated by afni_proc.py, Tue Jun 5 09:13:53 2018"
  3. echo "(version 6.14, May 25, 2018)"
  4. echo "execution started: `date`"
  5. # execute via :
  6. # tcsh -xef proc.FT.surf |& tee output.proc.FT.surf
  7. # =========================== auto block: setup ============================
  8. # script setup
  9. # take note of the AFNI version
  10. afni -ver
  11. # check that the current AFNI version is recent enough
  12. afni_history -check_date 3 May 2018
  13. if ( $status ) then
  14. echo "** this script requires newer AFNI binaries (than 3 May 2018)"
  15. echo " (consider: @update.afni.binaries -defaults)"
  16. exit
  17. endif
  18. # the user may specify a single subject to run with
  19. if ( $#argv > 0 ) then
  20. set subj = $argv[1]
  21. else
  22. set subj = FT.surf
  23. endif
  24. # assign output directory name
  25. set output_dir = $subj.results
  26. # verify that the results directory does not yet exist
  27. if ( -d $output_dir ) then
  28. echo output dir "$subj.results" already exists
  29. exit
  30. endif
  31. # set list of runs
  32. set runs = (`count -digits 2 1 3`)
  33. # create results and stimuli directories
  34. mkdir $output_dir
  35. mkdir $output_dir/stimuli
  36. # copy stim files into stimulus directory
  37. cp FT/AV1_vis.txt FT/AV2_aud.txt $output_dir/stimuli
  38. # copy anatomy to results dir
  39. 3dcopy FT/FT_anat+orig $output_dir/FT_anat
  40. # ============================ auto block: tcat ============================
  41. # apply 3dTcat to copy input dsets to results dir,
  42. # while removing the first 2 TRs
  43. 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat FT/FT_epi_r1+orig'[2..$]'
  44. 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat FT/FT_epi_r2+orig'[2..$]'
  45. 3dTcat -prefix $output_dir/pb00.$subj.r03.tcat FT/FT_epi_r3+orig'[2..$]'
  46. # and make note of repetitions (TRs) per run
  47. set tr_counts = ( 150 150 150 )
  48. # -------------------------------------------------------
  49. # enter the results directory (can begin processing data)
  50. cd $output_dir
  51. # ========================== auto block: outcount ==========================
  52. # data check: compute outlier fraction for each volume
  53. touch out.pre_ss_warn.txt
  54. foreach run ( $runs )
  55. 3dToutcount -automask -fraction -polort 3 -legendre \
  56. pb00.$subj.r$run.tcat+orig > outcount.r$run.1D
  57. # outliers at TR 0 might suggest pre-steady state TRs
  58. if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then
  59. echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \
  60. >> out.pre_ss_warn.txt
  61. endif
  62. end
  63. # catenate outlier counts into a single time series
  64. cat outcount.r*.1D > outcount_rall.1D
  65. # get run number and TR index for minimum outlier volume
  66. set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'`
  67. set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \
  68. -index_to_run_tr $minindex` )
  69. # save run and TR indices for extraction of vr_base_min_outlier
  70. set minoutrun = $ovals[1]
  71. set minouttr = $ovals[2]
  72. echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt
  73. # ================================= tshift =================================
  74. # time shift data so all slice timing is the same
  75. foreach run ( $runs )
  76. 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \
  77. pb00.$subj.r$run.tcat+orig
  78. end
  79. # --------------------------------
  80. # extract volreg registration base
  81. 3dbucket -prefix vr_base_min_outlier \
  82. pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]"
  83. # ================================= align ==================================
  84. # for e2a: compute anat alignment transformation to EPI registration base
  85. # (new anat will be intermediate, stripped, FT_anat_ns+orig)
  86. align_epi_anat.py -anat2epi -anat FT_anat+orig \
  87. -save_skullstrip -suffix _al_junk \
  88. -epi vr_base_min_outlier+orig -epi_base 0 \
  89. -epi_strip 3dAutomask \
  90. -volreg off -tshift off
  91. # ================================= volreg =================================
  92. # align each dset to base volume, align to anat
  93. # register and warp
  94. foreach run ( $runs )
  95. # register each volume to the base image
  96. 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \
  97. -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \
  98. -cubic \
  99. -1Dmatrix_save mat.r$run.vr.aff12.1D \
  100. pb01.$subj.r$run.tshift+orig
  101. # create an all-1 dataset to mask the extents of the warp
  102. 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \
  103. -prefix rm.epi.all1
  104. # catenate volreg/epi2anat xforms
  105. cat_matvec -ONELINE \
  106. FT_anat_al_junk_mat.aff12.1D -I \
  107. mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D
  108. # apply catenated xform: volreg/epi2anat
  109. 3dAllineate -base FT_anat_ns+orig \
  110. -input pb01.$subj.r$run.tshift+orig \
  111. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  112. -mast_dxyz 2.5 \
  113. -prefix rm.epi.nomask.r$run
  114. # warp the all-1 dataset for extents masking
  115. 3dAllineate -base FT_anat_ns+orig \
  116. -input rm.epi.all1+orig \
  117. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  118. -mast_dxyz 2.5 -final NN -quiet \
  119. -prefix rm.epi.1.r$run
  120. # make an extents intersection mask of this run
  121. 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+orig
  122. end
  123. # make a single file of registration params
  124. cat dfile.r*.1D > dfile_rall.1D
  125. # ----------------------------------------
  126. # create the extents mask: mask_epi_extents+orig
  127. # (this is a mask of voxels that have valid data at every TR)
  128. 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD
  129. 3dcalc -a rm.epi.mean+orig -expr 'step(a-0.999)' -prefix mask_epi_extents
  130. # and apply the extents mask to the EPI data
  131. # (delete any time series with missing data)
  132. foreach run ( $runs )
  133. 3dcalc -a rm.epi.nomask.r$run+orig -b mask_epi_extents+orig \
  134. -expr 'a*b' -prefix pb02.$subj.r$run.volreg
  135. end
  136. # warp the volreg base EPI dataset to make a final version
  137. cat_matvec -ONELINE FT_anat_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D
  138. 3dAllineate -base FT_anat_ns+orig \
  139. -input vr_base_min_outlier+orig \
  140. -1Dmatrix_apply mat.basewarp.aff12.1D \
  141. -mast_dxyz 2.5 \
  142. -prefix final_epi_vr_base_min_outlier
  143. # create an anat_final dataset, aligned with stats
  144. 3dcopy FT_anat_ns+orig anat_final.$subj
  145. # record final registration costs
  146. 3dAllineate -base final_epi_vr_base_min_outlier+orig -allcostX \
  147. -input anat_final.$subj+orig |& tee out.allcostX.txt
  148. # -----------------------------------------
  149. # warp anat follower datasets (identity: resample)
  150. # ======================= surf (map data to surface) =======================
  151. # map EPI data to the surface domain
  152. # set directory variables
  153. set surface_dir = /data/rickr/data/sample/AFNI_data6/FT_analysis/FT/SUMA
  154. # align the surface anatomy with the current experiment anatomy
  155. @SUMA_AlignToExperiment -exp_anat anat_final.$subj+orig \
  156. -surf_anat $surface_dir/FT_SurfVol.nii \
  157. -wd -strip_skull surf_anat \
  158. -atlas_followers -overwrite_resp S \
  159. -prefix ${subj}_SurfVol_Alnd_Exp
  160. # map volume data to the surface of each hemisphere
  161. foreach hemi ( lh rh )
  162. foreach run ( $runs )
  163. 3dVol2Surf -spec $surface_dir/std.60.FT_${hemi}.spec \
  164. -sv ${subj}_SurfVol_Alnd_Exp+orig \
  165. -surf_A smoothwm \
  166. -surf_B pial \
  167. -f_index nodes \
  168. -f_steps 10 \
  169. -map_func ave \
  170. -oob_value 0 \
  171. -grid_parent pb02.$subj.r$run.volreg+orig \
  172. -out_niml pb03.$subj.$hemi.r$run.surf.niml.dset
  173. end
  174. end
  175. # make local script for running suma, and make it executable
  176. echo suma -spec $surface_dir/std.60.FT_lh.spec \
  177. -sv ${subj}_SurfVol_Alnd_Exp+orig > run_suma
  178. chmod 755 run_suma
  179. # =========================== blur (on surface) ============================
  180. foreach hemi ( lh rh )
  181. foreach run ( $runs )
  182. # to save time, estimate blur parameters only once
  183. if ( ! -f surf.smooth.params.1D ) then
  184. SurfSmooth -spec $surface_dir/std.60.FT_${hemi}.spec \
  185. -surf_A smoothwm \
  186. -input pb03.$subj.$hemi.r$run.surf.niml.dset \
  187. -met HEAT_07 \
  188. -target_fwhm 6.0 \
  189. -blurmaster pb03.$subj.$hemi.r$run.surf.niml.dset \
  190. -detrend_master \
  191. -output pb04.$subj.$hemi.r$run.blur.niml.dset \
  192. | tee surf.smooth.params.1D
  193. else
  194. set params = `1dcat surf.smooth.params.1D`
  195. SurfSmooth -spec $surface_dir/std.60.FT_${hemi}.spec \
  196. -surf_A smoothwm \
  197. -input pb03.$subj.$hemi.r$run.surf.niml.dset \
  198. -met HEAT_07 \
  199. -Niter $params[1] \
  200. -sigma $params[2] \
  201. -output pb04.$subj.$hemi.r$run.blur.niml.dset
  202. endif
  203. end
  204. end
  205. # ================================= scale ==================================
  206. # scale each voxel time series to have a mean of 100
  207. # (be sure no negatives creep in)
  208. # (subject to a range of [0,200])
  209. foreach hemi ( lh rh )
  210. foreach run ( $runs )
  211. 3dTstat -prefix rm.$hemi.mean_r$run.niml.dset \
  212. pb04.$subj.$hemi.r$run.blur.niml.dset
  213. 3dcalc -a pb04.$subj.$hemi.r$run.blur.niml.dset \
  214. -b rm.$hemi.mean_r$run.niml.dset \
  215. -expr 'min(200, a/b*100)*step(a)*step(b)' \
  216. -prefix pb05.$subj.$hemi.r$run.scale.niml.dset
  217. end
  218. end
  219. # ================================ regress =================================
  220. # compute de-meaned motion parameters (for use in regression)
  221. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  222. -demean -write motion_demean.1D
  223. # compute motion parameter derivatives (just to have)
  224. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  225. -derivative -demean -write motion_deriv.1D
  226. # convert motion parameters for per-run regression
  227. 1d_tool.py -infile motion_demean.1D -set_nruns 3 \
  228. -split_into_pad_runs mot_demean
  229. # create censor file motion_${subj}_censor.1D, for censoring motion
  230. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  231. -show_censor_count -censor_prev_TR \
  232. -censor_motion 0.3 motion_${subj}
  233. # note TRs that were not censored
  234. set ktrs = `1d_tool.py -infile motion_${subj}_censor.1D \
  235. -show_trs_uncensored encoded`
  236. # ------------------------------
  237. # run the regression analysis
  238. foreach hemi ( lh rh )
  239. 3dDeconvolve -input pb05.$subj.$hemi.r*.scale.niml.dset \
  240. -censor motion_${subj}_censor.1D \
  241. -polort 3 \
  242. -num_stimts 20 \
  243. -stim_times 1 stimuli/AV1_vis.txt 'BLOCK(20,1)' \
  244. -stim_label 1 vis \
  245. -stim_times 2 stimuli/AV2_aud.txt 'BLOCK(20,1)' \
  246. -stim_label 2 aud \
  247. -stim_file 3 mot_demean.r01.1D'[0]' -stim_base 3 \
  248. -stim_label 3 roll_01 \
  249. -stim_file 4 mot_demean.r01.1D'[1]' -stim_base 4 \
  250. -stim_label 4 pitch_01 \
  251. -stim_file 5 mot_demean.r01.1D'[2]' -stim_base 5 \
  252. -stim_label 5 yaw_01 \
  253. -stim_file 6 mot_demean.r01.1D'[3]' -stim_base 6 \
  254. -stim_label 6 dS_01 \
  255. -stim_file 7 mot_demean.r01.1D'[4]' -stim_base 7 \
  256. -stim_label 7 dL_01 \
  257. -stim_file 8 mot_demean.r01.1D'[5]' -stim_base 8 \
  258. -stim_label 8 dP_01 \
  259. -stim_file 9 mot_demean.r02.1D'[0]' -stim_base 9 \
  260. -stim_label 9 roll_02 \
  261. -stim_file 10 mot_demean.r02.1D'[1]' -stim_base 10 \
  262. -stim_label 10 pitch_02 \
  263. -stim_file 11 mot_demean.r02.1D'[2]' -stim_base 11 \
  264. -stim_label 11 yaw_02 \
  265. -stim_file 12 mot_demean.r02.1D'[3]' -stim_base 12 \
  266. -stim_label 12 dS_02 \
  267. -stim_file 13 mot_demean.r02.1D'[4]' -stim_base 13 \
  268. -stim_label 13 dL_02 \
  269. -stim_file 14 mot_demean.r02.1D'[5]' -stim_base 14 \
  270. -stim_label 14 dP_02 \
  271. -stim_file 15 mot_demean.r03.1D'[0]' -stim_base 15 \
  272. -stim_label 15 roll_03 \
  273. -stim_file 16 mot_demean.r03.1D'[1]' -stim_base 16 \
  274. -stim_label 16 pitch_03 \
  275. -stim_file 17 mot_demean.r03.1D'[2]' -stim_base 17 \
  276. -stim_label 17 yaw_03 \
  277. -stim_file 18 mot_demean.r03.1D'[3]' -stim_base 18 \
  278. -stim_label 18 dS_03 \
  279. -stim_file 19 mot_demean.r03.1D'[4]' -stim_base 19 \
  280. -stim_label 19 dL_03 \
  281. -stim_file 20 mot_demean.r03.1D'[5]' -stim_base 20 \
  282. -stim_label 20 dP_03 \
  283. -jobs 2 \
  284. -gltsym 'SYM: vis -aud' \
  285. -glt_label 1 V-A \
  286. -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \
  287. -x1D_uncensored X.nocensor.xmat.1D \
  288. -fitts fitts.$subj.$hemi.niml.dset \
  289. -errts errts.${subj}.$hemi.niml.dset \
  290. -bucket stats.$subj.$hemi.niml.dset
  291. end
  292. # display any large pairwise correlations from the X-matrix
  293. 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt
  294. # create an all_runs dataset to match the fitts, errts, etc.
  295. foreach hemi ( lh rh )
  296. 3dTcat -prefix all_runs.$subj.$hemi.niml.dset \
  297. pb05.$subj.$hemi.r*.scale.niml.dset
  298. end
  299. # --------------------------------------------------
  300. # create a temporal signal to noise ratio dataset
  301. # signal: if 'scale' block, mean should be 100
  302. # noise : compute standard deviation of errts
  303. foreach hemi ( lh rh )
  304. 3dTstat -mean -prefix rm.signal.all.$hemi.niml.dset \
  305. all_runs.$subj.$hemi.niml.dset"[$ktrs]"
  306. 3dTstat -stdev -prefix rm.noise.all.$hemi.niml.dset \
  307. errts.${subj}.$hemi.niml.dset"[$ktrs]"
  308. 3dcalc -a rm.signal.all.$hemi.niml.dset \
  309. -b rm.noise.all.$hemi.niml.dset \
  310. -expr 'a/b' -prefix TSNR.$subj.$hemi.niml.dset
  311. end
  312. # create ideal files for fixed response stim types
  313. 1dcat X.nocensor.xmat.1D'[12]' > ideal_vis.1D
  314. 1dcat X.nocensor.xmat.1D'[13]' > ideal_aud.1D
  315. # --------------------------------------------------------
  316. # compute sum of non-baseline regressors from the X-matrix
  317. # (use 1d_tool.py to get list of regressor colums)
  318. set reg_cols = `1d_tool.py -infile X.nocensor.xmat.1D -show_indices_interest`
  319. 3dTstat -sum -prefix sum_ideal.1D X.nocensor.xmat.1D"[$reg_cols]"
  320. # also, create a stimulus-only X-matrix, for easy review
  321. 1dcat X.nocensor.xmat.1D"[$reg_cols]" > X.stim.xmat.1D
  322. # ================== auto block: generate review scripts ===================
  323. # generate a review script for the unprocessed EPI data
  324. gen_epi_review.py -script @epi_review.$subj \
  325. -dsets pb00.$subj.r*.tcat+orig.HEAD
  326. # generate scripts to review single subject results
  327. # (try with defaults, but do not allow bad exit status)
  328. gen_ss_review_scripts.py -mot_limit 0.3 -exit0
  329. # ========================== auto block: finalize ==========================
  330. # remove temporary files
  331. \rm -f rm.*
  332. # if the basic subject review script is here, run it
  333. # (want this to be the last text output)
  334. if ( -e @ss_review_basic ) ./@ss_review_basic |& tee out.ss_review.$subj.txt
  335. # return to parent directory
  336. cd ..
  337. echo "execution finished: `date`"
  338. # ==========================================================================
  339. # script generated by the command:
  340. #
  341. # afni_proc.py -subj_id FT.surf -blocks tshift align volreg surf blur scale \
  342. # regress -copy_anat FT/FT_anat+orig -dsets FT/FT_epi_r1+orig.HEAD \
  343. # FT/FT_epi_r2+orig.HEAD FT/FT_epi_r3+orig.HEAD -surf_anat \
  344. # FT/SUMA/FT_SurfVol.nii -surf_spec FT/SUMA/std.60.FT_lh.spec \
  345. # FT/SUMA/std.60.FT_rh.spec -tcat_remove_first_trs 2 -volreg_align_to \
  346. # MIN_OUTLIER -volreg_align_e2a -blur_size 6 -regress_stim_times \
  347. # FT/AV1_vis.txt FT/AV2_aud.txt -regress_stim_labels vis aud \
  348. # -regress_basis 'BLOCK(20,1)' -regress_motion_per_run \
  349. # -regress_censor_motion 0.3 -regress_opts_3dD -jobs 2 -gltsym 'SYM: vis \
  350. # -aud' -glt_label 1 V-A