proc.FT 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476
  1. #!/bin/tcsh -xef
  2. echo "auto-generated by afni_proc.py, Tue Jan 9 09:34:33 2018"
  3. echo "(version 6.02, December 12, 2017)"
  4. echo "execution started: `date`"
  5. # execute via :
  6. # tcsh -xef proc.FT |& tee output.proc.FT
  7. # =========================== auto block: setup ============================
  8. # script setup
  9. # take note of the AFNI version
  10. afni -ver
  11. # check that the current AFNI version is recent enough
  12. afni_history -check_date 23 Sep 2016
  13. if ( $status ) then
  14. echo "** this script requires newer AFNI binaries (than 23 Sep 2016)"
  15. echo " (consider: @update.afni.binaries -defaults)"
  16. exit
  17. endif
  18. # the user may specify a single subject to run with
  19. if ( $#argv > 0 ) then
  20. set subj = $argv[1]
  21. else
  22. set subj = FT
  23. endif
  24. # assign output directory name
  25. set output_dir = $subj.results
  26. # verify that the results directory does not yet exist
  27. if ( -d $output_dir ) then
  28. echo output dir "$subj.results" already exists
  29. exit
  30. endif
  31. # set list of runs
  32. set runs = (`count -digits 2 1 3`)
  33. # create results and stimuli directories
  34. mkdir $output_dir
  35. mkdir $output_dir/stimuli
  36. # copy stim files into stimulus directory
  37. cp FT/AV1_vis.txt FT/AV2_aud.txt $output_dir/stimuli
  38. # copy anatomy to results dir
  39. 3dcopy FT/FT_anat+orig $output_dir/FT_anat
  40. # ============================ auto block: tcat ============================
  41. # apply 3dTcat to copy input dsets to results dir, while
  42. # removing the first 2 TRs
  43. 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat FT/FT_epi_r1+orig'[2..$]'
  44. 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat FT/FT_epi_r2+orig'[2..$]'
  45. 3dTcat -prefix $output_dir/pb00.$subj.r03.tcat FT/FT_epi_r3+orig'[2..$]'
  46. # and make note of repetitions (TRs) per run
  47. set tr_counts = ( 150 150 150 )
  48. # -------------------------------------------------------
  49. # enter the results directory (can begin processing data)
  50. cd $output_dir
  51. # ========================== auto block: outcount ==========================
  52. # data check: compute outlier fraction for each volume
  53. touch out.pre_ss_warn.txt
  54. foreach run ( $runs )
  55. 3dToutcount -automask -fraction -polort 3 -legendre \
  56. pb00.$subj.r$run.tcat+orig > outcount.r$run.1D
  57. # outliers at TR 0 might suggest pre-steady state TRs
  58. if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then
  59. echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \
  60. >> out.pre_ss_warn.txt
  61. endif
  62. end
  63. # catenate outlier counts into a single time series
  64. cat outcount.r*.1D > outcount_rall.1D
  65. # get run number and TR index for minimum outlier volume
  66. set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'`
  67. set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \
  68. -index_to_run_tr $minindex` )
  69. # save run and TR indices for extraction of vr_base_min_outlier
  70. set minoutrun = $ovals[1]
  71. set minouttr = $ovals[2]
  72. echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt
  73. # ================================= tshift =================================
  74. # time shift data so all slice timing is the same
  75. foreach run ( $runs )
  76. 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \
  77. pb00.$subj.r$run.tcat+orig
  78. end
  79. # --------------------------------
  80. # extract volreg registration base
  81. 3dbucket -prefix vr_base_min_outlier \
  82. pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]"
  83. # ================================= align ==================================
  84. # for e2a: compute anat alignment transformation to EPI registration base
  85. # (new anat will be intermediate, stripped, FT_anat_ns+orig)
  86. align_epi_anat.py -anat2epi -anat FT_anat+orig \
  87. -save_skullstrip -suffix _al_junk \
  88. -epi vr_base_min_outlier+orig -epi_base 0 \
  89. -epi_strip 3dAutomask \
  90. -volreg off -tshift off
  91. # ================================== tlrc ==================================
  92. # warp anatomy to standard space
  93. @auto_tlrc -base TT_N27+tlrc -input FT_anat_ns+orig -no_ss
  94. # store forward transformation matrix in a text file
  95. cat_matvec FT_anat_ns+tlrc::WARP_DATA -I > warp.anat.Xat.1D
  96. # ================================= volreg =================================
  97. # align each dset to base volume, align to anat, warp to tlrc space
  98. # verify that we have a +tlrc warp dataset
  99. if ( ! -f FT_anat_ns+tlrc.HEAD ) then
  100. echo "** missing +tlrc warp dataset: FT_anat_ns+tlrc.HEAD"
  101. exit
  102. endif
  103. # register and warp
  104. foreach run ( $runs )
  105. # register each volume to the base
  106. 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \
  107. -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \
  108. -cubic \
  109. -1Dmatrix_save mat.r$run.vr.aff12.1D \
  110. pb01.$subj.r$run.tshift+orig
  111. # create an all-1 dataset to mask the extents of the warp
  112. 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \
  113. -prefix rm.epi.all1
  114. # catenate volreg/epi2anat/tlrc xforms
  115. cat_matvec -ONELINE \
  116. FT_anat_ns+tlrc::WARP_DATA -I \
  117. FT_anat_al_junk_mat.aff12.1D -I \
  118. mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D
  119. # apply catenated xform: volreg/epi2anat/tlrc
  120. 3dAllineate -base FT_anat_ns+tlrc \
  121. -input pb01.$subj.r$run.tshift+orig \
  122. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  123. -mast_dxyz 2.5 \
  124. -prefix rm.epi.nomask.r$run
  125. # warp the all-1 dataset for extents masking
  126. 3dAllineate -base FT_anat_ns+tlrc \
  127. -input rm.epi.all1+orig \
  128. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  129. -mast_dxyz 2.5 -final NN -quiet \
  130. -prefix rm.epi.1.r$run
  131. # make an extents intersection mask of this run
  132. 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc
  133. end
  134. # make a single file of registration params
  135. cat dfile.r*.1D > dfile_rall.1D
  136. # ----------------------------------------
  137. # create the extents mask: mask_epi_extents+tlrc
  138. # (this is a mask of voxels that have valid data at every TR)
  139. 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD
  140. 3dcalc -a rm.epi.mean+tlrc -expr 'step(a-0.999)' -prefix mask_epi_extents
  141. # and apply the extents mask to the EPI data
  142. # (delete any time series with missing data)
  143. foreach run ( $runs )
  144. 3dcalc -a rm.epi.nomask.r$run+tlrc -b mask_epi_extents+tlrc \
  145. -expr 'a*b' -prefix pb02.$subj.r$run.volreg
  146. end
  147. # warp the volreg base EPI dataset to make a final version
  148. cat_matvec -ONELINE \
  149. FT_anat_ns+tlrc::WARP_DATA -I \
  150. FT_anat_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D
  151. 3dAllineate -base FT_anat_ns+tlrc \
  152. -input vr_base_min_outlier+orig \
  153. -1Dmatrix_apply mat.basewarp.aff12.1D \
  154. -mast_dxyz 2.5 \
  155. -prefix final_epi_vr_base_min_outlier
  156. # create an anat_final dataset, aligned with stats
  157. 3dcopy FT_anat_ns+tlrc anat_final.$subj
  158. # record final registration costs
  159. 3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX \
  160. -input anat_final.$subj+tlrc |& tee out.allcostX.txt
  161. # -----------------------------------------
  162. # warp anat follower datasets (affine)
  163. 3dAllineate -source FT_anat+orig \
  164. -master anat_final.$subj+tlrc \
  165. -final wsinc5 -1Dmatrix_apply warp.anat.Xat.1D \
  166. -prefix anat_w_skull_warped
  167. # ================================== blur ==================================
  168. # blur each volume of each run
  169. foreach run ( $runs )
  170. 3dmerge -1blur_fwhm 4.0 -doall -prefix pb03.$subj.r$run.blur \
  171. pb02.$subj.r$run.volreg+tlrc
  172. end
  173. # ================================== mask ==================================
  174. # create 'full_mask' dataset (union mask)
  175. foreach run ( $runs )
  176. 3dAutomask -dilate 1 -prefix rm.mask_r$run pb03.$subj.r$run.blur+tlrc
  177. end
  178. # create union of inputs, output type is byte
  179. 3dmask_tool -inputs rm.mask_r*+tlrc.HEAD -union -prefix full_mask.$subj
  180. # ---- create subject anatomy mask, mask_anat.$subj+tlrc ----
  181. # (resampled from tlrc anat)
  182. 3dresample -master full_mask.$subj+tlrc -input FT_anat_ns+tlrc \
  183. -prefix rm.resam.anat
  184. # convert to binary anat mask; fill gaps and holes
  185. 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.anat+tlrc \
  186. -prefix mask_anat.$subj
  187. # compute overlaps between anat and EPI masks
  188. 3dABoverlap -no_automask full_mask.$subj+tlrc mask_anat.$subj+tlrc \
  189. |& tee out.mask_ae_overlap.txt
  190. # note Dice coefficient of masks, as well
  191. 3ddot -dodice full_mask.$subj+tlrc mask_anat.$subj+tlrc \
  192. |& tee out.mask_ae_dice.txt
  193. # ---- create group anatomy mask, mask_group+tlrc ----
  194. # (resampled from tlrc base anat, TT_N27+tlrc)
  195. 3dresample -master full_mask.$subj+tlrc -prefix ./rm.resam.group \
  196. -input /home/rickr/abin/TT_N27+tlrc
  197. # convert to binary group mask; fill gaps and holes
  198. 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.group+tlrc \
  199. -prefix mask_group
  200. # ================================= scale ==================================
  201. # scale each voxel time series to have a mean of 100
  202. # (be sure no negatives creep in)
  203. # (subject to a range of [0,200])
  204. foreach run ( $runs )
  205. 3dTstat -prefix rm.mean_r$run pb03.$subj.r$run.blur+tlrc
  206. 3dcalc -a pb03.$subj.r$run.blur+tlrc -b rm.mean_r$run+tlrc \
  207. -c mask_epi_extents+tlrc \
  208. -expr 'c * min(200, a/b*100)*step(a)*step(b)' \
  209. -prefix pb04.$subj.r$run.scale
  210. end
  211. # ================================ regress =================================
  212. # compute de-meaned motion parameters (for use in regression)
  213. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  214. -demean -write motion_demean.1D
  215. # compute motion parameter derivatives (just to have)
  216. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  217. -derivative -demean -write motion_deriv.1D
  218. # create censor file motion_${subj}_censor.1D, for censoring motion
  219. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  220. -show_censor_count -censor_prev_TR \
  221. -censor_motion 0.3 motion_${subj}
  222. # note TRs that were not censored
  223. set ktrs = `1d_tool.py -infile motion_${subj}_censor.1D \
  224. -show_trs_uncensored encoded`
  225. # ------------------------------
  226. # run the regression analysis
  227. 3dDeconvolve -input pb04.$subj.r*.scale+tlrc.HEAD \
  228. -censor motion_${subj}_censor.1D \
  229. -polort 3 \
  230. -num_stimts 8 \
  231. -stim_times 1 stimuli/AV1_vis.txt 'BLOCK(20,1)' \
  232. -stim_label 1 vis \
  233. -stim_times 2 stimuli/AV2_aud.txt 'BLOCK(20,1)' \
  234. -stim_label 2 aud \
  235. -stim_file 3 motion_demean.1D'[0]' -stim_base 3 -stim_label 3 roll \
  236. -stim_file 4 motion_demean.1D'[1]' -stim_base 4 -stim_label 4 pitch \
  237. -stim_file 5 motion_demean.1D'[2]' -stim_base 5 -stim_label 5 yaw \
  238. -stim_file 6 motion_demean.1D'[3]' -stim_base 6 -stim_label 6 dS \
  239. -stim_file 7 motion_demean.1D'[4]' -stim_base 7 -stim_label 7 dL \
  240. -stim_file 8 motion_demean.1D'[5]' -stim_base 8 -stim_label 8 dP \
  241. -jobs 2 \
  242. -gltsym 'SYM: vis -aud' \
  243. -glt_label 1 V-A \
  244. -gltsym 'SYM: 0.5*vis +0.5*aud' \
  245. -glt_label 2 mean.VA \
  246. -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \
  247. -x1D_uncensored X.nocensor.xmat.1D \
  248. -errts errts.${subj} \
  249. -bucket stats.$subj
  250. # if 3dDeconvolve fails, terminate the script
  251. if ( $status != 0 ) then
  252. echo '---------------------------------------'
  253. echo '** 3dDeconvolve error, failing...'
  254. echo ' (consider the file 3dDeconvolve.err)'
  255. exit
  256. endif
  257. # display any large pairwise correlations from the X-matrix
  258. 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt
  259. # create an all_runs dataset to match the fitts, errts, etc.
  260. 3dTcat -prefix all_runs.$subj pb04.$subj.r*.scale+tlrc.HEAD
  261. # --------------------------------------------------
  262. # create a temporal signal to noise ratio dataset
  263. # signal: if 'scale' block, mean should be 100
  264. # noise : compute standard deviation of errts
  265. 3dTstat -mean -prefix rm.signal.all all_runs.$subj+tlrc"[$ktrs]"
  266. 3dTstat -stdev -prefix rm.noise.all errts.${subj}+tlrc"[$ktrs]"
  267. 3dcalc -a rm.signal.all+tlrc \
  268. -b rm.noise.all+tlrc \
  269. -c full_mask.$subj+tlrc \
  270. -expr 'c*a/b' -prefix TSNR.$subj
  271. # ---------------------------------------------------
  272. # compute and store GCOR (global correlation average)
  273. # (sum of squares of global mean of unit errts)
  274. 3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}+tlrc
  275. 3dmaskave -quiet -mask full_mask.$subj+tlrc rm.errts.unit+tlrc \
  276. > gmean.errts.unit.1D
  277. 3dTstat -sos -prefix - gmean.errts.unit.1D\' > out.gcor.1D
  278. echo "-- GCOR = `cat out.gcor.1D`"
  279. # ---------------------------------------------------
  280. # compute correlation volume
  281. # (per voxel: average correlation across masked brain)
  282. # (now just dot product with average unit time series)
  283. 3dcalc -a rm.errts.unit+tlrc -b gmean.errts.unit.1D -expr 'a*b' -prefix rm.DP
  284. 3dTstat -sum -prefix corr_brain rm.DP+tlrc
  285. # create fitts dataset from all_runs and errts
  286. 3dcalc -a all_runs.$subj+tlrc -b errts.${subj}+tlrc -expr a-b \
  287. -prefix fitts.$subj
  288. # create ideal files for fixed response stim types
  289. 1dcat X.nocensor.xmat.1D'[12]' > ideal_vis.1D
  290. 1dcat X.nocensor.xmat.1D'[13]' > ideal_aud.1D
  291. # --------------------------------------------------------
  292. # compute sum of non-baseline regressors from the X-matrix
  293. # (use 1d_tool.py to get list of regressor colums)
  294. set reg_cols = `1d_tool.py -infile X.nocensor.xmat.1D -show_indices_interest`
  295. 3dTstat -sum -prefix sum_ideal.1D X.nocensor.xmat.1D"[$reg_cols]"
  296. # also, create a stimulus-only X-matrix, for easy review
  297. 1dcat X.nocensor.xmat.1D"[$reg_cols]" > X.stim.xmat.1D
  298. # ============================ blur estimation =============================
  299. # compute blur estimates
  300. touch blur_est.$subj.1D # start with empty file
  301. # create directory for ACF curve files
  302. mkdir files_ACF
  303. # -- estimate blur for each run in epits --
  304. touch blur.epits.1D
  305. # restrict to uncensored TRs, per run
  306. foreach run ( $runs )
  307. set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \
  308. -show_trs_run $run`
  309. if ( $trs == "" ) continue
  310. 3dFWHMx -detrend -mask full_mask.$subj+tlrc \
  311. -ACF files_ACF/out.3dFWHMx.ACF.epits.r$run.1D \
  312. all_runs.$subj+tlrc"[$trs]" >> blur.epits.1D
  313. end
  314. # compute average FWHM blur (from every other row) and append
  315. set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{0..$(2)}'\'` )
  316. echo average epits FWHM blurs: $blurs
  317. echo "$blurs # epits FWHM blur estimates" >> blur_est.$subj.1D
  318. # compute average ACF blur (from every other row) and append
  319. set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{1..$(2)}'\'` )
  320. echo average epits ACF blurs: $blurs
  321. echo "$blurs # epits ACF blur estimates" >> blur_est.$subj.1D
  322. # -- estimate blur for each run in errts --
  323. touch blur.errts.1D
  324. # restrict to uncensored TRs, per run
  325. foreach run ( $runs )
  326. set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \
  327. -show_trs_run $run`
  328. if ( $trs == "" ) continue
  329. 3dFWHMx -detrend -mask full_mask.$subj+tlrc \
  330. -ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D \
  331. errts.${subj}+tlrc"[$trs]" >> blur.errts.1D
  332. end
  333. # compute average FWHM blur (from every other row) and append
  334. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'` )
  335. echo average errts FWHM blurs: $blurs
  336. echo "$blurs # errts FWHM blur estimates" >> blur_est.$subj.1D
  337. # compute average ACF blur (from every other row) and append
  338. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'` )
  339. echo average errts ACF blurs: $blurs
  340. echo "$blurs # errts ACF blur estimates" >> blur_est.$subj.1D
  341. # add 3dClustSim results as attributes to any stats dset
  342. mkdir files_ClustSim
  343. # run Monte Carlo simulations using method 'ACF'
  344. set params = ( `grep ACF blur_est.$subj.1D | tail -n 1` )
  345. 3dClustSim -both -mask full_mask.$subj+tlrc -acf $params[1-3] \
  346. -cmd 3dClustSim.ACF.cmd -prefix files_ClustSim/ClustSim.ACF
  347. # run 3drefit to attach 3dClustSim results to stats
  348. set cmd = ( `cat 3dClustSim.ACF.cmd` )
  349. $cmd stats.$subj+tlrc
  350. # ================== auto block: generate review scripts ===================
  351. # generate a review script for the unprocessed EPI data
  352. gen_epi_review.py -script @epi_review.$subj \
  353. -dsets pb00.$subj.r*.tcat+orig.HEAD
  354. # generate scripts to review single subject results
  355. # (try with defaults, but do not allow bad exit status)
  356. gen_ss_review_scripts.py -mot_limit 0.3 -exit0
  357. # ========================== auto block: finalize ==========================
  358. # remove temporary files
  359. \rm -f rm.*
  360. # if the basic subject review script is here, run it
  361. # (want this to be the last text output)
  362. if ( -e @ss_review_basic ) ./@ss_review_basic |& tee out.ss_review.$subj.txt
  363. # return to parent directory
  364. cd ..
  365. echo "execution finished: `date`"
  366. # ==========================================================================
  367. # script generated by the command:
  368. #
  369. # afni_proc.py -subj_id FT -script proc.FT -scr_overwrite -blocks tshift \
  370. # align tlrc volreg blur mask scale regress -copy_anat FT/FT_anat+orig \
  371. # -dsets FT/FT_epi_r1+orig.HEAD FT/FT_epi_r2+orig.HEAD \
  372. # FT/FT_epi_r3+orig.HEAD -tcat_remove_first_trs 2 -volreg_align_to \
  373. # MIN_OUTLIER -volreg_align_e2a -volreg_tlrc_warp -blur_size 4.0 \
  374. # -regress_stim_times FT/AV1_vis.txt FT/AV2_aud.txt -regress_stim_labels \
  375. # vis aud -regress_basis 'BLOCK(20,1)' -regress_censor_motion 0.3 \
  376. # -regress_opts_3dD -jobs 2 -gltsym 'SYM: vis -aud' -glt_label 1 V-A \
  377. # -gltsym 'SYM: 0.5*vis +0.5*aud' -glt_label 2 mean.VA \
  378. # -regress_compute_fitts -regress_make_ideal_sum sum_ideal.1D \
  379. # -regress_est_blur_epits -regress_est_blur_errts -regress_run_clustsim \
  380. # yes -execute