s11.proc.FT 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312
  1. #!/bin/tcsh -xef
  2. echo "auto-generated by afni_proc.py, Mon Aug 22 15:47:09 2016"
  3. echo "(version 5.01, August 22, 2016)"
  4. echo "execution started: `date`"
  5. # execute via :
  6. # tcsh -xef proc.FT |& tee output.proc.FT
  7. # =========================== auto block: setup ============================
  8. # script setup
  9. # take note of the AFNI version
  10. afni -ver
  11. # check that the current AFNI version is recent enough
  12. afni_history -check_date 1 Dec 2015
  13. if ( $status ) then
  14. echo "** this script requires newer AFNI binaries (than 1 Dec 2015)"
  15. echo " (consider: @update.afni.binaries -defaults)"
  16. exit
  17. endif
  18. # the user may specify a single subject to run with
  19. if ( $#argv > 0 ) then
  20. set subj = $argv[1]
  21. else
  22. set subj = FT
  23. endif
  24. # assign output directory name
  25. set output_dir = $subj.results
  26. # verify that the results directory does not yet exist
  27. if ( -d $output_dir ) then
  28. echo output dir "$subj.results" already exists
  29. exit
  30. endif
  31. # set list of runs
  32. set runs = (`count -digits 2 1 3`)
  33. # create results and stimuli directories
  34. mkdir $output_dir
  35. mkdir $output_dir/stimuli
  36. # copy stim files into stimulus directory
  37. cp FT/AV1_vis.txt FT/AV2_aud.txt $output_dir/stimuli
  38. # copy anatomy to results dir
  39. 3dcopy FT/FT_anat+orig $output_dir/FT_anat
  40. # ============================ auto block: tcat ============================
  41. # apply 3dTcat to copy input dsets to results dir, while
  42. # removing the first 2 TRs
  43. 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat FT/FT_epi_r1+orig'[2..$]'
  44. 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat FT/FT_epi_r2+orig'[2..$]'
  45. 3dTcat -prefix $output_dir/pb00.$subj.r03.tcat FT/FT_epi_r3+orig'[2..$]'
  46. # and make note of repetitions (TRs) per run
  47. set tr_counts = ( 150 150 150 )
  48. # -------------------------------------------------------
  49. # enter the results directory (can begin processing data)
  50. cd $output_dir
  51. # ========================== auto block: outcount ==========================
  52. # data check: compute outlier fraction for each volume
  53. touch out.pre_ss_warn.txt
  54. foreach run ( $runs )
  55. 3dToutcount -automask -fraction -polort 3 -legendre \
  56. pb00.$subj.r$run.tcat+orig > outcount.r$run.1D
  57. # outliers at TR 0 might suggest pre-steady state TRs
  58. if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then
  59. echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \
  60. >> out.pre_ss_warn.txt
  61. endif
  62. end
  63. # catenate outlier counts into a single time series
  64. cat outcount.r*.1D > outcount_rall.1D
  65. # ================================= tshift =================================
  66. # time shift data so all slice timing is the same
  67. foreach run ( $runs )
  68. 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \
  69. pb00.$subj.r$run.tcat+orig
  70. end
  71. # --------------------------------
  72. # extract volreg registration base
  73. 3dbucket -prefix vr_base pb01.$subj.r01.tshift+orig"[2]"
  74. # ================================= volreg =================================
  75. # align each dset to base volume
  76. foreach run ( $runs )
  77. # register each volume to the base
  78. 3dvolreg -verbose -zpad 1 -base vr_base+orig \
  79. -1Dfile dfile.r$run.1D -prefix pb02.$subj.r$run.volreg \
  80. -cubic \
  81. pb01.$subj.r$run.tshift+orig
  82. end
  83. # make a single file of registration params
  84. cat dfile.r*.1D > dfile_rall.1D
  85. # compute motion magnitude time series: the Euclidean norm
  86. # (sqrt(sum squares)) of the motion parameter derivatives
  87. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  88. -derivative -collapse_cols euclidean_norm \
  89. -write motion_${subj}_enorm.1D
  90. # create an anat_final dataset, aligned with stats
  91. 3dcopy FT_anat+orig anat_final.$subj
  92. # ================================== blur ==================================
  93. # blur each volume of each run
  94. foreach run ( $runs )
  95. 3dmerge -1blur_fwhm 4.0 -doall -prefix pb03.$subj.r$run.blur \
  96. pb02.$subj.r$run.volreg+orig
  97. end
  98. # ================================== mask ==================================
  99. # create 'full_mask' dataset (union mask)
  100. foreach run ( $runs )
  101. 3dAutomask -dilate 1 -prefix rm.mask_r$run pb03.$subj.r$run.blur+orig
  102. end
  103. # create union of inputs, output type is byte
  104. 3dmask_tool -inputs rm.mask_r*+orig.HEAD -union -prefix full_mask.$subj
  105. # ================================= scale ==================================
  106. # scale each voxel time series to have a mean of 100
  107. # (be sure no negatives creep in)
  108. # (subject to a range of [0,200])
  109. foreach run ( $runs )
  110. 3dTstat -prefix rm.mean_r$run pb03.$subj.r$run.blur+orig
  111. 3dcalc -a pb03.$subj.r$run.blur+orig -b rm.mean_r$run+orig \
  112. -expr 'min(200, a/b*100)*step(a)*step(b)' \
  113. -prefix pb04.$subj.r$run.scale
  114. end
  115. # ================================ regress =================================
  116. # compute de-meaned motion parameters (for use in regression)
  117. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  118. -demean -write motion_demean.1D
  119. # compute motion parameter derivatives (just to have)
  120. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  121. -derivative -demean -write motion_deriv.1D
  122. # ------------------------------
  123. # run the regression analysis
  124. 3dDeconvolve -input pb04.$subj.r*.scale+orig.HEAD \
  125. -polort 3 \
  126. -num_stimts 8 \
  127. -stim_times 1 stimuli/AV1_vis.txt 'BLOCK(20,1)' \
  128. -stim_label 1 Vrel \
  129. -stim_times 2 stimuli/AV2_aud.txt 'BLOCK(20,1)' \
  130. -stim_label 2 Arel \
  131. -stim_file 3 motion_demean.1D'[0]' -stim_base 3 -stim_label 3 roll \
  132. -stim_file 4 motion_demean.1D'[1]' -stim_base 4 -stim_label 4 pitch \
  133. -stim_file 5 motion_demean.1D'[2]' -stim_base 5 -stim_label 5 yaw \
  134. -stim_file 6 motion_demean.1D'[3]' -stim_base 6 -stim_label 6 dS \
  135. -stim_file 7 motion_demean.1D'[4]' -stim_base 7 -stim_label 7 dL \
  136. -stim_file 8 motion_demean.1D'[5]' -stim_base 8 -stim_label 8 dP \
  137. -gltsym 'SYM: Vrel -Arel' \
  138. -glt_label 1 V-A \
  139. -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \
  140. -fitts fitts.$subj \
  141. -errts errts.${subj} \
  142. -bucket stats.$subj
  143. # if 3dDeconvolve fails, terminate the script
  144. if ( $status != 0 ) then
  145. echo '---------------------------------------'
  146. echo '** 3dDeconvolve error, failing...'
  147. echo ' (consider the file 3dDeconvolve.err)'
  148. exit
  149. endif
  150. # display any large pairwise correlations from the X-matrix
  151. 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt
  152. # create an all_runs dataset to match the fitts, errts, etc.
  153. 3dTcat -prefix all_runs.$subj pb04.$subj.r*.scale+orig.HEAD
  154. # --------------------------------------------------
  155. # create a temporal signal to noise ratio dataset
  156. # signal: if 'scale' block, mean should be 100
  157. # noise : compute standard deviation of errts
  158. 3dTstat -mean -prefix rm.signal.all all_runs.$subj+orig
  159. 3dTstat -stdev -prefix rm.noise.all errts.${subj}+orig
  160. 3dcalc -a rm.signal.all+orig \
  161. -b rm.noise.all+orig \
  162. -c full_mask.$subj+orig \
  163. -expr 'c*a/b' -prefix TSNR.$subj
  164. # ---------------------------------------------------
  165. # compute and store GCOR (global correlation average)
  166. # (sum of squares of global mean of unit errts)
  167. 3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}+orig
  168. 3dmaskave -quiet -mask full_mask.$subj+orig rm.errts.unit+orig \
  169. > gmean.errts.unit.1D
  170. 3dTstat -sos -prefix - gmean.errts.unit.1D\' > out.gcor.1D
  171. echo "-- GCOR = `cat out.gcor.1D`"
  172. # ---------------------------------------------------
  173. # compute correlation volume
  174. # (per voxel: average correlation across masked brain)
  175. # (now just dot product with average unit time series)
  176. 3dcalc -a rm.errts.unit+orig -b gmean.errts.unit.1D -expr 'a*b' -prefix rm.DP
  177. 3dTstat -sum -prefix corr_brain rm.DP+orig
  178. # create ideal files for fixed response stim types
  179. 1dcat X.xmat.1D'[12]' > ideal_Vrel.1D
  180. 1dcat X.xmat.1D'[13]' > ideal_Arel.1D
  181. # --------------------------------------------------------
  182. # compute sum of non-baseline regressors from the X-matrix
  183. # (use 1d_tool.py to get list of regressor colums)
  184. set reg_cols = `1d_tool.py -infile X.xmat.1D -show_indices_interest`
  185. 3dTstat -sum -prefix sum_ideal.1D X.xmat.1D"[$reg_cols]"
  186. # also, create a stimulus-only X-matrix, for easy review
  187. 1dcat X.xmat.1D"[$reg_cols]" > X.stim.xmat.1D
  188. # ============================ blur estimation =============================
  189. # compute blur estimates
  190. touch blur_est.$subj.1D # start with empty file
  191. # create directory for ACF curve files
  192. mkdir files_ACF
  193. # -- estimate blur for each run in errts --
  194. touch blur.errts.1D
  195. # restrict to uncensored TRs, per run
  196. foreach run ( $runs )
  197. set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \
  198. -show_trs_run $run`
  199. if ( $trs == "" ) continue
  200. 3dFWHMx -detrend -mask full_mask.$subj+orig \
  201. -ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D \
  202. errts.${subj}+orig"[$trs]" >> blur.errts.1D
  203. end
  204. # compute average FWHM blur (from every other row) and append
  205. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'` )
  206. echo average errts FWHM blurs: $blurs
  207. echo "$blurs # errts FWHM blur estimates" >> blur_est.$subj.1D
  208. # compute average ACF blur (from every other row) and append
  209. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'` )
  210. echo average errts ACF blurs: $blurs
  211. echo "$blurs # errts ACF blur estimates" >> blur_est.$subj.1D
  212. # add 3dClustSim results as attributes to any stats dset
  213. mkdir files_ClustSim
  214. # run Monte Carlo simulations using method 'ACF'
  215. set params = ( `grep ACF blur_est.$subj.1D | tail -n 1` )
  216. 3dClustSim -both -mask full_mask.$subj+orig -acf $params[1-3] \
  217. -cmd 3dClustSim.ACF.cmd -prefix files_ClustSim/ClustSim.ACF
  218. # run 3drefit to attach 3dClustSim results to stats
  219. set cmd = ( `cat 3dClustSim.ACF.cmd` )
  220. $cmd stats.$subj+orig
  221. # ================== auto block: generate review scripts ===================
  222. # generate a review script for the unprocessed EPI data
  223. gen_epi_review.py -script @epi_review.$subj \
  224. -dsets pb00.$subj.r*.tcat+orig.HEAD
  225. # generate scripts to review single subject results
  226. # (try with defaults, but do not allow bad exit status)
  227. gen_ss_review_scripts.py -exit0
  228. # ========================== auto block: finalize ==========================
  229. # remove temporary files
  230. \rm -f rm.*
  231. # if the basic subject review script is here, run it
  232. # (want this to be the last text output)
  233. if ( -e @ss_review_basic ) ./@ss_review_basic |& tee out.ss_review.$subj.txt
  234. # return to parent directory
  235. cd ..
  236. echo "execution finished: `date`"
  237. # ==========================================================================
  238. # script generated by the command:
  239. #
  240. # afni_proc.py -subj_id FT -dsets FT/FT_epi_r1+orig.HEAD \
  241. # FT/FT_epi_r2+orig.HEAD FT/FT_epi_r3+orig.HEAD -copy_anat \
  242. # FT/FT_anat+orig -tcat_remove_first_trs 2 -regress_stim_times \
  243. # FT/AV1_vis.txt FT/AV2_aud.txt -regress_stim_labels Vrel Arel \
  244. # -regress_basis 'BLOCK(20,1)' -regress_est_blur_errts -regress_opts_3dD \
  245. # -gltsym 'SYM: Vrel -Arel' -glt_label 1 V-A