s15.proc.FT.uber 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498
  1. #!/bin/tcsh -xef
  2. echo "auto-generated by afni_proc.py, Thu May 23 13:05:05 2019"
  3. echo "(version 6.37, May 23, 2019)"
  4. echo "execution started: `date`"
  5. # to execute via tcsh:
  6. # tcsh -xef proc.FT |& tee output.proc.FT
  7. # to execute via bash:
  8. # tcsh -xef proc.FT 2>&1 | tee output.proc.FT
  9. # =========================== auto block: setup ============================
  10. # script setup
  11. # take note of the AFNI version
  12. afni -ver
  13. # check that the current AFNI version is recent enough
  14. afni_history -check_date 10 May 2019
  15. if ( $status ) then
  16. echo "** this script requires newer AFNI binaries (than 10 May 2019)"
  17. echo " (consider: @update.afni.binaries -defaults)"
  18. exit
  19. endif
  20. # the user may specify a single subject to run with
  21. if ( $#argv > 0 ) then
  22. set subj = $argv[1]
  23. else
  24. set subj = FT
  25. endif
  26. # assign output directory name
  27. set output_dir = $subj.results
  28. # verify that the results directory does not yet exist
  29. if ( -d $output_dir ) then
  30. echo output dir "$subj.results" already exists
  31. exit
  32. endif
  33. # set list of runs
  34. set runs = (`count -digits 2 1 3`)
  35. # create results and stimuli directories
  36. mkdir $output_dir
  37. mkdir $output_dir/stimuli
  38. # copy stim files into stimulus directory
  39. cp FT/AV1_vis.txt FT/AV2_aud.txt $output_dir/stimuli
  40. # copy anatomy to results dir
  41. 3dcopy FT/FT_anat+orig $output_dir/FT_anat
  42. # ============================ auto block: tcat ============================
  43. # apply 3dTcat to copy input dsets to results dir,
  44. # while removing the first 2 TRs
  45. 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat FT/FT_epi_r1+orig'[2..$]'
  46. 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat FT/FT_epi_r2+orig'[2..$]'
  47. 3dTcat -prefix $output_dir/pb00.$subj.r03.tcat FT/FT_epi_r3+orig'[2..$]'
  48. # and make note of repetitions (TRs) per run
  49. set tr_counts = ( 150 150 150 )
  50. # -------------------------------------------------------
  51. # enter the results directory (can begin processing data)
  52. cd $output_dir
  53. # ---------------------------------------------------------
  54. # data check: compute correlations with spherical ~averages
  55. @radial_correlate -nfirst 0 -do_clean yes -rdir radcor.pb00.tcat \
  56. pb00.$subj.r*.tcat+orig.HEAD
  57. # ========================== auto block: outcount ==========================
  58. # data check: compute outlier fraction for each volume
  59. touch out.pre_ss_warn.txt
  60. foreach run ( $runs )
  61. 3dToutcount -automask -fraction -polort 3 -legendre \
  62. pb00.$subj.r$run.tcat+orig > outcount.r$run.1D
  63. # outliers at TR 0 might suggest pre-steady state TRs
  64. if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then
  65. echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \
  66. >> out.pre_ss_warn.txt
  67. endif
  68. end
  69. # catenate outlier counts into a single time series
  70. cat outcount.r*.1D > outcount_rall.1D
  71. # get run number and TR index for minimum outlier volume
  72. set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'`
  73. set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \
  74. -index_to_run_tr $minindex` )
  75. # save run and TR indices for extraction of vr_base_min_outlier
  76. set minoutrun = $ovals[1]
  77. set minouttr = $ovals[2]
  78. echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt
  79. # ================================= tshift =================================
  80. # time shift data so all slice timing is the same
  81. foreach run ( $runs )
  82. 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \
  83. pb00.$subj.r$run.tcat+orig
  84. end
  85. # --------------------------------
  86. # extract volreg registration base
  87. 3dbucket -prefix vr_base_min_outlier \
  88. pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]"
  89. # ================================= align ==================================
  90. # for e2a: compute anat alignment transformation to EPI registration base
  91. # (new anat will be intermediate, stripped, FT_anat_ns+orig)
  92. align_epi_anat.py -anat2epi -anat FT_anat+orig \
  93. -save_skullstrip -suffix _al_junk \
  94. -epi vr_base_min_outlier+orig -epi_base 0 \
  95. -epi_strip 3dAutomask \
  96. -check_flip \
  97. -volreg off -tshift off
  98. # ================================== tlrc ==================================
  99. # warp anatomy to standard space
  100. @auto_tlrc -base TT_N27+tlrc -input FT_anat_ns+orig -no_ss
  101. # store forward transformation matrix in a text file
  102. cat_matvec FT_anat_ns+tlrc::WARP_DATA -I > warp.anat.Xat.1D
  103. # ================================= volreg =================================
  104. # align each dset to base volume, to anat, warp to tlrc space
  105. # verify that we have a +tlrc warp dataset
  106. if ( ! -f FT_anat_ns+tlrc.HEAD ) then
  107. echo "** missing +tlrc warp dataset: FT_anat_ns+tlrc.HEAD"
  108. exit
  109. endif
  110. # register and warp
  111. foreach run ( $runs )
  112. # register each volume to the base image
  113. 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \
  114. -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \
  115. -cubic \
  116. -1Dmatrix_save mat.r$run.vr.aff12.1D \
  117. pb01.$subj.r$run.tshift+orig
  118. # create an all-1 dataset to mask the extents of the warp
  119. 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \
  120. -prefix rm.epi.all1
  121. # catenate volreg/epi2anat/tlrc xforms
  122. cat_matvec -ONELINE \
  123. FT_anat_ns+tlrc::WARP_DATA -I \
  124. FT_anat_al_junk_mat.aff12.1D -I \
  125. mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D
  126. # apply catenated xform: volreg/epi2anat/tlrc
  127. 3dAllineate -base FT_anat_ns+tlrc \
  128. -input pb01.$subj.r$run.tshift+orig \
  129. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  130. -mast_dxyz 2.5 \
  131. -prefix rm.epi.nomask.r$run
  132. # warp the all-1 dataset for extents masking
  133. 3dAllineate -base FT_anat_ns+tlrc \
  134. -input rm.epi.all1+orig \
  135. -1Dmatrix_apply mat.r$run.warp.aff12.1D \
  136. -mast_dxyz 2.5 -final NN -quiet \
  137. -prefix rm.epi.1.r$run
  138. # make an extents intersection mask of this run
  139. 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc
  140. end
  141. # make a single file of registration params
  142. cat dfile.r*.1D > dfile_rall.1D
  143. # ----------------------------------------
  144. # create the extents mask: mask_epi_extents+tlrc
  145. # (this is a mask of voxels that have valid data at every TR)
  146. 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD
  147. 3dcalc -a rm.epi.mean+tlrc -expr 'step(a-0.999)' -prefix mask_epi_extents
  148. # and apply the extents mask to the EPI data
  149. # (delete any time series with missing data)
  150. foreach run ( $runs )
  151. 3dcalc -a rm.epi.nomask.r$run+tlrc -b mask_epi_extents+tlrc \
  152. -expr 'a*b' -prefix pb02.$subj.r$run.volreg
  153. end
  154. # warp the volreg base EPI dataset to make a final version
  155. cat_matvec -ONELINE \
  156. FT_anat_ns+tlrc::WARP_DATA -I \
  157. FT_anat_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D
  158. 3dAllineate -base FT_anat_ns+tlrc \
  159. -input vr_base_min_outlier+orig \
  160. -1Dmatrix_apply mat.basewarp.aff12.1D \
  161. -mast_dxyz 2.5 \
  162. -prefix final_epi_vr_base_min_outlier
  163. # create an anat_final dataset, aligned with stats
  164. 3dcopy FT_anat_ns+tlrc anat_final.$subj
  165. # record final registration costs
  166. 3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX \
  167. -input anat_final.$subj+tlrc |& tee out.allcostX.txt
  168. # -----------------------------------------
  169. # warp anat follower datasets (affine)
  170. 3dAllineate -source FT_anat+orig \
  171. -master anat_final.$subj+tlrc \
  172. -final wsinc5 -1Dmatrix_apply warp.anat.Xat.1D \
  173. -prefix anat_w_skull_warped
  174. # ---------------------------------------------------------
  175. # data check: compute correlations with spherical ~averages
  176. @radial_correlate -nfirst 0 -do_clean yes -rdir radcor.pb02.volreg \
  177. pb02.$subj.r*.volreg+tlrc.HEAD
  178. # ================================== blur ==================================
  179. # blur each volume of each run
  180. foreach run ( $runs )
  181. 3dmerge -1blur_fwhm 4.0 -doall -prefix pb03.$subj.r$run.blur \
  182. pb02.$subj.r$run.volreg+tlrc
  183. end
  184. # ================================== mask ==================================
  185. # create 'full_mask' dataset (union mask)
  186. foreach run ( $runs )
  187. 3dAutomask -prefix rm.mask_r$run pb03.$subj.r$run.blur+tlrc
  188. end
  189. # create union of inputs, output type is byte
  190. 3dmask_tool -inputs rm.mask_r*+tlrc.HEAD -union -prefix full_mask.$subj
  191. # ---- create subject anatomy mask, mask_anat.$subj+tlrc ----
  192. # (resampled from tlrc anat)
  193. 3dresample -master full_mask.$subj+tlrc -input FT_anat_ns+tlrc \
  194. -prefix rm.resam.anat
  195. # convert to binary anat mask; fill gaps and holes
  196. 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.anat+tlrc \
  197. -prefix mask_anat.$subj
  198. # compute tighter EPI mask by intersecting with anat mask
  199. 3dmask_tool -input full_mask.$subj+tlrc mask_anat.$subj+tlrc \
  200. -inter -prefix mask_epi_anat.$subj
  201. # compute overlaps between anat and EPI masks
  202. 3dABoverlap -no_automask full_mask.$subj+tlrc mask_anat.$subj+tlrc \
  203. |& tee out.mask_ae_overlap.txt
  204. # note Dice coefficient of masks, as well
  205. 3ddot -dodice full_mask.$subj+tlrc mask_anat.$subj+tlrc \
  206. |& tee out.mask_ae_dice.txt
  207. # ---- create group anatomy mask, mask_group+tlrc ----
  208. # (resampled from tlrc base anat, TT_N27+tlrc)
  209. 3dresample -master full_mask.$subj+tlrc -prefix ./rm.resam.group \
  210. -input /home/rickr/abin/TT_N27+tlrc
  211. # convert to binary group mask; fill gaps and holes
  212. 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.group+tlrc \
  213. -prefix mask_group
  214. # ================================= scale ==================================
  215. # scale each voxel time series to have a mean of 100
  216. # (be sure no negatives creep in)
  217. # (subject to a range of [0,200])
  218. foreach run ( $runs )
  219. 3dTstat -prefix rm.mean_r$run pb03.$subj.r$run.blur+tlrc
  220. 3dcalc -a pb03.$subj.r$run.blur+tlrc -b rm.mean_r$run+tlrc \
  221. -c mask_epi_extents+tlrc \
  222. -expr 'c * min(200, a/b*100)*step(a)*step(b)' \
  223. -prefix pb04.$subj.r$run.scale
  224. end
  225. # ================================ regress =================================
  226. # compute de-meaned motion parameters (for use in regression)
  227. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  228. -demean -write motion_demean.1D
  229. # compute motion parameter derivatives (just to have)
  230. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  231. -derivative -demean -write motion_deriv.1D
  232. # convert motion parameters for per-run regression
  233. 1d_tool.py -infile motion_demean.1D -set_nruns 3 \
  234. -split_into_pad_runs mot_demean
  235. # create censor file motion_${subj}_censor.1D, for censoring motion
  236. 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \
  237. -show_censor_count -censor_prev_TR \
  238. -censor_motion 0.3 motion_${subj}
  239. # note TRs that were not censored
  240. set ktrs = `1d_tool.py -infile motion_${subj}_censor.1D \
  241. -show_trs_uncensored encoded`
  242. # ------------------------------
  243. # run the regression analysis
  244. 3dDeconvolve -input pb04.$subj.r*.scale+tlrc.HEAD \
  245. -censor motion_${subj}_censor.1D \
  246. -ortvec mot_demean.r01.1D mot_demean_r01 \
  247. -ortvec mot_demean.r02.1D mot_demean_r02 \
  248. -ortvec mot_demean.r03.1D mot_demean_r03 \
  249. -polort 3 \
  250. -num_stimts 2 \
  251. -stim_times 1 stimuli/AV1_vis.txt 'BLOCK(20,1)' \
  252. -stim_label 1 vis \
  253. -stim_times 2 stimuli/AV2_aud.txt 'BLOCK(20,1)' \
  254. -stim_label 2 aud \
  255. -jobs 2 \
  256. -gltsym 'SYM: vis -aud' \
  257. -glt_label 1 V-A \
  258. -gltsym 'SYM: 0.5*vis +0.5*aud' \
  259. -glt_label 2 mean.VA \
  260. -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \
  261. -x1D_uncensored X.nocensor.xmat.1D \
  262. -errts errts.${subj} \
  263. -bucket stats.$subj
  264. # if 3dDeconvolve fails, terminate the script
  265. if ( $status != 0 ) then
  266. echo '---------------------------------------'
  267. echo '** 3dDeconvolve error, failing...'
  268. echo ' (consider the file 3dDeconvolve.err)'
  269. exit
  270. endif
  271. # display any large pairwise correlations from the X-matrix
  272. 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt
  273. # display degrees of freedom info from X-matrix
  274. 1d_tool.py -show_df_info -infile X.xmat.1D |& tee out.df_info.txt
  275. # create an all_runs dataset to match the fitts, errts, etc.
  276. 3dTcat -prefix all_runs.$subj pb04.$subj.r*.scale+tlrc.HEAD
  277. # --------------------------------------------------
  278. # create a temporal signal to noise ratio dataset
  279. # signal: if 'scale' block, mean should be 100
  280. # noise : compute standard deviation of errts
  281. 3dTstat -mean -prefix rm.signal.all all_runs.$subj+tlrc"[$ktrs]"
  282. 3dTstat -stdev -prefix rm.noise.all errts.${subj}+tlrc"[$ktrs]"
  283. 3dcalc -a rm.signal.all+tlrc \
  284. -b rm.noise.all+tlrc \
  285. -c mask_epi_anat.$subj+tlrc \
  286. -expr 'c*a/b' -prefix TSNR.$subj
  287. # ---------------------------------------------------
  288. # compute and store GCOR (global correlation average)
  289. # (sum of squares of global mean of unit errts)
  290. 3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}+tlrc
  291. 3dmaskave -quiet -mask full_mask.$subj+tlrc rm.errts.unit+tlrc \
  292. > gmean.errts.unit.1D
  293. 3dTstat -sos -prefix - gmean.errts.unit.1D\' > out.gcor.1D
  294. echo "-- GCOR = `cat out.gcor.1D`"
  295. # ---------------------------------------------------
  296. # compute correlation volume
  297. # (per voxel: average correlation across masked brain)
  298. # (now just dot product with average unit time series)
  299. 3dcalc -a rm.errts.unit+tlrc -b gmean.errts.unit.1D -expr 'a*b' -prefix rm.DP
  300. 3dTstat -sum -prefix corr_brain rm.DP+tlrc
  301. # create fitts dataset from all_runs and errts
  302. 3dcalc -a all_runs.$subj+tlrc -b errts.${subj}+tlrc -expr a-b \
  303. -prefix fitts.$subj
  304. # create ideal files for fixed response stim types
  305. 1dcat X.nocensor.xmat.1D'[12]' > ideal_vis.1D
  306. 1dcat X.nocensor.xmat.1D'[13]' > ideal_aud.1D
  307. # --------------------------------------------------------
  308. # compute sum of non-baseline regressors from the X-matrix
  309. # (use 1d_tool.py to get list of regressor colums)
  310. set reg_cols = `1d_tool.py -infile X.nocensor.xmat.1D -show_indices_interest`
  311. 3dTstat -sum -prefix sum_ideal.1D X.nocensor.xmat.1D"[$reg_cols]"
  312. # also, create a stimulus-only X-matrix, for easy review
  313. 1dcat X.nocensor.xmat.1D"[$reg_cols]" > X.stim.xmat.1D
  314. # ============================ blur estimation =============================
  315. # compute blur estimates
  316. touch blur_est.$subj.1D # start with empty file
  317. # create directory for ACF curve files
  318. mkdir files_ACF
  319. # -- estimate blur for each run in epits --
  320. touch blur.epits.1D
  321. # restrict to uncensored TRs, per run
  322. foreach run ( $runs )
  323. set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \
  324. -show_trs_run $run`
  325. if ( $trs == "" ) continue
  326. 3dFWHMx -detrend -mask mask_epi_anat.$subj+tlrc \
  327. -ACF files_ACF/out.3dFWHMx.ACF.epits.r$run.1D \
  328. all_runs.$subj+tlrc"[$trs]" >> blur.epits.1D
  329. end
  330. # compute average FWHM blur (from every other row) and append
  331. set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{0..$(2)}'\'` )
  332. echo average epits FWHM blurs: $blurs
  333. echo "$blurs # epits FWHM blur estimates" >> blur_est.$subj.1D
  334. # compute average ACF blur (from every other row) and append
  335. set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{1..$(2)}'\'` )
  336. echo average epits ACF blurs: $blurs
  337. echo "$blurs # epits ACF blur estimates" >> blur_est.$subj.1D
  338. # -- estimate blur for each run in errts --
  339. touch blur.errts.1D
  340. # restrict to uncensored TRs, per run
  341. foreach run ( $runs )
  342. set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \
  343. -show_trs_run $run`
  344. if ( $trs == "" ) continue
  345. 3dFWHMx -detrend -mask mask_epi_anat.$subj+tlrc \
  346. -ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D \
  347. errts.${subj}+tlrc"[$trs]" >> blur.errts.1D
  348. end
  349. # compute average FWHM blur (from every other row) and append
  350. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'` )
  351. echo average errts FWHM blurs: $blurs
  352. echo "$blurs # errts FWHM blur estimates" >> blur_est.$subj.1D
  353. # compute average ACF blur (from every other row) and append
  354. set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'` )
  355. echo average errts ACF blurs: $blurs
  356. echo "$blurs # errts ACF blur estimates" >> blur_est.$subj.1D
  357. # ================== auto block: generate review scripts ===================
  358. # generate a review script for the unprocessed EPI data
  359. gen_epi_review.py -script @epi_review.$subj \
  360. -dsets pb00.$subj.r*.tcat+orig.HEAD
  361. # generate scripts to review single subject results
  362. # (try with defaults, but do not allow bad exit status)
  363. gen_ss_review_scripts.py -mot_limit 0.3 -exit0 \
  364. -ss_review_dset out.ss_review.$subj.txt \
  365. -write_uvars_json out.ss_review_uvars.json
  366. # ========================== auto block: finalize ==========================
  367. # remove temporary files
  368. \rm -f rm.*
  369. # if the basic subject review script is here, run it
  370. # (want this to be the last text output)
  371. if ( -e @ss_review_basic ) then
  372. ./@ss_review_basic |& tee out.ss_review.$subj.txt
  373. # generate html ss review pages
  374. # (akin to static images from running @ss_review_driver)
  375. apqc_make_tcsh.py -review_style basic -subj_dir . \
  376. -uvar_json out.ss_review_uvars.json
  377. tcsh @ss_review_html |& tee out.review_html
  378. apqc_make_html.py -qc_dir QC_$subj
  379. echo "\nconsider running: \n\n afni_open -b $subj.results/QC_$subj/index.html\n"
  380. endif
  381. # return to parent directory (just in case...)
  382. cd ..
  383. echo "execution finished: `date`"
  384. # ==========================================================================
  385. # script generated by the command:
  386. #
  387. # afni_proc.py -subj_id FT -blocks tshift align tlrc volreg blur mask scale \
  388. # regress -radial_correlate_blocks tcat volreg -copy_anat FT/FT_anat+orig \
  389. # -dsets FT/FT_epi_r1+orig.HEAD FT/FT_epi_r2+orig.HEAD \
  390. # FT/FT_epi_r3+orig.HEAD -tcat_remove_first_trs 2 -align_opts_aea \
  391. # -check_flip -volreg_align_to MIN_OUTLIER -volreg_align_e2a \
  392. # -volreg_tlrc_warp -blur_size 4.0 -mask_epi_anat yes -regress_stim_times \
  393. # FT/AV1_vis.txt FT/AV2_aud.txt -regress_stim_labels vis aud \
  394. # -regress_basis 'BLOCK(20,1)' -regress_censor_motion 0.3 \
  395. # -regress_motion_per_run -regress_opts_3dD -jobs 2 -gltsym 'SYM: vis \
  396. # -aud' -glt_label 1 V-A -gltsym 'SYM: 0.5*vis +0.5*aud' -glt_label 2 \
  397. # mean.VA -regress_compute_fitts -regress_make_ideal_sum sum_ideal.1D \
  398. # -regress_est_blur_epits -regress_est_blur_errts -regress_run_clustsim \
  399. # no -execute