highspeed-defacing-cluster.sh 3.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. #!/bin/bash
  2. # ==============================================================================
  3. # SCRIPT INFORMATION:
  4. # ==============================================================================
  5. # SCRIPT: DEFACING ANATOMICAL MRI DATA IN A BIDS DATASET
  6. # PROJECT: HIGHSPEED
  7. # WRITTEN BY LENNART WITTKUHN, 2018 - 2020
  8. # CONTACT: WITTKUHN AT MPIB HYPHEN BERLIN DOT MPG DOT DE
  9. # MAX PLANCK RESEARCH GROUP NEUROCODE
  10. # MAX PLANCK INSTITUTE FOR HUMAN DEVELOPMENT
  11. # MAX PLANCK UCL CENTRE FOR COMPUTATIONAL PSYCHIATRY AND AGEING RESEARCH
  12. # LENTZEALLEE 94, 14195 BERLIN, GERMANY
  13. # ==============================================================================
  14. # DEFINE ALL PATHS:
  15. # ==============================================================================
  16. # define home directory:
  17. PATH_BASE="${HOME}"
  18. # define the name of the current task:
  19. TASK_NAME="pydeface"
  20. # define the name of the project:
  21. PATH_ROOT="${PATH_BASE}/highspeed"
  22. # define the name of the project:
  23. PROJECT_NAME="highspeed-bids"
  24. # define the path to the project folder:
  25. PATH_PROJECT="${PATH_ROOT}/${PROJECT_NAME}"
  26. # define the path to the singularity container:
  27. PATH_CONTAINER="${PATH_PROJECT}/tools/${TASK_NAME}/${TASK_NAME}_37-2e0c2d.sif"
  28. # path to the log directory:
  29. PATH_LOG="${PATH_PROJECT}/logs/${TASK_NAME}"
  30. # path to the data directory (in BIDS format):
  31. PATH_BIDS="${PATH_PROJECT}"
  32. # ==============================================================================
  33. # CREATE RELEVANT DIRECTORIES:
  34. # ==============================================================================
  35. # create directory for log files:
  36. if [ ! -d ${PATH_LOG} ]; then
  37. mkdir -p ${PATH_LOG}
  38. else
  39. # remove old log files inside the log container:
  40. rm -r ${PATH_LOG}/*
  41. fi
  42. # ==============================================================================
  43. # DEFINE PARAMETERS:
  44. # ==============================================================================
  45. # maximum number of cpus per process:
  46. N_CPUS=1
  47. # memory demand in *MB*
  48. MEM_MB=500
  49. # memory demand in *KB*
  50. MEM_KB="$((${MEM_MB} * 1000))"
  51. # ==============================================================================
  52. # RUN PYDEFACE:
  53. # ==============================================================================
  54. for FILE in ${PATH_BIDS}/*/*/anat/*T1w.nii.gz; do
  55. # to just get filename from a given path:
  56. FILE_BASENAME="$(basename -- $FILE)"
  57. # get the parent directory:
  58. FILE_PARENT="$(dirname "$FILE")"
  59. # create cluster job:
  60. echo "#!/bin/bash" > job
  61. # name of the job
  62. echo "#SBATCH --job-name pydeface_${FILE_BASENAME}" >> job
  63. # set the expected maximum running time for the job:
  64. echo "#SBATCH --time 1:00:00" >> job
  65. # determine how much RAM your operation needs:
  66. echo "#SBATCH --mem ${MEM_MB}MB" >> job
  67. # email notification on abort/end, use 'n' for no notification:
  68. echo "#SBATCH --mail-type NONE" >> job
  69. # writelog to log folder
  70. echo "#SBATCH --output ${PATH_LOG}/slurm-%j.out" >> job
  71. # request multiple cpus
  72. echo "#SBATCH --cpus-per-task ${N_CPUS}" >> job
  73. # define the main command:
  74. echo "singularity run --contain -B ${FILE_PARENT}:/input:rw ${PATH_CONTAINER} \
  75. pydeface /input/${FILE_BASENAME} --force" >> job
  76. # submit job to cluster queue and remove it to avoid confusion:
  77. sbatch job
  78. rm -f job
  79. done