SLURM/ArrayJobs: Difference between revisions
Jump to navigation
Jump to search
(Created page with "<pre> #!/bin/bash ##################### # job-array example # ##################### #SBATCH --job-name=example # 16 jobs will run in this array at the same time #SBATCH --a...") |
No edit summary |
||
Line 25: | Line 25: | ||
# define and create a unique scratch directory | # define and create a unique scratch directory | ||
SCRATCH_DIRECTORY=/ | SCRATCH_DIRECTORY=/scratch0/${USER}/job-array-example/${SLURM_JOBID} | ||
mkdir -p ${SCRATCH_DIRECTORY} | mkdir -p ${SCRATCH_DIRECTORY} | ||
cd ${SCRATCH_DIRECTORY} | cd ${SCRATCH_DIRECTORY} |
Revision as of 19:06, 29 June 2017
#!/bin/bash ##################### # job-array example # ##################### #SBATCH --job-name=example # 16 jobs will run in this array at the same time #SBATCH --array=1-16 # run for five minutes # d-hh:mm:ss #SBATCH --time=0-00:05:00 # short partition should do it #SBATCH --partition short # 500MB memory per core # this is a hard limit #SBATCH --mem-per-cpu=500MB # you may not place bash commands before the last SBATCH directive # define and create a unique scratch directory SCRATCH_DIRECTORY=/scratch0/${USER}/job-array-example/${SLURM_JOBID} mkdir -p ${SCRATCH_DIRECTORY} cd ${SCRATCH_DIRECTORY} cp ${SLURM_SUBMIT_DIR}/test.py ${SCRATCH_DIRECTORY} # each job will see a different ${SLURM_ARRAY_TASK_ID} echo "now processing task id:: " ${SLURM_ARRAY_TASK_ID} python test.py > output_${SLURM_ARRAY_TASK_ID}.txt # after the job is done we copy our output back to $SLURM_SUBMIT_DIR cp output_${SLURM_ARRAY_TASK_ID}.txt ${SLURM_SUBMIT_DIR} # we step out of the scratch directory and remove it cd ${SLURM_SUBMIT_DIR} rm -rf ${SCRATCH_DIRECTORY} # happy end exit 0