I want to create an array of jobs, but have them execute on different nodes. This is what I'm trying:

```

balter@exahead1:~/slurm_tutorial$ cat nodes.sub
#!/bin/bash


#SBATCH --job-name=nodes
#SBATCH --array=0-3
#SBATCH --nodes=4
#SBATCH --tasks-per-node=1
##SBATCH --ntasks=4
#SBATCH --output="hello_%N_%A_%a_%j.out"
#SBATCH --error="hello_%N_%A_%a_%j.err"

srun echo "SLURM_JOB_NAME: $SLURM_JOB_NAME   SLURM_JOB_ID: $SLURM_JOB_ID  
SLURM_ARRAY_TASK_ID: $SLURM_ARRAY_TASK_ID SLURM_ARRAY_JOB_ID: $SLURM_ARRAY_JOB_ID   
SLURM_PROCID: $SLURM_PROCID  hostname: $(hostname)"

balter@exahead1:~/slurm_tutorial$ sbatch nodes.sub
balter@exahead1:~/slurm_tutorial$ for i in *.out; do echo "*** $i ***"; cat $i; 
done

*** hello_exanode-6-3_408_0_409.out ***

SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 409  SLURM_ARRAY_TASK_ID: 0 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3

SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 409  SLURM_ARRAY_TASK_ID: 0 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3

SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 409  SLURM_ARRAY_TASK_ID: 0 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 409  SLURM_ARRAY_TASK_ID: 0 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
*** hello_exanode-6-3_408_1_410.out ***
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 410  SLURM_ARRAY_TASK_ID: 1 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 410  SLURM_ARRAY_TASK_ID: 1 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 410  SLURM_ARRAY_TASK_ID: 1 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 410  SLURM_ARRAY_TASK_ID: 1 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
*** hello_exanode-6-3_408_2_411.out ***
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 411  SLURM_ARRAY_TASK_ID: 2 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 411  SLURM_ARRAY_TASK_ID: 2 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 411  SLURM_ARRAY_TASK_ID: 2 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 411  SLURM_ARRAY_TASK_ID: 2 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
*** hello_exanode-6-3_408_3_408.out ***
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 408  SLURM_ARRAY_TASK_ID: 3 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 408  SLURM_ARRAY_TASK_ID: 3 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 408  SLURM_ARRAY_TASK_ID: 3 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3
SLURM_JOB_NAME: nodes   SLURM_JOB_ID: 408  SLURM_ARRAY_TASK_ID: 3 
SLURM_ARRAY_JOB_ID: 408   SLURM_PROCID: 0  hostname: exanode-6-3

```

Reply via email to