Commit 43c7a30f authored by acloirec's avatar acloirec
Browse files

adding openmpi example

parent 71c896d0
......@@ -11,15 +11,27 @@
set -e
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
module load intel intelmpi
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun ../../../bin/hello_mpi
#####Intelmpi avec placement pour mpirun
# module load intel/18.1 intelmpi/2018.1.163
# export SLURM_CPU_BIND=NONE
# export I_MPI_PIN=1
# export I_MPI_PIN_PROCESSOR_LIST=0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27
# ulimit -s unlimited
# mpirun ../../../bin/hello_mpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
srun ../../../bin/hello_mpi
......@@ -4,7 +4,7 @@
#SBATCH --ntasks=168
#SBATCH --ntasks-per-node=24
#SBATCH --cpus-per-task=1
#SBATCH --time=0:40:00
#SBATCH --time=0:30:00
#SBATCH -C HSW24
#SBATCH --exclusive
#SBATCH --mem=50GB
......@@ -12,15 +12,14 @@
set -e
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
module load intel intelmpi
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun ../../../bin/hello_mpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
srun ../../../bin/hello_mpi
......@@ -9,19 +9,18 @@
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm
#SBATCH --output h_and_b_mpi.output.slurm
set -e
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
module load intel intelmpi
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun ../../../bin/hello_mpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
srun ../../../bin/hello_mpi
......@@ -9,21 +9,20 @@
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm.%J
#SBATCH --output h_or_b_mpi.output.slurm.%J
set -e
export NCPUS=` echo $SLURM_JOB_CPUS_PER_NODE | awk '{gsub(/\(/,"",$0);gsub(/\)/,"",$0); gsub(/x/,"*",$0); gsub(/\,/,"+",$0); print $0}' `
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun -n $((NCPUS)) ../../../bin/hello_mpi
module load intel intelmpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
export NCPUS=` echo $SLURM_JOB_CPUS_PER_NODE | awk '{gsub(/\(/,"",$0);gsub(/\)/,"",$0); gsub(/x/,"*",$0); gsub(/\,/,"+",$0); print $0}' `
srun -n $((NCPUS)) ../../../bin/hello_mpi
......@@ -9,19 +9,18 @@
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm.%J
#SBATCH --output h_or_b_t_mpi.output.slurm.%J
set -e
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
module load intel intelmpi
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun ../../../bin/hello_mpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
srun ../../../bin/hello_mpi
......@@ -9,21 +9,20 @@
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm.%J
#SBATCH --output hsw_xor_n_mpi.output.slurm.%J
set -e
export I_MPI_DOMAIN=auto
export I_MPI_PIN_RESPECT_CPUSET=0
export I_MPI_DEBUG=4
export NCPUS=$(($SLURM_CPUS_ON_NODE*$SLURM_NNODES))
module load intel intelmpi
#####Intelmpi placement auto
# module load intel/18.1 intelmpi/2018.1.163
# export I_MPI_DOMAIN=auto
# export I_MPI_PIN_RESPECT_CPUSET=0
# ulimit -s unlimited
# srun -n $NCPUS ../../../bin/hello_mpi
####Openmpi placement auto
module load intel/18.1 openmpi/intel/2.0.2
ulimit -s unlimited
export NCPUS=$(($SLURM_CPUS_ON_NODE*$SLURM_NNODES))
srun -n $NCPUS ../../../bin/hello_mpi
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment