Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
support
hello_world
Commits
a14f1564
Commit
a14f1564
authored
Mar 09, 2018
by
acloirec
Browse files
Merge branch 'mpi_occigen_or_xor_and' into dev
parents
3ccc607e
17de51b1
Changes
5
Hide whitespace changes
Inline
Side-by-side
test_scripts/occigen/mpi/hsw24_and_bdw28.sh
View file @
a14f1564
#!/bin/bash
#SBATCH -J hsw24_mpi
#SBATCH --nodes=7
#SBATCH --ntasks=184
#SBATCH --cpus-per-task=1
#SBATCH --time=0:40:00
#SBATCH -J mpi_hello
#SBATCH --ntasks=128
#SBATCH --threads-per-core=1
#SBATCH --time=0:30:00
#EXPLICIT AND
#SBATCH --constraint="[HSW24*3&BDW28*
4
]"
#SBATCH --constraint="[HSW24*3&BDW28*
2
]"
#SBATCH --exclusive
#SBATCH --mem=50GB
...
...
test_scripts/occigen/mpi/hsw24_or_bdw28_nodes.sh
0 → 100644
View file @
a14f1564
#!/bin/bash
#SBATCH -J mpi_hello
#SBATCH -N 20
#SBATCH --threads-per-core=1
#SBATCH --time=0:40:00
#OR
#SBATCH -C HSW24|BDW28
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm.%J
set
-e
export
I_MPI_DOMAIN
=
auto
export
I_MPI_PIN_RESPECT_CPUSET
=
0
export
I_MPI_DEBUG
=
4
module load intel intelmpi
ulimit
-s
unlimited
export
NCPUS
=
`
echo
$SLURM_JOB_CPUS_PER_NODE
|
awk
'{gsub(/\(/,"",$0);gsub(/\)/,"",$0); gsub(/x/,"*",$0); gsub(/\,/,"+",$0); print $0}'
`
srun
-n
$((
NCPUS
))
../../../bin/hello_mpi
test_scripts/occigen/mpi/hsw24_or_bdw28.sh
→
test_scripts/occigen/mpi/hsw24_or_bdw28
_ntasks
.sh
View file @
a14f1564
#!/bin/bash
#SBATCH -J
hsw24_mpi
#SBATCH -
N 7
#SBATCH --
cpu
s-per-
task
=1
#SBATCH -J
mpi_hello
#SBATCH -
-ntasks=168
#SBATCH --
thread
s-per-
core
=1
#SBATCH --time=0:40:00
#
EXCLUSIVE
OR
#OR
#SBATCH -C HSW24|BDW28
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm
#SBATCH --output hsw_mpi.output.slurm
.%J
set
-e
...
...
test_scripts/occigen/mpi/hsw24_xor_bdw28_nodes.sh
0 → 100644
View file @
a14f1564
#!/bin/bash
#SBATCH -J mpi_hello
#SBATCH -N 2
#SBATCH --threads-per-core=1
#SBATCH --time=0:40:00
#EXCLUSIVE OR
#SBATCH -C [HSW24|BDW28]
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm.%J
set
-e
export
I_MPI_DOMAIN
=
auto
export
I_MPI_PIN_RESPECT_CPUSET
=
0
export
I_MPI_DEBUG
=
4
module load intel intelmpi
ulimit
-s
unlimited
export
NCPUS
=
$((
$SLURM_CPUS_ON_NODE
*
$SLURM_NNODES
))
srun
-n
$NCPUS
../../../bin/hello_mpi
test_scripts/occigen/mpi/hsw24_xor_bdw28.sh
→
test_scripts/occigen/mpi/hsw24_xor_bdw28
_ntasks
.sh
View file @
a14f1564
#!/bin/bash
#SBATCH -J
hsw24_mpi
#SBATCH -J
mpi_hello
#SBATCH --ntasks=168
#SBATCH --
cpu
s-per-
task
=1
#SBATCH --
thread
s-per-
core
=1
#SBATCH --time=0:40:00
#EXCLUSIVE OR
...
...
@@ -9,7 +9,7 @@
#SBATCH --exclusive
#SBATCH --mem=50GB
#SBATCH --output hsw_mpi.output.slurm
#SBATCH --output hsw_mpi.output.slurm
.%J
set
-e
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment