-
Notifications
You must be signed in to change notification settings - Fork 0
/
init_benchmarks.sh
102 lines (77 loc) · 3.05 KB
/
init_benchmarks.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#!/bin/bash
# batch no container
scripts_dir=~/workspace/bench
mkdir -p $scripts_dir
echo "writing no container batch scripts"
cat << 'EOF' >> $scripts_dir/mpibench_nocont.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY /home/azureuser/workspace/mpiBench-master/mpiBench -e $1
EOF
cat << 'EOF' >> $scripts_dir/osu_nocont.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY /home/azureuser/workspace/osu-micro-benchmarks-5.6.3/$1 ${@:2}
EOF
echo "pulling docker containers"
sudo docker pull nichr/hpc-bench:v2
#sudo docker pull nichr/hpc-bench:v3
# singularity
mkdir -p ~/workspace/singularity
cd ~/workspace/singularity
echo "writing sif definition file"
cat << 'EOF' >> v2.def
BootStrap: docker-daemon
From: nichr/hpc-bench:v2
EOF
# build singularity
echo "building sif"
sudo /usr/local/bin/singularity build v2.sif v2.def
echo "writing singularity batch scripts"
cat << 'EOF' >> $scripts_dir/mpibench_singularity.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY singularity run /home/azureuser/workspace/singularity/v2.sif /opt/benchmarks/mpiBench/mpiBench -e $1
EOF
cat << 'EOF' >> $scripts_dir/osu_singularity.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY singularity run /home/azureuser/workspace/singularity/v2.sif /opt/benchmarks/osu-micro-benchmarks/$1 ${@:2}
EOF
# charliecloud
echo "building charliecloud container"
mkdir -p ~/workspace/charliecloud
cd ~/workspace/charliecloud
ch-builder2tar nichr/hpc-bench:v2 .
ch-tar2dir nichr.hpc-bench:v2.tar.gz .
echo "writing charliecloud batch scripts"
cat << 'EOF' >> $scripts_dir/mpibench_charliecloud.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
# run with --join https://hpc.github.io/charliecloud/faq.html#communication-between-ranks-on-the-same-node-fails
# --set-env=file_to_env
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY ch-run --join /home/azureuser/workspace/charliecloud/nichr.hpc-bench\:v2 -- /opt/benchmarks/mpiBench/mpiBench -e $1
EOF
cat << 'EOF' >> $scripts_dir/osu_charliecloud.sh
#!/bin/bash
#SBATCH --time=00:10:00
#SBATCH --partition=hpc
module purge
module load mpi/openmpi
mpirun --map-by node -mca pml ucx --mca btl ^vader,tcp,openib -x UCX_NET_DEVICES=mlx5_0:1 -x UCX_IB_PKEY=$UCX_IB_PKEY ch-run --join /home/azureuser/workspace/charliecloud/nichr.hpc-bench\:v2 -- /opt/benchmarks/osu-micro-benchmarks/$1 ${@:2}
EOF