Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
UEABS
ueabs
Commits
83ad2e36
Commit
83ad2e36
authored
Oct 14, 2021
by
Cedric Jourdain
Browse files
Add Vega submission scripts
parent
92170478
Changes
4
Show whitespace changes
Inline
Side-by-side
specfem3d/job_script/job_vega-cpu_small_benchmark_run_to_test_more_complex_Earth.slurm
0 → 100644
View file @
83ad2e36
#!/bin/bash
#SBATCH -J specfem_cpu_small_benchmark_run_to_test_more_complex_Earth
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=24
#SBATCH --cpus-per-task=4
#SBATCH --time=11:59:59
#SBATCH --output specfem_cpu_small_benchmark_run_to_test_more_complex_Earth-%j.out
#SBATCH --exclusive
#SBATCH -p cpu
#set -e
source
../env/env_vega-cpu
grep
"^[^#;]"
../env/env_vega-cpu
cat
job_vega-cpu_small_benchmark_run_to_test_more_complex_Earth.slurm
rm
$install_dir
/specfem3d_globe/Makefile
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/bin/
*
cd
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth
#export OMPI_MCA_pml=ucx
#export OMPI_MCA_btl="^uct,tcp,openib,vader"
export
OMP_NUM_THREADS
=
$SLURM_CPUS_PER_TASK
#sed -i s/"mpirun -np"/"srun -n"/g $install_dir/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/run_mesher_solver.bash
taskset
-a
-p
$PPID
time
./run_this_example.sh
echo
"=========="
echo
"config.log"
echo
"=========="
cat
$install_dir
/TestCaseA/specfem3d_globe/config.log
echo
"========"
echo
"make.log"
echo
"========"
cat
$install_dir
/TestCaseA/specfem3d_globe/make.log
echo
echo
"running seismogram comparisons:"
echo
cd
$install_dir
/specfem3d_globe/
# uncompress seismograms
if
[
-e
EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/II.AAK.MXE.sem.ascii.bz2
]
;
then
echo
echo
"unzipping references..."
echo
mkdir
OUTPUT_FILES_reference_OK/
bunzip2 EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/
*
.bz2
echo
echo
fi
#wget https://repo.anaconda.com/miniconda/Miniconda3-py37_4.10.3-Linux-x86_64.sh
#sh /ceph/hpc/home/eucedricj/Miniconda3-py37_4.10.3-Linux-x86_64.sh
#source miniconda3/bin/activate
#conda create --name python2 python=2.7
module purge
conda activate python2
# compares seismograms by plotting correlations
./utils/compare_seismogram_correlations.py EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/ EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/
echo
echo
"done"
cp
$install_dir
//specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/output_solver.txt output_solver_
$SLURM_JOBID
.txt
specfem3d/job_script/job_vega-cpu_test_case_A.slurm
0 → 100644
View file @
83ad2e36
#!/bin/bash
#SBATCH -J Test_case_A-cpu
#SBATCH --nodes=24
#SBATCH --ntasks-per-node=4
#SBATCH --cpus-per-task=8
#SBATCH --time=00:30:00
#SBATCH --output specfem-cpu_TestCaseA-gcc-9-8-openMP-nomultithread-Ofast-znver2-OMP_PLACESCores-mpirun-withMCA-%j.output
#SBATCH -p cpu
#SBATCH --hint=nomultithread
#SBATCH --distribution=block:block
#set -e
source
../env/env_vega-cpu
grep
"^[^#;]"
../env/env_vega-cpu
cat
job_vega-cpu_test_case_A.slurm
echo
"=========="
echo
"config.log"
echo
"=========="
cat
$install_dir
/TestCaseA/specfem3d_globe/config.log
echo
"========"
echo
"make.log"
echo
"========"
cat
$install_dir
/TestCaseA/specfem3d_globe/make.log
cd
$install_dir
/TestCaseA/specfem3d_globe
#export SLURM_CPU_BIND=NONE
export
OMPI_MCA_pml
=
ucx
export
OMPI_MCA_btl
=
"^uct,tcp,openib,vader"
#self,vader,openib" # with ^ucx and ^tcp -> error occurred in MPI_Bcast
#Make sure that OMP_NUM_THREADS / KMP_HW_SUBSET = cpus-per-task
#export KMP_HW_SUBSET=2T
export
OMP_PLACES
=
cores
#sockets
#export OMP_SCHEDULE=DYNAMIC
export
OMP_NUM_THREADS
=
$SLURM_CPUS_PER_TASK
ulimit
-s
unlimited
MESHER_EXE
=
./bin/xmeshfem3D
SOLVER_EXE
=
./bin/xspecfem3D
echo
$LD_LIBRARY_PATH
ldd /exa5/scratch/user/eucedricj//benchmarks/vega-cpu/specfem3d_globe/31octobre/cpu-znver2/TestCaseA/specfem3d_globe/bin/xspecfem3D
# backup files used for this simulation
cp
DATA/Par_file OUTPUT_FILES/
cp
DATA/STATIONS OUTPUT_FILES/
cp
DATA/CMTSOLUTION OUTPUT_FILES/
##
## mesh generation
##
sleep
2
echo
echo
`
date
`
echo
"starting MPI mesher"
echo
MPI_PROCESS
=
`
echo
"
$SLURM_NNODES
*
$SLURM_NTASKS_PER_NODE
"
| bc
-l
`
echo
"SLURM_NTASKS_PER_NODE = "
$SLURM_NTASKS_PER_NODE
echo
"SLURM_CPUS_PER_TASKS = "
$SLURM_CPUS_PER_TASK
echo
"SLURM_NNODES="
$SLURM_NNODES
echo
"MPI_PROCESS
$MPI_PROCESS
"
#time mpirun --display-devel-map -n ${MPI_PROCESS} ${MESHER_EXE}
time
mpirun
-n
${
MPI_PROCESS
}
${
MESHER_EXE
}
#time srun --mpi=pmix_v3 --cpu-bind=core -n ${MPI_PROCESS} ${MESHER_EXE} #
echo
" mesher done:
`
date
`
"
echo
##
## forward simulation
##
sleep
2
echo
echo
`
date
`
echo
starting run
in
current directory
$PWD
echo
#unset FORT_BUFFERED
#time mpirun --display-devel-map -n ${MPI_PROCESS} ${SOLVER_EXE}
time
mpirun
-n
${
MPI_PROCESS
}
${
SOLVER_EXE
}
#time srun --mpi=pmix_v3 --cpu-bind=core -n ${MPI_PROCESS} ${SOLVER_EXE}
echo
"finished successfully"
echo
`
date
`
ls
-lrth
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_mesher.txt
cat
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_mesher.txt
ls
-lrth
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_solver.txt
cat
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_solver.txt
specfem3d/job_script/job_vega-gpu_small_benchmark_run_to_test_more_complex_Earth.slurm
0 → 100644
View file @
83ad2e36
#!/bin/bash
#SBATCH -J specfem_gpu_small_benchmark_run_to_test_more_complex_Earth
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=24
#SBATCH --cpus-per-task=2
#SBATCH --time=01:59:59
#SBATCH --output specfem_gpu_small_benchmark_run_to_test_more_complex_Earth-%j.out
#SBATCH --exclusive
#SBATCH -p gpu
#SBATCH --gres=gpu:4
#set -e
source
../env/env_vega-gpu
grep
"^[^#;]"
../env/env_vega-gpu
cat
job_vega-gpu_small_benchmark_run_to_test_more_complex_Earth.slurm
cd
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth
export
OMPI_MCA_pml
=
ucx
export
OMPI_MCA_btl
=
"^uct,tcp,openib,vader"
export
CUDA_VISIBLE_DEVICES
=
0,1,2,3
#export OMP_NUM_THREADS=1
sed
-i
s/
"GPU_MODE = .false."
/
"GPU_MODE = .true."
/g
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/DATA/Par_file
sed
-i
s/
"GPU_DEVICE = Tesla"
/
"GPU_DEVICE = *"
/g
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/DATA/Par_file
sed
-i
s/
"configure --enable-openmp"
/
"configure --build=ppc64 --with-cuda=cuda8 "
/g
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/run_this_example.sh
#sed -i s/"mpirun -np"/"srun -n"/g $install_dir/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/run_mesher_solver.bash
#taskset -a -p $PPID
time
./run_this_example.sh
echo
"=========="
echo
"config.log"
echo
"=========="
cat
$install_dir
/specfem3d_globe/config.log
echo
"========"
echo
"make.log"
echo
"========"
cat
$install_dir
/specfem3d_globe/make.log
echo
echo
"running seismogram comparisons:"
echo
cd
$install_dir
/specfem3d_globe/
# uncompress seismograms
if
[
-e
EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/II.AAK.MXE.sem.ascii.bz2
]
;
then
echo
echo
"unzipping references..."
echo
mkdir
OUTPUT_FILES_reference_OK/
bunzip2 EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/
*
.bz2
echo
echo
fi
#wget https://repo.anaconda.com/miniconda/Miniconda3-py37_4.10.3-Linux-x86_64.sh
#sh /ceph/hpc/home/eucedricj/Miniconda3-py37_4.10.3-Linux-x86_64.sh
#source miniconda3/bin/activate
#conda create --name python2 python=2.7
module purge
echo
"which conda"
which conda
echo
"conda init bash"
conda init bash
/ceph/hpc/home/eucedricj/miniconda3/bin/activate
echo
"conda activate python2"
conda activate python2
# compares seismograms by plotting correlations
./utils/compare_seismogram_correlations.py EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/ EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES_reference_OK/
echo
echo
"done"
ls
-lrth
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/output_
*
.txt
cat
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/output_mesher.txt
cat
$install_dir
/specfem3d_globe/EXAMPLES/small_benchmark_run_to_test_more_complex_Earth/OUTPUT_FILES/output_solver.txt
specfem3d/job_script/job_vega-gpu_test_case_A.slurm
0 → 100644
View file @
83ad2e36
#!/bin/bash
#SBATCH -J Test_case_A-gpu
#SBATCH --nodes=24
#SBATCH --ntasks-per-node=4
#SBATCH --cpus-per-task=8
#SBATCH --time=00:30:00
#SBATCH --output specfem-gpu_TestCaseA-gcc-9-cuda-11-GPU-NoopenMP-NoMultithread-8cpus-distribBlock-znver2-%j.output
#SBATCH -p gpu
#SBATCH --gres=gpu:4
#SBATCH --hint=nomultithread
#SBATCH --distribution=block:block
#set -e
source
../env/env_vega-gpu
grep
"^[^#;]"
../env/env_vega-gpu
cat
job_vega-gpu_test_case_A.slurm
echo
"=========="
echo
"config.log"
echo
"=========="
cat
$install_dir
/TestCaseA/specfem3d_globe/config.log
echo
"========"
echo
"make.log"
echo
"========"
cat
$install_dir
/TestCaseA/specfem3d_globe/make.log
cd
$install_dir
/TestCaseA/specfem3d_globe
grep
GPU DATA/Par_file
export
OMPI_MCA_pml
=
ucx
export
OMPI_MCA_btl
=
"^uct,tcp,openib,vader"
#self,vader,openib" # with ^ucx and ^tcp -> error occurred in MPI_Bcast
export
CUDA_VISIBLE_DEVICES
=
0,1,2,3
#export OMP_NUM_THREADS=2
ulimit
-s
unlimited
MESHER_EXE
=
./bin/xmeshfem3D
SOLVER_EXE
=
./bin/xspecfem3D
# backup files used for this simulation
cp
DATA/Par_file OUTPUT_FILES/
cp
DATA/STATIONS OUTPUT_FILES/
cp
DATA/CMTSOLUTION OUTPUT_FILES/
##
## mesh generation
##
sleep
2
echo
echo
`
date
`
echo
"starting MPI mesher"
echo
MPI_PROCESS
=
`
echo
"
$SLURM_NNODES
*
$SLURM_NTASKS_PER_NODE
"
| bc
-l
`
echo
"SLURM_NTASKS_PER_NODE = "
$SLURM_NTASKS_PER_NODE
echo
"SLURM_CPUS_PER_TASKS = "
$SLURM_CPUS_PER_TASK
echo
"SLURM_NNODES="
$SLURM_NNODES
echo
"MPI_PROCESS
$MPI_PROCESS
"
time
mpirun
-n
${
MPI_PROCESS
}
${
MESHER_EXE
}
echo
" mesher done:
`
date
`
"
echo
##
## forward simulation
##
sleep
2
echo
echo
`
date
`
echo
starting run
in
current directory
$PWD
echo
#unset FORT_BUFFERED
time
mpirun
-n
${
MPI_PROCESS
}
${
SOLVER_EXE
}
echo
"finished successfully"
echo
"====================="
echo
`
date
`
ls
-lrth
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_
*
.txt
cat
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_mesher.txt
cat
$install_dir
/TestCaseA/specfem3d_globe/OUTPUT_FILES/output_solver.txt
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment