Skip to content
Snippets Groups Projects
Commit 38c0a942 authored by Cedric Jourdain's avatar Cedric Jourdain :monkey_face:
Browse files

Add test case B for marconi100

parent 5a2fb295
Branches
Tags
No related merge requests found
#!/bin/bash
#SBATCH -J Test_case_B
#SBATCH -A Ppp4x_5850
#SBATCH -p m100_usr_prod
#SBATCH --time 01:59:00
#SBATCH --nodes=384
#SBATCH --ntasks-per-node=4
#SBATCH --cpus-per-task=8
#SBATCH --output=specfem3D_%x_marconi100-xl-spectrumpi-GPU-%j.output
#SBATCH --gres=gpu:4
#SBATCH --gpus-per-node=4
#SBATCH --hint=nomultithread
#SBATCH --exclusive
source ../env/env_marconi100
echo "Environment used:"
echo "================="
grep -E -v '^(#|$)' ../env/env_marconi100
cat job_marconi100_test_case_B.slurm
cd $install_dir/TestCaseB/specfem3d_globe
grep GPU DATA/Par_file
export CUDA_VISIBLE_DEVICES=0,1,2,3
ulimit -s unlimited
MESHER_EXE=./bin/xmeshfem3D
SOLVER_EXE=./bin/xspecfem3D
echo "=========="
echo "config.log"
echo "=========="
cat $install_dir/TestCaseB/specfem3d_globe/config.log
echo "========"
echo "make.log"
echo "========"
cat $install_dir/TestCaseB/specfem3d_globe/make.log
# backup files used for this simulation
cp DATA/Par_file OUTPUT_FILES/
cp DATA/STATIONS OUTPUT_FILES/
cp DATA/CMTSOLUTION OUTPUT_FILES/
##
## mesh generation
##
sleep 2
echo
echo `date`
echo "starting MPI mesher"
echo
MPI_PROCESS=` echo "$SLURM_NNODES*$SLURM_NTASKS_PER_NODE" | bc -l`
echo "SLURM_NTASKS= " $SLURM_NTASKS
echo "SLURM_NTASKS_PER_NODE = " $SLURM_NTASKS_PER_NODE
echo "SLURM_CPUS_PER_TASKS = " $SLURM_CPUS_PER_TASK
echo "SLURM_NNODES=" $SLURM_NNODES
echo "MPI_PROCESS $MPI_PROCESS"
time mpirun -gpu -np ${MPI_PROCESS} ${MESHER_EXE}
# set the value of --ntasks-per-node to the number of MPI processes you want to run per node, and --cpus-per-task = OMP_NUM_THREADS (if you want to exploit the SMT in terms of number of OMP threads) or to 128 / (ntasks-per-node) (if you want to exploit the SMT in terms of number of MPI processes).
echo " mesher done: `date`"
echo
##
## forward simulation
##
sleep 2
echo
echo `date`
echo starting run in current directory $PWD
echo
time mpirun -gpu -np ${MPI_PROCESS} ${SOLVER_EXE}
ls -lrth $install_dir/TestCaseB/specfem3d_globe/OUTPUT_FILES/output_*.txt
cat $install_dir/TestCaseB/specfem3d_globe/OUTPUT_FILES/output_solver.txt
echo "========"
cat $install_dir/TestCaseB/specfem3d_globe/OUTPUT_FILES/output_mesher.txt
echo "finished successfully"
echo `date`
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment