Commit 6a374dad authored by Cedric Jourdain's avatar Cedric Jourdain
Browse files

Add dibona env, script compilation and slurm job template

parent 71deae59
......@@ -10,6 +10,7 @@ echo " - davide "
echo " - juwels"
echo " - irene-skl "
echo " - irene-knl "
echo " - dibona "
read machine
source ./env/env_${machine}
......@@ -97,7 +98,7 @@ Clean(){
Deploy(){
echo "install_dir ="$install_dir
if [ $machine = "occigen" ] || [ $machine = "marenostrum" ] || [ $machine = "marconi-knl" ] || [ $machine = "daint-cpu-only" ] || [ $machine = "daint-gpu" ] || [ $machine = "davide" ] || [ $machine = "juwels" ] || [ $machine = "irene-skl" ] || [ $machine = "irene-knl" ];then
if [ $machine = "occigen" ] || [ $machine = "marenostrum" ] || [ $machine = "marconi-knl" ] || [ $machine = "daint-cpu-only" ] || [ $machine = "daint-gpu" ] || [ $machine = "davide" ] || [ $machine = "juwels" ] || [ $machine = "irene-skl" ] || [ $machine = "irene-knl" ] || [ $machine = "dibona" ];then
echo "==> Install on $machine :"
#Clean
mkdir -p $install_dir
......
#!/bin/bash
#module purge
module load arm/arm-hpc-compiler/19.0 openmpi3.1.2/arm19.0
export machine=dibona
export software=specfem3d_globe
export version=31octobre
export install_dir=$HOME/benchmarks/dibona/$software/$version/
export CC="mpicc"
export FC="mpifort"
export MPIFC=$FC
export FCFLAGS=" -g -O3 -fopenmp -DUSE_FP32 -DOPT_STREAMS -mcmodel=large"
export CFLAGS=" -g -O3 -fopenmp "
#!/bin/bash -l
#SBATCH --job-name=specfem3D_test_case_A
#SBATCH --time=02:30:00
#SBATCH --nodes=24
#SBATCH --ntasks-per-core=2
#SBATCH --ntasks-per-node=4
#SBATCH --cpus-per-task=16
#SBATCH --partition=normal
#SBATCH --output=specfem3D_test_case_A_16OMP-dibona-%j.output
#SBATCH --partition=production
set -e
source ../env/env_dibona
cd $install_dir/TestCaseA/specfem3d_globe
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
ulimit -s unlimited
MESHER_EXE=./bin/xmeshfem3D
SOLVER_EXE=./bin/xspecfem3D
# backup files used for this simulation
cp DATA/Par_file OUTPUT_FILES/
cp DATA/STATIONS OUTPUT_FILES/
cp DATA/CMTSOLUTION OUTPUT_FILES/
##
## mesh generation
##
sleep 2
echo
echo `date`
echo "starting MPI mesher"
echo
MPI_PROCESS=` echo "$SLURM_NNODES*$SLURM_NTASKS_PER_NODE" | bc -l`
echo "SLURM_NTASKS_PER_NODE = " $SLURM_NTASKS_PER_NODE
echo "SLURM_CPUS_PER_TASKS = " $SLURM_CPUS_PER_TASK
echo "SLURM_NNODES=" $SLURM_NNODES
echo "MPI_PROCESS $MPI_PROCESS"
time srun -n ${MPI_PROCESS} ${MESHER_EXE}
echo " mesher done: `date`"
echo
##
## forward simulation
##
sleep 2
echo
echo `date`
echo starting run in current directory $PWD
echo
time srun -n ${MPI_PROCESS} ${SOLVER_EXE}
echo "finished successfully"
echo `date`
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment