Calculations/Dipolar-Gas-Simulator/bwhpc_matlab_gpe_sim_gpu.slurm

39 lines
1010 B
Bash

#!/bin/bash
########### Begin SLURM header ###########
#Partition
#SBATCH --partition=gpu-single
# Request number of nodes and GPU for job
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --gres=gpu:4
#SBATCH --mem=8G
# Estimated wallclock time for job
#SBATCH --time=04:00:00
#SBATCH --job-name=simulation
#SBATCH --error=simulation.err
#SBATCH --output=simulation.out
########### End SLURM header ##########
echo "Working Directory: $PWD"
echo "Running on host $HOSTNAME"
echo "Job id: $SLURM_JOB_ID"
echo "Job name: $SLURM_JOB_NAME"
echo "Number of nodes allocated to job: $SLURM_JOB_NUM_NODES"
echo "Number of GPUs allocated to job: $SLURM_GPUS"
# Load module
module load math/matlab/R2023a
echo Directory is `pwd`
echo "Initiating Job..."
# Start a Matlab program
matlab -nodisplay -nosplash -r "Scripts.run_on_cluster"
# notice for tests
echo "Job terminated successfully"
exit