Added lines to allow for running on GPUs in cluster.
This commit is contained in:
parent
a9e24bf295
commit
cafe7eeb86
@ -25,6 +25,7 @@ OptionsStruct.NumberOfTimeSteps = 2E6; % in s
|
||||
OptionsStruct.EnergyTolerance = 5E-10;
|
||||
|
||||
OptionsStruct.JobNumber = 1;
|
||||
OptionsStruct.RunOnGPU = true;
|
||||
OptionsStruct.SaveData = true;
|
||||
OptionsStruct.SaveDirectory = './Data';
|
||||
options = Helper.convertstruct2cell(OptionsStruct);
|
||||
|
@ -20,9 +20,8 @@ classdef DipolarGas < handle & matlab.mixin.Copyable
|
||||
|
||||
SimulationParameters;
|
||||
|
||||
%Flags
|
||||
|
||||
JobNumber;
|
||||
RunOnGPU;
|
||||
DebugMode;
|
||||
DoSave;
|
||||
SaveDirectory;
|
||||
@ -64,6 +63,8 @@ classdef DipolarGas < handle & matlab.mixin.Copyable
|
||||
@(x) assert(isnumeric(x) && isscalar(x) && (x > 0)));
|
||||
addParameter(p, 'JobNumber', 1,...
|
||||
@(x) assert(isnumeric(x) && isscalar(x) && (x > 0)));
|
||||
addParameter(p, 'RunOnGPU', false,...
|
||||
@islogical);
|
||||
addParameter(p, 'DebugMode', false,...
|
||||
@islogical);
|
||||
addParameter(p, 'SaveData', false,...
|
||||
@ -88,6 +89,7 @@ classdef DipolarGas < handle & matlab.mixin.Copyable
|
||||
this.MinimumTimeStepSize = p.Results.MinimumTimeStepSize;
|
||||
|
||||
this.JobNumber = p.Results.JobNumber;
|
||||
this.RunOnGPU = p.Results.RunOnGPU;
|
||||
this.DebugMode = p.Results.DebugMode;
|
||||
this.DoSave = p.Results.SaveData;
|
||||
this.SaveDirectory = p.Results.SaveDirectory;
|
||||
|
@ -16,4 +16,8 @@ function [psi,V,VDk] = initialize(this,Params,Transf,TransfRad)
|
||||
|
||||
% == Setting up the initial wavefunction == %
|
||||
psi = this.setupWavefunction(Params,Transf);
|
||||
|
||||
if this.RunOnGPU
|
||||
psi = gpuArray(psi);
|
||||
end
|
||||
end
|
@ -6,9 +6,9 @@
|
||||
#SBATCH --nodes=1
|
||||
#SBATCH --ntasks-per-node=1
|
||||
#SBATCH --cpus-per-task=10
|
||||
#SBATCH --mem=8G
|
||||
#SBATCH --mem=24G
|
||||
# Estimated wallclock time for job
|
||||
#SBATCH --time=02:00:00
|
||||
#SBATCH --time=15:00:00
|
||||
#SBATCH --job-name=simulation
|
||||
#SBATCH --error=simulation.err
|
||||
#SBATCH --output=simulation.out
|
38
Dipolar-Gas-Simulator/bwhpc_matlab_gpe_sim_gpu.slurm
Normal file
38
Dipolar-Gas-Simulator/bwhpc_matlab_gpe_sim_gpu.slurm
Normal file
@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
########### Begin SLURM header ###########
|
||||
#Partition
|
||||
#SBATCH --partition=single
|
||||
# Request number of nodes and GPU for job
|
||||
#SBATCH --nodes=1
|
||||
#SBATCH --ntasks-per-node=10
|
||||
#SBATCH --gres=gpu:4
|
||||
#SBATCH --mem=24G
|
||||
# Estimated wallclock time for job
|
||||
#SBATCH --time=15:00:00
|
||||
#SBATCH --job-name=simulation
|
||||
#SBATCH --error=simulation.err
|
||||
#SBATCH --output=simulation.out
|
||||
|
||||
########### End SLURM header ##########
|
||||
|
||||
echo "Working Directory: $PWD"
|
||||
echo "Running on host $HOSTNAME"
|
||||
echo "Job id: $SLURM_JOB_ID"
|
||||
echo "Job name: $SLURM_JOB_NAME"
|
||||
echo "Number of nodes allocated to job: $SLURM_JOB_NUM_NODES"
|
||||
echo "Number of GPUs allocated to job: $SLURM_GPUS"
|
||||
|
||||
|
||||
# Load module
|
||||
module load math/matlab/R2023a
|
||||
|
||||
echo Directory is `pwd`
|
||||
echo "Initiating Job..."
|
||||
|
||||
# Start a Matlab program
|
||||
matlab -nodisplay -nosplash -r "Scripts.run_on_cluster"
|
||||
|
||||
# notice for tests
|
||||
echo "Job terminated successfully"
|
||||
|
||||
exit
|
Loading…
Reference in New Issue
Block a user