'''Environment Variablesrequired for running VASP on Magus. ''' Your old bashrc is available from the old backup and contained Environment variable setting relevant to the old cluster.
source /snufs/intel/composer_xe_2015.2.164/bin/compilervars.sh intel64 source /snufs/intel/impi/5.0.3.048/intel64/bin/mpivars.sh
'''Location of VASP-5.3.5 Binaries on Magus '''
/snufs/apps/vasp/5.3.5/intelmpi/
'''Vasp for Hashwell '''
/snufs/apps/vasp/5.3.5/intelmpi/vasp.hw /snufs/apps/vasp/5.3.5/intelmpi/vasp.hw.gamma /snufs/apps/vasp/5.3.5/intelmpi/vasp.hw.nc
'''Vasp binaries for Sandybridge '''
/snufs/apps/vasp/5.3.5/intelmpi/vasp.sdb /snufs/apps/vasp/5.3.5/intelmpi/vasp.sdb.gamma /snufs/apps/vasp/5.3.5/intelmpi/vasp.sdb.nc
'''Job Submission script'''
Example can be copied from /snufs/apps/vasp/5.3.5/intelmpi/submit.lsf.example
'''Explanation is given below in Bluebraces. '''
#!/bin/bash
#BSUB -o out.%J ( Output file will be in name of out.JOBID)
#BSUB -R "span[ptile=16]"
<br />MPI=64 ( Number of cores to submit Job on )
PPN=16 ( Processor tasks per node )
MYDIR=$(pwd)
EXEEXE=/snufs/apps/vasp/5.3.5/intelmpi/vasp.hw ( Executable path choose as per the processor architecture) export I_MPI_DAPL_PROVIDER=ofa-v2-mlx4_0-1
export I_MPI_FABRICS=shm:dapl
export I_MPI_FALLBACK=0
export export OMP_NUM_THREADS=1
export FORT_BUFFERED=yes
export I_MPI_PIN_PROCESSOR_LIST=0-15
rm rm -f host.list
cat $LSB_DJOB_HOSTFILE > ./host.list
env env > log.env mpiexecmpiexec.hydra -np $MPI -f ./host.list -genvall -ppn $PPN $EXE > $MYDIR/vasp.out 2>&1