Kennedy manual
Revision as of 08:07, 6 May 2020 by PeterThorpe (talk | contribs)
Temporary quick start info can be found here. This is a work in progress until something official is made up.
https://github.com/peterthorpe5/How_to_use_Kennedy_HPC
The following are command line commands, or tags you can put in your shell script to achieve certain functionality.
request_48_thread_1.3TBRAM
- !/bin/bash -l # not the -l is essential now
- SBATCH -J fly_pilon #jobname
- SBATCH -N 1 #node
- SBATCH --ntasks-per-node=48
- SBATCH --threads-per-core=2
- SBATCH -p bigmem
- SBATCH --nodelist=kennedy150 # this is the specific node. This one has 1.5TB RAM
- SBATCH --mem=1350GB
test_conda_activate
- !/bin/bash -l
- SBATCH -J conda_test #jobname
- SBATCH -N 1 #node
- SBATCH --tasks-per-node=1
- SBATCH -p bigmem # big mem if for the BIOINF community
- SBATCH --mail-type=END # email at the end of the job
- SBATCH --mail-user=$USER@st-andrews.ac.uk # your email address
cd /gpfs1/home/$USER/
pyv="$(python -V 2>&1)"
echo "$pyv"
- conda to activate the software
echo $PATH
conda activate spades
pyv="$(python -V 2>&1)"
echo "$pyv"
conda deactivate
conda activate python27
pyv="$(python2 -V 2>&1)"
echo "$pyv"
12threads_bigMeme_30G_RAM
!/bin/bash -l # essential
- SBATCH -J trimmo #jobname
- SBATCH -N 1 #node
- SBATCH --ntasks-per-node=12
- SBATCH --threads-per-core=2
- SBATCH -p bigmem
- SBATCH --mem=30GB