mlp-project/_ShB-base-train.sh
2024-03-06 01:38:21 +00:00

34 lines
910 B
Bash

#!/bin/sh
#SBATCH -N 1
#SBATCH -n 1
#SBATCH --partition=Teach-Standard
#SBATCH --gres=gpu:4
#SBATCH --mem=24000
#SBATCH --time=3-00:00:00
set -e
export CUDA_HOME=/opt/cuda-9.0.176.1/
export CUDNN_HOME=/opt/cuDNN-7.0/
export STUDENT_ID=$(whoami)
export LD_LIBRARY_PATH=${CUDNN_HOME}/lib64:${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
export LIBRARY_PATH=${CUDNN_HOME}/lib64:${LIBRARY_PATH}
export CPATH=${CUDNN_HOME}/include:$CPATH
export PATH=${CUDA_HOME}/bin:${PATH}
export PYTHON_PATH=$PATH
mkdir -p /disk/scratch/${STUDENT_ID}
export TMPDIR=/disk/scratch/${STUDENT_ID}/
export TMP=/disk/scratch/${STUDENT_ID}/
source /home/${STUDENT_ID}/miniconda3/bin/activate mlp-cuda
python train.py \
--pth_tar './pretrained/deit_base_patch16_384-8de9b5d1.pth' \
--train_dataset 'ShanghaiB' \
--save_path ./save/ShanghaiB-base-$(date -Iminutes) \
--batch_size 4 \
--gpus 0,1,2,3 \
--print_freq 100 \
--epochs 1000