-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.launch-jupyter-slurm
68 lines (53 loc) · 2.05 KB
/
.launch-jupyter-slurm
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#!/bin/bash
#SBATCH --qos=gpushort
#SBATCH --partition=gpu
#SBATCH --gres=gpu:v100:1
#SBATCH --job-name=jupyter
#SBATCH --account=dominoes
#SBATCH --nodes=1
#SBATCH --ntasks=1
##SBATCH --ntasks=16
#SBATCH --cpus-per-task=1
##SBATCH --mem=60000
#SBATCH --time=1-00:00:00
#SBATCH --output=jupyter-slurm-%j.log
###
# This script is to be submitted via "sbatch" on the cluster.
#
# Set --cpus-per-task above to match the size of your multiprocessing run, if any.
###
echo "Job started with SLURM ID $SLURM_JOBID"
echo "Running on node(s): $SLURM_NODELIST"
echo "------------------------------------------------------------"
cd $HOME
# Some initial setup
module purge
module load anaconda &> /dev/null # suppress stdout # this is where the Jupyter client comes from
# optional: if you have a Conda env, activate it here:
#MY_CONDA_ENV=py2
MY_CONDA_ENV=py3
source activate $MY_CONDA_ENV
# set the port on which the browser will listen
BROWSERPORT=7071
# set a random port for the notebook, in case multiple notebooks are
# on the same compute node.
NOTEBOOKPORT=`shuf -i 8000-8500 -n 1`
# set a random port for tunneling, in case multiple connections are happening
# on the same login node.
#TUNNELPORT=`shuf -i 8501-9000 -n 1`
TUNNELPORT=8081
echo "On your local machine, run"
echo " ssh -L localhost:$BROWSERPORT:localhost:$TUNNELPORT $(whoami)@$SLURM_SUBMIT_HOST.pik-potsdam.de -N"
echo "and point your browser to"
echo " http://localhost:$BROWSERPORT"
echo "To stop this notebook, run"
echo " scancel $SLURM_JOB_ID"
echo "------------------------------------------------------------"
# Set up a reverse SSH tunnel from the compute node back to the submitting host (login01 or login02)
# This is the machine we will connect to with SSH forward tunneling from our client.
echo "Setting up reverse SSH tunnel from compute node back to submitting host"
ssh -R$TUNNELPORT:localhost:$NOTEBOOKPORT $SLURM_SUBMIT_HOST -N -f
# Start the notebook
echo "Starting the notebook"
srun -n1 jupyter-notebook --no-browser --port=$NOTEBOOKPORT --NotebookApp.iopub_data_rate_limit=10000000000
wait