-
Notifications
You must be signed in to change notification settings - Fork 27
Expand file tree
/
Copy pathdask_singlenode.sh
More file actions
16 lines (13 loc) · 974 Bytes
/
dask_singlenode.sh
File metadata and controls
16 lines (13 loc) · 974 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#!/bin/bash
#SBATCH --account=project_2015299 # Choose the project to be billed
#SBATCH --reservation=geocomputing_day2 # Only available during the course
#SBATCH --time=00:05:00 # Maximum duration of the job. Upper limit depends on partition.
#SBATCH --ntasks=1 # Number of tasks. Upper limit depends on partition.
#SBATCH --cpus-per-task=3 # How many processors work on one task. Upper limit depends on number of CPUs per node.
#SBATCH --mem-per-cpu=6G # Memory required per usable allocated CPU. Default units are megabytes.
#SBATCH --partition=small # Which queue to use. Defines maximum time, memory, tasks, nodes and local storage for job
### Load the geoconda module which has Python and Dask installed
module load geoconda
datadir=/appl/data/geo/sentinel/s2_example_data/L2A
### Run the Dask example. The directory given to the script hosts 3 Sentinel images
srun python dask_singlenode.py $datadir