You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: configs/cscs/daint/gpu/craype_config
+24-16Lines changed: 24 additions & 16 deletions
Original file line number
Diff line number
Diff line change
@@ -3,35 +3,45 @@
3
3
# Author: Samuel Omlin, CSCS (omlins)
4
4
#
5
5
# Description: Definition of site specific variables and call of JUHPC.
6
-
# Site: ALPS:todi, Swiss National Supercomputing Centre (CSCS)
6
+
# Site: Piz Daint:gpu, Swiss National Supercomputing Centre (CSCS)
7
7
# Base: craype
8
8
9
9
10
-
# Load required modules (including correct CPU and GPU target modules)
10
+
# Load required modules, including correct CPU and GPU target modules (NOTE: the same modules should be loaded when running julia - JUHPC can be used in a module build recipe...)
11
11
module load daint-gpu
12
12
module switch PrgEnv-cray PrgEnv-gnu
13
-
module load cudatoolkit craype-accel-nvidia60
13
+
module load cudatoolkit/11.2.0_3.39-2.1__gf93aa1c craype-accel-nvidia60# Load latest available cudatoolkit
14
14
module load cray-hdf5-parallel
15
15
module list
16
16
17
17
18
18
# Environment variables for HPC key packages that require system libraries that require system libraries (MPI.jl, CUDA.jl, HDF5.jl and ADIOS2.jl)
19
-
export JUHPC_CUDA_HOME=$CUDA_HOME# Used for CUDA.jl runtime discovery (set as CUDA_HOME in the uenv view / activate script).
20
-
export JUHPC_CUDA_RUNTIME_VERSION=$CRAY_CUDATOOLKIT_VERSION# Used for CUDA.jl runtime version definition (set in preferences).
21
-
export JUHPC_ROCM_HOME= # Used for AMDGPU.jl runtime discovery (set as ROCM_PATH in the uenv view / activate script).
22
-
export JUHPC_MPI_HOME=$MPICH_DIR# Used for MPI.jl system binary discovery (set in preferences).
23
-
export JUHPC_MPI_VENDOR= # Used for MPI.jl system binary discovery (used to set preferences).
24
-
export JUHPC_MPI_EXEC="srun -C gpu"# Used for MPI.jl exec command discovery (set in preferences). Arguments are space separated, e.g. "srun -C gpu".
25
-
export JUHPC_HDF5_HOME=$HDF5_DIR# Used for HDF5.jl library discovery (set in preferences).
26
-
export JUHPC_ADIOS2_HOME= # Used for ADIOS2.jl library discovery (set as JULIA_ADIOS2_PATH in the uenv view / activate script).
19
+
export JUHPC_CUDA_HOME=$CUDA_HOME# Used for CUDA.jl runtime discovery (set as CUDA_HOME in the activate script).
20
+
export JUHPC_CUDA_RUNTIME_VERSION=$CRAY_CUDATOOLKIT_VERSION# Used for CUDA.jl runtime version definition (set in preferences).
21
+
export JUHPC_ROCM_HOME= # Used for AMDGPU.jl runtime discovery (set as ROCM_PATH in the activate script).
22
+
export JUHPC_MPI_HOME=$MPICH_DIR# Used for MPI.jl system binary discovery (set in preferences).
23
+
export JUHPC_MPI_VENDOR= # Used for MPI.jl system binary discovery (used to set preferences).
24
+
export JUHPC_MPI_EXEC="srun -C gpu"# Used for MPI.jl exec command discovery (set in preferences). Arguments are space separated, e.g. "srun -C gpu".
25
+
export JUHPC_HDF5_HOME=$HDF5_DIR# Used for HDF5.jl library discovery (set in preferences).
26
+
export JUHPC_ADIOS2_HOME= # Used for ADIOS2.jl library discovery (set as JULIA_ADIOS2_PATH in the activate script).
27
+
28
+
29
+
# Create site-specific post-install script (currently MPIPreferences does not provide an option to set required preloads if not automatically detected; JUHPC_MPI_VENDOR fails on Piz Daint...)
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
33
-
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup"# User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
JUHPC_SETUP_INSTALLDIR=$SCRATCH/../julia/${HOSTNAME%%[0-9]*}-gpu/juhpc_setup# HPC setup installation environment variables must be expanded during installation.
43
+
JULIAUP_INSTALLDIR="\$SCRATCH/../julia/\$USER/\${HOSTNAME%%[0-9]*}-gpu/juliaup"# User environment variables SCRATCH and HOSTNAME must not be expanded HPC setup installation, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
0 commit comments