You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: configs/cscs/alps/gh200/craype_config
+3-1Lines changed: 3 additions & 1 deletion
Original file line number
Diff line number
Diff line change
@@ -27,9 +27,11 @@ export JUHPC_ADIOS2_HOME= # Used for ADIOS2.
27
27
28
28
29
29
# Call JUHPC
30
+
git clone https://github.com/omlins/JUHPC
31
+
JUHPC=./JUHPC/src/juhpc
30
32
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
31
33
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup"# User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
32
-
bash -l ./juhpc$JUHPC_SETUP_INSTALLDIR$JULIAUP_INSTALLDIR# NOTE: The juhpc script must be in the current directory.
# Description: Definition of site specific variables and call of JUHPC.
6
+
# Site: ALPS:todi, Swiss National Supercomputing Centre (CSCS)
7
+
# Base: craype
8
+
9
+
10
+
# Load required modules (including correct CPU and GPU target modules)
11
+
module load daint-gpu
12
+
module switch PrgEnv-cray PrgEnv-gnu
13
+
module load cudatoolkit craype-accel-nvidia60
14
+
module load cray-hdf5-parallel
15
+
module list
16
+
17
+
18
+
# Environment variables for HPC key packages that require system libraries that require system libraries (MPI.jl, CUDA.jl, HDF5.jl and ADIOS2.jl)
19
+
export JUHPC_CUDA_HOME=$CUDA_HOME# Used for CUDA.jl runtime discovery (set as CUDA_HOME in the uenv view / activate script).
20
+
export JUHPC_CUDA_RUNTIME_VERSION=$CRAY_CUDATOOLKIT_VERSION# Used for CUDA.jl runtime version definition (set in preferences).
21
+
export JUHPC_ROCM_HOME= # Used for AMDGPU.jl runtime discovery (set as ROCM_PATH in the uenv view / activate script).
22
+
export JUHPC_MPI_HOME=$MPICH_DIR# Used for MPI.jl system binary discovery (set in preferences).
23
+
export JUHPC_MPI_VENDOR= # Used for MPI.jl system binary discovery (used to set preferences).
24
+
export JUHPC_MPI_EXEC="srun -C gpu"# Used for MPI.jl exec command discovery (set in preferences). Arguments are space separated, e.g. "srun -C gpu".
25
+
export JUHPC_HDF5_HOME=$HDF5_DIR# Used for HDF5.jl library discovery (set in preferences).
26
+
export JUHPC_ADIOS2_HOME= # Used for ADIOS2.jl library discovery (set as JULIA_ADIOS2_PATH in the uenv view / activate script).
27
+
28
+
29
+
# Call JUHPC
30
+
git clone https://github.com/omlins/JUHPC
31
+
JUHPC=./JUHPC/src/juhpc
32
+
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
33
+
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup"# User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
Copy file name to clipboardExpand all lines: src/juhpc
+9-4Lines changed: 9 additions & 4 deletions
Original file line number
Diff line number
Diff line change
@@ -101,7 +101,7 @@ if [ -n "${JUHPC_CUDA_HOME}" ]; then # Set preference for using the local CUDA
101
101
102
102
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("CUDA_Runtime_jll", "local"=>true)'
103
103
if [ -n"${JUHPC_CUDA_RUNTIME_VERSION}" ];then
104
-
julia --project=$JULIA_PREFDIR -e 'using Preferences; v=VersionNumber(ENV["JUHPC_CUDA_RUNTIME_VERSION"]); set_preferences!("CUDA_Runtime_jll", "version"=> "$(v.major).$(v.minor)")'
104
+
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("CUDA_Runtime_jll", "version"=>join(split(ENV["JUHPC_CUDA_RUNTIME_VERSION"],".")[1:2],"."))'
105
105
fi
106
106
fi
107
107
@@ -117,7 +117,7 @@ if [ -n "${JUHPC_ROCM_HOME}" ]; then export ROCM_PATH=$JUHPC_ROCM_HOME; fi
117
117
julia --project=$JULIA_PREFDIR -e 'using Pkg; Pkg.add([p for (p,l) in [("MPIPreferences", "JUHPC_MPI_VENDOR"), ("MPIPreferences", "JUHPC_MPI_HOME"), ("CUDA", "JUHPC_CUDA_HOME"), ("AMDGPU", "JUHPC_ROCM_HOME"), ("HDF5", "JUHPC_HDF5_HOME")] if haskey(ENV,l) && ENV[l]!=""])'
118
118
119
119
if [ -n"${JUHPC_CUDA_HOME}" ];then# Set preference for using the local CUDA runtime in a more stable way (in case the previous would not be valid anymore)
120
-
julia --project=$JULIA_PREFDIR -e 'using CUDA; CUDA.set_runtime_version!((VersionNumber(ENV[key]) for key in ["JUHPC_CUDA_RUNTIME_VERSION"] if haskey(ENV,key) && ENV[key]!=="")...; local_toolkit=true)'
120
+
julia --project=$JULIA_PREFDIR -e 'using CUDA; CUDA.set_runtime_version!((VersionNumber(join(split(ENV[key],".")[1:2],".")) for key in ["JUHPC_CUDA_RUNTIME_VERSION"] if haskey(ENV,key) && ENV[key]!=="")...; local_toolkit=true)'
121
121
fi
122
122
123
123
if [ -n"${JUHPC_ROCM_HOME}" ];then# Set preference for using the local ROCm runtime in a more stable way (in case the previous would not be valid anymore)
@@ -151,8 +151,8 @@ julia -e '
151
151
println("""#!/bin/bash
152
152
153
153
info() {
154
-
local message="$1"
155
-
echo -e "$JUHPC$message" >&2
154
+
local message="\$1"
155
+
echo -e "$(ENV["JUHPC"]) \$message" >&2
156
156
}
157
157
158
158
if [ ! -f $(ENV["JULIAUP_BINDIR"])/juliaup ]; then
0 commit comments