You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# Description: Definition of site specific variables and call of JUHPC.
6
+
# Site: ALPS:todi, Swiss National Supercomputing Centre (CSCS)
7
+
# Base: craype
8
+
9
+
10
+
# Load required modules (including correct CPU and GPU target modules)
11
+
module load cray
12
+
module switch PrgEnv-cray PrgEnv-gnu
13
+
module load cudatoolkit craype-accel-nvidia90
14
+
module load cray-hdf5-parallel
15
+
module list
16
+
17
+
18
+
# Environment variables for HPC key packages that require system libraries that require system libraries (MPI.jl, CUDA.jl, HDF5.jl and ADIOS2.jl)
19
+
export JUHPC_CUDA_HOME=$CUDA_HOME# Used for CUDA.jl runtime discovery (set as CUDA_HOME in the uenv view / activate script).
20
+
export JUHPC_CUDA_RUNTIME_VERSION=$CRAY_CUDATOOLKIT_VERSION# Used for CUDA.jl runtime version definition (set in preferences).
21
+
export JUHPC_ROCM_HOME= # Used for AMDGPU.jl runtime discovery (set as ROCM_PATH in the uenv view / activate script).
22
+
export JUHPC_MPI_HOME= # Used for MPI.jl system binary discovery (set in preferences).
23
+
export JUHPC_MPI_VENDOR="cray"# Used for MPI.jl system binary discovery (used to set preferences).
24
+
export JUHPC_MPI_EXEC="srun -C gpu"# Used for MPI.jl exec command discovery (set in preferences). Arguments are space separated, e.g. "srun -C gpu".
25
+
export JUHPC_HDF5_HOME=$HDF5_DIR# Used for HDF5.jl library discovery (set in preferences).
26
+
export JUHPC_ADIOS2_HOME= # Used for ADIOS2.jl library discovery (set as JULIA_ADIOS2_PATH in the uenv view / activate script).
27
+
28
+
29
+
# Call JUHPC
30
+
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
31
+
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup"# User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
32
+
bash -l ./juhpc $JUHPC_SETUP_INSTALLDIR$JULIAUP_INSTALLDIR# NOTE: The juhpc script must be in the current directory.
33
+
34
+
35
+
# Activate the HPC setup environment variables
36
+
.$JUHPC_SETUP_INSTALLDIR/activate
37
+
38
+
# Call juliaup to install juliaup and latest julia on scratch
39
+
juliaup
40
+
41
+
# Call juliaup to see its options
42
+
juliaup
43
+
44
+
# Call julia Pkg
45
+
julia -e 'using Pkg; Pkg.status()'
46
+
47
+
# Add CUDA.jl
48
+
julia -e 'using Pkg; Pkg.add("CUDA"); using CUDA; CUDA.versioninfo()'
49
+
50
+
# Add MPI.jl
51
+
julia -e 'using Pkg; Pkg.add("MPI"); using MPI; MPI.versioninfo()'
52
+
53
+
# Add HDF5.jl
54
+
julia -e 'using Pkg; Pkg.add("HDF5"); using HDF5; @show HDF5.has_parallel()'
55
+
56
+
# Test CUDA-aware MPI
57
+
cd~/cudaaware
58
+
MPICH_GPU_SUPPORT_ENABLED=1 srun -Acsstaff -C'gpu' -N2 -n2 julia cudaaware.jl
59
+
60
+
# julia -e 'import Pkg; Pkg.test("MPI"; test_args=["--backend=CUDA"])'
if [[ -n"${JUHPC_CUDA_HOME}"||-n"${JUHPC_ROCM_HOME}" ]];then
95
+
julia --project=$JULIA_PREFDIR -e 'using Pkg; Pkg.add("Preferences")'
96
+
echo"[extras]">>$JULIA_PREF_PROJECT
97
+
fi
98
+
99
+
if [ -n"${JUHPC_CUDA_HOME}" ];then# Set preference for using the local CUDA runtime before any installation of CUDA.jl to avoid downloading of artifacts
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("CUDA_Runtime_jll", "local"=>true)'
103
+
if [ -n"${JUHPC_CUDA_RUNTIME_VERSION}" ];then
104
+
julia --project=$JULIA_PREFDIR -e 'using Preferences; v=VersionNumber(ENV["JUHPC_CUDA_RUNTIME_VERSION"]); set_preferences!("CUDA_Runtime_jll", "version"=> "$(v.major).$(v.minor)")'
105
+
fi
106
+
fi
107
+
108
+
if [ -n"${JUHPC_ROCM_HOME}" ];then# Set preference for using the local ROCm runtime before any installation of AMDGPU.jl to avoid downloading of artifacts
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("AMDGPU", "use_artifacts"=>false, "eager_gc"=>false)'
112
+
fi
113
+
114
+
if [ -n"${JUHPC_CUDA_HOME}" ];thenexport CUDA_HOME=$JUHPC_CUDA_HOME;fi
115
+
if [ -n"${JUHPC_ROCM_HOME}" ];thenexport ROCM_PATH=$JUHPC_ROCM_HOME;fi
116
+
117
+
julia --project=$JULIA_PREFDIR -e 'using Pkg; Pkg.add([p for (p,l) in [("MPIPreferences", "JUHPC_MPI_VENDOR"), ("MPIPreferences", "JUHPC_MPI_HOME"), ("CUDA", "JUHPC_CUDA_HOME"), ("AMDGPU", "JUHPC_ROCM_HOME"), ("HDF5", "JUHPC_HDF5_HOME")] if haskey(ENV,l) && ENV[l]!=""])'
118
+
119
+
if [ -n"${JUHPC_CUDA_HOME}" ];then# Set preference for using the local CUDA runtime in a more stable way (in case the previous would not be valid anymore)
120
+
julia --project=$JULIA_PREFDIR -e 'using CUDA; CUDA.set_runtime_version!((VersionNumber(ENV[key]) for key in ["JUHPC_CUDA_RUNTIME_VERSION"] if haskey(ENV,key) && ENV[key]!=="")...; local_toolkit=true)'
121
+
fi
122
+
123
+
if [ -n"${JUHPC_ROCM_HOME}" ];then# Set preference for using the local ROCm runtime in a more stable way (in case the previous would not be valid anymore)
124
+
julia --project=$JULIA_PREFDIR -e 'using ROCM; ROCM.AMDGPU.ROCmDiscovery.use_artifacts!(false)'
125
+
fi
126
+
127
+
if [ -n"${JUHPC_MPI_VENDOR}" ];then
128
+
check_var "JUHPC_MPI_EXEC"
129
+
julia --project=$JULIA_PREFDIR -e 'using MPIPreferences; MPIPreferences.use_system_binary(mpiexec=split(ENV["JUHPC_MPI_EXEC"]), vendor=ENV["JUHPC_MPI_VENDOR"])'
130
+
elif [ -n"${JUHPC_MPI_HOME}" ];then
131
+
check_var "JUHPC_MPI_EXEC"
132
+
julia --project=$JULIA_PREFDIR -e 'using MPIPreferences; MPIPreferences.use_system_binary(mpiexec=split(ENV["JUHPC_MPI_EXEC"]), extra_paths=["$(ENV["JUHPC_MPI_HOME"])/lib"])'
133
+
fi
134
+
135
+
if [ -n"${JUHPC_HDF5_HOME}" ];then
136
+
julia --project=$JULIA_PREFDIR -e 'using HDF5; HDF5.API.set_libraries!("$(ENV["JUHPC_HDF5_HOME"])/lib/libhdf5.so", "$(ENV["JUHPC_HDF5_HOME"])/lib/libhdf5_hl.so")'
137
+
fi
138
+
139
+
info "... done."
140
+
141
+
142
+
# Create a wrapper for juliaup that installs juliaup (and latest julia) on scratch if it is not already installed
0 commit comments