Skip to content

Commit a608df9

Browse files
authored
Merge pull request #3 from omlins/initial
Update configurations and version number extractions
2 parents b31b3f3 + 85ce182 commit a608df9

File tree

3 files changed

+74
-5
lines changed

3 files changed

+74
-5
lines changed

configs/cscs/craype_config renamed to configs/cscs/alps/gh200/craype_config

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,11 @@ export JUHPC_ADIOS2_HOME= # Used for ADIOS2.
2727

2828

2929
# Call JUHPC
30+
git clone https://github.com/omlins/JUHPC
31+
JUHPC=./JUHPC/src/juhpc
3032
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
3133
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup" # User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
32-
bash -l ./juhpc $JUHPC_SETUP_INSTALLDIR $JULIAUP_INSTALLDIR # NOTE: The juhpc script must be in the current directory.
34+
bash -l $JUHPC $JUHPC_SETUP_INSTALLDIR $JULIAUP_INSTALLDIR
3335

3436

3537
# Activate the HPC setup environment variables

configs/cscs/daint/gpu/craype_config

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
#!/bin/bash
2+
3+
# Author: Samuel Omlin, CSCS (omlins)
4+
#
5+
# Description: Definition of site specific variables and call of JUHPC.
6+
# Site: ALPS:todi, Swiss National Supercomputing Centre (CSCS)
7+
# Base: craype
8+
9+
10+
# Load required modules (including correct CPU and GPU target modules)
11+
module load daint-gpu
12+
module switch PrgEnv-cray PrgEnv-gnu
13+
module load cudatoolkit craype-accel-nvidia60
14+
module load cray-hdf5-parallel
15+
module list
16+
17+
18+
# Environment variables for HPC key packages that require system libraries that require system libraries (MPI.jl, CUDA.jl, HDF5.jl and ADIOS2.jl)
19+
export JUHPC_CUDA_HOME=$CUDA_HOME # Used for CUDA.jl runtime discovery (set as CUDA_HOME in the uenv view / activate script).
20+
export JUHPC_CUDA_RUNTIME_VERSION=$CRAY_CUDATOOLKIT_VERSION # Used for CUDA.jl runtime version definition (set in preferences).
21+
export JUHPC_ROCM_HOME= # Used for AMDGPU.jl runtime discovery (set as ROCM_PATH in the uenv view / activate script).
22+
export JUHPC_MPI_HOME=$MPICH_DIR # Used for MPI.jl system binary discovery (set in preferences).
23+
export JUHPC_MPI_VENDOR= # Used for MPI.jl system binary discovery (used to set preferences).
24+
export JUHPC_MPI_EXEC="srun -C gpu" # Used for MPI.jl exec command discovery (set in preferences). Arguments are space separated, e.g. "srun -C gpu".
25+
export JUHPC_HDF5_HOME=$HDF5_DIR # Used for HDF5.jl library discovery (set in preferences).
26+
export JUHPC_ADIOS2_HOME= # Used for ADIOS2.jl library discovery (set as JULIA_ADIOS2_PATH in the uenv view / activate script).
27+
28+
29+
# Call JUHPC
30+
git clone https://github.com/omlins/JUHPC
31+
JUHPC=./JUHPC/src/juhpc
32+
JUHPC_SETUP_INSTALLDIR=$SCRATCH/${HOSTNAME%%-*}/juhpc_setup # HPC setup installation environment variables must be expanded during installation.
33+
JULIAUP_INSTALLDIR="\$SCRATCH/\${HOSTNAME%%-*}/juliaup" # User environment variables SCRATCH and HOSTNAME must not be expanded during uenv build, but during usage. Separate installation by HOSTNAME is required, because different hosts with different architectures can share the same file system (e.g., daint and eiger on ALPS).
34+
bash -l $JUHPC $JUHPC_SETUP_INSTALLDIR $JULIAUP_INSTALLDIR
35+
36+
37+
# Activate the HPC setup environment variables
38+
. $JUHPC_SETUP_INSTALLDIR/activate
39+
40+
# Call juliaup to install juliaup and latest julia on scratch
41+
juliaup
42+
43+
# Call juliaup to see its options
44+
juliaup
45+
46+
# Call julia Pkg
47+
julia -e 'using Pkg; Pkg.status()'
48+
49+
# Add CUDA.jl
50+
julia -e 'using Pkg; Pkg.add("CUDA"); using CUDA; CUDA.versioninfo()'
51+
52+
# Add MPI.jl
53+
julia -e 'using Pkg; Pkg.add("MPI"); using MPI; MPI.versioninfo()'
54+
55+
# Add HDF5.jl
56+
julia -e 'using Pkg; Pkg.add("HDF5"); using HDF5; @show HDF5.has_parallel()'
57+
58+
# Test CUDA-aware MPI
59+
cd ~/cudaaware
60+
MPICH_GPU_SUPPORT_ENABLED=1 srun -Acsstaff -C'gpu' -N2 -n2 julia cudaaware.jl
61+
62+
# julia -e 'import Pkg; Pkg.test("MPI"; test_args=["--backend=CUDA"])'

src/juhpc

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ if [ -n "${JUHPC_CUDA_HOME}" ]; then # Set preference for using the local CUDA
101101

102102
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("CUDA_Runtime_jll", "local"=>true)'
103103
if [ -n "${JUHPC_CUDA_RUNTIME_VERSION}" ]; then
104-
julia --project=$JULIA_PREFDIR -e 'using Preferences; v=VersionNumber(ENV["JUHPC_CUDA_RUNTIME_VERSION"]); set_preferences!("CUDA_Runtime_jll", "version"=> "$(v.major).$(v.minor)")'
104+
julia --project=$JULIA_PREFDIR -e 'using Preferences; set_preferences!("CUDA_Runtime_jll", "version"=>join(split(ENV["JUHPC_CUDA_RUNTIME_VERSION"],".")[1:2],"."))'
105105
fi
106106
fi
107107

@@ -117,7 +117,7 @@ if [ -n "${JUHPC_ROCM_HOME}" ]; then export ROCM_PATH=$JUHPC_ROCM_HOME; fi
117117
julia --project=$JULIA_PREFDIR -e 'using Pkg; Pkg.add([p for (p,l) in [("MPIPreferences", "JUHPC_MPI_VENDOR"), ("MPIPreferences", "JUHPC_MPI_HOME"), ("CUDA", "JUHPC_CUDA_HOME"), ("AMDGPU", "JUHPC_ROCM_HOME"), ("HDF5", "JUHPC_HDF5_HOME")] if haskey(ENV,l) && ENV[l]!=""])'
118118

119119
if [ -n "${JUHPC_CUDA_HOME}" ]; then # Set preference for using the local CUDA runtime in a more stable way (in case the previous would not be valid anymore)
120-
julia --project=$JULIA_PREFDIR -e 'using CUDA; CUDA.set_runtime_version!((VersionNumber(ENV[key]) for key in ["JUHPC_CUDA_RUNTIME_VERSION"] if haskey(ENV,key) && ENV[key]!=="")...; local_toolkit=true)'
120+
julia --project=$JULIA_PREFDIR -e 'using CUDA; CUDA.set_runtime_version!((VersionNumber(join(split(ENV[key],".")[1:2],".")) for key in ["JUHPC_CUDA_RUNTIME_VERSION"] if haskey(ENV,key) && ENV[key]!=="")...; local_toolkit=true)'
121121
fi
122122

123123
if [ -n "${JUHPC_ROCM_HOME}" ]; then # Set preference for using the local ROCm runtime in a more stable way (in case the previous would not be valid anymore)
@@ -151,8 +151,8 @@ julia -e '
151151
println("""#!/bin/bash
152152
153153
info() {
154-
local message="$1"
155-
echo -e "$JUHPC $message" >&2
154+
local message="\$1"
155+
echo -e "$(ENV["JUHPC"]) \$message" >&2
156156
}
157157
158158
if [ ! -f $(ENV["JULIAUP_BINDIR"])/juliaup ]; then
@@ -181,6 +181,11 @@ export JULIA_DEPOT=$JULIAUP_INSTALLDIR/depot
181181
export ACTIVATE_SCRIPT=$JUHPC_SETUP_INSTALLDIR/activate
182182

183183
julia -e 'println("""
184+
info() {
185+
local message="\$1"
186+
echo -e "$(ENV["JUHPC"]) \$message" >&2
187+
}
188+
info "activating HPC setup for juliaup, julia and HPC key packages requiring system libraries..."
184189
export PATH=$(ENV["JULIAUP_WRAPPER_BINDIR"]):$(ENV["JULIAUP_BINDIR"]):\$PATH # The wrapper must be before the juliaup bindir
185190
export JULIAUP_DEPOT_PATH=$(ENV["JULIAUP_DEPOT"])
186191
export JULIA_DEPOT_PATH=$(ENV["JULIA_DEPOT"])

0 commit comments

Comments
 (0)