@@ -83,6 +83,7 @@ RUN pip install --upgrade pip
8383# Setup optimized Mamba environment with required PyTorch dependencies
8484RUN wget -O /tmp/Miniforge.sh https://github.com/conda-forge/miniforge/releases/download/24.3.0-0/Mambaforge-24.3.0-0-Linux-x86_64.sh \
8585 && bash /tmp/Miniforge.sh -b -p /Miniforge \
86+ && echo "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-12.1/compat/" >> /Miniforge/etc/profile.d/mamba.sh \
8687 && source /Miniforge/etc/profile.d/conda.sh \
8788 && source /Miniforge/etc/profile.d/mamba.sh \
8889 && mamba update -y -q -n base -c defaults mamba \
@@ -119,21 +120,11 @@ RUN source /Miniforge/etc/profile.d/conda.sh \
119120 && source /Miniforge/etc/profile.d/mamba.sh \
120121 && mamba activate BigCodeBench \
121122 && cd /bigcodebench && pip install .[generate] \
122- && python -c "from bigcodebench.data import get_bigcodebench; get_bigcodebench()"
123-
124- # Install Flash Attention
125- RUN source /Miniforge/etc/profile.d/conda.sh \
126- && source /Miniforge/etc/profile.d/mamba.sh \
127- && mamba activate BigCodeBench \
123+ && python -c "from bigcodebench.data import get_bigcodebench; get_bigcodebench()" \
128124 && export MAX_JOBS=$(($(nproc) - 2)) \
129125 && pip install --no-cache-dir ninja packaging psutil \
130126 && pip install flash-attn==2.5.8 --no-build-isolation
131127
132128WORKDIR /app
133129
134- # Declare an argument for the huggingface token
135- ARG HF_TOKEN
136- RUN if [[ -n "$HF_TOKEN" ]] ; then /Miniforge/envs/BigCodeBench/bin/huggingface-cli login --token $HF_TOKEN ; \
137- else echo "No HuggingFace token specified. Access to gated or private models will be unavailable." ; fi
138-
139130ENTRYPOINT ["/Miniforge/envs/BigCodeBench/bin/python" , "-m" , "bigcodebench.generate" ]
0 commit comments