Skip to content

Commit 10fd7b7

Browse files
committed
[update]: removed flash attention
1 parent 1e2b001 commit 10fd7b7

File tree

1 file changed

+0
-3
lines changed
  • assets/training/finetune_acft_hf_nlp/environments/acpt-rft/context

1 file changed

+0
-3
lines changed

assets/training/finetune_acft_hf_nlp/environments/acpt-rft/context/Dockerfile

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,6 @@ COPY azure_grader /opt/conda/envs/ptca/lib/python3.10/site-packages/verl/utils/r
6969
COPY azure_python_grader /opt/conda/envs/ptca/lib/python3.10/site-packages/verl/utils/reward_score/azure_python_grader.py
7070
COPY utils /opt/conda/envs/ptca/lib/python3.10/site-packages/verl/utils/vllm/utils.py
7171
RUN python3 -m pip install --upgrade pip setuptools wheel
72-
ENV FLASH_ATTENTION_FORCE_BUILD=TRUE
73-
ENV NVCC_THREADS=1
74-
RUN MAX_JOBS=1 pip install --no-cache-dir --upgrade flash-attn==2.8.3 --no-build-isolation
7572
RUN pip install vllm==0.13.0
7673
RUN pip install openai==2.14.0
7774
RUN pip install --force-reinstall --no-cache-dir --no-build-isolation git+https://github.com/deepseek-ai/DeepGEMM.git@c9f8b34dcdacc20aa746b786f983492c51072870

0 commit comments

Comments
 (0)