Skip to content

Commit 43f1af0

Browse files
authored
fix ci
1 parent b42b2cb commit 43f1af0

File tree

1 file changed

+7
-4
lines changed

1 file changed

+7
-4
lines changed

.github/workflows/run_chatgpt_examples.yml

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ jobs:
1919
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
2020
runs-on: [self-hosted, ubuntu-latest]
2121
container:
22-
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
22+
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.5.1-12.4.1
2323
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data --shm-size=10.24gb
2424
timeout-minutes: 180
2525
defaults:
@@ -29,16 +29,19 @@ jobs:
2929
- name: Checkout ColossalAI
3030
uses: actions/checkout@v2
3131

32-
- name: Install flash-attention
32+
- name: Install torch
3333
run: |
3434
pip uninstall flash-attn
35-
pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/cu121
36-
pip install flash-attn==2.7.4.post1 --no-build-isolation
35+
pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/cu124
3736
3837
- name: Install Colossal-AI
3938
run: |
4039
BUILD_EXT=1 pip install --no-cache-dir -v -e .
4140
41+
- name: Install flash-attn
42+
run: |
43+
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
44+
4245
- name: Install ChatGPT
4346
env:
4447
CFLAGS: "-O1"

0 commit comments

Comments
 (0)