Skip to content

Commit 89ba345

Browse files
committed
fix pr
1 parent c41a608 commit 89ba345

File tree

2 files changed

+8
-7
lines changed

2 files changed

+8
-7
lines changed

.github/workflows/run_chatgpt_examples.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,14 @@ jobs:
3434
pip uninstall flash-attn
3535
pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/cu124
3636
37-
- name: Install Colossal-AI
38-
run: |
39-
BUILD_EXT=1 pip install --no-cache-dir -v -e .
40-
4137
- name: Install flash-attn
4238
run: |
4339
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
4440
41+
- name: Install Colossal-AI
42+
run: |
43+
BUILD_EXT=1 pip install --no-cache-dir -v -e .
44+
4545
- name: Install ChatGPT
4646
env:
4747
CFLAGS: "-O1"

applications/ColossalChat/tests/test_train.sh

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ MODELS_DIR=$TEMP_DIR/models_config
3131
# Skip those tests due to CI tests timeout
3232
MODELS=('llama')
3333
ADVANCED_PLUGINS=('zero2' 'sp_split_gather' 'sp_ring' 'sp_all_to_all' 'tp_zero2' '3d' 'gemini' 'gemini_auto' 'zero2_cpu' 'pp' 'tp_pp')
34-
ADVANCED_PLUGINS=('zero2' 'sp_all_to_all' 'tp_zero2' '3d' 'gemini' 'gemini_auto' 'zero2_cpu' 'pp' 'tp_pp')
34+
ADVANCED_PLUGINS=('zero2' 'sp_all_to_all' '3d' 'gemini' 'gemini_auto' 'zero2_cpu' 'pp' 'tp_pp')
3535
PLUGINS=('zero2' '3d' 'gemini' 'gemini_auto' 'zero2_cpu')
3636
LORA_RANK=('0') # skip to reduce CI execution time, can pass all locally
3737
LORA_CONFIG_ENABLE="--lora_config $BASE_DIR/examples/training_scripts/lora_config.json"
@@ -332,6 +332,7 @@ GRAD_CKPTS=('--grad_checkpoint')
332332
for lora_rank in ${LORA_RANK[@]}; do
333333
for model in ${MODELS[@]}; do
334334
for plugin in ${ADVANCED_PLUGINS[@]}; do
335+
echo "[Test]: Starting $model-$plugin-$lora_rank"
335336
if [[ " ${SKIPPED_TESTS[*]} " =~ " $model-$plugin-$lora_rank " ]]; then
336337
echo "[Test]: Skipped $model-$plugin-$lora_rank"
337338
continue
@@ -389,8 +390,8 @@ for lora_rank in ${LORA_RANK[@]}; do
389390
if [[ $plugin == "sp_ring" ]]; then
390391
enable_sequence_parallelism='--enable_sequence_parallelism'
391392
sp_mode='ring'
392-
tp='1'
393-
sp='2'
393+
tp='2'
394+
sp='1'
394395
bs='8'
395396
plugin='3d'
396397
fi

0 commit comments

Comments
 (0)