Skip to content

Commit a86c868

Browse files
committed
Updated ci/cd
Signed-off-by: romit <[email protected]>
1 parent 850430f commit a86c868

File tree

4 files changed

+19
-27
lines changed

4 files changed

+19
-27
lines changed

tests/artifacts/language_models/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@
2020
### Constants used for model path
2121
PREDEFINED_MODEL_PATH = os.path.join(os.path.dirname(__file__))
2222
MAYKEYE_TINY_LLAMA_CACHED = os.path.join(PREDEFINED_MODEL_PATH, "maykeye-tinyllama-v0")
23-
TINYMIXTRAL_MOE = "Isotonic/TinyMixtral-4x248M-MoE"
23+
TINYMIXTRAL_MOE = "Isotonic/TinyMixtral-4x248M-MoE"

tests/artifacts/predefined_data_configs/tokenize_and_apply_input_masking_streaming.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ datasets:
88
data_handlers:
99
- name: tokenize_and_apply_input_masking
1010
arguments:
11-
remove_columns: all
11+
# remove_columns: all
1212
batched: false
1313
fn_kwargs:
1414
input_column_name: input

tests/test_sft_trainer.py

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1766,9 +1766,7 @@ def test_run_moe_ft_and_inference_ep1_kernels(dataset_path, ep_degree):
17661766
sft_trainer.train(
17671767
model_args, data_args, train_args, fast_moe_config=fast_moe_config
17681768
)
1769-
_test_run_inference(
1770-
checkpoint_path=_get_hf_converted_path(tempdir)
1771-
)
1769+
_test_run_inference(checkpoint_path=_get_hf_converted_path(tempdir))
17721770

17731771

17741772
@pytest.mark.skipif(
@@ -1801,27 +1799,17 @@ def test_run_moe_lora_and_inference(dataset_path, target_modules, ep_degree):
18011799
lora_args.target_modules = target_modules
18021800
fast_moe_config = FastMoeConfig(fast_moe=FastMoe(ep_degree=ep_degree))
18031801

1804-
if target_modules == "all-linear":
1805-
with pytest.raises(ValueError):
1806-
sft_trainer.train(
1807-
model_args,
1808-
data_args,
1809-
train_args,
1810-
lora_args,
1811-
fast_moe_config=fast_moe_config,
1812-
)
1813-
else:
1814-
sft_trainer.train(
1815-
model_args,
1816-
data_args,
1817-
train_args,
1818-
lora_args,
1819-
fast_moe_config=fast_moe_config,
1820-
)
1821-
_test_run_inference(
1822-
checkpoint_path=_get_checkpoint_path(tempdir),
1823-
base_model_name_or_path=TINYMIXTRAL_MOE,
1824-
)
1802+
sft_trainer.train(
1803+
model_args,
1804+
data_args,
1805+
train_args,
1806+
lora_args,
1807+
fast_moe_config=fast_moe_config,
1808+
)
1809+
_test_run_inference(
1810+
checkpoint_path=_get_checkpoint_path(tempdir),
1811+
base_model_name_or_path=TINYMIXTRAL_MOE,
1812+
)
18251813

18261814

18271815
@pytest.mark.skipif(
@@ -1936,7 +1924,8 @@ def _get_hf_converted_path(dir_path):
19361924
hf_converted_dir = [
19371925
d
19381926
for d in os.listdir(final_checkpoint_path)
1939-
if os.path.isdir(os.path.join(final_checkpoint_path, d)) and re.match(r"^safetensors-\d+$", d)
1927+
if os.path.isdir(os.path.join(final_checkpoint_path, d))
1928+
and re.match(r"^safetensors-\d+$", d)
19401929
]
19411930
hf_converted_dir.sort(key=lambda name: int(name.split("-")[-1]))
19421931

tuning/config/acceleration_configs/acceleration_framework_config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,9 @@ def get_framework(self):
240240
try:
241241
with NamedTemporaryFile("w") as f:
242242
self.to_yaml(f.name)
243+
244+
AccelerationFramework.active_plugins = []
245+
AccelerationFramework.plugins_require_custom_loading = []
243246
return AccelerationFramework(f.name)
244247
except ValueError as e:
245248
(msg,) = e.args

0 commit comments

Comments
 (0)