Skip to content

Commit cb3658f

Browse files
committed
Skip test_batching when tokenizer is missing
1 parent c129020 commit cb3658f

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

src/tests/test_batching.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class Config:
2525
}
2626
}
2727

28+
@pytest.mark.skip_missing_tokenizer
2829
@patch('llama_recipes.finetuning.train')
2930
@patch('llama_recipes.finetuning.AutoTokenizer')
3031
@patch("llama_recipes.finetuning.AutoConfig.from_pretrained")
@@ -90,6 +91,7 @@ def test_packing(
9091
assert batch["attention_mask"][0].size(0) == 4096
9192

9293

94+
@pytest.mark.skip_missing_tokenizer
9395
@patch("llama_recipes.finetuning.torch.cuda.is_available")
9496
@patch('llama_recipes.finetuning.train')
9597
@patch('llama_recipes.finetuning.AutoTokenizer')

0 commit comments

Comments
 (0)