We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c129020 commit cb3658fCopy full SHA for cb3658f
src/tests/test_batching.py
@@ -25,6 +25,7 @@ class Config:
25
}
26
27
28
+@pytest.mark.skip_missing_tokenizer
29
@patch('llama_recipes.finetuning.train')
30
@patch('llama_recipes.finetuning.AutoTokenizer')
31
@patch("llama_recipes.finetuning.AutoConfig.from_pretrained")
@@ -90,6 +91,7 @@ def test_packing(
90
91
assert batch["attention_mask"][0].size(0) == 4096
92
93
94
95
@patch("llama_recipes.finetuning.torch.cuda.is_available")
96
97
0 commit comments