Skip to content

Commit 83b35e3

Browse files
committed
unit test fixes
Signed-off-by: adil-a <adil.asif2000@hotmail.com>
1 parent 24f859a commit 83b35e3

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

nemo_automodel/components/models/biencoder/llama_bidirectional_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ def _update_causal_mask(
177177
return attention_mask
178178
return None
179179

180-
@check_model_inputs
180+
@check_model_inputs()
181181
@auto_docstring
182182
def forward(
183183
self,

tests/unit_tests/distributed/test_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def test_get_sync_ctx(monkeypatch, patch_dist):
117117
class Plain(torch.nn.Linear):
118118
pass
119119

120-
ctx = du.get_sync_ctx(Plain(2, 2), is_optim_step=False)
120+
ctx = du.get_sync_ctx(Plain(2, 2), is_optim_step=False, defer_fsdp_grad_sync=False)
121121
# entering/exiting the context must be a no-op
122122
with ctx:
123123
pass

tests/unit_tests/recipes/test_finetune_vlm_helpers.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,6 +228,7 @@ def test_run_train_step_supports_tensor_outputs(monkeypatch):
228228
recipe.cfg = _Cfg(fp8=None)
229229
recipe.lr_scheduler = None
230230
recipe.timestamp = 0.0
231+
recipe.model_wrapper = None
231232

232233
recipe._dp_allreduce = lambda tensor, include_cp=False: tensor
233234
recipe._get_dp_group_size = lambda include_cp=True: 1
@@ -251,7 +252,7 @@ def fake_calculate_loss(*args, **kwargs):
251252
)
252253
monkeypatch.setattr(
253254
"nemo_automodel.recipes.vlm.finetune.get_sync_ctx",
254-
lambda model, is_last: nullcontext(),
255+
lambda model, is_last, defer_fsdp_grad_sync=True: nullcontext(),
255256
)
256257

257258
calculate_mock = MagicMock(side_effect=fake_calculate_loss)

0 commit comments

Comments
 (0)