Skip to content

Commit 51b8bc7

Browse files
Apply suggestions from code review
Co-authored-by: Benjamin Bossan <[email protected]>
1 parent e067e2d commit 51b8bc7

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

tests/lora/utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1941,7 +1941,7 @@ def test_set_adapters_match_attention_kwargs(self):
19411941
)
19421942
self.assertTrue(
19431943
np.allclose(output_lora_scale, output_lora_scale_wo_kwargs, atol=1e-3, rtol=1e-3),
1944-
"Lora + scale should change match the output of `set_adapters()`.",
1944+
"Lora + scale should match the output of `set_adapters()`.",
19451945
)
19461946

19471947
with tempfile.TemporaryDirectory() as tmpdirname:
@@ -1960,7 +1960,6 @@ def test_set_adapters_match_attention_kwargs(self):
19601960
for module_name, module in modules_to_save.items():
19611961
self.assertTrue(check_if_lora_correctly_set(module), f"Lora not correctly set in {module_name}")
19621962

1963-
print(f"{attention_kwargs=}")
19641963
output_lora_from_pretrained = pipe(**inputs, generator=torch.manual_seed(0), **attention_kwargs)[0]
19651964
self.assertTrue(
19661965
not np.allclose(output_no_lora, output_lora_from_pretrained, atol=1e-3, rtol=1e-3),

0 commit comments

Comments
 (0)