Skip to content

Commit 2e04b5f

Browse files
committed
fixed testing as per PR
Signed-off-by: mikail <mkhona@nvidia.com>
1 parent 06fc893 commit 2e04b5f

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tests/test_normalized_optimizer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,8 @@ def test_oblique_adam_zero_gradient(self) -> None:
141141
torch.nn.functional.normalize(param, p=2.0, dim=1, eps=1e-8, out=param)
142142
initial_param = param.clone()
143143

144-
param = torch.nn.Parameter(param)
144+
# Keep as tensor, not parameter, but enable gradients
145+
param.requires_grad_(True)
145146
optimizer = ObliqueAdam([param], lr=0.01, dim=1)
146147

147148
# Set zero gradient
@@ -220,7 +221,6 @@ def test_multiple_optimization_steps_preserve_norms(self) -> None:
220221

221222
# Perform multiple optimization steps
222223
for step in range(10):
223-
torch.manual_seed(step) # Different gradient each step
224224
param.grad = torch.randn_like(param.data, device=self.device)
225225
optimizer.step()
226226

0 commit comments

Comments
 (0)