From 268af8fcecc003cb826925395a88b07c3a5b313c Mon Sep 17 00:00:00 2001 From: "Bintang Alam Semesta W.A.M" <23573683+bintang-aswam@users.noreply.github.com> Date: Tue, 17 Jun 2025 07:21:04 +0700 Subject: [PATCH] (manually) update zero gradients after updating the weights Previous version: a.grad, b.grad, c.grad, d.grad = None, None, None, None instead it's appropriate to just set 0. instead of "None" a.grad, b.grad, c.grad, d.grad = 0. , 0. , 0. , 0. --- .../examples_autograd/polynomial_custom_function.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/beginner_source/examples_autograd/polynomial_custom_function.py b/beginner_source/examples_autograd/polynomial_custom_function.py index 39057c8fd7a..3c986659a97 100755 --- a/beginner_source/examples_autograd/polynomial_custom_function.py +++ b/beginner_source/examples_autograd/polynomial_custom_function.py @@ -98,9 +98,10 @@ def backward(ctx, grad_output): d -= learning_rate * d.grad # Manually zero the gradients after updating weights - a.grad = None - b.grad = None - c.grad = None - d.grad = None + #a.grad = None + #b.grad = None + #c.grad = None + #d.grad = None + a.grad, b.grad, c.grad, d.grad = 0. , 0. , 0. , 0. print(f'Result: y = {a.item()} + {b.item()} * P3({c.item()} + {d.item()} x)')