From 615085c7ae4cf7f16d1f538a65de6c0aee744a7a Mon Sep 17 00:00:00 2001 From: Leo Milano Date: Fri, 26 Sep 2025 13:12:04 -0700 Subject: [PATCH 1/3] Update polynomial_autograd.py with changes to use exp(x) instead of sin(x) as the function to be learned --- .../examples_autograd/polynomial_autograd.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/beginner_source/examples_autograd/polynomial_autograd.py b/beginner_source/examples_autograd/polynomial_autograd.py index 525d0c33ce9..2297c129bbc 100755 --- a/beginner_source/examples_autograd/polynomial_autograd.py +++ b/beginner_source/examples_autograd/polynomial_autograd.py @@ -1,4 +1,4 @@ -""" +r""" PyTorch: Tensors and autograd ------------------------------- @@ -27,8 +27,8 @@ # Create Tensors to hold input and outputs. # By default, requires_grad=False, which indicates that we do not need to # compute gradients with respect to these Tensors during the backward pass. -x = torch.linspace(-math.pi, math.pi, 2000, dtype=dtype) -y = torch.sin(x) +x = torch.linspace(-1, 1, 2000, dtype=dtype) +y = torch.exp(x) # A Taylor expansion would be 1 + x + (1/2) x**2 + (1/3!) x**3 + ... # Create random Tensors for weights. For a third order polynomial, we need # 4 weights: y = a + b x + c x^2 + d x^3 @@ -39,8 +39,8 @@ c = torch.randn((), dtype=dtype, requires_grad=True) d = torch.randn((), dtype=dtype, requires_grad=True) -learning_rate = 1e-6 -for t in range(2000): +learning_rate = 1e-5 +for t in range(5000): # Forward pass: compute predicted y using operations on Tensors. y_pred = a + b * x + c * x ** 2 + d * x ** 3 @@ -48,8 +48,13 @@ # Now loss is a Tensor of shape (1,) # loss.item() gets the scalar value held in the loss. loss = (y_pred - y).pow(2).sum() + + # Calculare initial loss, so we can report loss relative to it + if t==0: + initial_loss=loss.item() + if t % 100 == 99: - print(t, loss.item()) + print(f'Iteration t = {t:4d} loss(t)/loss(0) = {round(loss.item()/initial_loss, 6):10.6f} a = {a.item():10.6f} b = {b.item():10.6f} c = {c.item():10.6f} d = {d.item():10.6f}') # Use autograd to compute the backward pass. This call will compute the # gradient of loss with respect to all Tensors with requires_grad=True. From fc6089b4192adecf3689954d739a128c9ac7567f Mon Sep 17 00:00:00 2001 From: Leo Milano Date: Fri, 26 Sep 2025 16:34:25 -0700 Subject: [PATCH 2/3] Properly initialize initial_loss variable in polynomial_autograd.py --- beginner_source/examples_autograd/polynomial_autograd.py | 1 + 1 file changed, 1 insertion(+) diff --git a/beginner_source/examples_autograd/polynomial_autograd.py b/beginner_source/examples_autograd/polynomial_autograd.py index 2297c129bbc..ada4ac8c314 100755 --- a/beginner_source/examples_autograd/polynomial_autograd.py +++ b/beginner_source/examples_autograd/polynomial_autograd.py @@ -39,6 +39,7 @@ c = torch.randn((), dtype=dtype, requires_grad=True) d = torch.randn((), dtype=dtype, requires_grad=True) +initial_loss = 1. learning_rate = 1e-5 for t in range(5000): # Forward pass: compute predicted y using operations on Tensors. From 60d203050b6be4a8e7a310e9e9e0f437388a8905 Mon Sep 17 00:00:00 2001 From: Leo Milano Date: Mon, 29 Sep 2025 11:48:53 -0700 Subject: [PATCH 3/3] Removing trailing whitespace as suggested by lintrunner --- beginner_source/examples_autograd/polynomial_autograd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/examples_autograd/polynomial_autograd.py b/beginner_source/examples_autograd/polynomial_autograd.py index ada4ac8c314..d33ca8bcb90 100755 --- a/beginner_source/examples_autograd/polynomial_autograd.py +++ b/beginner_source/examples_autograd/polynomial_autograd.py @@ -49,7 +49,7 @@ # Now loss is a Tensor of shape (1,) # loss.item() gets the scalar value held in the loss. loss = (y_pred - y).pow(2).sum() - + # Calculare initial loss, so we can report loss relative to it if t==0: initial_loss=loss.item()