What does oscillating loss mean? #505
Unanswered
manaswimancha
asked this question in
Q&A
Replies: 1 comment
-
Hi @manaswimancha , PyTorch (and neural networks in general) tend to like values between 0 and 1. I edited your code slightly and it turns out your model performs quite well (when trained for 1000 epochs instead of 100): # Import dependencies
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
# Setting a seed
torch.manual_seed(0)
# Generate data
x = torch.arange(0, 1, 0.01)
y = 0.3 * x + 0.9
# # Visualize generated data
# plt.scatter(x,y,c="b",s=5)
# plt.title("Population over time")
# plt.xlabel("Years")
# plt.ylabel("Count")
# plt.show()
# Split training & testing sets
x_train = x[:int(0.8*x.size(dim=0))].float()
x_test = x[int(0.8*x.size(dim=0)):].float()
y_train = x[:int(0.8*x.size(dim=0))].float()
y_test = x[int(0.8*x.size(dim=0)):].float()
# Build neural network
class Model(nn.Module):
# Initialize weights & bias
def __init__(self):
super().__init__()
self.weight = nn.Parameter(torch.randn(1, requires_grad=True))
self.bias = nn.Parameter(torch.randn(1, requires_grad=True))
# Define forward propagation
def forward(self,x):
return self.weight*x + self.bias
# Train neural network
# Instantiate neural network
model = Model()
# Define loss & optimizer functions
loss_func = nn.L1Loss()
optimizer = optim.SGD(model.parameters(), 0.01)
# Store experiment data
epochs = []
losses = []
# test_losses = []
# Train model
model.train()
# Set training & testing epochs
for epoch in range(1000):
# Count epoch
epochs.append(epoch+1)
# Define forward pass
train_preds = model(x_train)
# print(y_train_preds)
# Calculate loss
loss = loss_func(train_preds, y_train)
losses.append(loss.detach().numpy())
# Reset optimizer
optimizer.zero_grad()
# Define backpropagation
loss.backward()
# Increment optimizer
optimizer.step()
print(model.state_dict())
print(loss)
# # Evaluate model
# model.eval()
# # Set inference mode
# with torch.inference_mode():
# # Define forward pass
# y_test_preds = model(x_test)
# # Calculate loss
# test_loss = loss_func(y_test_preds, y_test)
# Visualize performance
plt.plot(epochs,losses,"b")
# plt.plot(epochs,test_losses,"g")
plt.title("Losses over time")
plt.xlabel("Epochs")
plt.ylabel("Mean Absolute Difference E")
plt.show()
# # Visualize predictions
# preds = model(x)
# plt.scatter(x,y,c="b",s=5)
# plt.scatter(x,preds.detach().numpy(),c="g",s=5)
# plt.title("Population predictions vs actual")
# plt.xlabel("Years")
# plt.ylabel("Count")
# plt.show()
# # Display weights
params = list(model.parameters())
print(f"Actual Formula: 3x+9")
print(f"Predicted Formula: {params[0].data.numpy()[0]}x+{params[1].data.numpy()[0]}") See the notebook here: https://colab.research.google.com/drive/1VyZzXv7DUTqFxx9lScIsWl9ZtfKen6Kg?usp=sharing In general, things you can do to improve your models:
|
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
Uh oh!
There was an error while loading. Please reload this page.
-
I am trying to do linear regression. Why is my predictions incorrect?
Beta Was this translation helpful? Give feedback.
All reactions