Learn practical skills, build real-world projects, and advance your career
# Gradient descent(hard way). linear regretion
import torch
w = torch.tensor(-10.0, requires_grad = True) # Since requires_grad= True, python knows it is a variable
# Create values from -3 to 3 with increments of 0.1. The view command is used to add an additional dimension
X = torch.arange(-3,3,0.1).view(-1,1)
f = -3*x
import matplotlib.pyplot as plt
# The method numpy() convert to a numpy array (this allows to use matplotlib)
plt.plot(x.numpy(),f.numpy())
plt.show()
Y = f+0.1*torch.randn(x.size())
# Add some random noise
plt.plot(x.numpy(),y.numpy(),'ro')

# Define yhat= x*w
def forward(x):
    return w*x

# Define the cost function
def criterion(yhat,y):
    return torch.mean((yhat-y)**2)

Ir = 0.1  #Learning rate
# 4 iterations are performed 
COST = []    # Create a list
for epoch in range(4):
    Yhat = forward(X)
    loss = criterion(Yhat,Y)
    loss.backward()  # This compute the derivative with respect all the variables in the cost function
    w.data = w.data-Ir*w.grad.data
# .data atribute gives access to the date held in the variable; and .grad gives the derivative with respect to w 
    w.grad.data.zero_() # set the gradient to zero since pytorch calculate the gradient in iterative manner
    w.data 
    COST.append(loss.item())  # item() is used to obtain the loss as a python number
   
Notebook Image
Notebook Image
import jovian
jovian.commit()
[jovian] Attempting to save notebook..