w = torch.tensor(1.0) w.requires_grad = True defforward(x): return x * w defloss(x,y): y_prev = forward (x) return (y_prev-y) ** 2
epoch_list = [] loss_list = []
for epoch inrange(1,100): for x,y inzip(x_data,y_data): l=loss(x,y) l.backward() print (x,y,w.grad.item()) w.data = w.data - 0.01*w.grad.data w.grad.data.zero_() # after update, remember set the grad to zero epoch_list.append(epoch) loss_list.append(l.item()) print('progress:', epoch, l.item()) plt.plot(epoch_list,loss_list) plt.xlabel('epoch') plt.ylabel('loss') plt.show()