Pytorch实现最简单的线性回归
程序员文章站
2022-05-26 20:42:38
...
在运行环境中输入:
import torch
from torch.autograd import Variable
#准备数据
x_data = [5.0, 7.0, 3.0]
y_data = [15.0, 21.0, 9.0]
#产生随机变量
w = Variable(torch.Tensor([1.0]), requires_grad=True)
r = 0.001; #定义学习率
#准备模型
def forward(x):
return x * w
#设置损失函数
def loss(x, y):
y_pred = forward(x)
return (y_pred - y) * (y_pred - y)
# 训练前预测输入为10时,预测结果为
print("predict (before training)", 10, forward(10).data[0])
# Training loop
for epoch in range(100):
for x_val, y_val in zip(x_data, y_data):
l = loss(x_val, y_val)
l.backward()
print("\tgrad: ", x_val, y_val, w.grad.data[0])
w.data = w.data - r * w.grad.data
#更新权重后手动调零渐变
w.grad.data.zero_()
print("progress:", epoch, l.data[0])
#输入10,得到预测结果
print("predict (after training)", 10, forward(10).data[0])
程序运行结果:
progress: 98 tensor(1.4552e-11)
grad: 5.0 15.0 tensor(-5.7220e-05)
grad: 7.0 21.0 tensor(-0.0001)
grad: 3.0 9.0 tensor(-2.2888e-05)
progress: 99 tensor(1.4552e-11)
predict (after training) 10 tensor(30.0000)