pytorch实现线性回归
程序员文章站
2022-06-11 23:44:55
...
import numpy as np
import torch
x = np.linspace(0,20,100).astype(np.float32).reshape((-1,1))
y = x*4 + np.random.rand((*x.shape)).astype(np.float32).reshape((-1,1))
import torch.nn as nn
class LinearRegressionModel(nn.Module):
def __init__(self, input_dim, output_dim):
super(LinearRegressionModel, self).__init__()
self.linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
out = self.linear(x)
return out
input_dim = 1
output_dim = 1
net = LinearRegressionModel(input_dim,output_dim)
net = net.cuda()
criterion = nn.MSELoss()# Mean Squared Loss
l_rate = 0.001
optimizer = torch.optim.SGD(net.parameters(), lr = l_rate) #Stochastic Gradient Descent
epochs = 10000
for epoch in range(epochs):
epoch +=1
#increase the number of epochs by 1 every time
inputs = torch.from_numpy(x)
labels = torch.from_numpy(y)
inputs = inputs.cuda()
labels = labels.cuda()
outputs = net(inputs)
optimizer.zero_grad()
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
if epoch % 200 == 0:
print('epoch {}, loss {}, w {}, b {},'.format(epoch,loss.item(), net.linear.weight.item(), net.linear.bias.item()))
转载于:https://www.jianshu.com/p/817d9b3137d1