PyTorch 求取梯度并实现一元线性回归

numpy和tensor相互转化并求取梯度

*numpy和tensor相互转化
tensor为张量,张量指多维矩阵

import torch
import numpy as np
from torch.autograd import Variable

a=torch.Tensor([[2,3],[4,8],[7,9]])
b_=torch.LongTensor([[2,3],[4,8],[7,9]])
c=torch.zeros((3,2))  #全为0的tensor
d=torch.randn((3,2))  #正态随机分布初始值

#改变a的第一行第二列的初始值
a[0,1]=100

#将tensor和numpy之间进行转化
e=np.array([[2,3],[4,5]])
torch_e=torch.from_numpy(e)
  • 梯度计算
    梯度表示为求导后的数值
    Variable变量可以自动实现梯度求导
x=Variable(torch.Tensor([1]),requires_grad=True)
w=Variable(torch.Tensor([2]),requires_grad=True)
b=Variable(torch.Tensor([3]),requires_grad=True)

#构建计算图
y=w*x+b

#计算梯度

y.backward()  #自动求导
print(x.grad)
print(w.grad)
print(b.grad)

tensor([2.])
tensor([1.])
tensor([1.])

实现一维线性回归

  • 代码

import numpy as np
import torch
import torch.nn as nn
from torch import optim
from torch.autograd import Variable
import matplotlib.pyplot as plt

x_train = np.array(
    [[3.3], [4.4], [5.5], [6.71], [6.93], [4.168], [9.779], [6.182], [7.59], [2.167], [7.042], [10.791], [5.313],
     [7.997], [3.1]], dtype=np.float32)


y_train = np.array(
    [[1.7], [2.76], [2.09], [3.19], [1.694], [1.573], [3.366], [2.596], [2.53], [1.221], [2.827], [3.465], [1.65],
     [2.904], [1.3]], dtype=np.float32)

x_train=torch.from_numpy(x_train)
y_train=torch.from_numpy(y_train)


class LinearRegression(nn.Module):
    def __init__(self):
        super(LinearRegression,self).__init__()
        self.linear=nn.Linear(1,1)  #input and output is 1 dimension

    def forward(self, x):
        out=self.linear(x)
        return out

if torch.cuda.is_available():
    model=LinearRegression().cuda()
else:
    model=LinearRegression()

criterion=nn.MSELoss()
optimizer=optim.SGD(model.parameters(),lr=1e-3)

num_epochs=1000
for epoch in range(num_epochs):
    if torch.cuda.is_available():
        inputs=Variable(x_train).cuda()
        target=Variable(y_train).cuda()
    else:
        inputs=Variable(x_train)
        target=Variable(y_train)
    #forward
    out=model(inputs)
    loss=criterion(out,target)
    #backward
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    if (epoch+1)%20==0:
        print('Epoch[{}/{}], loss: {:.6f}'.format(epoch+1,num_epochs,loss.item()))


model.eval()
predict=model(Variable(x_train))
predict=predict.data.numpy()
plt.plot(x_train.numpy(),y_train.numpy(),'ro',label='Original data')
plt.plot(x_train.numpy(),predict,label='Fitting Line')
plt.show()

PyTorch 求取梯度并实现一元线性回归