Variable一般的初始化方法,默认是不求梯度的

import torch
from torch.autograd import Variable
x_tensor = torch.randn(2,3)

#将tensor转换成Variable
x = Variable(x_tensor) 
print(x.requires_grad) #False
x = Variable(x_tensor,requires_grad=True) #Varibale 默认时不要求梯度的,如果要求梯度,需要说明
print(x)