%reset -fimport torchimport torch.nn as nnimport torchvisionimport torchvision.transforms as transformsimport numpy as npimport matplotlib.pyplot as pltimport torch.utils.data as data_utilsimport torch.nn as nnimport torch.nn.functional as Fnum_epochs = 20x1 = np.array([0,0])x2 = np.array([0,1])x3 = np.array([1,0])x4 = np.array([1,1])num_epochs = 200x = torch.tensor([x1,x2,x3,x4]).float()y = torch.tensor([0,1,1,0]).long()train = data_utils.TensorDataset(x,y)train_loader = data_utils.DataLoader(train , batch_size=2 , shuffle=True)device = 'cpu'input_size = 2hidden_size = 100 num_classes = 2learning_rate = .0001torch.manual_seed(24)def weights_init(m): m.weight.data.normal_(0.0, 1)class NeuralNet(nn.Module) : def __init__(self, input_size, hidden_size, num_classes) : super(NeuralNet, self).__init__() self.fc1 = nn.Linear(input_size , hidden_size) self.relu = nn.ReLU() self.fc2 = nn.Linear(hidden_size , num_classes) def forward(self, x) : out = self.fc1(x) out = self.relu(out) out = self.fc2(out) return outmodel = NeuralNet(input_size, hidden_size, num_classes).to(device)model.apply(weights_init)criterionCE = nn.CrossEntropyLoss()optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)for i in range(0 , 1) : total_step = len(train_loader) for epoch in range(num_epochs) : for i,(images , labels) in enumerate(train_loader) : images = images.to(device) labels = labels.to(device) outputs = model(images) loss = criterionCE(outputs , labels) optimizer.zero_grad() loss.backward() optimizer.step() outputs = model(x) print(outputs.data.max(1)[1])
我使用以下方法来初始化权重:
def weights_init(m): m.weight.data.normal_(0.0, 1)
但出现了以下错误:
~/anaconda3/envs/pytorch/lib/python3.7/site-packages/torch/nn/modules/module.py in __getattr__(self, name) 533 return modules[name] 534 raise AttributeError("'{}' object has no attribute '{}'".format(--> 535 type(self).__name__, name)) 536 537 def __setattr__(self, name, value):AttributeError: 'ReLU' object has no attribute 'weight'
这是初始化权重的正确方法吗?
另外,对象应该属于nn.Module
类型,而不是Relu
类型吗?
回答:
你正在尝试设置一个无权重层的权重(ReLU)。
在weights_init
中,你应该在初始化权重之前检查层的类型。例如:
def weights_init(m): if type(m) == nn.Linear: m.weight.data.normal_(0.0, 1)