By zll On 2018-03-25 13:30:45 Under 深度学习 Tags: #Pytorch 本文浏览量次 Finetune,只更新fc层的参数 只更新最后一层的参数1234567891011model = models.vgg19(pretrained=True)# 冻结所有层的参数,finetune过程中这些参数不更新for param in model.parameters(): param.requires_grad = False# torch.nn.Linear(in_features, out_features, bias=True),获得in_features,即num_ftrsnum_ftrs = model.fc.in_features# 训练集只有两类# nn.Linear()的weights和bias默认requires_grad = Truemodel.fc = nn.Linear(num_ftrs, 2)# 只更新fc层的参数,model.fc.parameters()optimizer = optim.SGD(model.fc.parameters(), lr=0.001, momentum=0.9) 也可不改变原有网络,在其后增加若干层,像下面这样操作12345678910111213class Net(nn.Module): def __init__(self): super(Net, self).__init__() self.vgg = models.vgg19(pretrained=True) for para in self.vgg.parameters(): para.requires_grad = False self.relu = nn.ReLU(inplace=True) self.fc_1000_2 = nn.Linear(1000, 2) # requires_grad默认为True def forward(self, x): vgg_out = self.vgg(x) out = self.relu(vgg_out) out = self.fc_1000_2(out) # vgg的输出再加一层,1000-->2 return out