Finetune,只更新fc层的参数

只更新最后一层的参数

1
2
3
4
5
6
7
8
9
10
11
model = models.vgg19(pretrained=True)
# 冻结所有层的参数,finetune过程中这些参数不更新
for param in model.parameters():
param.requires_grad = False
# torch.nn.Linear(in_features, out_features, bias=True),获得in_features,即num_ftrs
num_ftrs = model.fc.in_features
# 训练集只有两类
# nn.Linear()的weights和bias默认requires_grad = True
model.fc = nn.Linear(num_ftrs, 2)
# 只更新fc层的参数,model.fc.parameters()
optimizer = optim.SGD(model.fc.parameters(), lr=0.001, momentum=0.9)

也可不改变原有网络,在其后增加若干层,像下面这样操作

1
2
3
4
5
6
7
8
9
10
11
12
13
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.vgg = models.vgg19(pretrained=True)
for para in self.vgg.parameters():
para.requires_grad = False
self.relu = nn.ReLU(inplace=True)
self.fc_1000_2 = nn.Linear(1000, 2) # requires_grad默认为True
def forward(self, x):
vgg_out = self.vgg(x)
out = self.relu(vgg_out)
out = self.fc_1000_2(out) # vgg的输出再加一层,1000-->2
return out