在网络最后加一个训练好的mlp层并将其参数冻结
for key in model.mlp.state_dict():
if key in backbone_file:
model.mlp.state_dict()[key] = backbone_file[key]
for (name, param) in model.mlp.named_parameters():
if name in backbone_file:
param.requires_grad = False
optimizer = AdamW(filter(lambda p: p.requires_grad, model.parameters()), lr=args.LR, weight_decay=args.WEIGHT_DECAY)