You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
def get_optimizer(self, optimizer_cfg):
self.msg_mgr.log_info(optimizer_cfg)
optimizer = get_attr_from([optim], optimizer_cfg['solver'])
valid_arg = get_valid_args(optimizer, optimizer_cfg, ['solver'])
transformer_no_decay = ['patch_embed', 'norm', 'relative_position_bias_table']
transformer_params = list(self.transformer.named_parameters())
params_list = [
{'params': [p for n, p in transformer_params if any(nd in n for nd in transformer_no_decay)], 'lr': optimizer_cfg['lr'], 'weight_decay': 0.},
{'params': [p for n, p in transformer_params if not any(nd in n for nd in transformer_no_decay)], 'lr': optimizer_cfg['lr'], 'weight_decay': optimizer_cfg['weight_decay']},
{'params': self.FCs.parameters(), 'lr': optimizer_cfg['lr'] * 0.1, 'weight_decay': optimizer_cfg['weight_decay']},
{'params': self.BNNecks.parameters(), 'lr': optimizer_cfg['lr'] * 0.1, 'weight_decay': optimizer_cfg['weight_decay']},
]
for i in range(5):
if hasattr(self, 'layer%d'%i):
params_list.append(
{'params': getattr(self, 'layer%d'%i).parameters(), 'lr': optimizer_cfg['lr'] * 0.1, 'weight_decay': optimizer_cfg['weight_decay']}
)
optimizer = optimizer(params_list)
return optimizer
为什么SwinGait中优化器参数需要如此细致的单独区分
The text was updated successfully, but these errors were encountered:
为什么SwinGait中优化器参数需要如此细致的单独区分
The text was updated successfully, but these errors were encountered: