From 4c19b3bb4271c0568ee629981b828f9ff84829d6 Mon Sep 17 00:00:00 2001 From: Andy <929910266@qq.com> Date: Thu, 14 Nov 2019 16:51:10 +0800 Subject: [PATCH] fix initial_lr is not specified in param_groups[0] when reuming --- trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trainer.py b/trainer.py index 4f6c78c..37d0812 100644 --- a/trainer.py +++ b/trainer.py @@ -113,7 +113,7 @@ def main(): model.half() criterion.half() - optimizer = torch.optim.SGD(model.parameters(), args.lr, + optimizer = torch.optim.SGD([{"params":model.parameters(),"initial_lr":args.lr}], lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)