glenn-jocher commited on
Commit
b7fe1d0
2 Parent(s): 762b06d 956511d

Merge pull request #300 from Laughing-q/master

Browse files
Files changed (1) hide show
  1. train.py +4 -3
train.py CHANGED
@@ -101,6 +101,9 @@ def train(hyp):
101
  optim.SGD(pg0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True)
102
  optimizer.add_param_group({'params': pg1, 'weight_decay': hyp['weight_decay']}) # add pg1 with weight_decay
103
  optimizer.add_param_group({'params': pg2}) # add pg2 (biases)
 
 
 
104
  print('Optimizer groups: %g .bias, %g conv.weight, %g other' % (len(pg2), len(pg1), len(pg0)))
105
  del pg0, pg1, pg2
106
 
@@ -144,9 +147,7 @@ def train(hyp):
144
  if mixed_precision:
145
  model, optimizer = amp.initialize(model, optimizer, opt_level='O1', verbosity=0)
146
 
147
- # Scheduler https://arxiv.org/pdf/1812.01187.pdf
148
- lf = lambda x: (((1 + math.cos(x * math.pi / epochs)) / 2) ** 1.0) * 0.9 + 0.1 # cosine
149
- scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
150
  scheduler.last_epoch = start_epoch - 1 # do not move
151
  # https://discuss.pytorch.org/t/a-problem-occured-when-resuming-an-optimizer/28822
152
  # plot_lr_scheduler(optimizer, scheduler, epochs)
 
101
  optim.SGD(pg0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True)
102
  optimizer.add_param_group({'params': pg1, 'weight_decay': hyp['weight_decay']}) # add pg1 with weight_decay
103
  optimizer.add_param_group({'params': pg2}) # add pg2 (biases)
104
+ # Scheduler https://arxiv.org/pdf/1812.01187.pdf
105
+ lf = lambda x: (((1 + math.cos(x * math.pi / epochs)) / 2) ** 1.0) * 0.9 + 0.1 # cosine
106
+ scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
107
  print('Optimizer groups: %g .bias, %g conv.weight, %g other' % (len(pg2), len(pg1), len(pg0)))
108
  del pg0, pg1, pg2
109
 
 
147
  if mixed_precision:
148
  model, optimizer = amp.initialize(model, optimizer, opt_level='O1', verbosity=0)
149
 
150
+
 
 
151
  scheduler.last_epoch = start_epoch - 1 # do not move
152
  # https://discuss.pytorch.org/t/a-problem-occured-when-resuming-an-optimizer/28822
153
  # plot_lr_scheduler(optimizer, scheduler, epochs)