Skip to content

Commit

Permalink
Load optimizer state from the checkpoint only when the training mode …
Browse files Browse the repository at this point in the history
…is enabled (#632)
  • Loading branch information
andrey-churkin committed Apr 5, 2021
1 parent f635ee6 commit 83580b3
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions examples/classification/staged_quantization_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,9 @@ def autoq_eval_fn(model, eval_loader):
best_acc1 = resuming_checkpoint['best_acc1']
kd_loss_calculator.original_model.load_state_dict(resuming_checkpoint['original_model_state_dict'])
compression_ctrl.scheduler.load_state(resuming_checkpoint['compression_scheduler'])
optimizer.load_state_dict(resuming_checkpoint['optimizer'])
optimizer_scheduler.load_state_dict(resuming_checkpoint['optimizer_scheduler'])
if config.mode.lower() == 'train':
optimizer.load_state_dict(resuming_checkpoint['optimizer'])
optimizer_scheduler.load_state_dict(resuming_checkpoint['optimizer_scheduler'])
logger.info("=> loaded checkpoint '{}' (epoch: {}, best_acc1: {:.3f})"
.format(resuming_checkpoint_path, resuming_checkpoint['epoch'], best_acc1))
else:
Expand Down

0 comments on commit 83580b3

Please sign in to comment.