diff --git a/opencood/tools/train.py b/opencood/tools/train.py index fd612f59..ec6c6f07 100644 --- a/opencood/tools/train.py +++ b/opencood/tools/train.py @@ -111,7 +111,7 @@ def main(): optimizer = train_utils.setup_optimizer(hypes, model_without_ddp) # lr scheduler setup num_steps = len(train_loader) - scheduler = train_utils.setup_lr_schedular(hypes, optimizer, num_steps) + scheduler = train_utils.setup_lr_scheduler(hypes, optimizer, num_steps) # record training writer = SummaryWriter(saved_path) diff --git a/opencood/tools/train_utils.py b/opencood/tools/train_utils.py index b88de8cf..39162795 100644 --- a/opencood/tools/train_utils.py +++ b/opencood/tools/train_utils.py @@ -199,7 +199,7 @@ def setup_optimizer(hypes, model): lr=method_dict['lr']) -def setup_lr_schedular(hypes, optimizer, n_iter_per_epoch): +def setup_lr_scheduler(hypes, optimizer, n_iter_per_epoch): """ Set up the learning rate schedular.