From 7ccad7d8fe416099bffaf6c2d2fefc3631d12167 Mon Sep 17 00:00:00 2001 From: proanimer <1322767102@qq.com> Date: Thu, 28 Dec 2023 16:56:30 +0800 Subject: [PATCH] fix typo --- opencood/tools/train.py | 2 +- opencood/tools/train_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/opencood/tools/train.py b/opencood/tools/train.py index fd612f59..ec6c6f07 100644 --- a/opencood/tools/train.py +++ b/opencood/tools/train.py @@ -111,7 +111,7 @@ def main(): optimizer = train_utils.setup_optimizer(hypes, model_without_ddp) # lr scheduler setup num_steps = len(train_loader) - scheduler = train_utils.setup_lr_schedular(hypes, optimizer, num_steps) + scheduler = train_utils.setup_lr_scheduler(hypes, optimizer, num_steps) # record training writer = SummaryWriter(saved_path) diff --git a/opencood/tools/train_utils.py b/opencood/tools/train_utils.py index b88de8cf..39162795 100644 --- a/opencood/tools/train_utils.py +++ b/opencood/tools/train_utils.py @@ -199,7 +199,7 @@ def setup_optimizer(hypes, model): lr=method_dict['lr']) -def setup_lr_schedular(hypes, optimizer, n_iter_per_epoch): +def setup_lr_scheduler(hypes, optimizer, n_iter_per_epoch): """ Set up the learning rate schedular.