diff --git a/ignite/handlers/lr_finder.py b/ignite/handlers/lr_finder.py index 2b3e58c05ae..e3840d5da7d 100644 --- a/ignite/handlers/lr_finder.py +++ b/ignite/handlers/lr_finder.py @@ -542,7 +542,7 @@ def __init__( # override base_lrs self.base_lrs = start_lrs - def get_lr(self) -> List[float]: # type: ignore[override] + def get_lr(self) -> List[float]: curr_iter = self.last_epoch + 1 r = curr_iter / self.num_iter return [base_lr * (end_lr / base_lr) ** r for end_lr, base_lr in zip(self.end_lrs, self.base_lrs)] diff --git a/ignite/handlers/param_scheduler.py b/ignite/handlers/param_scheduler.py index d0d0cba4fd8..dee9a4116b8 100644 --- a/ignite/handlers/param_scheduler.py +++ b/ignite/handlers/param_scheduler.py @@ -7,7 +7,7 @@ from collections import OrderedDict from copy import copy from pathlib import Path -from typing import Any, cast, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union import torch from torch.optim.lr_scheduler import CosineAnnealingWarmRestarts, ReduceLROnPlateau @@ -992,7 +992,7 @@ def get_param(self) -> Union[float, List[float]]: """Method to get current optimizer's parameter value""" # Emulate context manager for pytorch>=1.4 self.lr_scheduler._get_lr_called_within_step = True # type: ignore[union-attr] - lr_list = cast(List[float], self.lr_scheduler.get_lr()) + lr_list = self.lr_scheduler.get_lr() self.lr_scheduler._get_lr_called_within_step = False # type: ignore[union-attr] if len(lr_list) == 1: return lr_list[0] @@ -1670,7 +1670,7 @@ def __init__( _scheduler_kwargs["verbose"] = False self.scheduler = ReduceLROnPlateau(optimizer, **_scheduler_kwargs) - self.scheduler._reduce_lr = self._reduce_lr # type: ignore[attr-defined] + self.scheduler._reduce_lr = self._reduce_lr # type: ignore[method-assign] self._state_attrs += ["metric_name", "scheduler"]