lr_scheduler.pyi 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. from typing import Iterable, Any, Optional, Callable, Union, List, Dict
  2. from .optimizer import Optimizer
  3. class LRScheduler:
  4. optimizer: Optimizer = ...
  5. base_lrs: List[float] = ...
  6. last_epoch: int = ...
  7. verbose: bool = ...
  8. def __init__(self, optimizer: Optimizer, last_epoch: int = ..., verbose: bool = ...) -> None: ...
  9. def state_dict(self) -> Dict[str, Any]: ...
  10. def load_state_dict(self, state_dict: Dict[str, Any]) -> None: ...
  11. def get_last_lr(self) -> List[float]: ...
  12. def get_lr(self) -> float: ...
  13. def step(self, epoch: Optional[int] = ...) -> None: ...
  14. def print_lr(self, is_verbose: bool, group: Dict[str, Any], lr: float, epoch: Optional[int] = ...) -> None: ...
  15. class _LRScheduler(LRScheduler):
  16. ...
  17. class LambdaLR(LRScheduler):
  18. lr_lambdas: List[Callable[[int], float]] = ...
  19. def __init__(self, optimizer: Optimizer, lr_lambda: Union[Callable[[int], float], List[Callable[[int], float]]], last_epoch: int = ..., verbose: bool = ...) -> None: ...
  20. class MultiplicativeLR(LRScheduler):
  21. lr_lambdas: List[Callable[[int], float]] = ...
  22. def __init__(self, optimizer: Optimizer, lr_lambda: Union[Callable[[int], float], List[Callable[[int], float]]], last_epoch: int = ..., verbose: bool = ...) -> None: ...
  23. class StepLR(LRScheduler):
  24. step_size: int = ...
  25. gamma: float = ...
  26. def __init__(self, optimizer: Optimizer, step_size: int, gamma: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  27. class MultiStepLR(LRScheduler):
  28. milestones: Iterable[int] = ...
  29. gamma: float = ...
  30. def __init__(self, optimizer: Optimizer, milestones: Iterable[int], gamma: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  31. class ConstantLR(LRScheduler):
  32. factor: float = ...
  33. total_iters: int = ...
  34. def __init__(self, optimizer: Optimizer, factor: float=..., total_iters: int=..., last_epoch: int=..., verbose: bool = ...) -> None: ...
  35. class LinearLR(LRScheduler):
  36. start_factor: float = ...
  37. end_factor: float = ...
  38. total_iters: int = ...
  39. def __init__(self, optimizer: Optimizer, start_factor: float=..., end_factor: float= ..., total_iters: int= ..., last_epoch: int= ..., verbose: bool = ...) -> None: ...
  40. class ExponentialLR(LRScheduler):
  41. gamma: float = ...
  42. def __init__(self, optimizer: Optimizer, gamma: float, last_epoch: int = ..., verbose: bool = ...) -> None: ...
  43. class ChainedScheduler(LRScheduler):
  44. def __init__(self, schedulers: List[LRScheduler]) -> None: ...
  45. class SequentialLR(LRScheduler):
  46. def __init__(self, optimizer: Optimizer, schedulers: List[LRScheduler], milestones: List[int], last_epoch: int=..., verbose: bool=...) -> None: ...
  47. class CosineAnnealingLR(LRScheduler):
  48. T_max: int = ...
  49. eta_min: float = ...
  50. def __init__(self, optimizer: Optimizer, T_max: int, eta_min: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  51. class ReduceLROnPlateau:
  52. factor: float = ...
  53. optimizer: Optimizer = ...
  54. min_lrs: List[float] = ...
  55. patience: int = ...
  56. verbose: bool = ...
  57. cooldown: int = ...
  58. cooldown_counter: int = ...
  59. mode: str = ...
  60. threshold: float = ...
  61. threshold_mode: str = ...
  62. best: Optional[float] = ...
  63. num_bad_epochs: Optional[int] = ...
  64. mode_worse: Optional[float] = ...
  65. eps: float = ...
  66. last_epoch: int = ...
  67. def __init__(self, optimizer: Optimizer, mode: str = ..., factor: float = ..., patience: int = ..., threshold: float = ..., threshold_mode: str = ..., cooldown: int = ..., min_lr: Union[List[float], float] = ..., eps: float = ..., verbose: bool = ...) -> None: ...
  68. def step(self, metrics: Any, epoch: Optional[int] = ...) -> None: ...
  69. @property
  70. def in_cooldown(self) -> bool: ...
  71. def is_better(self, a: Any, best: Any) -> bool: ...
  72. def state_dict(self) -> Dict[str, Any]: ...
  73. def load_state_dict(self, state_dict: Dict[str, Any]) -> None: ...
  74. class CyclicLR(LRScheduler):
  75. max_lrs: List[float] = ...
  76. total_size: float = ...
  77. step_ratio: float = ...
  78. mode: str = ...
  79. gamma: float = ...
  80. scale_mode: str = ...
  81. cycle_momentum: bool = ...
  82. base_momentums: List[float] = ...
  83. max_momentums: List[float] = ...
  84. def __init__(self, optimizer: Optimizer, base_lr: Union[float, List[float]], max_lr: Union[float, List[float]], step_size_up: int = ..., step_size_down: Optional[int] = ..., mode: str = ..., gamma: float = ..., scale_fn: Optional[Callable[[float], float]] = ..., scale_mode: str = ..., cycle_momentum: bool = ..., base_momentum: float = ..., max_momentum: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  85. def scale_fn(self, x: Any) -> float: ...
  86. class CosineAnnealingWarmRestarts(LRScheduler):
  87. T_0: int = ...
  88. T_i: int = ...
  89. T_mult: Optional[int] = ...
  90. eta_min: Optional[float] = ...
  91. T_cur: Any = ...
  92. def __init__(self, optimizer: Optimizer, T_0: int, T_mult: int = ..., eta_min: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  93. def step(self, epoch: Optional[Any] = ...): ...
  94. class OneCycleLR(LRScheduler):
  95. total_steps: int = ...
  96. anneal_func: Callable[[float, float, float], float] = ...
  97. cycle_momentum: bool = ...
  98. use_beta1: bool = ...
  99. def __init__(self, optimizer: Optimizer, max_lr: Union[float, List[float]], total_steps: int = ..., epochs: int = ..., steps_per_epoch: int = ..., pct_start: float = ..., anneal_strategy: str = ..., cycle_momentum: bool = ..., base_momentum: Union[float, List[float]] = ..., max_momentum: Union[float, List[float]] = ..., div_factor: float = ..., final_div_factor: float = ..., three_phase: bool = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...
  100. class PolynomialLR(LRScheduler):
  101. total_iters: int = ...
  102. power: float = ...
  103. def __init__(self, optimizer: Optimizer, total_iters: int = ..., power: float = ..., last_epoch: int = ..., verbose: bool = ...) -> None: ...