reagent.optimizer package

Submodules

reagent.optimizer.optimizer module

reagent.optimizer.scheduler module

class reagent.optimizer.scheduler.LearningRateSchedulerConfig

Bases: object

REGISTRY = {'CosineAnnealingLR': <class 'reagent.optimizer.uninferrable_schedulers.CosineAnnealingLR'>, 'CosineAnnealingWarmRestarts': <class 'reagent.optimizer.uninferrable_schedulers.CosineAnnealingWarmRestarts'>, 'CyclicLR': <class 'reagent.optimizer.uninferrable_schedulers.CyclicLR'>, 'ExponentialLR': <class 'reagent.optimizer.uninferrable_schedulers.ExponentialLR'>, 'LambdaLR': <class 'reagent.optimizer.uninferrable_schedulers.LambdaLR'>, 'MultiStepLR': <class 'reagent.optimizer.uninferrable_schedulers.MultiStepLR'>, 'MultiplicativeLR': <class 'reagent.optimizer.uninferrable_schedulers.MultiplicativeLR'>, 'OneCycleLR': <class 'reagent.optimizer.uninferrable_schedulers.OneCycleLR'>, 'StepLR': <class 'reagent.optimizer.uninferrable_schedulers.StepLR'>}
REGISTRY_FROZEN = True
REGISTRY_NAME = 'LearningRateSchedulerConfig'
make_from_optimizer(optimizer: torch.optim.Optimizer) → torch.optim.lr_scheduler._LRScheduler

reagent.optimizer.scheduler_union module

class reagent.optimizer.scheduler_union.LearningRateScheduler__Union(LambdaLR: Union[reagent.optimizer.uninferrable_schedulers.LambdaLR, NoneType] = None, MultiplicativeLR: Union[reagent.optimizer.uninferrable_schedulers.MultiplicativeLR, NoneType] = None, StepLR: Union[reagent.optimizer.uninferrable_schedulers.StepLR, NoneType] = None, MultiStepLR: Union[reagent.optimizer.uninferrable_schedulers.MultiStepLR, NoneType] = None, ExponentialLR: Union[reagent.optimizer.uninferrable_schedulers.ExponentialLR, NoneType] = None, CosineAnnealingLR: Union[reagent.optimizer.uninferrable_schedulers.CosineAnnealingLR, NoneType] = None, CyclicLR: Union[reagent.optimizer.uninferrable_schedulers.CyclicLR, NoneType] = None, OneCycleLR: Union[reagent.optimizer.uninferrable_schedulers.OneCycleLR, NoneType] = None, CosineAnnealingWarmRestarts: Union[reagent.optimizer.uninferrable_schedulers.CosineAnnealingWarmRestarts, NoneType] = None)

Bases: reagent.core.tagged_union.TaggedUnion

CosineAnnealingLR = None
CosineAnnealingWarmRestarts = None
CyclicLR = None
ExponentialLR = None
LambdaLR = None
MultiStepLR = None
MultiplicativeLR = None
OneCycleLR = None
StepLR = None
make_from_optimizer(optimizer: torch.optim.Optimizer) → torch.optim.lr_scheduler._LRScheduler
make_union_instance(instance_class=None)
reagent.optimizer.scheduler_union.get_torch_lr_schedulers() → List[str]

reagent.optimizer.uninferrable_optimizers module

reagent.optimizer.uninferrable_schedulers module

This file contains configs that could not be inferred from the default values provided by PyTorch. If PyTorch optimizers and lr_schedulers had type annotations then we could infer everything. default values that cannot be inferred: - tuple - None - required parameters (no default value)

Sometimes there are no defaults to infer from, so we got to include those here. TODO: remove this file once we can infer everything.

class reagent.optimizer.uninferrable_schedulers.CosineAnnealingLR(T_max: int, eta_min: float = 0, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

eta_min = 0
last_epoch = -1
class reagent.optimizer.uninferrable_schedulers.CosineAnnealingWarmRestarts(T_0: int, T_mult: int = 1, eta_min: float = 0, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

T_mult = 1
eta_min = 0
last_epoch = -1
class reagent.optimizer.uninferrable_schedulers.CyclicLR

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

class reagent.optimizer.uninferrable_schedulers.ExponentialLR(gamma: float, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

last_epoch = -1
class reagent.optimizer.uninferrable_schedulers.LambdaLR

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

class reagent.optimizer.uninferrable_schedulers.MultiStepLR(milestones: List[int], gamma: float = 0.1, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

gamma = 0.1
last_epoch = -1
class reagent.optimizer.uninferrable_schedulers.MultiplicativeLR

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

class reagent.optimizer.uninferrable_schedulers.OneCycleLR(max_lr: Union[float, List[float]], total_steps: Union[int, NoneType] = None, epochs: Union[int, NoneType] = None, steps_per_epoch: Union[int, NoneType] = None, pct_start: float = 0.3, anneal_strategy: str = 'cos', cycle_momentum: bool = True, base_momentum: float = 0.85, max_momentum: float = 0.95, div_factor: float = 25.0, final_div_factor: float = 10000.0, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

anneal_strategy = 'cos'
base_momentum = 0.85
cycle_momentum = True
div_factor = 25.0
epochs = None
final_div_factor = 10000.0
last_epoch = -1
max_momentum = 0.95
pct_start = 0.3
steps_per_epoch = None
total_steps = None
class reagent.optimizer.uninferrable_schedulers.StepLR(step_size: int, gamma: float = 0.1, last_epoch: int = -1)

Bases: reagent.optimizer.scheduler.LearningRateSchedulerConfig

gamma = 0.1
last_epoch = -1

reagent.optimizer.union module

reagent.optimizer.utils module

reagent.optimizer.utils.is_strict_subclass(a, b)
reagent.optimizer.utils.is_torch_lr_scheduler(cls)
reagent.optimizer.utils.is_torch_optimizer(cls)

Module contents