xref: /aosp_15_r20/external/pytorch/torch/optim/__init__.py (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1"""
2:mod:`torch.optim` is a package implementing various optimization algorithms.
3
4Most commonly used methods are already supported, and the interface is general
5enough, so that more sophisticated ones can also be easily integrated in the
6future.
7"""
8
9from torch.optim import lr_scheduler as lr_scheduler, swa_utils as swa_utils
10from torch.optim._adafactor import Adafactor as Adafactor
11from torch.optim.adadelta import Adadelta as Adadelta
12from torch.optim.adagrad import Adagrad as Adagrad
13from torch.optim.adam import Adam as Adam
14from torch.optim.adamax import Adamax as Adamax
15from torch.optim.adamw import AdamW as AdamW
16from torch.optim.asgd import ASGD as ASGD
17from torch.optim.lbfgs import LBFGS as LBFGS
18from torch.optim.nadam import NAdam as NAdam
19from torch.optim.optimizer import Optimizer as Optimizer
20from torch.optim.radam import RAdam as RAdam
21from torch.optim.rmsprop import RMSprop as RMSprop
22from torch.optim.rprop import Rprop as Rprop
23from torch.optim.sgd import SGD as SGD
24from torch.optim.sparse_adam import SparseAdam as SparseAdam
25
26
27Adafactor.__module__ = "torch.optim"
28
29
30del adadelta  # type: ignore[name-defined] # noqa: F821
31del adagrad  # type: ignore[name-defined] # noqa: F821
32del adam  # type: ignore[name-defined] # noqa: F821
33del adamw  # type: ignore[name-defined] # noqa: F821
34del sparse_adam  # type: ignore[name-defined] # noqa: F821
35del adamax  # type: ignore[name-defined] # noqa: F821
36del asgd  # type: ignore[name-defined] # noqa: F821
37del sgd  # type: ignore[name-defined] # noqa: F821
38del radam  # type: ignore[name-defined] # noqa: F821
39del rprop  # type: ignore[name-defined] # noqa: F821
40del rmsprop  # type: ignore[name-defined] # noqa: F821
41del optimizer  # type: ignore[name-defined] # noqa: F821
42del nadam  # type: ignore[name-defined] # noqa: F821
43del lbfgs  # type: ignore[name-defined] # noqa: F821
44
45__all__ = [
46    "Adafactor",
47    "Adadelta",
48    "Adagrad",
49    "Adam",
50    "Adamax",
51    "AdamW",
52    "ASGD",
53    "LBFGS",
54    "lr_scheduler",
55    "NAdam",
56    "Optimizer",
57    "RAdam",
58    "RMSprop",
59    "Rprop",
60    "SGD",
61    "SparseAdam",
62    "swa_utils",
63]
64