xref: /aosp_15_r20/external/pytorch/torch/distributed/tensor/experimental/__init__.py (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1# mypy: allow-untyped-defs
2# Copyright (c) Meta Platforms, Inc. and affiliates
3from contextlib import contextmanager
4
5from torch.distributed.tensor._api import DTensor
6from torch.distributed.tensor.experimental._func_map import local_map
7from torch.distributed.tensor.experimental._register_sharding import register_sharding
8
9
10__all__ = ["implicit_replication", "local_map", "register_sharding"]
11
12
13@contextmanager
14def implicit_replication():
15    """
16    This context manager allows :class:`DTensor` to implicitly treat all non-DTensors (``torch.Tensor``)
17    in the program be replicate :class:`DTensor` s during the operator computation.
18
19    .. warning:: This might possible lead to incorrect results if ``torch.Tensor`` s are not replicated
20        in practice, please use it at your discretion.
21    """
22    try:
23        DTensor._op_dispatcher._allow_implicit_replication = True
24        yield
25    finally:
26        DTensor._op_dispatcher._allow_implicit_replication = False
27
28
29# Set namespace for exposed private names
30implicit_replication.__module__ = "torch.distributed.tensor.experimental"
31local_map.__module__ = "torch.distributed.tensor.experimental"
32register_sharding.__module__ = "torch.distributed.tensor.experimental"
33