/aosp_15_r20/external/pytorch/test/distributed/fsdp/ |
H A D | test_fsdp_use_orig_params.py | 17 FullyShardedDataParallel as FSDP, unknown 168 fsdp_model: FSDP, 199 def _check_ddp_fsdp_param_parity(self, ddp_model: DDP, fsdp_model: FSDP): 598 def _check_fsdp_parameter_parity(self, fsdp1: FSDP, fsdp2: FSDP) -> None: 785 ddp_model: DDP, fsdp_model: FSDP, between_fwd_and_bwd: bool 884 def _check_param_parity(self, ddp_model: DDP, fsdp_model: FSDP):
|
H A D | test_fsdp_tp_integration.py | 19 FullyShardedDataParallel as FSDP, unknown 132 tp_fsdp_model: FSDP, 178 model: FSDP,
|
H A D | test_fsdp_unshard_params.py | 14 FullyShardedDataParallel as FSDP, unknown 492 fsdp_model: FSDP, 544 def _get_fsdp_grads(fsdp_model: FSDP, is_supported: bool):
|
H A D | test_fsdp_traversal.py | 6 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_distributed_checkpoint.py | 13 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP, StateDictType unknown
|
H A D | test_fsdp_uneven.py | 7 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_multiple_wrapping.py | 7 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_input.py | 7 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_multiple_forward.py | 7 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_pure_fp16.py | 10 FullyShardedDataParallel as FSDP, unknown
|
H A D | test_fsdp_apply.py | 8 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_backward_prefetch.py | 10 from torch.distributed.fsdp import BackwardPrefetch, FullyShardedDataParallel as FSDP unknown
|
/aosp_15_r20/external/pytorch/test/distributed/_composable/fully_shard/ |
H A D | test_fully_shard_init.py | 12 from torch.distributed.fsdp import BackwardPrefetch, FullyShardedDataParallel as FSDP unknown 112 fsdp_wrapped_model: FSDP,
|
H A D | test_fully_shard_optim_checkpoint.py | 10 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
/aosp_15_r20/external/pytorch/test/distributed/checkpoint/e2e/ |
H A D | test_e2e_save_and_load.py | 28 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown 86 FSDP = auto() variable in ModelType
|
H A D | test_pipeline.py | 11 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
/aosp_15_r20/external/pytorch/test/distributed/checkpoint/ |
H A D | test_fsdp_tp_checkpoint_conversion.py | 7 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_save_load_api.py | 8 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_model_state.py | 10 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsdp_optim_state.py | 8 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_format_utils.py | 15 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_fsspec.py | 15 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | test_hsdp_checkpoint.py | 13 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
/aosp_15_r20/external/pytorch/torch/distributed/checkpoint/examples/ |
H A D | fsdp_checkpoint_example.py | 19 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|
H A D | async_checkpointing_example.py | 18 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP unknown
|