mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[BE] document distributed apis (#165194)
This PR documents some `torch.distributed.distributed_c10d` APIs. Below are some screenshots of the rendered docs. <img width="909" height="527" alt="Screenshot 2025-10-10 at 10 18 40 PM" src="https://github.com/user-attachments/assets/555ae886-bead-47f3-8c67-9bc91c14bd11" /> <img width="885" height="548" alt="Screenshot 2025-10-10 at 10 18 47 PM" src="https://github.com/user-attachments/assets/1d6f7af1-db28-40f9-927e-5c47668a1a88" /> Pull Request resolved: https://github.com/pytorch/pytorch/pull/165194 Approved by: https://github.com/janeyx99
This commit is contained in:
committed by
PyTorch MergeBot
parent
a71ca4dcb9
commit
fa95882093
@ -534,42 +534,6 @@ coverage_ignore_functions = [
|
||||
# torch.distributed.checkpoint.utils
|
||||
"find_state_dict_object",
|
||||
"find_tensor_shard",
|
||||
# torch.distributed.collective_utils
|
||||
"all_gather",
|
||||
"all_gather_object_enforce_type",
|
||||
"broadcast",
|
||||
# torch.distributed.distributed_c10d
|
||||
"all_gather",
|
||||
"all_gather_coalesced",
|
||||
"all_gather_into_tensor",
|
||||
"all_gather_object",
|
||||
"all_reduce",
|
||||
"all_reduce_coalesced",
|
||||
"all_to_all",
|
||||
"all_to_all_single",
|
||||
"barrier",
|
||||
"batch_isend_irecv",
|
||||
"broadcast",
|
||||
"broadcast_object_list",
|
||||
"destroy_process_group",
|
||||
"gather",
|
||||
"gather_object",
|
||||
"get_backend",
|
||||
"get_backend_config",
|
||||
"get_global_rank",
|
||||
"get_group_rank",
|
||||
"get_process_group_ranks",
|
||||
"get_rank",
|
||||
"get_world_size",
|
||||
"init_process_group",
|
||||
"irecv",
|
||||
"is_backend_available",
|
||||
"is_gloo_available",
|
||||
"is_initialized",
|
||||
"is_mpi_available",
|
||||
"is_nccl_available",
|
||||
"is_torchelastic_launched",
|
||||
"is_ucc_available",
|
||||
"isend",
|
||||
"monitored_barrier",
|
||||
"new_group",
|
||||
@ -643,15 +607,8 @@ coverage_ignore_functions = [
|
||||
"transformer_auto_wrap_policy",
|
||||
"wrap",
|
||||
# torch.distributed.nn.functional
|
||||
"all_gather",
|
||||
"all_reduce",
|
||||
"all_to_all",
|
||||
"all_to_all_single",
|
||||
"broadcast",
|
||||
"gather",
|
||||
"reduce",
|
||||
"reduce_scatter",
|
||||
"scatter",
|
||||
# torch.distributed.nn.jit.instantiator
|
||||
"get_arg_return_types_from_interface",
|
||||
"instantiate_non_scriptable_remote_module_template",
|
||||
|
@ -10,6 +10,7 @@ torch.cpu
|
||||
current_device
|
||||
current_stream
|
||||
is_available
|
||||
is_initialized
|
||||
synchronize
|
||||
stream
|
||||
set_device
|
||||
|
@ -221,6 +221,16 @@ inconsistent 'UUID' assignment across ranks, and to prevent races during initial
|
||||
|
||||
```{eval-rst}
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_xccl_available
|
||||
.. autofunction:: torch.distributed.distributed_c10d.batch_isend_irecv
|
||||
.. autofunction:: torch.distributed.distributed_c10d.destroy_process_group
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_backend_available
|
||||
.. autofunction:: torch.distributed.distributed_c10d.irecv
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_gloo_available
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_initialized
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_mpi_available
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_nccl_available
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_torchelastic_launched
|
||||
.. autofunction:: torch.distributed.distributed_c10d.is_ucc_available
|
||||
```
|
||||
|
||||
```{eval-rst}
|
||||
|
Reference in New Issue
Block a user