Fixed comment to match logic in distributed_c10d.py (#162158)

inconsistent with the logic introduced in #162157  and modified in #142216.This update ensures the documentation matches the actual behavior of the code.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/162158
Approved by: https://github.com/wconstab
This commit is contained in:
Codeboi007
2025-09-06 05:37:43 +00:00
committed by PyTorch MergeBot
parent bc505977fb
commit c98ddaca6d

View File

@ -1929,9 +1929,9 @@ def _new_process_group_helper(
if "," not in str(backend) and ":" not in str(backend):
assert backend in Backend.backend_type_map, f"Unknown backend type {backend}"
if backend == Backend.UNDEFINED:
# Currently when backend is UNDEFINED, both ``gloo`` and ``nccl`` backends
# will be created, we use nccl(if cuda is available) or gloo as default
# backend so we can correctly call getDefaultBackend which in ProcessGroup.
# Currently when backend is UNDEFINED, only one backend will be initialized
# we use nccl (if cuda is available) or gloo as default backend
# so we can correctly call getDefaultBackend which in ProcessGroup.
if Backend.NCCL in backend_config.get_device_backend_map().values():
pg._set_default_backend(ProcessGroup.BackendType.NCCL)
else: