mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
[CI] Fix doctest job if build without distributed (#165449)
Guard test with `TORCH_DOCTEST_DISTRIBUTED` and set it to true in run_test.py to be able to pass doctest for PyTorch build without distribtued support. This is a regression introduced by https://github.com/pytorch/pytorch/pull/164806 Fixes https://github.com/pytorch/pytorch/issues/165343 Pull Request resolved: https://github.com/pytorch/pytorch/pull/165449 Approved by: https://github.com/seemethere
This commit is contained in:
committed by
PyTorch MergeBot
parent
d18e068fd6
commit
cbf212e9c7
@ -1123,6 +1123,9 @@ def run_doctests(test_module, test_directory, options):
|
||||
if torch.mps.is_available():
|
||||
os.environ["TORCH_DOCTEST_MPS"] = "1"
|
||||
|
||||
if torch.distributed.is_available():
|
||||
os.environ["TORCH_DOCTEST_DISTRIBUTED"] = "1"
|
||||
|
||||
if 0:
|
||||
# TODO: could try to enable some of these
|
||||
os.environ["TORCH_DOCTEST_QUANTIZED_DYNAMIC"] = "1"
|
||||
|
@ -27,6 +27,7 @@ class ShardOrderEntry(NamedTuple):
|
||||
second, etc. This tuple is guaranteed to be non-empty.
|
||||
|
||||
Examples:
|
||||
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_DISTRIBUTED)
|
||||
>>> # Tensor dim 1 sharded across mesh dim 2, then mesh dim 0
|
||||
>>> ShardOrderEntry(tensor_dim=1, mesh_dims=(2, 0))
|
||||
|
||||
|
Reference in New Issue
Block a user