[c10d] Comment out ddp_hook_with_optimizer_parity tests (#100215)

This is a mirror PR of D45339293

Summary:
These tests cause the following errors internally with unknown reason:
```
AttributeError: type object 'TestDistBackendWithSpawn' has no attribute 'test_ddp_hook_with_optimizer_parity_adam'
AttributeError: type object 'TestDistBackendWithSpawn' has no attribute 'test_ddp_hook_with_optimizer_parity_adamw'
AttributeError: type object 'TestDistBackendWithSpawn' has no attribute 'test_ddp_hook_with_optimizer_parity_sgd'
```
Commenting these tests out to unblock other PRs.

Test Plan: Sandcastle

Pull Request resolved: https://github.com/pytorch/pytorch/pull/100215
Approved by: https://github.com/wz337, https://github.com/fduwjj
This commit is contained in:
Ke Wen
2023-04-28 17:38:12 +00:00
committed by PyTorch MergeBot
parent efed5e1c47
commit 628a8df1c9

View File

@ -74,7 +74,6 @@ from torch.testing._internal.common_utils import (
IS_FBCODE,
NO_MULTIPROCESSING_SPAWN,
IS_SANDCASTLE,
parametrize,
skip_but_pass_in_sandcastle,
skip_but_pass_in_sandcastle_if,
)
@ -4905,6 +4904,13 @@ class DistributedTest:
)
dist.barrier()
"""
# Commenting out the following 3 tests as they cause Sandcastle jobs to fail
# Failure signature:
# AttributeError: type object 'TestDistBackendWithSpawn' has no attribute 'test_ddp_hook_with_optimizer_parity_adamw
from torch.testing._internal.common_utils import parametrize
@skip_but_pass_in_sandcastle_if(
BACKEND == "nccl" or BACKEND == "ucc",
"Issues with async error handling, see https://github.com/pytorch/pytorch/issues/73259",
@ -4973,6 +4979,7 @@ class DistributedTest:
momentum=sgd_momentum,
weight_decay=sgd_weight_decay,
)
"""
@skip_if_lt_x_gpu(2)
def test_get_data_parallel_params(self):