"Remove BLOCK_LIST" (#135729)

Summary:
Skip test_prepare_qat_conv_bn_fusion_getitem_placeholder when we use training ir, since it's only for bn-getitem pattern, but the pattern doesn't exist in training ir.

Remove BLOCK_LIST since it's empty.
Now all internal unittests will use training ir.

Test Plan:
```
buck2 run 'fbcode//mode/dev-nosan'  caffe2/test/quantization:test_quantization -- -r test_prepare_qat_conv_bn_fusion_getitem_placeholder
buck2 run 'fbcode//mode/dev-nosan'  caffe2/test:quantization_pt2e_qat -- -r test_prepare_qat_conv_bn_fusion_getitem_placeholder
```

Differential Revision: D62387987

Pull Request resolved: https://github.com/pytorch/pytorch/pull/135729
Approved by: https://github.com/tugsbayasgalan
This commit is contained in:
Shangdi Yu
2024-09-12 01:22:06 +00:00
committed by PyTorch MergeBot
parent a130ed828a
commit 1a74952925
2 changed files with 4 additions and 1 deletions

View File

@ -604,6 +604,9 @@ class TestQuantizePT2EQAT_ConvBn_Base(PT2EQATTestCase):
is returned as part of the match anyway (as a placeholder).
"""
if capture_pre_autograd_graph_using_training_ir():
self.skipTest("Not applicable to training IR")
class M(torch.nn.Module):
def __init__(self, conv_class, bn_class):
super().__init__()

View File

@ -67,7 +67,7 @@ def capture_pre_autograd_graph_warning():
log.warning("capture_pre_autograd_graph() is deprecated and doesn't provide any function guarantee moving forward.")
log.warning("Please switch to use torch.export.export_for_training instead.")
if config.is_fbcode():
log.warning("Unless the unittest is in the blocklist, capture_pre_autograd_graph() will fallback to torch.export.export_for_training.") # noqa: B950
log.warning("For unittest, capture_pre_autograd_graph() will fallback to torch.export.export_for_training.") # noqa: B950
@compatibility(is_backward_compatible=False)