Revert "Graph break with error message (#158800)"

This reverts commit cae4746952afbb6d26ecf7599cb7c6c449c69ef4.

Reverted https://github.com/pytorch/pytorch/pull/158800 on behalf of https://github.com/clee2000 due to broke some tests on main inductor/test_distributed_patterns.py::DistributedPatternTests::test_nn_param_return4 [GH job link](https://github.com/pytorch/pytorch/actions/runs/16507837934/job/46685704688) [HUD commit link](cae4746952), note to self: bad TD, but also dynamo/test_repros failed but didn't get skipped by TD so maybe a landrace, or I just blaming the wrong commit entirely.. ([comment](https://github.com/pytorch/pytorch/pull/158800#issuecomment-3115224608))
This commit is contained in:
PyTorch MergeBot
2025-07-24 22:45:58 +00:00
parent 751285cb22
commit 8d2a1d6e18
5 changed files with 0 additions and 60 deletions

View File

@ -1339,7 +1339,6 @@ SeqNr|OrigAten|SrcFn|FwdSrcFn
FileCheck().check("bw_donated_idxs=[1]").run("\n".join(captured.output))
@torch._functorch.config.patch("donated_buffer", True)
@torch._dynamo.config.patch("graph_break_on_nn_param_ctor", False)
def test_donated_buffer6(self):
if is_dynamic_shape_test(self._testMethodName):
# parameters should not be dynamic shape

View File

@ -42,7 +42,6 @@ import torch.distributed as dist
import torch.library
import torch.utils._pytree as pytree
from torch import nn
from torch._dynamo.backends.debugging import ExplainWithBackend
from torch._dynamo.debug_utils import same_two_models
from torch._dynamo.testing import (
CompileCounter,
@ -7135,28 +7134,6 @@ def forward(self, s77 : torch.SymInt, s27 : torch.SymInt, L_x_ : torch.Tensor):
torch.compile(f, backend="eager", fullgraph=True)(eye, out_res)
self.assertEqual(out_ref, out_res)
def test_nn_parameter_ctor_graph_breaks(self):
def fn():
param = torch.nn.Parameter(torch.ones(10))
return param * 2
self.maxDiff = None
eb = ExplainWithBackend("eager")
optimized_fn = torch.compile(fn, backend=eb)
_ = optimized_fn()
explain_output = eb.output()
self.assertEqual(explain_output.graph_break_count, 1)
expected_msg = (
"Attempted to use `torch.nn.Parameter()` constructor with Dynamo\n"
" Explanation: Dynamo does not support this\n"
" Hint: Try to construct `torch.nn.Parameter()` outside the compiled region.\n"
" Hint: If this is not possible, turn `graph_break_on_nn_param_ctor` off\n"
" Hint: It may be possible to write Dynamo tracing rules for this code. "
"Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues.\n\n"
" Developer debug context: \n"
)
self.assertEqual(explain_output.break_reasons[0].reason, expected_msg)
class ReproTestsDevice(torch._dynamo.test_case.TestCase):
def test_sub_alpha_scalar_repro(self, device):

View File

@ -562,13 +562,6 @@ caching_precompile = os.environ.get("TORCH_CACHING_PRECOMPILE", "0") == "1"
# registering backward hooks on tensors contained within the compiled region.
compiled_autograd = False
# Checks if we should graph break when seeing nn parameter constructors
# in dynamo; this is so that we clearly fail and ask users to move outside
# the function as opposed to trying to support the ctor with unclear semantics
# See https://github.com/pytorch/pytorch/issues/157452 for more context
graph_break_on_nn_param_ctor = True
# Overrides torch.compile() kwargs for Compiled Autograd:
compiled_autograd_kwargs_override: dict[str, Any] = {}

View File

@ -2509,20 +2509,5 @@
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
]
}
],
"GB0252": [
{
"Gb_type": "Attempted to use `torch.nn.Parameter()` constructor with Dynamo",
"Context": "",
"Explanation": "Dynamo does not support this",
"Hints": [
"Try to construct `torch.nn.Parameter()` outside the compiled region.",
"If this is not possible, turn `graph_break_on_nn_param_ctor` off",
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
],
"Additional_Info": [
"Try to construct nn.Parameter() outside the compiled region. If this is not possible, turn `graph_break_on_nn_param_ctor` off"
]
}
]
}

View File

@ -1696,20 +1696,6 @@ For now, dynamo will explicitly graph break when it encounters user code with th
if data.source:
return cls._nn_param_via_prefix_insert(tx, data, requires_grad)
if config.graph_break_on_nn_param_ctor:
# Need user to manually move since we cannot
unimplemented_v2(
gb_type="Attempted to use `torch.nn.Parameter()` constructor with Dynamo",
context="",
explanation="Dynamo does not support this",
hints=[
"Try to construct `torch.nn.Parameter()` outside the compiled region.",
"If this is not possible, turn `graph_break_on_nn_param_ctor` off",
*graph_break_hints.SUPPORTABLE,
],
)
# TODO[@lucaskabela]: Remove the behavior below since it is deprecated
if isinstance(
data, TensorWithTFOverrideVariable
) or is_traceable_wrapper_subclass_type(data.class_type):