Adjust ...mark_unbacked() -> ...decorators.mark_unbacked() in logs. (#164131)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/164131
Approved by: https://github.com/albanD, https://github.com/Skylion007
This commit is contained in:
Fabian
2025-09-29 17:43:56 +00:00
committed by PyTorch MergeBot
parent a56e7a1920
commit 8701f18bc0
3 changed files with 4 additions and 4 deletions

View File

@ -216,7 +216,7 @@ you never specialize.
#### `mark_unbacked(tensor, dim)` #### `mark_unbacked(tensor, dim)`
The {func}`torch._dynamo.mark_unbacked` function marks a tensor dimension as unbacked. It is unlikely The {func}`torch._dynamo.decorators.mark_unbacked` function marks a tensor dimension as unbacked. It is unlikely
to be the tool you need, but it could be useful if the specialization occurs inside to be the tool you need, but it could be useful if the specialization occurs inside
a condition `guard_size_oblivious(x)`, and if using it removes the specialization. a condition `guard_size_oblivious(x)`, and if using it removes the specialization.
Ensure it fixes the specialization and does not introduce a data-dependent error Ensure it fixes the specialization and does not introduce a data-dependent error

View File

@ -733,7 +733,7 @@ TRACE FX call mul from test_logging.py:N in fn (LoggingTests.test_trace_call_pre
+- __SHAPE_GUARD__: L['x'].size()[0] == 2*L['y'].size()[0] # return x + torch.cat([y, z]) # #:# in # #:# in # +- __SHAPE_GUARD__: L['x'].size()[0] == 2*L['y'].size()[0] # return x + torch.cat([y, z]) # #:# in # #:# in #
+- __SHAPE_GUARD__: L['z'].size()[0] == L['y'].size()[0] # duck sizing added this equality because these variables had the same size 3 (to avoid this specialization, set torch.fx.experimental._config.use_duck_shape = False) +- __SHAPE_GUARD__: L['z'].size()[0] == L['y'].size()[0] # duck sizing added this equality because these variables had the same size 3 (to avoid this specialization, set torch.fx.experimental._config.use_duck_shape = False)
+- __SHAPE_GUARD__: ((2*L['y'].size()[0]) % 3) == 0 # if x.size(0) % 3 == 0: # #:# in # #:# in # +- __SHAPE_GUARD__: ((2*L['y'].size()[0]) % 3) == 0 # if x.size(0) % 3 == 0: # #:# in # #:# in #
+- __SHAPE_GUARD__: 2 <= L['y'].size()[0] # return x + torch.cat([y, z]) # #:# in # (user code shown is first use of this value--the guard itself is not due user code but due to 0/1 specialization in the framework; to avoid specialization try torch._dynamo.mark_unbacked(tensor, dim))""", # noqa: B950 +- __SHAPE_GUARD__: 2 <= L['y'].size()[0] # return x + torch.cat([y, z]) # #:# in # (user code shown is first use of this value--the guard itself is not due user code but due to 0/1 specialization in the framework; to avoid specialization try torch._dynamo.decorators.mark_unbacked(tensor, dim))""", # noqa: B950
) )
@make_logging_test(guards=True) @make_logging_test(guards=True)
@ -749,7 +749,7 @@ TRACE FX call mul from test_logging.py:N in fn (LoggingTests.test_trace_call_pre
munge_shape_guards(record.getMessage()), munge_shape_guards(record.getMessage()),
"""\ """\
+- __SHAPE_GUARD__: L['x'].size()[0] == 2*L['y'].size()[0] # return any([x.size(0) == y.size(0) * 2]) # #:# in # #:# in # +- __SHAPE_GUARD__: L['x'].size()[0] == 2*L['y'].size()[0] # return any([x.size(0) == y.size(0) * 2]) # #:# in # #:# in #
+- __SHAPE_GUARD__: 2 <= L['y'].size()[0] # return any([x.size(0) == y.size(0) * 2]) # #:# in # (user code shown is first use of this value--the guard itself is not due user code but due to 0/1 specialization in the framework; to avoid specialization try torch._dynamo.mark_unbacked(tensor, dim))""", # noqa: B950 +- __SHAPE_GUARD__: 2 <= L['y'].size()[0] # return any([x.size(0) == y.size(0) * 2]) # #:# in # (user code shown is first use of this value--the guard itself is not due user code but due to 0/1 specialization in the framework; to avoid specialization try torch._dynamo.decorators.mark_unbacked(tensor, dim))""", # noqa: B950
) )
@make_logging_test(guards=True) @make_logging_test(guards=True)

View File

@ -5077,7 +5077,7 @@ class ShapeEnv:
self._get_sloc( self._get_sloc(
"user code shown is first use of this value--the guard itself is not " "user code shown is first use of this value--the guard itself is not "
"due user code but due to 0/1 specialization in the framework; to " "due user code but due to 0/1 specialization in the framework; to "
"avoid specialization try torch._dynamo.mark_unbacked(tensor, dim)" "avoid specialization try torch._dynamo.decorators.mark_unbacked(tensor, dim)"
if self.specialize_zero_one if self.specialize_zero_one
else None else None
), ),