Revert "[Inductor] Add DeviceAssert op to enable device-side assertion in torch.compile (#160677)"

This reverts commit 378edb047f83dfb84c2d9c032bddebc5e0147b8f.

Reverted https://github.com/pytorch/pytorch/pull/160677 on behalf of https://github.com/atalman due to new test is failing internally ([comment](https://github.com/pytorch/pytorch/pull/160677#issuecomment-3230152168))
This commit is contained in:
PyTorch MergeBot
2025-08-27 23:45:12 +00:00
parent 903181bb6f
commit c55bdb26e1
11 changed files with 16 additions and 298 deletions

View File

@ -1276,13 +1276,6 @@ class SchedulerNode(BaseSchedulerNode):
)
return buffers_store_as_atomic_add
@cache_on_self
def has_side_effects(self) -> bool:
# self._body is None sometimes that's why this check was added
if self._body is not None and self._body.has_op("device_assert_async"):
return True
return super().has_side_effects()
def refresh_group_node_dependencies(
group_snode: Union[FusedSchedulerNode, GroupedSchedulerNode],
@ -1552,12 +1545,6 @@ class FusedSchedulerNode(BaseSchedulerNode):
return buf.getrawvalue().rstrip()
@cache_on_self
def has_side_effects(self) -> bool:
if self.snodes is not None:
return any(node.has_side_effects() for node in self.snodes)
return super().has_side_effects()
class ForeachKernelSchedulerNode(FusedSchedulerNode):
"""
@ -3887,6 +3874,7 @@ class Scheduler:
Determine if it is possible to combine node1 and node2 into a
single fused node.
"""
if node1 is node2:
return False
@ -3990,6 +3978,7 @@ class Scheduler:
):
why("fusion for buffer explicit disabled")
return False
device = node1.get_device()
device2 = node2.get_device()
if device != device2: