[inductor] dont reuse buffers if it affects peak (#145883) (#159530)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/159530
Approved by: https://github.com/eellison
This commit is contained in:
Markus Hoehnerbach
2025-08-15 12:00:45 -07:00
committed by PyTorch MergeBot
parent 62db8ec391
commit 65d21dae18
5 changed files with 608 additions and 2 deletions

View File

@ -2073,6 +2073,7 @@ class Scheduler:
)
self.nodes = [self.create_scheduler_node(n) for n in nodes]
self.current_node: Optional[BaseSchedulerNode] = None
self.update_zero_dim_cpu_tensor()
# some new constants could have been created above
self.available_buffer_names.update(V.graph.constants.keys())
@ -4989,6 +4990,7 @@ class Scheduler:
assert device.index is not None, "device should have an index"
V.graph.wrapper_code.codegen_device_guard_enter(device.index)
self.current_node = node
self.buffer_names_to_free.update(node.last_usage)
if node.is_template():