Revert "Fix different seq length (#167481)"

This reverts commit c78e64622e62eb93a03a9c3762df3290d6c65362.

Reverted https://github.com/pytorch/pytorch/pull/167481 on behalf of https://github.com/pytorch-auto-revert due to Reverted automatically by pytorch's autorevert, to avoid this behaviour add the tag autorevert: disable ([comment](https://github.com/pytorch/pytorch/pull/167481#issuecomment-3530992724))
This commit is contained in:
PyTorch MergeBot
2025-11-14 06:05:45 +00:00
parent 50bf1f0b81
commit 3522e0ce74

View File

@ -2714,22 +2714,12 @@ class Scheduler:
if (
used_non_deterministic_runtime_estimations()
and config_comms.runtime_estimations_align_across_all_distributed_ranks
and (
config.runtime_estimations_mms_benchmark
or config_comms.runtime_estimations_use_nccl_lib_estimations
)
):
has_collectives = False
for node in self.nodes:
if is_collective(node.node):
has_collectives = True
break
if has_collectives:
from .comms import (
align_runtime_estimations_across_all_distributed_ranks,
)
from .comms import (
align_runtime_estimations_across_all_distributed_ranks,
)
align_runtime_estimations_across_all_distributed_ranks(self.nodes)
align_runtime_estimations_across_all_distributed_ranks(self.nodes)
from torch._logging import trace_structured
@ -2765,7 +2755,6 @@ class Scheduler:
self.insert_memory_check_nodes()
log_ir_post_fusion(self.nodes)
# pyrefly: ignore[unbound-name]
V.debug.graph_diagram(self.nodes)
self.debug_draw_graph()