mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
add private config to temporarily preserve old FSDP guard behavior (#142871)
Summary: https://github.com/pytorch/pytorch/pull/138819 wobbled dynamo guards in a way that caused some performance regression, so this PR temporarily adds a config to get the old behavior back while we investigate. Test Plan: CI Differential Revision: D67096751 Pull Request resolved: https://github.com/pytorch/pytorch/pull/142871 Approved by: https://github.com/yf225
This commit is contained in:
committed by
PyTorch MergeBot
parent
8fae4397b4
commit
e19f493f02
@ -114,6 +114,17 @@ class GuardSource(enum.Enum):
|
||||
return self in (GuardSource.GLOBAL_FSDP_MODULE, GuardSource.LOCAL_FSDP_MODULE)
|
||||
|
||||
def is_specialized_nn_module(self) -> bool:
|
||||
import torch._dynamo.config as config
|
||||
|
||||
if config._unsafe_skip_fsdp_module_guards:
|
||||
return (
|
||||
self
|
||||
in (
|
||||
GuardSource.GLOBAL_SPECIALIZED_NN_MODULE,
|
||||
GuardSource.LOCAL_SPECIALIZED_NN_MODULE,
|
||||
)
|
||||
or self.is_fsdp_module()
|
||||
)
|
||||
return self in (
|
||||
GuardSource.GLOBAL_SPECIALIZED_NN_MODULE,
|
||||
GuardSource.LOCAL_SPECIALIZED_NN_MODULE,
|
||||
|
||||
Reference in New Issue
Block a user