mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[easy][logging] Remove dynamo_timed fwd_only param (#140993)
Summary: It's ignored; remove it Test Plan: CI Pull Request resolved: https://github.com/pytorch/pytorch/pull/140993 Approved by: https://github.com/ezyang
This commit is contained in:
committed by
PyTorch MergeBot
parent
5e0c009a5a
commit
ff17d2b83e
@ -298,8 +298,6 @@ def dynamo_timed(
|
||||
# TODO(masneral): Deprecate this param.
|
||||
phase_name: Optional[str] = None,
|
||||
log_pt2_compile_event: bool = False,
|
||||
# TODO(masnesral): fwd_only is ignored. Remove it.
|
||||
fwd_only: bool = True,
|
||||
metadata: Optional[Dict[str, object]] = None,
|
||||
dynamo_compile_column_us: Optional[str] = None,
|
||||
) -> Generator[Any, None, None]:
|
||||
|
@ -299,9 +299,7 @@ class AsyncCompile:
|
||||
return LambdaFuture(get_result)
|
||||
|
||||
def wait(self, scope: Dict[str, Any]) -> None:
|
||||
with dynamo_timed(
|
||||
"async_compile.wait", log_pt2_compile_event=True, fwd_only=False
|
||||
):
|
||||
with dynamo_timed("async_compile.wait", log_pt2_compile_event=True):
|
||||
num_kernels = len(
|
||||
[
|
||||
value
|
||||
|
@ -1220,9 +1220,7 @@ class FxGraphCache:
|
||||
|
||||
try:
|
||||
with dynamo_timed(
|
||||
"PyCodeCache.load_by_key_path",
|
||||
log_pt2_compile_event=True,
|
||||
fwd_only=False,
|
||||
"PyCodeCache.load_by_key_path", log_pt2_compile_event=True
|
||||
):
|
||||
graph.current_callable = PyCodeCache.load_by_key_path(
|
||||
graph.cache_key,
|
||||
|
@ -1985,9 +1985,7 @@ class GraphLowering(torch.fx.Interpreter):
|
||||
lambda: {"filename": path},
|
||||
payload_fn=lambda: code,
|
||||
)
|
||||
with dynamo_timed(
|
||||
"PyCodeCache.load_by_key_path", log_pt2_compile_event=True, fwd_only=False
|
||||
):
|
||||
with dynamo_timed("PyCodeCache.load_by_key_path", log_pt2_compile_event=True):
|
||||
mod = PyCodeCache.load_by_key_path(
|
||||
key,
|
||||
path,
|
||||
|
@ -138,7 +138,6 @@ except AttributeError: # Compile workers only have a mock version of torch
|
||||
def dynamo_timed(
|
||||
key,
|
||||
phase_name=None,
|
||||
fwd_only=True,
|
||||
metadata=None,
|
||||
dynamo_compile_column_us=None,
|
||||
):
|
||||
|
@ -145,9 +145,7 @@ class TritonBundler:
|
||||
cls.end_compile()
|
||||
return [], None
|
||||
|
||||
with dynamo_timed(
|
||||
key="TritonBundler.collect", fwd_only=False, log_pt2_compile_event=True
|
||||
):
|
||||
with dynamo_timed(key="TritonBundler.collect", log_pt2_compile_event=True):
|
||||
entries = cls._entries
|
||||
if entries is not None:
|
||||
result: List[TritonKernelArtifacts] = []
|
||||
@ -221,9 +219,7 @@ class TritonBundler:
|
||||
return None
|
||||
|
||||
with dynamo_timed(
|
||||
key="TritonBundler.read_and_emit",
|
||||
fwd_only=False,
|
||||
log_pt2_compile_event=True,
|
||||
key="TritonBundler.read_and_emit", log_pt2_compile_event=True
|
||||
):
|
||||
kernel_names: List[str] = []
|
||||
|
||||
|
Reference in New Issue
Block a user