mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
Rename inductor cache (#156128)
Requested by Simon on a different PR Pull Request resolved: https://github.com/pytorch/pytorch/pull/156128 Approved by: https://github.com/xmfan
This commit is contained in:
committed by
PyTorch MergeBot
parent
45382b284d
commit
a2a75be0f8
@ -15,7 +15,7 @@ from benchmark_utils import ( # type: ignore[import-not-found]
|
||||
)
|
||||
|
||||
import torch
|
||||
from torch._inductor.utils import fresh_inductor_cache
|
||||
from torch._inductor.utils import fresh_cache
|
||||
|
||||
|
||||
class BenchmarkRunnerMixedMM(BenchmarkRunner): # type: ignore[misc, no-any-unimported]
|
||||
@ -59,7 +59,7 @@ class BenchmarkRunnerMixedMM(BenchmarkRunner): # type: ignore[misc, no-any-unim
|
||||
)
|
||||
b = b.to(dtype=dtype_right)
|
||||
|
||||
with fresh_inductor_cache():
|
||||
with fresh_cache():
|
||||
|
||||
def mixed_mm(A, B):
|
||||
return torch.mm(A, B.to(A.dtype))
|
||||
|
@ -16,7 +16,7 @@ from benchmark_utils import ( # type: ignore[import-not-found]
|
||||
)
|
||||
|
||||
import torch
|
||||
from torch._inductor.utils import fresh_inductor_cache
|
||||
from torch._inductor.utils import fresh_cache
|
||||
|
||||
|
||||
class BenchmarkRunnerMM(BenchmarkRunner): # type: ignore[misc, no-any-unimported]
|
||||
@ -57,7 +57,7 @@ class BenchmarkRunnerMM(BenchmarkRunner): # type: ignore[misc, no-any-unimporte
|
||||
dtype_right=dtype,
|
||||
)
|
||||
|
||||
with fresh_inductor_cache():
|
||||
with fresh_cache():
|
||||
|
||||
def mixed_mm(A: Any, B: Any) -> Any:
|
||||
return torch.mm(A, B)
|
||||
|
@ -18,7 +18,7 @@ import torch
|
||||
from torch._inductor.fx_passes.pad_mm import ( # type: ignore[import-not-found]
|
||||
get_alignment_size_dtype,
|
||||
)
|
||||
from torch._inductor.utils import fresh_inductor_cache
|
||||
from torch._inductor.utils import fresh_cache
|
||||
|
||||
|
||||
class BenchmarkRunnerPadMM(BenchmarkRunner): # type: ignore[misc, no-any-unimported]
|
||||
@ -74,7 +74,7 @@ class BenchmarkRunnerPadMM(BenchmarkRunner): # type: ignore[misc, no-any-unimpo
|
||||
print(f"transpose_left={transpose_left} transpose_right={transpose_right}")
|
||||
print(f"prepadded_left={prepadded_left} prepadded_right={prepadded_right}")
|
||||
|
||||
with fresh_inductor_cache():
|
||||
with fresh_cache():
|
||||
|
||||
def mm(a: Any, b: Any) -> Any:
|
||||
return torch.mm(a, b)
|
||||
|
Reference in New Issue
Block a user