mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[TorchGen] Remove unused variables (#122576)
This PR removes some unused Python variables from TorchGen scripts. Pull Request resolved: https://github.com/pytorch/pytorch/pull/122576 Approved by: https://github.com/Skylion007
This commit is contained in:
@ -243,7 +243,6 @@ class GenLazyIR(ABC):
|
||||
# for now, we just want one IR class decl and soon after also the method defs
|
||||
# and we use the functional version not out/inplace.
|
||||
all_args = schema.filtered_args()
|
||||
value_args = schema.filtered_args(values=True, scalars=False)
|
||||
scalar_args = schema.filtered_args(values=False, scalars=True)
|
||||
|
||||
ctor_args = [
|
||||
@ -675,7 +674,6 @@ class GenLazyShapeInferenceDefinition:
|
||||
|
||||
@method_with_native_function
|
||||
def __call__(self, f: NativeFunction) -> List[str]:
|
||||
sig = kernel_signature(f, self.backend_index)
|
||||
metadata = self.backend_index.get_kernel(f)
|
||||
assert metadata is not None
|
||||
|
||||
|
@ -1,12 +1,10 @@
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
from collections import Counter, namedtuple
|
||||
from collections import namedtuple
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
@ -28,7 +26,7 @@ from torchgen.gen import get_grouped_native_functions, parse_native_yaml
|
||||
|
||||
from torchgen.model import NativeFunction, NativeFunctionsGroup, OperatorName
|
||||
from torchgen.selective_build.selector import SelectiveBuilder
|
||||
from torchgen.utils import concatMap, FileManager, NamespaceHelper
|
||||
from torchgen.utils import FileManager, NamespaceHelper
|
||||
from torchgen.yaml_utils import YamlLoader
|
||||
from .gen_backend_stubs import (
|
||||
error_on_missing_kernels,
|
||||
@ -107,14 +105,6 @@ def parse_native_functions_keys(
|
||||
backend_yaml_path: str,
|
||||
grouped_native_functions: Sequence[Union[NativeFunction, NativeFunctionsGroup]],
|
||||
) -> Tuple[List[OperatorName], List[Any], List[OperatorName]]:
|
||||
native_functions_map: Dict[OperatorName, NativeFunction] = {
|
||||
f.func.name: f
|
||||
for f in concatMap(
|
||||
lambda f: [f] if isinstance(f, NativeFunction) else list(f.functions()),
|
||||
grouped_native_functions,
|
||||
)
|
||||
}
|
||||
|
||||
with open(backend_yaml_path) as f:
|
||||
yaml_values = yaml.load(f, Loader=YamlLoader)
|
||||
assert isinstance(yaml_values, dict)
|
||||
@ -142,10 +132,6 @@ def validate_shape_inference_header(
|
||||
f"Unable to read from the specified shape_inference_hdr file: {shape_inference_hdr}"
|
||||
) from e
|
||||
|
||||
shape_infr_regex = r"compute_shape_(\w+)"
|
||||
actual_shape_infr_name_counts = Counter(
|
||||
re.findall(shape_infr_regex, shape_infr_decls)
|
||||
)
|
||||
# TODO(whc) add a check for shape inference functions that have meta kernels implement and should be retired.
|
||||
|
||||
missing_decls = [
|
||||
|
Reference in New Issue
Block a user