mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Apply ufmt linter to all py files under tools (#81285)
With ufmt in place https://github.com/pytorch/pytorch/pull/81157, we can now use it to gradually format all files. I'm breaking this down into multiple smaller batches to avoid too many merge conflicts later on. This batch (as copied from the current BLACK linter config): * `tools/**/*.py` Upcoming batchs: * `torchgen/**/*.py` * `torch/package/**/*.py` * `torch/onnx/**/*.py` * `torch/_refs/**/*.py` * `torch/_prims/**/*.py` * `torch/_meta_registrations.py` * `torch/_decomp/**/*.py` * `test/onnx/**/*.py` Once they are all formatted, BLACK linter will be removed. Pull Request resolved: https://github.com/pytorch/pytorch/pull/81285 Approved by: https://github.com/suo
This commit is contained in:
@ -2,8 +2,9 @@ import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Any, cast, Optional
|
||||
|
||||
import yaml
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
try:
|
||||
# use faster C loader if available
|
||||
@ -25,10 +26,11 @@ def generate_code(
|
||||
force_schema_registration: bool = False,
|
||||
operator_selector: Any = None,
|
||||
) -> None:
|
||||
from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
|
||||
from tools.autograd.gen_annotated_fn_args import gen_annotated
|
||||
from torchgen.selective_build.selector import SelectiveBuilder
|
||||
|
||||
from tools.autograd.gen_annotated_fn_args import gen_annotated
|
||||
from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
|
||||
|
||||
# Build ATen based Variable classes
|
||||
if install_dir is None:
|
||||
install_dir = os.fspath(gen_dir / "torch/csrc")
|
||||
@ -207,8 +209,8 @@ def main() -> None:
|
||||
assert os.path.isfile(
|
||||
ts_native_functions
|
||||
), f"Unable to access {ts_native_functions}"
|
||||
from torchgen.gen_lazy_tensor import run_gen_lazy_tensor
|
||||
from torchgen.dest.lazy_ir import GenTSLazyIR
|
||||
from torchgen.gen_lazy_tensor import run_gen_lazy_tensor
|
||||
|
||||
run_gen_lazy_tensor(
|
||||
aten_path=aten_path,
|
||||
|
Reference in New Issue
Block a user