[BE][Easy] replace import pathlib with from pathlib import Path (#129426)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/129426
Approved by: https://github.com/malfet
This commit is contained in:
Xuehai Pan
2024-06-29 23:35:02 +08:00
committed by PyTorch MergeBot
parent 7837a12474
commit 6d75604ef1
33 changed files with 159 additions and 140 deletions

View File

@ -7,10 +7,11 @@ that opens libneuralnetworks.so with dlopen and finds the functions
we need with dlsym. We also generate a "check" wrapper that checks
return values and throws C++ exceptions on errors.
"""
import pathlib
import re
import sys
import textwrap
from pathlib import Path
PREFIX = """\
@ -231,7 +232,7 @@ def main(argv):
)
)
out_dir = pathlib.Path(__file__).parent
out_dir = Path(__file__).parent
(out_dir / "nnapi_wrapper.h").write_text(
PREFIX

View File

@ -18,12 +18,12 @@ Known limitations:
import argparse
import json
import os
import pathlib
import subprocess
import sys
import urllib
from io import BytesIO
from itertools import product
from pathlib import Path
from urllib.request import urlopen
from zipfile import ZipFile
@ -34,7 +34,7 @@ import requests
# https://console.rockset.com/lambdas/details/commons.artifacts
ARTIFACTS_QUERY_URL = "https://api.usw2a1.rockset.com/v1/public/shared_lambdas/4ca0033e-0117-41f5-b043-59cde19eff35"
CSV_LINTER = str(
pathlib.Path(__file__).absolute().parent.parent.parent.parent
Path(__file__).absolute().parent.parent.parent.parent
/ "tools/linter/adapters/no_merge_conflict_csv_linter.py"
)

View File

@ -2,7 +2,6 @@
from __future__ import annotations
import abc
import argparse
import collections
import contextlib
@ -14,7 +13,6 @@ import importlib
import itertools
import logging
import os
import pathlib
import shutil
import signal
import subprocess
@ -22,7 +20,7 @@ import sys
import time
import weakref
from contextlib import contextmanager
from pathlib import Path
from typing import (
Any,
Callable,
@ -60,6 +58,7 @@ from torch._dynamo.testing import (
same,
)
try:
from torch._dynamo.utils import (
clone_inputs,
@ -81,6 +80,7 @@ from torch._subclasses.fake_tensor import FakeTensorMode
from torch.utils import _pytree as pytree
from torch.utils._pytree import tree_map, tree_map_only
try:
import torch_xla
import torch_xla.core.xla_model as xm
@ -920,7 +920,7 @@ def speedup_experiment_onnx(
2. Running ORT with OnnxModel.
Writes to ./{output_filename}, which should be
`pathlib.Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv".
`Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv".
TODO(bowbao): Record export time and export peak memory usage.
"""
@ -1347,8 +1347,8 @@ class OnnxModel(abc.ABC):
@classmethod
def _generate_onnx_model_directory(
cls, output_directory: str, compiler_name: str, model_name: str
) -> pathlib.Path:
model_path = pathlib.Path(
) -> Path:
model_path = Path(
output_directory,
".onnx_models",
model_name,
@ -2389,7 +2389,6 @@ class BenchmarkRunner:
from diffusers.models.transformer_2d import Transformer2DModel
from torchbenchmark.models.nanogpt.model import Block
from transformers.models.llama.modeling_llama import LlamaDecoderLayer
from transformers.models.t5.modeling_t5 import T5Block
from transformers.models.whisper.modeling_whisper import WhisperEncoderLayer

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python3
import argparse
import os
import pathlib
import subprocess
from pathlib import Path
from common import (
get_testcases,
@ -194,7 +194,7 @@ if __name__ == "__main__":
"filename",
nargs="?",
default=str(
pathlib.Path(__file__).absolute().parent.parent.parent
Path(__file__).absolute().parent.parent.parent
/ "torch/testing/_internal/dynamo_test_failures.py"
),
help="Optional path to dynamo_test_failures.py",
@ -203,7 +203,7 @@ if __name__ == "__main__":
parser.add_argument(
"test_dir",
nargs="?",
default=str(pathlib.Path(__file__).absolute().parent.parent.parent / "test"),
default=str(Path(__file__).absolute().parent.parent.parent / "test"),
help="Optional path to test folder",
)
parser.add_argument(
@ -219,7 +219,7 @@ if __name__ == "__main__":
action="store_true",
)
args = parser.parse_args()
assert pathlib.Path(args.filename).exists(), args.filename
assert pathlib.Path(args.test_dir).exists(), args.test_dir
assert Path(args.filename).exists(), args.filename
assert Path(args.test_dir).exists(), args.test_dir
dynamo38, dynamo311 = download_reports(args.commit, ("dynamo38", "dynamo311"))
update(args.filename, args.test_dir, dynamo38, dynamo311, args.also_remove_skips)

View File

@ -1,14 +1,15 @@
#!/usr/bin/env python3
# Owner(s): ["oncall: distributed"]
import pathlib
import sys
from pathlib import Path
from typing import Tuple
import torch
import torch.distributed as dist
from torch import nn, Tensor
if not dist.is_available():
print("Distributed not available, skipping tests", file=sys.stderr)
sys.exit(0)
@ -45,7 +46,7 @@ class TestInstantiator(TestCase):
self.assertEqual(return_type_str, "Tuple[Tensor, int, str]")
def test_instantiate_scripted_remote_module_template(self):
dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
# Cleanup.
file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py")
@ -69,7 +70,7 @@ class TestInstantiator(TestCase):
self.assertEqual(num_files_after, 1)
def test_instantiate_non_scripted_remote_module_template(self):
dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
# Cleanup.
file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py")

View File

@ -7,10 +7,10 @@ with test_sym_bool)
# Owner(s): ["oncall: export"]
import copy
import io
import pathlib
import tempfile
import unittest
import zipfile
from pathlib import Path
import torch
import torch._dynamo as torchdynamo
@ -38,7 +38,6 @@ from torch.testing._internal.common_utils import (
TemporaryFileName,
TestCase,
)
from torch.testing._internal.torchbind_impls import init_torchbind_implementations
@ -1052,7 +1051,7 @@ class TestSaveLoad(TestCase):
ep = export(f, inp)
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
save(ep, path)
loaded_ep = load(path)

View File

@ -1,16 +1,17 @@
# Owner(s): ["module: inductor"]
import logging
import os
import pathlib
import re
import shutil
import sys
import unittest
from pathlib import Path
import torch
from torch._inductor import config, test_operators
from torch.testing._internal.inductor_utils import GPU_TYPE, HAS_GPU
try:
try:
from . import test_torchinductor
@ -22,7 +23,7 @@ except unittest.SkipTest:
raise
def filesize(filename: pathlib.Path):
def filesize(filename: Path):
assert filename.exists(), f"{filename} is missing"
return os.stat(filename).st_size
@ -43,7 +44,7 @@ class TestDebugTrace(test_torchinductor.TestCase):
self.assertEqual(len(cm.output), 1)
m = re.match(r"WARNING.* debug trace: (.*)", cm.output[0])
self.assertTrue(m)
filename = pathlib.Path(m.group(1))
filename = Path(m.group(1))
self.assertTrue(filename.is_dir())
self.assertGreater(filesize(filename / "fx_graph_readable.py"), 512)
self.assertGreater(filesize(filename / "fx_graph_runnable.py"), 512)

View File

@ -2,14 +2,15 @@
import io
import os
import pathlib
import sys
from pathlib import Path
from typing import NamedTuple, Optional
import torch
from torch import Tensor
from torch.testing._internal.common_utils import skipIfTorchDynamo, TemporaryFileName
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
@ -397,7 +398,7 @@ class TestSaveLoad(JitTestCase):
# Save then load.
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
m.save(path)
m2 = torch.jit.load(path)
@ -624,7 +625,7 @@ class TestSaveLoad(JitTestCase):
traced_module = torch.jit.trace(module, input1)
traced_inputs = list(traced_module.graph.inputs())
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
print(traced_module.graph)
loaded_module = torch.jit.load(path, _restore_shapes=True)
@ -640,7 +641,7 @@ class TestSaveLoad(JitTestCase):
traced_module._c._retrieve_traced_inputs()["forward"], [input_tensor]
)
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
loaded_module = torch.jit.load(path, _restore_shapes=True)
loaded_inputs = list(loaded_module.graph.inputs())
@ -659,7 +660,7 @@ class TestSaveLoad(JitTestCase):
self.assertEqual(len(traced_module._c._retrieve_traced_inputs()), 0)
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
loaded_module = torch.jit.load(path, _restore_shapes=True)
loaded_inputs = list(loaded_module.graph.inputs())
@ -1055,7 +1056,7 @@ class TestSaveLoadFlatbuffer(JitTestCase):
# Save then load.
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
torch.jit.save_jit_module_to_flatbuffer(m, path)
m2 = torch.jit.load(path)

View File

@ -3,7 +3,7 @@
import functools
import itertools
import os
import pathlib
from pathlib import Path
from typing import Sequence
from unittest import skip
@ -20,10 +20,10 @@ from torch.testing._internal.common_device_type import (
ops,
)
from torch.testing._internal.common_methods_invocations import op_db
from torch.testing._internal.common_utils import run_tests, TestCase
from torch.testing._internal.jit_utils import JitTestCase
torch._lazy.ts_backend.init()
@ -36,7 +36,7 @@ def remove_suffixes(l):
def init_lists():
path_to_script = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
path_to_script = Path(os.path.abspath(os.path.dirname(__file__)))
TS_NATIVE_FUNCTIONS_PATH = (
path_to_script.parent.parent / "aten/src/ATen/native/ts_native_functions.yaml"
)

View File

@ -5,7 +5,6 @@ import copy
import glob
import json
import os
import pathlib
import re
import shutil
import signal
@ -16,6 +15,7 @@ import time
from collections import defaultdict
from contextlib import ExitStack
from datetime import datetime
from pathlib import Path
from typing import Any, cast, Dict, List, NamedTuple, Optional, Sequence, Tuple, Union
import pkg_resources
@ -38,7 +38,7 @@ from torch.testing._internal.common_utils import (
TEST_WITH_SLOW_GRADCHECK,
)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent
# using tools/ to optimize test run.
sys.path.insert(0, str(REPO_ROOT))
@ -61,7 +61,6 @@ from tools.testing.target_determination.heuristics.previously_failed_in_pr impor
gen_additional_test_failures_file,
)
from tools.testing.target_determination.heuristics.utils import get_pr_number
from tools.testing.test_run import TestRun
from tools.testing.test_selections import (
calculate_shards,
@ -71,6 +70,7 @@ from tools.testing.test_selections import (
THRESHOLD,
)
HAVE_TEST_SELECTION_TOOLS = True
# Make sure to remove REPO_ROOT after import is done
sys.path.remove(str(REPO_ROOT))
@ -465,7 +465,7 @@ def run_test(
)
else:
cpp_test = os.path.join(
pathlib.Path(test_directory).parent,
Path(test_directory).parent,
CPP_TEST_PATH,
test_file.replace(f"{CPP_TEST_PREFIX}/", ""),
)
@ -800,11 +800,9 @@ def run_doctests(test_module, test_directory, options):
Assumes the incoming test module is called doctest, and simply executes the
xdoctest runner on the torch library itself.
"""
import pathlib
import xdoctest
pkgpath = pathlib.Path(torch.__file__).parent
pkgpath = Path(torch.__file__).parent
exclude_module_list = ["torch._vendor.*"]
enabled = {

View File

@ -1,40 +1,58 @@
# Owner(s): ["module: serialization"]
import torch
import unittest
import io
import tempfile
import os
import gc
import sys
import zipfile
import warnings
import gzip
import copy
import gc
import gzip
import io
import os
import pickle
import shutil
import pathlib
import platform
import shutil
import sys
import tempfile
import unittest
import warnings
import zipfile
from collections import namedtuple, OrderedDict
from copy import deepcopy
from itertools import product
from pathlib import Path
from torch._utils_internal import get_file_path_2
import torch
from torch._utils import _rebuild_tensor
from torch.utils._import_utils import import_dill
from torch.serialization import check_module_version_greater_or_equal, get_default_load_endianness, \
set_default_load_endianness, LoadEndianness, SourceChangeWarning
from torch.testing._internal.common_utils import (
IS_FILESYSTEM_UTF8_ENCODING, TemporaryDirectoryName,
TestCase, IS_FBCODE, IS_WINDOWS, TEST_DILL, run_tests, download_file, BytesIOContext, TemporaryFileName,
parametrize, instantiate_parametrized_tests, AlwaysWarnTypedStorageRemoval, serialTest, skipIfTorchDynamo)
from torch._utils_internal import get_file_path_2
from torch.serialization import (
check_module_version_greater_or_equal,
get_default_load_endianness,
LoadEndianness,
set_default_load_endianness,
SourceChangeWarning,
)
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_dtype import all_types_and_complex_and
from torch.testing._internal.common_utils import (
AlwaysWarnTypedStorageRemoval,
BytesIOContext,
download_file,
instantiate_parametrized_tests,
IS_FBCODE,
IS_FILESYSTEM_UTF8_ENCODING,
IS_WINDOWS,
parametrize,
run_tests,
serialTest,
skipIfTorchDynamo,
TemporaryDirectoryName,
TemporaryFileName,
TEST_DILL,
TestCase,
)
from torch.testing._internal.two_tensor import TwoTensor # noqa: F401
from torch.utils._import_utils import import_dill
if not IS_WINDOWS:
from mmap import MAP_SHARED, MAP_PRIVATE
from mmap import MAP_PRIVATE, MAP_SHARED
else:
MAP_SHARED, MAP_PRIVATE = None, None
@ -988,7 +1006,7 @@ class TestSerialization(TestCase, SerializationMixin):
model = torch.nn.Conv2d(20, 3200, kernel_size=3)
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
torch.save(model.state_dict(), path)
torch.load(path, weights_only=weights_only)
@ -4008,7 +4026,7 @@ class TestSerialization(TestCase, SerializationMixin):
finally:
set_default_load_endianness(current_load_endian)
@parametrize('path_type', (str, pathlib.Path))
@parametrize('path_type', (str, Path))
@parametrize('weights_only', (True, False))
@unittest.skipIf(IS_WINDOWS, "NamedTemporaryFile on windows")
def test_serialization_mmap_loading(self, weights_only, path_type):

View File

@ -1,13 +1,16 @@
# Owner(s): ["module: unknown"]
import expecttest
import io
import numpy as np
import os
import shutil
import sys
import tempfile
import unittest
from pathlib import Path
import expecttest
import numpy as np
TEST_TENSORBOARD = True
try:
@ -36,14 +39,15 @@ skipIfNoMatplotlib = unittest.skipIf(not TEST_MATPLOTLIB, "no matplotlib")
import torch
from torch.testing._internal.common_utils import (
instantiate_parametrized_tests,
IS_MACOS,
IS_WINDOWS,
parametrize,
TestCase,
run_tests,
TEST_WITH_CROSSREF,
IS_WINDOWS,
IS_MACOS,
TestCase,
)
def tensor_N(shape, dtype=float):
numel = np.prod(shape)
x = (np.arange(numel, dtype=dtype)).reshape(shape)
@ -75,15 +79,16 @@ class BaseTestCase(TestCase):
if TEST_TENSORBOARD:
from tensorboard.compat.proto.graph_pb2 import GraphDef
from torch.utils.tensorboard import summary, SummaryWriter
from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
from tensorboard.compat.proto.types_pb2 import DataType
from torch.utils.tensorboard.summary import int_to_half, tensor_proto
from torch.utils.tensorboard._convert_np import make_np
from torch.utils.tensorboard._pytorch_graph import graph
from google.protobuf import text_format
from PIL import Image
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.types_pb2 import DataType
from torch.utils.tensorboard import summary, SummaryWriter
from torch.utils.tensorboard._convert_np import make_np
from torch.utils.tensorboard._pytorch_graph import graph
from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
from torch.utils.tensorboard.summary import int_to_half, tensor_proto
class TestTensorBoardPyTorchNumpy(BaseTestCase):
def test_pytorch_np(self):
@ -289,9 +294,8 @@ class TestTensorBoardSummaryWriter(BaseTestCase):
self.assertTrue(passed)
def test_pathlib(self):
import pathlib
with tempfile.TemporaryDirectory(prefix="test_tensorboard_pathlib") as d:
p = pathlib.Path(d)
p = Path(d)
with SummaryWriter(p) as writer:
writer.add_scalar('test', 1)

View File

@ -1,6 +1,6 @@
import pathlib
import sys
import textwrap
from pathlib import Path
def check(path):
@ -62,7 +62,7 @@ if __name__ == "__main__":
if len(argv) != 2:
raise ValueError("Usage : python check_tests_conform path/to/file/or/dir")
path = pathlib.Path(argv[1])
path = Path(argv[1])
if path.is_dir():
# run for all files in the directory (no subdirs)

View File

@ -9,16 +9,14 @@ import itertools
import mmap
import operator
import os
import pathlib
import sys
import tempfile
import warnings
import weakref
from contextlib import contextmanager
from decimal import Decimal
from pathlib import Path
from tempfile import mkstemp
from unittest import expectedFailure as xfail, skipIf as skipif, SkipTest
import numpy
@ -37,6 +35,7 @@ from torch.testing._internal.common_utils import (
xpassIfTorchDynamo,
)
# If we are going to trace through these, we should use NumPy
# If testing on eager mode, we use torch._numpy
if TEST_WITH_TORCHDYNAMO:
@ -3866,7 +3865,7 @@ class TestIO(TestCase):
assert_array_equal(y, x.flat)
def test_roundtrip_dump_pathlib(self, x, tmp_filename):
p = pathlib.Path(tmp_filename)
p = Path(tmp_filename)
x.dump(p)
y = np.load(p, allow_pickle=True)
assert_array_equal(y, x)

View File

@ -12,10 +12,10 @@ Run with no arguments.
import argparse
import datetime
import os
import pathlib
import re
import subprocess
import sys
from pathlib import Path
from subprocess import DEVNULL
from typing import Any
@ -30,7 +30,7 @@ def read_sub_write(path: str, prefix_pat: str, new_default: int) -> None:
def main(args: Any) -> None:
pytorch_dir = pathlib.Path(__file__).parent.parent.parent.resolve()
pytorch_dir = Path(__file__).parent.parent.parent.resolve()
onnx_dir = pytorch_dir / "third_party" / "onnx"
os.chdir(onnx_dir)

View File

@ -2,8 +2,8 @@ from __future__ import annotations
import argparse
import os
import pathlib
import sys
from pathlib import Path
from typing import Any, cast
import yaml
@ -19,7 +19,7 @@ TAGS_PATH = "aten/src/ATen/native/tags.yaml"
def generate_code(
gen_dir: pathlib.Path,
gen_dir: Path,
native_functions_path: str | None = None,
tags_path: str | None = None,
install_dir: str | None = None,
@ -41,7 +41,7 @@ def generate_code(
autograd_gen_dir = os.path.join(install_dir, "autograd", "generated")
for d in (autograd_gen_dir, python_install_dir):
os.makedirs(d, exist_ok=True)
autograd_dir = os.fspath(pathlib.Path(__file__).parent.parent / "autograd")
autograd_dir = os.fspath(Path(__file__).parent.parent / "autograd")
if subset == "pybindings" or not subset:
gen_autograd_python(
@ -133,8 +133,8 @@ def main() -> None:
parser.add_argument("--tags-path")
parser.add_argument(
"--gen-dir",
type=pathlib.Path,
default=pathlib.Path("."),
type=Path,
default=Path("."),
help="Root directory where to install files. Defaults to the current working directory.",
)
parser.add_argument(

View File

@ -1,7 +1,7 @@
import pathlib
import sys
from pathlib import Path
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(REPO_ROOT))
from tools.stats.import_test_stats import get_test_class_times, get_test_times

View File

@ -5,12 +5,12 @@ from __future__ import annotations
import datetime
import json
import os
import pathlib
import shutil
from pathlib import Path
from typing import Any, Callable, cast, Dict
from urllib.request import urlopen
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
def get_disabled_issues() -> list[str]:
@ -22,7 +22,7 @@ def get_disabled_issues() -> list[str]:
SLOW_TESTS_FILE = ".pytorch-slow-tests.json"
DISABLED_TESTS_FILE = ".pytorch-disabled-tests.json"
ADDITIONAL_CI_FILES_FOLDER = pathlib.Path(".additional_ci_files")
ADDITIONAL_CI_FILES_FOLDER = Path(".additional_ci_files")
TEST_TIMES_FILE = "test-times.json"
TEST_CLASS_TIMES_FILE = "test-class-times.json"
TEST_FILE_RATINGS_FILE = "test-file-ratings.json"
@ -36,7 +36,7 @@ FILE_CACHE_LIFESPAN_SECONDS = datetime.timedelta(hours=3).seconds
def fetch_and_cache(
dirpath: str | pathlib.Path,
dirpath: str | Path,
name: str,
url: str,
process_fn: Callable[[dict[str, Any]], dict[str, Any]],
@ -44,7 +44,7 @@ def fetch_and_cache(
"""
This fetch and cache utils allows sharing between different process.
"""
pathlib.Path(dirpath).mkdir(exist_ok=True)
Path(dirpath).mkdir(exist_ok=True)
path = os.path.join(dirpath, name)
print(f"Downloading {url} to {path}")
@ -52,7 +52,7 @@ def fetch_and_cache(
def is_cached_file_valid() -> bool:
# Check if the file is new enough (see: FILE_CACHE_LIFESPAN_SECONDS). A real check
# could make a HEAD request and check/store the file's ETag
fname = pathlib.Path(path)
fname = Path(path)
now = datetime.datetime.now()
mtime = datetime.datetime.fromtimestamp(fname.stat().st_mtime)
diff = now - mtime

View File

@ -3,13 +3,13 @@ from __future__ import annotations
import io
import json
import pathlib
import sys
import unittest
from pathlib import Path
from typing import Any
from unittest import mock
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
sys.path.append(str(REPO_ROOT))
from tools.test.heuristics.test_interface import TestTD
from tools.testing.target_determination.determinator import TestPrioritizations

View File

@ -1,11 +1,11 @@
from __future__ import annotations
import pathlib
import sys
import unittest
from pathlib import Path
from typing import Any
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
sys.path.append(str(REPO_ROOT))
import tools.testing.target_determination.heuristics.interface as interface
from tools.testing.test_run import TestRun

View File

@ -1,12 +1,12 @@
from __future__ import annotations
import pathlib
import sys
import unittest
from pathlib import Path
from typing import Any
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
sys.path.append(str(REPO_ROOT))
import tools.testing.target_determination.heuristics.utils as utils
from tools.testing.test_run import TestRun

View File

@ -1,8 +1,8 @@
import pathlib
import sys
import unittest
from pathlib import Path
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
try:
# using tools/ to optimize test run.
sys.path.append(str(REPO_ROOT))

View File

@ -1,13 +1,13 @@
from __future__ import annotations
import functools
import pathlib
import random
import sys
import unittest
from collections import defaultdict
from pathlib import Path
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
try:
# using tools/ to optimize test run.
sys.path.append(str(REPO_ROOT))

View File

@ -2,13 +2,13 @@ from __future__ import annotations
import decimal
import inspect
import pathlib
import sys
import unittest
from pathlib import Path
from typing import Any
from unittest import mock
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.insert(0, str(REPO_ROOT))
from tools.stats.upload_metrics import add_global_metric, emit_metric

View File

@ -1,10 +1,10 @@
import json
import os
import pathlib
import sys
from pathlib import Path
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.insert(0, str(REPO_ROOT))
from tools.stats.import_test_stats import (

View File

@ -4,15 +4,15 @@ from __future__ import annotations
import argparse
import fnmatch
import pathlib
import subprocess
import textwrap
from pathlib import Path
from typing import Any
import yaml
REPO_ROOT = pathlib.Path(__file__).parent.parent.parent
REPO_ROOT = Path(__file__).parent.parent.parent
CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml"
WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows"

View File

@ -2,12 +2,12 @@ from __future__ import annotations
import modulefinder
import os
import pathlib
import sys
import warnings
from pathlib import Path
from typing import Any
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
# These tests are slow enough that it's worth calculating whether the patch
# touched any related files first. This list was manually generated, but for every

View File

@ -2,10 +2,10 @@ from __future__ import annotations
import json
import os
import pathlib
from pathlib import Path
from typing import Any
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
def gen_ci_artifact(included: list[Any], excluded: list[Any]) -> None:

View File

@ -1,14 +1,15 @@
# mypy: allow-untyped-defs
import inspect
import pathlib
import sys
import typing
from collections import defaultdict
from pathlib import Path
from types import CodeType
from typing import Dict, Iterable, List, Optional
import torch
_IS_MONKEYTYPE_INSTALLED = True
try:
import monkeytype # type: ignore[import]
@ -189,5 +190,5 @@ def jit_code_filter(code: CodeType) -> bool:
):
return False
filename = pathlib.Path(code.co_filename).resolve()
filename = Path(code.co_filename).resolve()
return not any(_startswith(filename, lib_path) for lib_path in LIB_PATHS)

View File

@ -1,7 +1,7 @@
# mypy: allow-untyped-defs
import os
import pathlib
from collections import defaultdict
from pathlib import Path
from typing import Any, Dict, List, Set, Tuple, Union
@ -211,7 +211,7 @@ def get_method_definitions(
# 3. Remove first argument after self (unless it is "*datapipes"), default args, and spaces
"""
if root == "":
root = str(pathlib.Path(__file__).parent.resolve())
root = str(Path(__file__).parent.resolve())
file_path = [file_path] if isinstance(file_path, str) else file_path
file_path = [os.path.join(root, path) for path in file_path]
file_paths = find_file_paths(
@ -288,7 +288,7 @@ def main() -> None:
mapDP_method_to_special_output_type,
)
path = pathlib.Path(__file__).parent.resolve()
path = Path(__file__).parent.resolve()
replacements = [
("${IterDataPipeMethods}", iter_method_definitions, 4),
("${MapDataPipeMethods}", map_method_definitions, 4),

View File

@ -64,21 +64,18 @@ Possible improvements:
(they probably don't work at all right now).
"""
import sys
import os
import io
import pathlib
import re
import argparse
import zipfile
import io
import json
import os
import pickle
import pprint
import re
import sys
import urllib.parse
from typing import (
Dict,
)
import zipfile
from pathlib import Path
from typing import Dict
import torch.utils.show_pickle
@ -201,7 +198,7 @@ def get_model_info(
file_size = path_or_file.stat().st_size # type: ignore[attr-defined]
elif isinstance(path_or_file, str):
default_title = path_or_file
file_size = pathlib.Path(path_or_file).stat().st_size
file_size = Path(path_or_file).stat().st_size
else:
default_title = "buffer"
path_or_file.seek(0, io.SEEK_END)

View File

@ -2,9 +2,9 @@ from __future__ import annotations
import argparse
import os
import pathlib
import re
from collections import Counter, defaultdict, namedtuple
from pathlib import Path
from typing import Sequence
import yaml
@ -529,7 +529,7 @@ def run(
source_yaml: str, output_dir: str, dry_run: bool, impl_path: str | None = None
) -> None:
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
pytorch_root = pathlib.Path(__file__).parent.parent.absolute()
pytorch_root = Path(__file__).parent.parent.absolute()
template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates")
def make_file_manager(install_dir: str) -> FileManager:

View File

@ -2,8 +2,8 @@ from __future__ import annotations
import argparse
import os
import pathlib
from collections import namedtuple
from pathlib import Path
from typing import Any, Callable, Iterable, Iterator, Sequence
import yaml
@ -252,7 +252,7 @@ def main() -> None:
options = parser.parse_args()
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
torch_root = pathlib.Path(__file__).parent.parent.parent.absolute()
torch_root = Path(__file__).parent.parent.parent.absolute()
aten_path = str(torch_root / "aten" / "src" / "ATen")
lazy_ir_generator: type[GenLazyIR] = default_args.lazy_ir_generator
if options.gen_ts_lowerings: