mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
Use absolute path path.resolve()
-> path.absolute()
(#129409)
Changes: 1. Always explicit `.absolute()`: `Path(__file__)` -> `Path(__file__).absolute()` 2. Replace `path.resolve()` with `path.absolute()` if the code is resolving the PyTorch repo root directory. Pull Request resolved: https://github.com/pytorch/pytorch/pull/129409 Approved by: https://github.com/albanD
This commit is contained in:
committed by
PyTorch MergeBot
parent
e9e18a9617
commit
45411d1fc9
2
.github/scripts/build_triton_wheel.py
vendored
2
.github/scripts/build_triton_wheel.py
vendored
@ -9,7 +9,7 @@ from tempfile import TemporaryDirectory
|
||||
from typing import Optional
|
||||
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
SCRIPT_DIR = Path(__file__).absolute().parent
|
||||
REPO_DIR = SCRIPT_DIR.parent.parent
|
||||
|
||||
|
||||
|
@ -12,8 +12,9 @@ import requests
|
||||
from gitutils import retries_decorator
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.testing.clickhouse import query_clickhouse
|
||||
|
||||
|
||||
|
2
.github/scripts/collect_ciflow_labels.py
vendored
2
.github/scripts/collect_ciflow_labels.py
vendored
@ -7,7 +7,7 @@ from typing import Any, cast, Dict, List, Set
|
||||
import yaml
|
||||
|
||||
|
||||
GITHUB_DIR = Path(__file__).parent.parent
|
||||
GITHUB_DIR = Path(__file__).absolute().parent.parent
|
||||
|
||||
|
||||
def get_workflows_push_tags() -> Set[str]:
|
||||
|
2
.github/scripts/delete_old_branches.py
vendored
2
.github/scripts/delete_old_branches.py
vendored
@ -22,7 +22,7 @@ TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
if not TOKEN:
|
||||
raise Exception("GITHUB_TOKEN is not set") # noqa: TRY002
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
# Query for all PRs instead of just closed/merged because it's faster
|
||||
GRAPHQL_ALL_PRS_BY_UPDATED_AT = """
|
||||
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||
import yaml
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||
EXPECTED_GROUP_PREFIX = (
|
||||
"${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}"
|
||||
|
2
.github/scripts/generate_ci_workflows.py
vendored
2
.github/scripts/generate_ci_workflows.py
vendored
@ -13,7 +13,7 @@ import jinja2
|
||||
|
||||
Arch = Literal["windows", "linux", "macos"]
|
||||
|
||||
GITHUB_DIR = Path(__file__).resolve().parent.parent
|
||||
GITHUB_DIR = Path(__file__).absolute().parent.parent
|
||||
|
||||
LABEL_CIFLOW_TRUNK = "ciflow/trunk"
|
||||
LABEL_CIFLOW_UNSTABLE = "ciflow/unstable"
|
||||
|
2
.github/scripts/gitutils.py
vendored
2
.github/scripts/gitutils.py
vendored
@ -32,7 +32,7 @@ def get_git_remote_name() -> str:
|
||||
def get_git_repo_dir() -> str:
|
||||
from pathlib import Path
|
||||
|
||||
return os.getenv("GIT_REPO_DIR", str(Path(__file__).resolve().parents[2]))
|
||||
return os.getenv("GIT_REPO_DIR", str(Path(__file__).absolute().parents[2]))
|
||||
|
||||
|
||||
def fuzzy_list_to_dict(items: List[Tuple[str, str]]) -> Dict[str, List[str]]:
|
||||
|
2
.github/scripts/lint_native_functions.py
vendored
2
.github/scripts/lint_native_functions.py
vendored
@ -26,7 +26,7 @@ def fn(base: str) -> str:
|
||||
return str(base / Path("aten/src/ATen/native/native_functions.yaml"))
|
||||
|
||||
|
||||
with open(Path(__file__).parents[2] / fn(".")) as f:
|
||||
with open(Path(__file__).absolute().parents[2] / fn(".")) as f:
|
||||
contents = f.read()
|
||||
|
||||
yaml = ruamel.yaml.YAML() # type: ignore[attr-defined]
|
||||
|
2
.github/scripts/test_gitutils.py
vendored
2
.github/scripts/test_gitutils.py
vendored
@ -12,7 +12,7 @@ from gitutils import (
|
||||
)
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).parent
|
||||
BASE_DIR = Path(__file__).absolute().parent
|
||||
|
||||
|
||||
class TestPeekableIterator(TestCase):
|
||||
|
4
.github/scripts/trymerge.py
vendored
4
.github/scripts/trymerge.py
vendored
@ -669,7 +669,7 @@ def get_ghstack_prs(
|
||||
if not open_only or not candidate.is_closed():
|
||||
return False
|
||||
print(
|
||||
f"Skipping {idx+1} of {len(rev_list)} PR (#{candidate.pr_num}) as its already been merged"
|
||||
f"Skipping {idx + 1} of {len(rev_list)} PR (#{candidate.pr_num}) as its already been merged"
|
||||
)
|
||||
return True
|
||||
|
||||
@ -1596,7 +1596,7 @@ def save_merge_record(
|
||||
"_id": f"{project}-{pr_num}-{comment_id}-{os.environ.get('GITHUB_RUN_ID')}",
|
||||
}
|
||||
]
|
||||
repo_root = Path(__file__).resolve().parent.parent.parent
|
||||
repo_root = Path(__file__).absolute().parents[2]
|
||||
|
||||
with open(repo_root / "merge_record.json", "w") as f:
|
||||
json.dump(data, f)
|
||||
|
@ -85,7 +85,7 @@ def write_kernel(kernel: Kernel, autogen_dir: Path) -> None:
|
||||
|
||||
def main(output_dir: Optional[str]) -> None:
|
||||
if output_dir is None:
|
||||
output_dir = Path(__file__).parent
|
||||
output_dir = Path(__file__).absolute().parent
|
||||
else:
|
||||
output_dir = Path(output_dir)
|
||||
|
||||
|
@ -380,7 +380,7 @@ void dispatch_{family_name}(T cb, int cc = 0) {{
|
||||
|
||||
def main(output_dir: Optional[str]) -> None:
|
||||
if output_dir is None:
|
||||
output_dir = Path(__file__).parent
|
||||
output_dir = Path(__file__).absolute().parent
|
||||
else:
|
||||
output_dir = Path(output_dir)
|
||||
write_decl_impl(
|
||||
|
@ -232,7 +232,7 @@ def main(argv):
|
||||
)
|
||||
)
|
||||
|
||||
out_dir = Path(__file__).parent
|
||||
out_dir = Path(__file__).absolute().parent
|
||||
|
||||
(out_dir / "nnapi_wrapper.h").write_text(
|
||||
PREFIX
|
||||
|
@ -330,7 +330,7 @@ def get_json_config(file_name: str, id: str):
|
||||
file_name (str): name of configuration file to load
|
||||
id (str): configuration that will be loaded
|
||||
"""
|
||||
with open(Path(__file__).parent / file_name) as f:
|
||||
with open(Path(__file__).absolute().parent / file_name) as f:
|
||||
json_config = json.load(f)[id]
|
||||
return json_config
|
||||
|
||||
|
@ -16,7 +16,7 @@ matplotlib.use("Agg")
|
||||
|
||||
|
||||
# Create a directory for the images, if it doesn't exist
|
||||
ACTIVATION_IMAGE_PATH = Path(__file__).parent / "activation_images"
|
||||
ACTIVATION_IMAGE_PATH = Path(__file__).absolute().parent / "activation_images"
|
||||
|
||||
if not ACTIVATION_IMAGE_PATH.exists():
|
||||
ACTIVATION_IMAGE_PATH.mkdir()
|
||||
|
@ -10,7 +10,10 @@ def get_correct_mypy_version():
|
||||
(match,) = re.finditer(
|
||||
r"mypy==(\d+(?:\.\d+)*)",
|
||||
(
|
||||
Path(__file__).parent.parent / ".ci" / "docker" / "requirements-ci.txt"
|
||||
Path(__file__).absolute().parent.parent
|
||||
/ ".ci"
|
||||
/ "docker"
|
||||
/ "requirements-ci.txt"
|
||||
).read_text(),
|
||||
)
|
||||
(version,) = match.groups()
|
||||
|
@ -2391,7 +2391,8 @@
|
||||
"Set",
|
||||
"Tuple",
|
||||
"Union",
|
||||
"defaultdict"
|
||||
"defaultdict",
|
||||
"Path"
|
||||
],
|
||||
"torch.utils.data.datapipes.utils.snapshot": [
|
||||
"IterDataPipe",
|
||||
|
@ -2,9 +2,9 @@
|
||||
# Owner(s): ["oncall: distributed"]
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from numpy.testing import assert_array_equal
|
||||
|
||||
@ -559,7 +559,7 @@ class DTensorTest(DTensorTestBase):
|
||||
import_string = (
|
||||
"import torch.distributed.tensor;" if should_import else ""
|
||||
)
|
||||
filename = pathlib.Path(f.name)
|
||||
filename = Path(f.name)
|
||||
err_msg = (
|
||||
(
|
||||
"_pickle.UnpicklingError: Weights only load failed. "
|
||||
|
@ -5,9 +5,10 @@
|
||||
#
|
||||
# This source code is licensed under the BSD-style license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
import pathlib
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import torch.distributed.elastic.rendezvous as rdvz
|
||||
|
||||
@ -18,7 +19,7 @@ TEST_PACKAGE_PATH = "/out_of_tree_test_package/src"
|
||||
|
||||
class OutOfTreeRendezvousTest(unittest.TestCase):
|
||||
def test_out_of_tree_handler_loading(self):
|
||||
current_path = str(pathlib.Path(__file__).parent.resolve())
|
||||
current_path = str(Path(__file__).absolute().parent)
|
||||
rdvz._register_out_of_tree_handlers()
|
||||
registry_dict = rdvz.rendezvous_handler_registry._registry
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
# Owner(s): ["oncall: distributed"]
|
||||
|
||||
import math
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
from tools.flight_recorder.components.types import COLLECTIVES, MatchState
|
||||
|
@ -2,8 +2,8 @@
|
||||
# run time cuda tests, but with the allocator using expandable segments
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import torch
|
||||
from torch.testing._internal.common_cuda import IS_JETSON, IS_WINDOWS
|
||||
@ -16,9 +16,10 @@ sys.path.append(pytorch_test_dir)
|
||||
from dynamo.test_cudagraphs import TestAotCudagraphs # noqa: F401
|
||||
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import get_disabled_tests
|
||||
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
# run time cuda tests, but with the allocator using expandable segments
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import torch
|
||||
from torch.testing._internal.common_cuda import IS_JETSON, IS_WINDOWS
|
||||
@ -22,7 +22,7 @@ if HAS_CUDA and not TEST_WITH_ASAN:
|
||||
CudaGraphTreeTests,
|
||||
)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
from tools.stats.import_test_stats import get_disabled_tests # @manual
|
||||
|
@ -111,7 +111,7 @@ Get the path to `test/jit/fixtures`, where all test models for operator changes
|
||||
|
||||
|
||||
def get_fixtures_path() -> Path:
|
||||
pytorch_dir = Path(__file__).resolve().parents[3]
|
||||
pytorch_dir = Path(__file__).absolute().parents[3]
|
||||
fixtures_path = pytorch_dir / "test" / "jit" / "fixtures"
|
||||
return fixtures_path
|
||||
|
||||
|
@ -41,7 +41,7 @@ Inherits most tests from TestNNAPI, which loads Android NNAPI models
|
||||
without the delegate API.
|
||||
"""
|
||||
# First skip is needed for IS_WINDOWS or IS_MACOS to skip the tests.
|
||||
torch_root = Path(__file__).resolve().parents[2]
|
||||
torch_root = Path(__file__).absolute().parents[2]
|
||||
lib_path = torch_root / "build" / "lib" / "libnnapi_backend.so"
|
||||
|
||||
|
||||
|
@ -21,7 +21,7 @@ from torch.jit.mobile import (
|
||||
from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
|
||||
|
||||
pytorch_test_dir = Path(__file__).resolve().parents[1]
|
||||
pytorch_test_dir = Path(__file__).absolute().parents[1]
|
||||
|
||||
# script_module_v4.ptl and script_module_v5.ptl source code
|
||||
# class TestModule(torch.nn.Module):
|
||||
|
@ -9,7 +9,7 @@ from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
from torchgen.operator_versions.gen_mobile_upgraders import sort_upgrader, write_cpp
|
||||
|
||||
|
||||
pytorch_caffe2_dir = Path(__file__).resolve().parents[2]
|
||||
pytorch_caffe2_dir = Path(__file__).absolute().parents[2]
|
||||
|
||||
|
||||
class TestLiteScriptModule(TestCase):
|
||||
|
@ -10,7 +10,7 @@ from torch.jit.mobile import _load_for_lite_interpreter
|
||||
from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
|
||||
|
||||
pytorch_test_dir = Path(__file__).resolve().parents[1]
|
||||
pytorch_test_dir = Path(__file__).absolute().parents[1]
|
||||
|
||||
|
||||
class TestLiteScriptModule(TestCase):
|
||||
|
@ -3,11 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import difflib
|
||||
import pathlib
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
|
||||
@ -169,7 +169,7 @@ torch=={torch.__version__}"""
|
||||
|
||||
|
||||
def save_error_report(file_name: str, text: str):
|
||||
reports_dir = pathlib.Path("error_reports")
|
||||
reports_dir = Path("error_reports")
|
||||
reports_dir.mkdir(parents=True, exist_ok=True)
|
||||
file_path = reports_dir / file_name
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
|
@ -6,7 +6,7 @@ from torch.package import PackageExporter
|
||||
from torch.testing._internal.common_utils import IS_FBCODE, IS_SANDCASTLE
|
||||
|
||||
|
||||
packaging_directory = f"{Path(__file__).parent}/package_bc"
|
||||
packaging_directory = str(Path(__file__).absolute().parent / "package_bc")
|
||||
torch.package.package_exporter._gate_torchscript_serialization = False
|
||||
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import os
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from sys import version_info
|
||||
from tempfile import TemporaryDirectory
|
||||
from textwrap import dedent
|
||||
@ -32,10 +33,8 @@ except ImportError:
|
||||
# Support the case where we run this file directly.
|
||||
from common import PackageTestCase
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
packaging_directory = Path(__file__).parent
|
||||
packaging_directory = Path(__file__).absolute().parent
|
||||
|
||||
|
||||
@skipIf(
|
||||
|
@ -13,7 +13,7 @@ except ImportError:
|
||||
# Support the case where we run this file directly.
|
||||
from common import PackageTestCase
|
||||
|
||||
packaging_directory = f"{Path(__file__).parent}/package_bc"
|
||||
packaging_directory = str(Path(__file__).absolute().parent / "package_bc")
|
||||
|
||||
|
||||
class TestLoadBCPackages(PackageTestCase):
|
||||
|
@ -198,7 +198,7 @@ class TestMisc(PackageTestCase):
|
||||
def test_load_python_version_from_package(self):
|
||||
"""Tests loading a package with a python version embdded"""
|
||||
importer1 = PackageImporter(
|
||||
f"{Path(__file__).parent}/package_e/test_nn_module.pt"
|
||||
str(Path(__file__).absolute().parent / "package_e" / "test_nn_module.pt")
|
||||
)
|
||||
self.assertEqual(importer1.python_version(), "3.9.7")
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import pickle
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from sys import version_info
|
||||
from textwrap import dedent
|
||||
from unittest import skipIf
|
||||
@ -17,10 +18,8 @@ except ImportError:
|
||||
# Support the case where we run this file directly.
|
||||
from common import PackageTestCase
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
packaging_directory = Path(__file__).parent
|
||||
packaging_directory = Path(__file__).absolute().parent
|
||||
|
||||
|
||||
class TestSaveLoad(PackageTestCase):
|
||||
|
@ -44,7 +44,7 @@ class TestQuantizationDocs(QuantizationTestCase):
|
||||
Current working directory when CI is running test seems to vary, this function
|
||||
looks for docs relative to this test file.
|
||||
"""
|
||||
core_dir = Path(__file__).parent
|
||||
core_dir = Path(__file__).absolute().parent
|
||||
assert core_dir.match("test/quantization/core/"), (
|
||||
"test_docs.py is in an unexpected location. If you've been "
|
||||
"moving files around, ensure that the test and build files have "
|
||||
|
@ -41,7 +41,7 @@ from torch.testing._internal.common_utils import (
|
||||
|
||||
|
||||
# using tools/ to optimize test run.
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parent.parent
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import (
|
||||
|
@ -1,8 +1,8 @@
|
||||
# Owner(s): ["module: cuda"]
|
||||
# run time cuda tests, but with the allocator using expandable segments
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from test_cuda import ( # noqa: F401
|
||||
TestBlockStateAbsorption,
|
||||
@ -15,7 +15,7 @@ from torch.testing._internal.common_cuda import IS_JETSON, IS_WINDOWS
|
||||
from torch.testing._internal.common_utils import run_tests, TEST_WITH_ROCM
|
||||
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[1]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import get_disabled_tests
|
||||
|
@ -7,7 +7,6 @@ import gc
|
||||
import gzip
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import platform
|
||||
import re
|
||||
@ -4445,7 +4444,7 @@ class TestSerialization(TestCase, SerializationMixin):
|
||||
with tempfile.NamedTemporaryFile() as f:
|
||||
njt = torch.nested.nested_tensor([[1, 2, 3], [4, 5]], layout=torch.jagged)
|
||||
torch.save(njt, f)
|
||||
filename = pathlib.Path(f.name)
|
||||
filename = Path(f.name)
|
||||
import_string = "import torch._dynamo;" if should_import else ""
|
||||
err_msg = (
|
||||
"_pickle.UnpicklingError: Weights only load failed. ``torch.nested`` and ``torch._dynamo``"
|
||||
|
@ -89,7 +89,7 @@ class TestTypeHints(TestCase):
|
||||
"""
|
||||
Run documentation examples through mypy.
|
||||
"""
|
||||
fn = Path(__file__).resolve().parent / "generated_type_hints_smoketest.py"
|
||||
fn = Path(__file__).absolute().parent / "generated_type_hints_smoketest.py"
|
||||
fn.write_text(get_all_examples())
|
||||
|
||||
# OK, so here's the deal. mypy treats installed packages
|
||||
@ -130,7 +130,7 @@ class TestTypeHints(TestCase):
|
||||
)
|
||||
except OSError:
|
||||
raise unittest.SkipTest("cannot symlink") from None
|
||||
repo_rootdir = Path(__file__).resolve().parent.parent
|
||||
repo_rootdir = Path(__file__).absolute().parent.parent
|
||||
# TODO: Would be better not to chdir here, this affects the
|
||||
# entire process!
|
||||
with set_cwd(str(repo_rootdir)):
|
||||
|
@ -13,7 +13,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
PYTORCH_ROOTDIR = Path(__file__).resolve().parent.parent
|
||||
PYTORCH_ROOTDIR = Path(__file__).absolute().parent.parent
|
||||
TORCH_DIR = PYTORCH_ROOTDIR / "torch"
|
||||
TORCH_LIB_DIR = TORCH_DIR / "lib"
|
||||
BUILD_DIR = PYTORCH_ROOTDIR / "build"
|
||||
@ -110,7 +110,7 @@ def main() -> None:
|
||||
print("More than 100 items needs to be rebuild, run `ninja torch_python` first")
|
||||
sys.exit(-1)
|
||||
for idx, (name, cmd) in enumerate(build_plan):
|
||||
print(f"[{idx + 1 } / {len(build_plan)}] Building {name}")
|
||||
print(f"[{idx + 1} / {len(build_plan)}] Building {name}")
|
||||
if args.verbose:
|
||||
print(cmd)
|
||||
subprocess.check_call(["sh", "-c", cmd], cwd=BUILD_DIR)
|
||||
|
@ -7,7 +7,7 @@ from pathlib import Path
|
||||
|
||||
# <project folder>
|
||||
HOME_DIR = os.environ["HOME"]
|
||||
TOOLS_FOLDER = str(Path(__file__).resolve().parents[2])
|
||||
TOOLS_FOLDER = str(Path(__file__).absolute().parents[2])
|
||||
|
||||
|
||||
# <profile folder>
|
||||
|
@ -85,7 +85,7 @@ if __name__ == "__main__":
|
||||
args.hip_version = None if args.hip_version == "" else args.hip_version
|
||||
args.xpu_version = None if args.xpu_version == "" else args.xpu_version
|
||||
|
||||
pytorch_root = Path(__file__).parent.parent
|
||||
pytorch_root = Path(__file__).absolute().parent.parent
|
||||
version_path = pytorch_root / "torch" / "version.py"
|
||||
# Attempt to get tag first, fall back to sha if a tag was not found
|
||||
tagged_version = get_tag(pytorch_root)
|
||||
|
@ -280,7 +280,7 @@ def main(args: list[str]) -> None:
|
||||
gen_unboxing(native_functions=native_functions, cpu_fm=cpu_fm, selector=selector)
|
||||
|
||||
if options.output_dependencies:
|
||||
depfile_path = Path(options.output_dependencies).resolve()
|
||||
depfile_path = Path(options.output_dependencies).absolute()
|
||||
depfile_name = depfile_path.name
|
||||
depfile_stem = depfile_path.stem
|
||||
|
||||
|
@ -182,7 +182,7 @@ def check_file(
|
||||
|
||||
for match in RESULTS_RE.finditer(proc.stdout.decode()):
|
||||
# Convert the reported path to an absolute path.
|
||||
abs_path = str(Path(match["file"]).resolve())
|
||||
abs_path = str(Path(match["file"]).absolute())
|
||||
message = LintMessage(
|
||||
path=abs_path,
|
||||
name=match["code"],
|
||||
@ -262,7 +262,7 @@ def main() -> None:
|
||||
print(json.dumps(err_msg._asdict()), flush=True)
|
||||
sys.exit(0)
|
||||
|
||||
abs_build_dir = Path(args.build_dir).resolve()
|
||||
abs_build_dir = Path(args.build_dir).absolute()
|
||||
|
||||
# Get the absolute path to clang-tidy and use this instead of the relative
|
||||
# path such as .lintbin/clang-tidy. The problem here is that os.chdir is
|
||||
|
@ -85,13 +85,13 @@ def extract_include_arg(include_dirs: list[Path], i: int, args: list[str]) -> No
|
||||
for name in PRE_INCLUDE_ARGS:
|
||||
path = extract_one(name, i, args)
|
||||
if path is not None:
|
||||
include_dirs.insert(0, Path(path).resolve())
|
||||
include_dirs.insert(0, Path(path).absolute())
|
||||
return
|
||||
|
||||
for name in POST_INCLUDE_ARGS:
|
||||
path = extract_one(name, i, args)
|
||||
if path is not None:
|
||||
include_dirs.append(Path(path).resolve())
|
||||
include_dirs.append(Path(path).absolute())
|
||||
return
|
||||
|
||||
|
||||
@ -110,7 +110,7 @@ if __name__ == "__main__":
|
||||
depfile_path = Path(args[i + 1])
|
||||
elif arg == "-c":
|
||||
# Include the base path of the cuda file
|
||||
include_dirs.append(Path(args[i + 1]).resolve().parent)
|
||||
include_dirs.append(Path(args[i + 1]).absolute().parent)
|
||||
else:
|
||||
extract_include_arg(include_dirs, i, args)
|
||||
|
||||
|
@ -30,7 +30,7 @@ def read_sub_write(path: str, prefix_pat: str, new_default: int) -> None:
|
||||
|
||||
|
||||
def main(args: Any) -> None:
|
||||
pytorch_dir = Path(__file__).parents[2].resolve()
|
||||
pytorch_dir = Path(__file__).absolute().parents[2]
|
||||
onnx_dir = pytorch_dir / "third_party" / "onnx"
|
||||
os.chdir(onnx_dir)
|
||||
|
||||
|
@ -44,7 +44,7 @@ def generate_code(
|
||||
autograd_gen_dir = os.path.join(install_dir, "autograd", "generated")
|
||||
for d in (autograd_gen_dir, python_install_dir):
|
||||
os.makedirs(d, exist_ok=True)
|
||||
autograd_dir = os.fspath(Path(__file__).parent.parent / "autograd")
|
||||
autograd_dir = os.fspath(Path(__file__).absolute().parent.parent / "autograd")
|
||||
|
||||
if subset == "pybindings" or not subset:
|
||||
gen_autograd_python(
|
||||
|
@ -2,7 +2,7 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import get_test_class_times, get_test_times
|
||||
|
@ -11,7 +11,7 @@ from typing import Any, Callable, cast, Dict
|
||||
from urllib.request import urlopen
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
|
||||
def get_disabled_issues() -> list[str]:
|
||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
|
||||
def flatten_data(d: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
@ -32,7 +32,7 @@ def get_artifacts(repo: str, workflow_run_id: int, workflow_run_attempt: int) ->
|
||||
# pytorch/pytorch/WORKFLOW_ID/RUN_ATTEMPT/artifact/NAME-SUFFIX.zip
|
||||
s3_filename = re.sub(FILENAME_REGEX, "", artifact_path.name)
|
||||
upload_file_to_s3(
|
||||
file_name=str(artifact_path.resolve()),
|
||||
file_name=str(artifact_path.absolute()),
|
||||
bucket=BUCKET_NAME,
|
||||
key=f"{repo}/{workflow_run_id}/{workflow_run_attempt}/artifact/{s3_filename}",
|
||||
)
|
||||
|
@ -10,7 +10,7 @@ from typing import Any
|
||||
from unittest import mock
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
from tools.test.heuristics.test_interface import TestTD
|
||||
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
import tools.testing.target_determination.heuristics.interface as interface
|
||||
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
import tools.testing.target_determination.heuristics.utils as utils
|
||||
|
@ -3,7 +3,7 @@ import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
try:
|
||||
# using tools/ to optimize test run.
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
@ -8,7 +8,7 @@ from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
try:
|
||||
# using tools/ to optimize test run.
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
@ -10,7 +10,7 @@ from typing import Any
|
||||
from unittest import mock
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.upload_metrics import add_global_metric, emit_metric, global_metrics
|
||||
|
@ -9,7 +9,7 @@ from pathlib import Path
|
||||
CPP_TEST_PREFIX = "cpp"
|
||||
CPP_TEST_PATH = "build/bin"
|
||||
CPP_TESTS_DIR = os.path.abspath(os.getenv("CPP_TESTS_DIR", default=CPP_TEST_PATH))
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
|
||||
def parse_test_module(test: str) -> str:
|
||||
@ -43,7 +43,7 @@ def discover_tests(
|
||||
]
|
||||
|
||||
cpp_tests_dir = (
|
||||
f"{base_dir.parent}/{CPP_TEST_PATH}" if cpp_tests_dir is None else cpp_tests_dir
|
||||
str(base_dir.parent / CPP_TEST_PATH) if cpp_tests_dir is None else cpp_tests_dir
|
||||
)
|
||||
# CPP test files are located under pytorch/build/bin. Unlike Python test, C++ tests
|
||||
# are just binaries and could have any name, i.e. basic or atest
|
||||
|
@ -4,7 +4,7 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import (
|
||||
|
@ -12,7 +12,7 @@ from typing import Any
|
||||
import yaml
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml"
|
||||
WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows"
|
||||
|
||||
|
@ -8,7 +8,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
# These tests are slow enough that it's worth calculating whether the patch
|
||||
# touched any related files first. This list was manually generated, but for every
|
||||
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
|
||||
|
||||
def gen_ci_artifact(included: list[Any], excluded: list[Any]) -> None:
|
||||
|
@ -17,7 +17,7 @@ from tools.testing.target_determination.heuristics.utils import (
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[3]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[3]
|
||||
|
||||
keyword_synonyms: dict[str, list[str]] = {
|
||||
"amp": ["mixed_precision"],
|
||||
|
@ -16,7 +16,7 @@ from tools.testing.target_determination.heuristics.utils import normalize_rating
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[4]
|
||||
|
||||
|
||||
class LLM(HeuristicInterface):
|
||||
|
@ -20,7 +20,7 @@ from tools.testing.target_determination.heuristics.utils import (
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[4]
|
||||
|
||||
|
||||
class PreviouslyFailedInPR(HeuristicInterface):
|
||||
|
@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[4]
|
||||
|
||||
|
||||
def python_test_file_to_test_name(tests: set[str]) -> set[str]:
|
||||
|
@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
|
||||
IS_MEM_LEAK_CHECK = os.getenv("PYTORCH_TEST_CUDA_MEM_LEAK_CHECK", "0") == "1"
|
||||
BUILD_ENVIRONMENT = os.getenv("BUILD_ENVIRONMENT", "")
|
||||
|
@ -9,7 +9,7 @@ import requests
|
||||
from clickhouse import query_clickhouse # type: ignore[import]
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
QUERY = """
|
||||
WITH most_recent_strict_commits AS (
|
||||
SELECT
|
||||
|
@ -7,7 +7,7 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
LAST_UPDATED = 0.0
|
||||
|
||||
|
||||
|
@ -1807,7 +1807,7 @@ class AotCodeCompiler:
|
||||
@clear_on_fresh_inductor_cache
|
||||
@functools.lru_cache
|
||||
def cpp_prefix_path() -> str:
|
||||
path = Path(__file__).parent / "codegen/cpp_prefix.h"
|
||||
path = Path(__file__).absolute().parent / "codegen/cpp_prefix.h"
|
||||
with path.open() as f:
|
||||
content = f.read()
|
||||
_, filename = write(
|
||||
|
@ -2,7 +2,7 @@
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
from pathlib import Path
|
||||
from typing import Any, List
|
||||
|
||||
from torch._inductor.metrics import get_metric_table, is_metric_table_enabled
|
||||
@ -322,7 +322,7 @@ class MultiKernelCall:
|
||||
)
|
||||
)
|
||||
_, _, path = get_path(key, "picked_kernel")
|
||||
return pathlib.Path(path)
|
||||
return Path(path)
|
||||
|
||||
def load_cache(self):
|
||||
assert self.picked_kernel is None
|
||||
|
@ -1483,7 +1483,9 @@ def _serialize_pattern(
|
||||
return pattern
|
||||
|
||||
|
||||
SERIALIZED_PATTERN_PATH = Path(__file__).parent / "fx_passes" / "serialized_patterns"
|
||||
SERIALIZED_PATTERN_PATH = (
|
||||
Path(__file__).absolute().parent / "fx_passes" / "serialized_patterns"
|
||||
)
|
||||
|
||||
# This is the set of serialized patterns that we've registered. Used by
|
||||
# test_serialized_patterns_up_to_date() to ensure the patterns are up
|
||||
|
@ -7,13 +7,13 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
||||
from weakref import WeakSet
|
||||
|
||||
@ -768,7 +768,7 @@ def make_module_path_relative(abs_path):
|
||||
a relative path relative to one of the Python search paths.
|
||||
"""
|
||||
|
||||
abs_path = pathlib.Path(abs_path).resolve()
|
||||
abs_path = Path(abs_path).absolute()
|
||||
|
||||
for path in sys.path:
|
||||
try:
|
||||
@ -844,7 +844,7 @@ class TorchLogsFormatter(logging.Formatter):
|
||||
filepath = make_module_path_relative(record.pathname)
|
||||
|
||||
prefix = (
|
||||
f"{record.rankprefix}{shortlevel}{record.asctime}.{int(record.msecs*1000):06d} {record.process} "
|
||||
f"{record.rankprefix}{shortlevel}{record.asctime}.{int(record.msecs * 1000):06d} {record.process} "
|
||||
f"{filepath}:"
|
||||
f"{record.lineno}]{record.traceid}{record.artifactprefix}"
|
||||
)
|
||||
|
@ -8,7 +8,7 @@ import contextlib
|
||||
import dataclasses
|
||||
import datetime
|
||||
import logging
|
||||
import pathlib
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, TYPE_CHECKING
|
||||
|
||||
import torch
|
||||
@ -91,7 +91,7 @@ class CaptureStrategy(abc.ABC):
|
||||
"""
|
||||
self._verbose_print = _verbose_printer(verbose)
|
||||
self._dump = dump
|
||||
self._artifacts_dir = pathlib.Path(artifacts_dir)
|
||||
self._artifacts_dir = Path(artifacts_dir)
|
||||
self._timestamp = timestamp or datetime.datetime.now().strftime(
|
||||
"%Y-%m-%d_%H-%M-%S-%f"
|
||||
)
|
||||
|
@ -8,10 +8,10 @@ import inspect
|
||||
import itertools
|
||||
import logging
|
||||
import operator
|
||||
import pathlib
|
||||
import textwrap
|
||||
import traceback
|
||||
import typing
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Literal, Mapping, Sequence
|
||||
|
||||
import onnxscript
|
||||
@ -1215,7 +1215,7 @@ def export(
|
||||
profiler = _maybe_start_profiler(profile)
|
||||
|
||||
# Create the artifacts directory if it does not exist
|
||||
artifacts_dir = pathlib.Path(artifacts_dir)
|
||||
artifacts_dir = Path(artifacts_dir)
|
||||
if report or profile or dump_exported_program:
|
||||
artifacts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
@ -1011,7 +1011,7 @@ class PackageExporter:
|
||||
|
||||
def _write_mock_file(self):
|
||||
if "_mock.py" not in self._written_files:
|
||||
mock_file = str(Path(__file__).parent / "_mock.py")
|
||||
mock_file = str(Path(__file__).absolute().parent / "_mock.py")
|
||||
self._write_source_string("_mock", _read_file(mock_file), is_package=False)
|
||||
|
||||
def _execute_dependency_graph(self):
|
||||
|
@ -22,7 +22,6 @@ import logging
|
||||
import math
|
||||
import operator
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import random
|
||||
import re
|
||||
@ -989,9 +988,9 @@ UNITTEST_ARGS = [sys.argv[0]] + remaining
|
||||
torch.manual_seed(SEED)
|
||||
|
||||
# CI Prefix path used only on CI environment
|
||||
CI_TEST_PREFIX = str(Path(os.getcwd()))
|
||||
CI_PT_ROOT = str(Path(os.getcwd()).parent)
|
||||
CI_FUNCTORCH_ROOT = str(os.path.join(Path(os.getcwd()).parent, "functorch"))
|
||||
CI_TEST_PREFIX = str(Path.cwd())
|
||||
CI_PT_ROOT = str(Path(CI_TEST_PREFIX).parent)
|
||||
CI_FUNCTORCH_ROOT = str(Path(CI_PT_ROOT) / "functorch")
|
||||
|
||||
def wait_for_process(p, timeout=None):
|
||||
try:
|
||||
@ -4358,7 +4357,7 @@ class TestCase(expecttest.TestCase):
|
||||
|
||||
def _attempt_load_from_subprocess(
|
||||
self,
|
||||
file: pathlib.Path,
|
||||
file: Path,
|
||||
import_string: str,
|
||||
expected_failure_message: Optional[str] = None
|
||||
) -> None:
|
||||
@ -4367,7 +4366,7 @@ class TestCase(expecttest.TestCase):
|
||||
weights_only `torch.load` works as expected without global imports.
|
||||
|
||||
Args:
|
||||
file (pathlib.Path): The path to the checkpoint to load.
|
||||
file (Path): The path to the checkpoint to load.
|
||||
import_string (str): import string to add to the script
|
||||
exected_failure_message (str, optional): The expected failure message if the
|
||||
checkpoint fails to load. If None, the test will pass
|
||||
@ -5010,11 +5009,11 @@ def disable_gc():
|
||||
def find_library_location(lib_name: str) -> Path:
|
||||
# return the shared library file in the installed folder if exist,
|
||||
# else the file in the build folder
|
||||
torch_root = Path(torch.__file__).resolve().parent
|
||||
torch_root = Path(torch.__file__).absolute().parent
|
||||
path = torch_root / 'lib' / lib_name
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
torch_root = Path(__file__).resolve().parents[2]
|
||||
torch_root = Path(__file__).absolute().parents[2]
|
||||
return torch_root / 'build' / 'lib' / lib_name
|
||||
|
||||
def skip_but_pass_in_sandcastle(reason):
|
||||
|
@ -126,11 +126,11 @@ def _find_rocm_home() -> Optional[str]:
|
||||
# Guess #2
|
||||
hipcc_path = shutil.which('hipcc')
|
||||
if hipcc_path is not None:
|
||||
rocm_home = os.path.dirname(os.path.dirname(
|
||||
os.path.realpath(hipcc_path)))
|
||||
rocm_home_path = Path(hipcc_path).resolve().parent.parent
|
||||
# can be either <ROCM_HOME>/hip/bin/hipcc or <ROCM_HOME>/bin/hipcc
|
||||
if os.path.basename(rocm_home) == 'hip':
|
||||
rocm_home = os.path.dirname(rocm_home)
|
||||
if rocm_home_path.name == 'hip':
|
||||
rocm_home_path = rocm_home_path.parent
|
||||
rocm_home = str(rocm_home_path)
|
||||
else:
|
||||
# Guess #3
|
||||
fallback_path = '/opt/rocm'
|
||||
@ -147,8 +147,7 @@ def _find_sycl_home() -> Optional[str]:
|
||||
# Guess 1: for source code build developer/user, we'll have icpx in PATH,
|
||||
# which will tell us the SYCL_HOME location.
|
||||
if icpx_path is not None:
|
||||
sycl_home = os.path.dirname(os.path.dirname(
|
||||
os.path.realpath(icpx_path)))
|
||||
sycl_home = str(Path(icpx_path).resolve().parent.parent)
|
||||
|
||||
# Guess 2: for users install Pytorch with XPU support, the sycl runtime is
|
||||
# inside intel-sycl-rt, which is automatically installed via pip dependency.
|
||||
@ -157,7 +156,7 @@ def _find_sycl_home() -> Optional[str]:
|
||||
files = importlib.metadata.files('intel-sycl-rt') or []
|
||||
for f in files:
|
||||
if f.name == "libsycl.so":
|
||||
sycl_home = os.path.dirname(Path(f.locate()).parent.resolve())
|
||||
sycl_home = str(Path(f.locate()).resolve().parent.parent)
|
||||
break
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
print("Trying to find SYCL_HOME from intel-sycl-rt package, but it is not installed.",
|
||||
|
@ -1,7 +1,7 @@
|
||||
# mypy: allow-untyped-defs
|
||||
import os
|
||||
import pathlib
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Set, Tuple, Union
|
||||
|
||||
|
||||
@ -211,7 +211,7 @@ def get_method_definitions(
|
||||
# 3. Remove first argument after self (unless it is "*datapipes"), default args, and spaces
|
||||
"""
|
||||
if root == "":
|
||||
root = str(pathlib.Path(__file__).parent.resolve())
|
||||
root = str(Path(__file__).absolute().parent)
|
||||
file_path = [file_path] if isinstance(file_path, str) else file_path
|
||||
file_path = [os.path.join(root, path) for path in file_path]
|
||||
file_paths = find_file_paths(
|
||||
@ -288,7 +288,7 @@ def main() -> None:
|
||||
mapDP_method_to_special_output_type,
|
||||
)
|
||||
|
||||
path = pathlib.Path(__file__).parent.resolve()
|
||||
path = Path(__file__).absolute().parent
|
||||
replacements = [
|
||||
("${IterDataPipeMethods}", iter_method_definitions, 4),
|
||||
("${MapDataPipeMethods}", map_method_definitions, 4),
|
||||
|
@ -86,7 +86,7 @@ def write_decomposition_util_file(path: str) -> None:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
pytorch_dir = Path(__file__).resolve().parents[3]
|
||||
pytorch_dir = Path(__file__).absolute().parents[3]
|
||||
upgrader_path = pytorch_dir / "torch" / "csrc" / "jit" / "runtime"
|
||||
write_decomposition_util_file(str(upgrader_path))
|
||||
|
||||
|
@ -2752,7 +2752,7 @@ def get_torchgen_root() -> Path:
|
||||
If you're depending on torchgen out-of-tree, you can use the root to figure
|
||||
out the path to native_functions.yaml
|
||||
"""
|
||||
return Path(__file__).parent.resolve()
|
||||
return Path(__file__).absolute().parent
|
||||
|
||||
|
||||
def main() -> None:
|
||||
@ -3043,7 +3043,7 @@ def main() -> None:
|
||||
gen_declarations_yaml(native_functions=native_functions, cpu_fm=cpu_fm)
|
||||
|
||||
if options.output_dependencies:
|
||||
depfile_path = Path(options.output_dependencies).resolve()
|
||||
depfile_path = Path(options.output_dependencies).absolute()
|
||||
depfile_name = depfile_path.name
|
||||
depfile_stem = depfile_path.stem
|
||||
|
||||
|
@ -984,7 +984,7 @@ def main() -> None:
|
||||
)
|
||||
|
||||
if options.output_dependencies:
|
||||
depfile_path = Path(options.output_dependencies).resolve()
|
||||
depfile_path = Path(options.output_dependencies).absolute()
|
||||
depfile_name = depfile_path.name
|
||||
depfile_stem = depfile_path.stem
|
||||
|
||||
|
@ -380,7 +380,7 @@ def main() -> None:
|
||||
for up in sorted_upgrader_list:
|
||||
print("after sort upgrader : ", next(iter(up)))
|
||||
|
||||
pytorch_dir = Path(__file__).resolve().parents[2]
|
||||
pytorch_dir = Path(__file__).absolute().parents[2]
|
||||
upgrader_path = pytorch_dir / "torch" / "csrc" / "jit" / "mobile"
|
||||
write_cpp(str(upgrader_path), sorted_upgrader_list)
|
||||
|
||||
|
@ -173,7 +173,7 @@ def write_decomposition_util_file(path: str) -> None:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
pytorch_dir = Path(__file__).resolve().parents[2]
|
||||
pytorch_dir = Path(__file__).absolute().parents[2]
|
||||
upgrader_path = pytorch_dir / "torch" / "csrc" / "jit" / "runtime"
|
||||
write_decomposition_util_file(str(upgrader_path))
|
||||
|
||||
|
Reference in New Issue
Block a user