mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "[BE][Easy] use pathlib.Path
instead of dirname
/ ".."
/ pardir
(#129374)"
This reverts commit 9e1f3ecaa710785a1ab03c6ad5093a5566d6c5e5. Reverted https://github.com/pytorch/pytorch/pull/129374 on behalf of https://github.com/huydhn due to Sorry for reverting your change but it is still failing with the same error ([comment](https://github.com/pytorch/pytorch/pull/129374#issuecomment-2197801405))
This commit is contained in:
@ -5,9 +5,8 @@ import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
# Need to import modules that lie on an upward-relative path
|
||||
sys.path.append(os.path.dirname(sys.path[0]))
|
||||
sys.path.append(os.path.join(sys.path[0], ".."))
|
||||
|
||||
import cimodel.lib.miniyaml as miniyaml
|
||||
|
||||
|
3
.github/scripts/delete_old_branches.py
vendored
3
.github/scripts/delete_old_branches.py
vendored
@ -9,7 +9,6 @@ from typing import Any, Callable, Dict, List, Set
|
||||
from github_utils import gh_fetch_json_dict, gh_graphql
|
||||
from gitutils import GitRepo
|
||||
|
||||
|
||||
SEC_IN_DAY = 24 * 60 * 60
|
||||
CLOSED_PR_RETENTION = 30 * SEC_IN_DAY
|
||||
NO_PR_RETENTION = 1.5 * 365 * SEC_IN_DAY
|
||||
@ -22,7 +21,7 @@ TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
if not TOKEN:
|
||||
raise Exception("GITHUB_TOKEN is not set") # noqa: TRY002
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[2]
|
||||
REPO_ROOT = Path(__file__).parent.parent.parent
|
||||
|
||||
# Query for all PRs instead of just closed/merged because it's faster
|
||||
GRAPHQL_ALL_PRS_BY_UPDATED_AT = """
|
||||
|
@ -1,12 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||
EXPECTED_GROUP_PREFIX = (
|
||||
"${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}"
|
||||
|
@ -13,7 +13,6 @@ architectures:
|
||||
import os
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
|
||||
CUDA_ARCHES = ["11.8", "12.1", "12.4"]
|
||||
|
||||
|
||||
@ -86,7 +85,7 @@ def get_nccl_submodule_version() -> str:
|
||||
from pathlib import Path
|
||||
|
||||
nccl_version_mk = (
|
||||
Path(__file__).absolute().parents[2]
|
||||
Path(__file__).absolute().parent.parent.parent
|
||||
/ "third_party"
|
||||
/ "nccl"
|
||||
/ "nccl"
|
||||
|
3
.github/scripts/gitutils.py
vendored
3
.github/scripts/gitutils.py
vendored
@ -19,7 +19,6 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
RE_GITHUB_URL_MATCH = re.compile("^https://.*@?github.com/(.+)/(.+)$")
|
||||
@ -32,7 +31,7 @@ def get_git_remote_name() -> str:
|
||||
def get_git_repo_dir() -> str:
|
||||
from pathlib import Path
|
||||
|
||||
return os.getenv("GIT_REPO_DIR", str(Path(__file__).resolve().parents[2]))
|
||||
return os.getenv("GIT_REPO_DIR", str(Path(__file__).resolve().parent.parent.parent))
|
||||
|
||||
|
||||
def fuzzy_list_to_dict(items: List[Tuple[str, str]]) -> Dict[str, List[str]]:
|
||||
|
2
.github/scripts/lint_native_functions.py
vendored
2
.github/scripts/lint_native_functions.py
vendored
@ -26,7 +26,7 @@ def fn(base: str) -> str:
|
||||
return str(base / Path("aten/src/ATen/native/native_functions.yaml"))
|
||||
|
||||
|
||||
with open(Path(__file__).parents[2] / fn(".")) as f:
|
||||
with open(Path(__file__).parent.parent.parent / fn(".")) as f:
|
||||
contents = f.read()
|
||||
|
||||
yaml = ruamel.yaml.YAML() # type: ignore[attr-defined]
|
||||
|
2
.github/scripts/test_gitutils.py
vendored
2
.github/scripts/test_gitutils.py
vendored
@ -68,7 +68,7 @@ class TestRetriesDecorator(TestCase):
|
||||
|
||||
class TestGitRepo(TestCase):
|
||||
def setUp(self) -> None:
|
||||
repo_dir = BASE_DIR.absolute().parent.parent
|
||||
repo_dir = BASE_DIR.parent.parent.absolute()
|
||||
if not (repo_dir / ".git").is_dir():
|
||||
raise SkipTest(
|
||||
"Can't find git directory, make sure to run this test on real repo checkout"
|
||||
|
@ -18,24 +18,23 @@ Known limitations:
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
from io import BytesIO
|
||||
from itertools import product
|
||||
from pathlib import Path
|
||||
from urllib.request import urlopen
|
||||
from zipfile import ZipFile
|
||||
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
|
||||
# Note: the public query url targets this rockset lambda:
|
||||
# https://console.rockset.com/lambdas/details/commons.artifacts
|
||||
ARTIFACTS_QUERY_URL = "https://api.usw2a1.rockset.com/v1/public/shared_lambdas/4ca0033e-0117-41f5-b043-59cde19eff35"
|
||||
CSV_LINTER = str(
|
||||
Path(__file__).absolute().parents[3]
|
||||
pathlib.Path(__file__).absolute().parent.parent.parent.parent
|
||||
/ "tools/linter/adapters/no_merge_conflict_csv_linter.py"
|
||||
)
|
||||
|
||||
|
@ -4,12 +4,12 @@ from pathlib import Path
|
||||
|
||||
import torch
|
||||
import torch._prims as prims
|
||||
|
||||
from torchgen.gen import parse_native_yaml
|
||||
|
||||
|
||||
ROOT = Path(__file__).absolute().parents[3]
|
||||
NATIVE_FUNCTION_YAML_PATH = ROOT / "aten/src/ATen/native/native_functions.yaml"
|
||||
TAGS_YAML_PATH = ROOT / "aten/src/ATen/native/tags.yaml"
|
||||
ROOT = Path(__file__).absolute().parent.parent.parent.parent
|
||||
NATIVE_FUNCTION_YAML_PATH = ROOT / Path("aten/src/ATen/native/native_functions.yaml")
|
||||
TAGS_YAML_PATH = ROOT / Path("aten/src/ATen/native/tags.yaml")
|
||||
|
||||
BUILD_DIR = "build/ir"
|
||||
ATEN_OPS_CSV_FILE = "aten_ops.csv"
|
||||
|
@ -15,7 +15,7 @@ from torch.ao.quantization.backend_config.utils import (
|
||||
|
||||
# Create a directory for the images, if it doesn't exist
|
||||
QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH = os.path.join(
|
||||
os.path.realpath(os.path.dirname(__file__)), "quantization_backend_configs"
|
||||
os.path.realpath(os.path.join(__file__, "..")), "quantization_backend_configs"
|
||||
)
|
||||
|
||||
if not os.path.exists(QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH):
|
||||
|
@ -5,15 +5,16 @@ from pathlib import Path
|
||||
|
||||
import torch
|
||||
import torch._dynamo as torchdynamo
|
||||
|
||||
from torch._export.db.case import ExportCase, normalize_inputs
|
||||
from torch._export.db.examples import all_examples
|
||||
from torch.export import export
|
||||
|
||||
|
||||
PWD = Path(__file__).absolute().parent
|
||||
ROOT = Path(__file__).absolute().parents[3]
|
||||
SOURCE = ROOT / "source"
|
||||
EXPORTDB_SOURCE = SOURCE / "generated" / "exportdb"
|
||||
ROOT = Path(__file__).absolute().parent.parent.parent.parent
|
||||
SOURCE = ROOT / Path("source")
|
||||
EXPORTDB_SOURCE = SOURCE / Path("generated") / Path("exportdb")
|
||||
|
||||
|
||||
def generate_example_rst(example_case: ExportCase):
|
||||
|
@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from common import (
|
||||
get_testcases,
|
||||
@ -12,8 +12,8 @@ from common import (
|
||||
key,
|
||||
open_test_results,
|
||||
)
|
||||
from download_reports import download_reports
|
||||
|
||||
from download_reports import download_reports
|
||||
|
||||
"""
|
||||
Usage: update_failures.py /path/to/dynamo_test_failures.py /path/to/test commit_sha
|
||||
@ -194,7 +194,7 @@ if __name__ == "__main__":
|
||||
"filename",
|
||||
nargs="?",
|
||||
default=str(
|
||||
Path(__file__).absolute().parents[2]
|
||||
pathlib.Path(__file__).absolute().parent.parent.parent
|
||||
/ "torch/testing/_internal/dynamo_test_failures.py"
|
||||
),
|
||||
help="Optional path to dynamo_test_failures.py",
|
||||
@ -203,7 +203,7 @@ if __name__ == "__main__":
|
||||
parser.add_argument(
|
||||
"test_dir",
|
||||
nargs="?",
|
||||
default=str(Path(__file__).absolute().parents[2] / "test"),
|
||||
default=str(pathlib.Path(__file__).absolute().parent.parent.parent / "test"),
|
||||
help="Optional path to test folder",
|
||||
)
|
||||
parser.add_argument(
|
||||
@ -219,7 +219,7 @@ if __name__ == "__main__":
|
||||
action="store_true",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
assert Path(args.filename).exists(), args.filename
|
||||
assert Path(args.test_dir).exists(), args.test_dir
|
||||
assert pathlib.Path(args.filename).exists(), args.filename
|
||||
assert pathlib.Path(args.test_dir).exists(), args.test_dir
|
||||
dynamo38, dynamo311 = download_reports(args.commit, ("dynamo38", "dynamo311"))
|
||||
update(args.filename, args.test_dir, dynamo38, dynamo311, args.also_remove_skips)
|
||||
|
@ -5,13 +5,12 @@ import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Set
|
||||
|
||||
import torch
|
||||
|
||||
# Use asterisk symbol so developer doesn't need to import here when they add tests for upgraders.
|
||||
from test.jit.fixtures_srcs.fixtures_src import * # noqa: F403
|
||||
|
||||
import torch
|
||||
from torch.jit.mobile import _export_operator_list, _load_for_lite_interpreter
|
||||
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
@ -9,7 +9,6 @@ import torch
|
||||
import torch._C
|
||||
from torch.testing._internal.common_utils import IS_FBCODE, skipIfTorchDynamo
|
||||
|
||||
|
||||
# hacky way to skip these tests in fbcode:
|
||||
# during test execution in fbcode, test_nnapi is available during test discovery,
|
||||
# but not during test execution. So we can't try-catch here, otherwise it'll think
|
||||
@ -41,7 +40,7 @@ Inherits most tests from TestNNAPI, which loads Android NNAPI models
|
||||
without the delegate API.
|
||||
"""
|
||||
# First skip is needed for IS_WINDOWS or IS_MACOS to skip the tests.
|
||||
torch_root = Path(__file__).resolve().parents[2]
|
||||
torch_root = Path(__file__).resolve().parent.parent.parent
|
||||
lib_path = torch_root / "build" / "lib" / "libnnapi_backend.so"
|
||||
|
||||
|
||||
|
@ -20,7 +20,6 @@ from torch.jit.mobile import (
|
||||
)
|
||||
from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
|
||||
|
||||
pytorch_test_dir = Path(__file__).resolve().parents[1]
|
||||
|
||||
# script_module_v4.ptl and script_module_v5.ptl source code
|
||||
|
@ -6,8 +6,8 @@ from pathlib import Path
|
||||
|
||||
from torch.jit.generate_bytecode import generate_upgraders_bytecode
|
||||
from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
from torchgen.operator_versions.gen_mobile_upgraders import sort_upgrader, write_cpp
|
||||
|
||||
from torchgen.operator_versions.gen_mobile_upgraders import sort_upgrader, write_cpp
|
||||
|
||||
pytorch_caffe2_dir = Path(__file__).resolve().parents[2]
|
||||
|
||||
|
@ -6,10 +6,10 @@ from pathlib import Path
|
||||
|
||||
import torch
|
||||
import torch.utils.bundled_inputs
|
||||
|
||||
from torch.jit.mobile import _load_for_lite_interpreter
|
||||
from torch.testing._internal.common_utils import run_tests, TestCase
|
||||
|
||||
|
||||
pytorch_test_dir = Path(__file__).resolve().parents[1]
|
||||
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
|
||||
import copy
|
||||
import dataclasses
|
||||
import io
|
||||
@ -25,6 +26,7 @@ from typing import (
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
|
||||
import onnxruntime
|
||||
import pytest
|
||||
import pytorch_test_common
|
||||
@ -38,7 +40,6 @@ from torch.testing._internal import common_utils
|
||||
from torch.testing._internal.opinfo import core as opinfo_core
|
||||
from torch.types import Number
|
||||
|
||||
|
||||
_NumericType = Union[Number, torch.Tensor, np.ndarray]
|
||||
_ModelType = Union[torch.nn.Module, Callable, torch_export.ExportedProgram]
|
||||
_InputArgsType = Optional[
|
||||
@ -47,7 +48,8 @@ _InputArgsType = Optional[
|
||||
_OutputsType = Sequence[_NumericType]
|
||||
|
||||
onnx_model_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
|
||||
os.path.dirname(os.path.realpath(__file__)),
|
||||
os.pardir,
|
||||
"repos",
|
||||
"onnx",
|
||||
"onnx",
|
||||
@ -55,7 +57,11 @@ onnx_model_dir = os.path.join(
|
||||
"test",
|
||||
"data",
|
||||
)
|
||||
|
||||
|
||||
pytorch_converted_dir = os.path.join(onnx_model_dir, "pytorch-converted")
|
||||
|
||||
|
||||
pytorch_operator_dir = os.path.join(onnx_model_dir, "pytorch-operator")
|
||||
|
||||
|
||||
|
@ -50,7 +50,7 @@ class TestQuantizationDocs(QuantizationTestCase):
|
||||
"been updated to have the correct relative path between "
|
||||
"test_docs.py and the docs."
|
||||
)
|
||||
pytorch_root = core_dir.parents[2]
|
||||
pytorch_root = core_dir.parent.parent.parent
|
||||
return pytorch_root / path_from_pytorch
|
||||
|
||||
path_to_file = get_correct_path(path_from_pytorch)
|
||||
|
@ -5,6 +5,7 @@ import itertools
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import unittest
|
||||
from collections import defaultdict
|
||||
from threading import Lock
|
||||
@ -17,7 +18,6 @@ from torch.testing._internal.common_utils import (
|
||||
TestCase,
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from mypy import api
|
||||
except ImportError:
|
||||
@ -30,7 +30,7 @@ DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "typing"))
|
||||
REVEAL_DIR = os.path.join(DATA_DIR, "reveal")
|
||||
PASS_DIR = os.path.join(DATA_DIR, "pass")
|
||||
FAIL_DIR = os.path.join(DATA_DIR, "fail")
|
||||
MYPY_INI = os.path.join(os.path.dirname(os.path.dirname(DATA_DIR)), "mypy.ini")
|
||||
MYPY_INI = os.path.join(DATA_DIR, os.pardir, os.pardir, "mypy.ini")
|
||||
CACHE_DIR = os.path.join(DATA_DIR, ".mypy_cache")
|
||||
|
||||
|
||||
|
@ -4,15 +4,17 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.append(str(REPO_ROOT / "torch" / "utils"))
|
||||
sys.path.append(
|
||||
os.path.realpath(
|
||||
os.path.join(
|
||||
__file__, os.path.pardir, os.path.pardir, os.path.pardir, "torch", "utils"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
from hipify import hipify_python # type: ignore[import]
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Top-level script for HIPifying, filling in most common parameters"
|
||||
)
|
||||
@ -50,7 +52,7 @@ parser.add_argument(
|
||||
args = parser.parse_args()
|
||||
|
||||
amd_build_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
proj_dir = os.path.dirname(os.path.dirname(amd_build_dir))
|
||||
proj_dir = os.path.join(os.path.dirname(os.path.dirname(amd_build_dir)))
|
||||
|
||||
if args.project_directory:
|
||||
proj_dir = args.project_directory
|
||||
|
@ -1,18 +1,16 @@
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from os.path import abspath, dirname
|
||||
|
||||
|
||||
# By appending REPO_ROOT to sys.path, this module can import other torch
|
||||
# By appending pytorch_root to sys.path, this module can import other torch
|
||||
# modules even when run as a standalone script. i.e., it's okay either you
|
||||
# do `python build_libtorch.py` or `python -m tools.build_libtorch`.
|
||||
REPO_ROOT = Path(__file__).absolute().parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
pytorch_root = dirname(dirname(abspath(__file__)))
|
||||
sys.path.append(pytorch_root)
|
||||
|
||||
from tools.build_pytorch_libs import build_caffe2
|
||||
from tools.setup_helpers.cmake import CMake
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Placeholder for future interface. For now just gives a nice -h.
|
||||
parser = argparse.ArgumentParser(description="Build libtorch")
|
||||
|
@ -42,7 +42,9 @@ def get_llvm_tool_path() -> str:
|
||||
def get_pytorch_folder() -> str:
|
||||
# TOOLS_FOLDER in oss: pytorch/tools/code_coverage
|
||||
return os.path.abspath(
|
||||
os.environ.get("PYTORCH_FOLDER", os.path.dirname(os.path.dirname(TOOLS_FOLDER)))
|
||||
os.environ.get(
|
||||
"PYTORCH_FOLDER", os.path.join(TOOLS_FOLDER, os.path.pardir, os.path.pardir)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,12 +1,13 @@
|
||||
import os
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Set
|
||||
|
||||
|
||||
# <project folder>
|
||||
HOME_DIR = os.environ["HOME"]
|
||||
TOOLS_FOLDER = str(Path(__file__).resolve().parents[2])
|
||||
TOOLS_FOLDER = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), os.path.pardir, os.path.pardir
|
||||
)
|
||||
|
||||
|
||||
# <profile folder>
|
||||
|
@ -8,28 +8,24 @@ import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from itertools import product
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
import subprocess
|
||||
import textwrap
|
||||
from dataclasses import dataclass
|
||||
from itertools import product
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
import yaml
|
||||
from yaml.constructor import ConstructorError
|
||||
from yaml.nodes import MappingNode
|
||||
|
||||
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader # type: ignore[assignment, misc]
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).absolute().parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
CPP_H_NAME = "spv.h"
|
||||
CPP_SRC_NAME = "spv.cpp"
|
||||
|
||||
|
@ -11,7 +11,6 @@ import urllib.error
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
# String representing the host platform (e.g. Linux, Darwin).
|
||||
HOST_PLATFORM = platform.system()
|
||||
HOST_PLATFORM_ARCH = platform.system() + "-" + platform.processor()
|
||||
@ -26,7 +25,10 @@ try:
|
||||
PYTORCH_ROOT = result.stdout.decode("utf-8").strip()
|
||||
except subprocess.CalledProcessError:
|
||||
# If git is not installed, compute repo root as 3 folders up from this file
|
||||
PYTORCH_ROOT = str(Path(__file__).absolute().parents[3])
|
||||
path_ = os.path.abspath(__file__)
|
||||
for _ in range(4):
|
||||
path_ = os.path.dirname(path_)
|
||||
PYTORCH_ROOT = path_
|
||||
|
||||
DRY_RUN = False
|
||||
|
||||
|
@ -12,10 +12,10 @@ Run with no arguments.
|
||||
import argparse
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from subprocess import DEVNULL
|
||||
from typing import Any
|
||||
|
||||
@ -30,7 +30,7 @@ def read_sub_write(path: str, prefix_pat: str, new_default: int) -> None:
|
||||
|
||||
|
||||
def main(args: Any) -> None:
|
||||
pytorch_dir = Path(__file__).parents[2].resolve()
|
||||
pytorch_dir = pathlib.Path(__file__).parent.parent.parent.resolve()
|
||||
onnx_dir = pytorch_dir / "third_party" / "onnx"
|
||||
os.chdir(onnx_dir)
|
||||
|
||||
|
@ -7,7 +7,6 @@ import platform
|
||||
import sys
|
||||
import sysconfig
|
||||
from distutils.version import LooseVersion
|
||||
from pathlib import Path
|
||||
from subprocess import CalledProcessError, check_call, check_output
|
||||
from typing import Any, cast, Dict, List, Optional
|
||||
|
||||
@ -173,7 +172,9 @@ class CMake:
|
||||
toolset_expr = ",".join([f"{k}={v}" for k, v in toolset_dict.items()])
|
||||
args.append("-T" + toolset_expr)
|
||||
|
||||
base_dir = str(Path(__file__).absolute().parents[2])
|
||||
base_dir = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
)
|
||||
install_dir = os.path.join(base_dir, "torch")
|
||||
|
||||
_mkdir_p(install_dir)
|
||||
|
@ -1,13 +1,11 @@
|
||||
# Little stub file to get BUILD.bazel to play along
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.insert(0, root)
|
||||
|
||||
import torchgen.gen
|
||||
|
||||
|
||||
torchgen.gen.main()
|
||||
|
@ -1,13 +1,11 @@
|
||||
# Little stub file to get BUILD.bazel to play along
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.insert(0, root)
|
||||
|
||||
import tools.jit.gen_unboxing
|
||||
|
||||
|
||||
tools.jit.gen_unboxing.main(sys.argv[1:])
|
||||
|
@ -1,25 +1,23 @@
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, cast, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
try:
|
||||
# use faster C loader if available
|
||||
from yaml import CSafeLoader as YamlLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader as YamlLoader # type: ignore[assignment, misc]
|
||||
|
||||
|
||||
NATIVE_FUNCTIONS_PATH = "aten/src/ATen/native/native_functions.yaml"
|
||||
TAGS_PATH = "aten/src/ATen/native/tags.yaml"
|
||||
|
||||
|
||||
def generate_code(
|
||||
gen_dir: Path,
|
||||
gen_dir: pathlib.Path,
|
||||
native_functions_path: Optional[str] = None,
|
||||
tags_path: Optional[str] = None,
|
||||
install_dir: Optional[str] = None,
|
||||
@ -30,7 +28,6 @@ def generate_code(
|
||||
) -> None:
|
||||
from tools.autograd.gen_annotated_fn_args import gen_annotated
|
||||
from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
|
||||
|
||||
from torchgen.selective_build.selector import SelectiveBuilder
|
||||
|
||||
# Build ATen based Variable classes
|
||||
@ -42,7 +39,7 @@ def generate_code(
|
||||
autograd_gen_dir = os.path.join(install_dir, "autograd", "generated")
|
||||
for d in (autograd_gen_dir, python_install_dir):
|
||||
os.makedirs(d, exist_ok=True)
|
||||
autograd_dir = os.fspath(Path(__file__).parent.parent / "autograd")
|
||||
autograd_dir = os.fspath(pathlib.Path(__file__).parent.parent / "autograd")
|
||||
|
||||
if subset == "pybindings" or not subset:
|
||||
gen_autograd_python(
|
||||
@ -109,9 +106,8 @@ def get_selector(
|
||||
operators_yaml_path: Optional[str],
|
||||
) -> Any:
|
||||
# cwrap depends on pyyaml, so we can't import it earlier
|
||||
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.insert(0, root)
|
||||
from torchgen.selective_build.selector import SelectiveBuilder
|
||||
|
||||
assert not (
|
||||
@ -135,8 +131,8 @@ def main() -> None:
|
||||
parser.add_argument("--tags-path")
|
||||
parser.add_argument(
|
||||
"--gen-dir",
|
||||
type=Path,
|
||||
default=Path("."),
|
||||
type=pathlib.Path,
|
||||
default=pathlib.Path("."),
|
||||
help="Root directory where to install files. Defaults to the current working directory.",
|
||||
)
|
||||
parser.add_argument(
|
||||
|
@ -1,10 +1,8 @@
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import get_test_class_times, get_test_times
|
||||
|
||||
|
||||
|
@ -3,13 +3,12 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, cast, Dict, List, Optional, Union
|
||||
from urllib.request import urlopen
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
|
||||
def get_disabled_issues() -> List[str]:
|
||||
@ -21,7 +20,7 @@ def get_disabled_issues() -> List[str]:
|
||||
|
||||
SLOW_TESTS_FILE = ".pytorch-slow-tests.json"
|
||||
DISABLED_TESTS_FILE = ".pytorch-disabled-tests.json"
|
||||
ADDITIONAL_CI_FILES_FOLDER = Path(".additional_ci_files")
|
||||
ADDITIONAL_CI_FILES_FOLDER = pathlib.Path(".additional_ci_files")
|
||||
TEST_TIMES_FILE = "test-times.json"
|
||||
TEST_CLASS_TIMES_FILE = "test-class-times.json"
|
||||
TEST_FILE_RATINGS_FILE = "test-file-ratings.json"
|
||||
@ -35,7 +34,7 @@ FILE_CACHE_LIFESPAN_SECONDS = datetime.timedelta(hours=3).seconds
|
||||
|
||||
|
||||
def fetch_and_cache(
|
||||
dirpath: Union[str, Path],
|
||||
dirpath: Union[str, pathlib.Path],
|
||||
name: str,
|
||||
url: str,
|
||||
process_fn: Callable[[Dict[str, Any]], Dict[str, Any]],
|
||||
@ -43,7 +42,7 @@ def fetch_and_cache(
|
||||
"""
|
||||
This fetch and cache utils allows sharing between different process.
|
||||
"""
|
||||
Path(dirpath).mkdir(exist_ok=True)
|
||||
pathlib.Path(dirpath).mkdir(exist_ok=True)
|
||||
|
||||
path = os.path.join(dirpath, name)
|
||||
print(f"Downloading {url} to {path}")
|
||||
@ -51,7 +50,7 @@ def fetch_and_cache(
|
||||
def is_cached_file_valid() -> bool:
|
||||
# Check if the file is new enough (see: FILE_CACHE_LIFESPAN_SECONDS). A real check
|
||||
# could make a HEAD request and check/store the file's ETag
|
||||
fname = Path(path)
|
||||
fname = pathlib.Path(path)
|
||||
now = datetime.datetime.now()
|
||||
mtime = datetime.datetime.fromtimestamp(fname.stat().st_mtime)
|
||||
diff = now - mtime
|
||||
|
@ -1,16 +1,14 @@
|
||||
# For testing specific heuristics
|
||||
import io
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Set
|
||||
from unittest import mock
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
from tools.test.heuristics.test_interface import TestTD
|
||||
from tools.testing.target_determination.determinator import TestPrioritizations
|
||||
from tools.testing.target_determination.heuristics.filepath import (
|
||||
@ -25,7 +23,6 @@ from tools.testing.target_determination.heuristics.previously_failed_in_pr impor
|
||||
)
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
sys.path.remove(str(REPO_ROOT))
|
||||
|
||||
HEURISTIC_CLASS = "tools.testing.target_determination.heuristics.historical_class_failure_correlation."
|
||||
|
@ -1,16 +1,13 @@
|
||||
import pathlib
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
import tools.testing.target_determination.heuristics.interface as interface
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
sys.path.remove(str(REPO_ROOT))
|
||||
|
||||
|
||||
|
@ -1,16 +1,14 @@
|
||||
import pathlib
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
||||
import tools.testing.target_determination.heuristics.utils as utils
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
sys.path.remove(str(REPO_ROOT))
|
||||
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Owner(s): ["module: codegen"]
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from typing import Optional
|
||||
@ -7,8 +8,12 @@ from typing import Optional
|
||||
import expecttest
|
||||
|
||||
from torchgen.gen import _GLOBAL_PARSE_NATIVE_YAML_CACHE # noqa: F401
|
||||
|
||||
from torchgen.gen_backend_stubs import run
|
||||
|
||||
path = os.path.dirname(os.path.realpath(__file__))
|
||||
gen_backend_stubs_path = os.path.join(path, "../torchgen/gen_backend_stubs.py")
|
||||
|
||||
|
||||
# gen_backend_stubs.py is an integration point that is called directly by external backends.
|
||||
# The tests here are to confirm that badly formed inputs result in reasonable error messages.
|
||||
|
@ -1,9 +1,8 @@
|
||||
import pathlib
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
try:
|
||||
# using tools/ to optimize test run.
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
@ -1,13 +1,12 @@
|
||||
import functools
|
||||
import pathlib
|
||||
import random
|
||||
import sys
|
||||
import unittest
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
try:
|
||||
# using tools/ to optimize test run.
|
||||
sys.path.append(str(REPO_ROOT))
|
||||
|
@ -1,18 +1,16 @@
|
||||
import decimal
|
||||
import inspect
|
||||
import pathlib
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from unittest import mock
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.upload_metrics import add_global_metric, emit_metric
|
||||
from tools.stats.upload_stats_lib import BATCH_SIZE, upload_to_rockset
|
||||
|
||||
from tools.stats.upload_stats_lib import BATCH_SIZE, upload_to_rockset
|
||||
|
||||
sys.path.remove(str(REPO_ROOT))
|
||||
|
||||
|
@ -4,11 +4,10 @@ import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Union
|
||||
|
||||
|
||||
CPP_TEST_PREFIX = "cpp"
|
||||
CPP_TEST_PATH = "build/bin"
|
||||
CPP_TESTS_DIR = os.path.abspath(os.getenv("CPP_TESTS_DIR", default=CPP_TEST_PATH))
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
|
||||
def parse_test_module(test: str) -> str:
|
||||
|
@ -1,10 +1,10 @@
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
sys.path.insert(0, str(REPO_ROOT))
|
||||
|
||||
from tools.stats.import_test_stats import (
|
||||
@ -18,6 +18,7 @@ from tools.stats.import_test_stats import (
|
||||
get_test_times,
|
||||
)
|
||||
from tools.stats.upload_metrics import emit_metric
|
||||
|
||||
from tools.testing.discover_tests import TESTS
|
||||
from tools.testing.target_determination.determinator import (
|
||||
AggregatedHeuristics,
|
||||
@ -25,7 +26,6 @@ from tools.testing.target_determination.determinator import (
|
||||
TestPrioritizations,
|
||||
)
|
||||
|
||||
|
||||
sys.path.remove(str(REPO_ROOT))
|
||||
|
||||
|
||||
|
@ -2,15 +2,16 @@
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import pathlib
|
||||
import subprocess
|
||||
import textwrap
|
||||
from pathlib import Path
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).parent.parent.parent
|
||||
CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml"
|
||||
WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows"
|
||||
|
||||
|
@ -1,12 +1,11 @@
|
||||
import modulefinder
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Set
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
# These tests are slow enough that it's worth calculating whether the patch
|
||||
# touched any related files first. This list was manually generated, but for every
|
||||
|
@ -1,10 +1,9 @@
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import pathlib
|
||||
from typing import Any, List
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent
|
||||
|
||||
|
||||
def gen_ci_artifact(included: List[Any], excluded: List[Any]) -> None:
|
||||
|
@ -8,14 +8,14 @@ from tools.testing.target_determination.heuristics.interface import (
|
||||
HeuristicInterface,
|
||||
TestPrioritizations,
|
||||
)
|
||||
|
||||
from tools.testing.target_determination.heuristics.utils import (
|
||||
normalize_ratings,
|
||||
query_changed_files,
|
||||
)
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[3]
|
||||
REPO_ROOT = Path(__file__).parent.parent.parent.parent
|
||||
|
||||
keyword_synonyms: Dict[str, List[str]] = {
|
||||
"amp": ["mixed_precision"],
|
||||
|
@ -14,7 +14,7 @@ from tools.testing.target_determination.heuristics.utils import normalize_rating
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
||||
|
||||
|
||||
class LLM(HeuristicInterface):
|
||||
|
@ -8,6 +8,7 @@ from tools.stats.import_test_stats import (
|
||||
TD_HEURISTIC_PREVIOUSLY_FAILED,
|
||||
TD_HEURISTIC_PREVIOUSLY_FAILED_ADDITIONAL,
|
||||
)
|
||||
|
||||
from tools.testing.target_determination.heuristics.interface import (
|
||||
HeuristicInterface,
|
||||
TestPrioritizations,
|
||||
@ -17,8 +18,7 @@ from tools.testing.target_determination.heuristics.utils import (
|
||||
)
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
||||
|
||||
|
||||
class PreviouslyFailedInPR(HeuristicInterface):
|
||||
|
@ -11,8 +11,7 @@ from warnings import warn
|
||||
|
||||
from tools.testing.test_run import TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
||||
|
||||
|
||||
def python_test_file_to_test_name(tests: Set[str]) -> Set[str]:
|
||||
|
@ -2,13 +2,13 @@ import math
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from typing import Callable, Dict, FrozenSet, List, Optional, Sequence, Tuple
|
||||
|
||||
from tools.stats.import_test_stats import get_disabled_tests, get_slow_tests
|
||||
from tools.testing.test_run import ShardedTest, TestRun
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
IS_MEM_LEAK_CHECK = os.getenv("PYTORCH_TEST_CUDA_MEM_LEAK_CHECK", "0") == "1"
|
||||
BUILD_ENVIRONMENT = os.getenv("BUILD_ENVIRONMENT", "")
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
try:
|
||||
# VS Code settings allow comments and trailing commas, which are not valid JSON.
|
||||
import json5 as json # type: ignore[import]
|
||||
|
@ -5,7 +5,6 @@ import functools
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Any, Callable, Dict
|
||||
|
||||
@ -52,13 +51,15 @@ def _reload_python_module(key, path):
|
||||
def _set_triton_ptxas_path() -> None:
|
||||
if os.environ.get("TRITON_PTXAS_PATH") is not None:
|
||||
return
|
||||
ptxas = Path(__file__).absolute().parents[1] / "bin" / "ptxas"
|
||||
if not ptxas.exists():
|
||||
ptxas_path = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "bin", "ptxas")
|
||||
)
|
||||
if not os.path.exists(ptxas_path):
|
||||
return
|
||||
if ptxas.is_file() and os.access(ptxas, os.X_OK):
|
||||
os.environ["TRITON_PTXAS_PATH"] = str(ptxas)
|
||||
if os.path.isfile(ptxas_path) and os.access(ptxas_path, os.X_OK):
|
||||
os.environ["TRITON_PTXAS_PATH"] = ptxas_path
|
||||
else:
|
||||
warnings.warn(f"{ptxas} exists but is not an executable")
|
||||
warnings.warn(f"{ptxas_path} exists but is not an executable")
|
||||
|
||||
|
||||
def _worker_compile_triton(load_kernel: Callable[[], Any], extra_env: Dict[str, str]):
|
||||
|
@ -4579,7 +4579,7 @@ def find_library_location(lib_name: str) -> Path:
|
||||
path = torch_root / 'lib' / lib_name
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
torch_root = Path(__file__).resolve().parents[2]
|
||||
torch_root = Path(__file__).resolve().parent.parent.parent
|
||||
return torch_root / 'build' / 'lib' / lib_name
|
||||
|
||||
def skip_but_pass_in_sandcastle(reason):
|
||||
|
@ -1,8 +1,8 @@
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
from collections import Counter, defaultdict, namedtuple
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Sequence, Set, Union
|
||||
|
||||
import yaml
|
||||
@ -527,7 +527,7 @@ def run(
|
||||
source_yaml: str, output_dir: str, dry_run: bool, impl_path: Optional[str] = None
|
||||
) -> None:
|
||||
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
||||
pytorch_root = Path(__file__).absolute().parent.parent
|
||||
pytorch_root = pathlib.Path(__file__).parent.parent.absolute()
|
||||
template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates")
|
||||
|
||||
def make_file_manager(install_dir: str) -> FileManager:
|
||||
|
@ -1,7 +1,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
from collections import namedtuple
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
@ -261,7 +261,7 @@ def main() -> None:
|
||||
options = parser.parse_args()
|
||||
|
||||
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
||||
torch_root = Path(__file__).absolute().parents[2]
|
||||
torch_root = pathlib.Path(__file__).parent.parent.parent.absolute()
|
||||
aten_path = str(torch_root / "aten" / "src" / "ATen")
|
||||
lazy_ir_generator: Type[GenLazyIR] = default_args.lazy_ir_generator
|
||||
if options.gen_ts_lowerings:
|
||||
|
Reference in New Issue
Block a user