mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/76275 In preparation for addressing https://github.com/pytorch/pytorch/issues/73212 Diff was generated with: ``` git mv tools/codegen torchgen git grep -l 'tools.codegen' | xargs sed -i 's/tools.codegen/torchgen/g' sed -i "s/\${TOOLS_PATH}\/codegen/\${TORCH_ROOT}\/torchgen/g" caffe2/CMakeLists.txt ``` and a manual edits to: * tools/test/test_gen_backend_stubs.py * torchgen/build.bzl * torchgen/gen_backend_stubs.py aka this diff: ``` diff --git a/tools/test/test_gen_backend_stubs.py b/tools/test/test_gen_backend_stubs.py index 3dc26c6d2d..104054575e 100644 --- a/tools/test/test_gen_backend_stubs.py +++ b/tools/test/test_gen_backend_stubs.py @@ -9,7 +9,7 @@ from torchgen.gen_backend_stubs import run from torchgen.gen import _GLOBAL_PARSE_NATIVE_YAML_CACHE # noqa: F401 path = os.path.dirname(os.path.realpath(__file__)) -gen_backend_stubs_path = os.path.join(path, '../torchgen/gen_backend_stubs.py') +gen_backend_stubs_path = os.path.join(path, '../../torchgen/gen_backend_stubs.py') # gen_backend_stubs.py is an integration point that is called directly by external backends. # The tests here are to confirm that badly formed inputs result in reasonable error messages. diff --git a/torchgen/build.bzl b/torchgen/build.bzl index ed04e35a43..d00078a3cf 100644 --- a/torchgen/build.bzl +++ b/torchgen/build.bzl @@ -1,6 +1,6 @@ def define_targets(rules): rules.py_library( - name = "codegen", + name = "torchgen", srcs = rules.glob(["**/*.py"]), deps = [ rules.requirement("PyYAML"), @@ -11,6 +11,6 @@ def define_targets(rules): rules.py_binary( name = "gen", - srcs = [":codegen"], + srcs = [":torchgen"], visibility = ["//visibility:public"], ) diff --git a/torchgen/gen_backend_stubs.py b/torchgen/gen_backend_stubs.py index c1a672a655..beee7a15e0 100644 --- a/torchgen/gen_backend_stubs.py +++ b/torchgen/gen_backend_stubs.py @@ -474,7 +474,7 @@ def run( ) -> None: # Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py - pytorch_root = pathlib.Path(__file__).parent.parent.parent.absolute() + pytorch_root = pathlib.Path(__file__).parent.parent.absolute() template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates") def make_file_manager(install_dir: str) -> FileManager: ``` run_all_fbandroid_tests Test Plan: sandcastle Reviewed By: albanD, ngimel Differential Revision: D35770317 fbshipit-source-id: 153ac4a7fef15b1e750812a90bfafdbc8f1ebcdf (cherry picked from commit c6d485d1d4648fa1c8a4c14c5bf3d8e899b9b4dd)
43 lines
1.5 KiB
Python
43 lines
1.5 KiB
Python
import threading
|
|
from contextlib import contextmanager
|
|
from typing import Optional, Iterator
|
|
|
|
# Simple dynamic scoping implementation. The name "parametrize" comes
|
|
# from Racket.
|
|
#
|
|
# WARNING WARNING: LOOKING TO EDIT THIS FILE? Think carefully about
|
|
# why you need to add a toggle to the global behavior of code
|
|
# generation. The parameters here should really only be used
|
|
# for "temporary" situations, where we need to temporarily change
|
|
# the codegen in some cases because we cannot conveniently update
|
|
# all call sites, and are slated to be eliminated once all call
|
|
# sites are eliminated. If you don't have a plan for how to get there,
|
|
# DON'T add a new entry here.
|
|
|
|
|
|
class Locals(threading.local):
|
|
use_const_ref_for_mutable_tensors: Optional[bool] = None
|
|
|
|
|
|
_locals = Locals()
|
|
|
|
|
|
def use_const_ref_for_mutable_tensors() -> bool:
|
|
assert _locals.use_const_ref_for_mutable_tensors is not None, (
|
|
"need to initialize local.use_const_ref_for_mutable_tensors with "
|
|
"local.parametrize"
|
|
)
|
|
return _locals.use_const_ref_for_mutable_tensors
|
|
|
|
|
|
@contextmanager
|
|
def parametrize(*, use_const_ref_for_mutable_tensors: bool) -> Iterator[None]:
|
|
old_use_const_ref_for_mutable_tensors = _locals.use_const_ref_for_mutable_tensors
|
|
try:
|
|
_locals.use_const_ref_for_mutable_tensors = use_const_ref_for_mutable_tensors
|
|
yield
|
|
finally:
|
|
_locals.use_const_ref_for_mutable_tensors = (
|
|
old_use_const_ref_for_mutable_tensors
|
|
)
|