mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Stop checking in VmapGeneratedPlumbing.h (#82351)
This PR changes VmapGeneratedPlumbing.h to be generated by torchgen. The output file is ATen/VmapGeneratedPlumbing.h. Why generate this file inside PyTorch codegen instead of a separate step in functorch? - I can't figure out how to get functorch's fbcode target to generate - functorch's build system will, in the mid-term, be absorbed into pytorch's build system, so I don't want to do the extra work of adding a step to the functorch build process. Test Plan: - build pytorch, build functorch Pull Request resolved: https://github.com/pytorch/pytorch/pull/82351 Approved by: https://github.com/ezyang
This commit is contained in:
committed by
PyTorch MergeBot
parent
d80fe49de0
commit
5c92777307
@ -79,6 +79,7 @@ generated_cpu_cpp = [
|
|||||||
"aten/src/ATen/MethodOperators.h",
|
"aten/src/ATen/MethodOperators.h",
|
||||||
"aten/src/ATen/NativeMetaFunctions.h",
|
"aten/src/ATen/NativeMetaFunctions.h",
|
||||||
"aten/src/ATen/RegistrationDeclarations.h",
|
"aten/src/ATen/RegistrationDeclarations.h",
|
||||||
|
"aten/src/ATen/VmapGeneratedPlumbing.h",
|
||||||
"aten/src/ATen/core/aten_interned_strings.h",
|
"aten/src/ATen/core/aten_interned_strings.h",
|
||||||
"aten/src/ATen/core/enum_tag.h",
|
"aten/src/ATen/core/enum_tag.h",
|
||||||
"aten/src/ATen/core/TensorBody.h",
|
"aten/src/ATen/core/TensorBody.h",
|
||||||
|
@ -144,6 +144,7 @@ GENERATED_H = [
|
|||||||
"FunctionalInverses.h",
|
"FunctionalInverses.h",
|
||||||
"RedispatchFunctions.h",
|
"RedispatchFunctions.h",
|
||||||
"RegistrationDeclarations.h",
|
"RegistrationDeclarations.h",
|
||||||
|
"VmapGeneratedPlumbing.h",
|
||||||
]
|
]
|
||||||
|
|
||||||
GENERATED_H_CORE = [
|
GENERATED_H_CORE = [
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
import os
|
|
||||||
import argparse
|
|
||||||
from torchgen.gen import FileManager, parse_native_yaml
|
|
||||||
from torchgen.gen import get_torchgen_root
|
|
||||||
from gen_vmap_plumbing import gen_all_vmap_plumbing
|
|
||||||
|
|
||||||
"""
|
|
||||||
INSTRUCTIONS
|
|
||||||
|
|
||||||
Step 1: You must have a PyTorch installation (in develop mode, i.e.
|
|
||||||
installed with python setup.py develop) in your current environment.
|
|
||||||
This script relies on the `tools` module from the PyTorch develop installation.
|
|
||||||
|
|
||||||
Step 2: Run this script.
|
|
||||||
|
|
||||||
python codegen/gen.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
parser = argparse.ArgumentParser(description='functorch codegen')
|
|
||||||
parser.add_argument(
|
|
||||||
'-s',
|
|
||||||
'--source-path',
|
|
||||||
help='path to source directory for ATen',
|
|
||||||
default=None)
|
|
||||||
parser.add_argument(
|
|
||||||
'-d', '--install_dir', help='output directory',
|
|
||||||
default='functorch/csrc')
|
|
||||||
options = parser.parse_args()
|
|
||||||
generate_code(options.install_dir, options.source_path)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_code(install_dir='functorch/csrc', source_path=None):
|
|
||||||
if source_path is None:
|
|
||||||
# infer the source path via torchgen
|
|
||||||
source_path = os.path.join(get_torchgen_root(), "packaged/ATen")
|
|
||||||
|
|
||||||
native_yaml_path = os.path.join(source_path, 'native/native_functions.yaml')
|
|
||||||
tags_path = os.path.join(source_path, 'native/tags.yaml')
|
|
||||||
parsed_yaml = parse_native_yaml(native_yaml_path, tags_path)
|
|
||||||
native_functions, _ = parsed_yaml.native_functions, parsed_yaml.backend_indices
|
|
||||||
template_dir = os.path.join(source_path, "templates")
|
|
||||||
|
|
||||||
def make_file_manager(install_dir: str) -> FileManager:
|
|
||||||
return FileManager(install_dir=install_dir, template_dir=template_dir, dry_run=False)
|
|
||||||
|
|
||||||
cpu_fm = make_file_manager(install_dir)
|
|
||||||
cpu_fm.write('VmapGeneratedPlumbing.h', lambda: gen_all_vmap_plumbing(native_functions))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -17,7 +17,7 @@
|
|||||||
#include <functorch/csrc/PlumbingHelper.h>
|
#include <functorch/csrc/PlumbingHelper.h>
|
||||||
#include <ATen/core/dispatch/Dispatcher.h>
|
#include <ATen/core/dispatch/Dispatcher.h>
|
||||||
#include <functorch/csrc/Constants.h>
|
#include <functorch/csrc/Constants.h>
|
||||||
#include <functorch/csrc/VmapGeneratedPlumbing.h>
|
#include <ATen/VmapGeneratedPlumbing.h>
|
||||||
|
|
||||||
namespace at { namespace functorch {
|
namespace at { namespace functorch {
|
||||||
Tensor reshape_dim_into(int64_t src, int64_t dst, const Tensor& x);
|
Tensor reshape_dim_into(int64_t src, int64_t dst, const Tensor& x);
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
#include <ATen/Tensor.h>
|
#include <ATen/Tensor.h>
|
||||||
#include <functorch/csrc/VmapGeneratedPlumbing.h>
|
#include <ATen/VmapGeneratedPlumbing.h>
|
||||||
|
|
||||||
namespace at {
|
namespace at {
|
||||||
namespace functorch {
|
namespace functorch {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -38,6 +38,7 @@ from torchgen.gen_functionalization_type import (
|
|||||||
gen_functionalization_view_inverse_declaration,
|
gen_functionalization_view_inverse_declaration,
|
||||||
gen_symint_view_copy_kernel,
|
gen_symint_view_copy_kernel,
|
||||||
)
|
)
|
||||||
|
from torchgen.gen_vmap_plumbing import gen_all_vmap_plumbing
|
||||||
|
|
||||||
from torchgen.model import (
|
from torchgen.model import (
|
||||||
Argument,
|
Argument,
|
||||||
@ -1841,6 +1842,10 @@ def gen_headers(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cpu_fm.write(
|
||||||
|
"VmapGeneratedPlumbing.h", lambda: gen_all_vmap_plumbing(native_functions)
|
||||||
|
)
|
||||||
|
|
||||||
def gen_aten_interned_strings() -> Dict[str, str]:
|
def gen_aten_interned_strings() -> Dict[str, str]:
|
||||||
attrs = set() # All function argument names
|
attrs = set() # All function argument names
|
||||||
names = set() # All ATen function names
|
names = set() # All ATen function names
|
||||||
|
Reference in New Issue
Block a user