Add dynamo test configuration (#80342)

This test configuration runs PyTorch's test suite under torchdynamo.
Once stabilized, we will make this default and remove this particular
CI job.

Signed-off-by: Edward Z. Yang <ezyang@fb.com>
Pull Request resolved: https://github.com/pytorch/pytorch/pull/80342
Approved by: https://github.com/anijain2305
This commit is contained in:
Edward Z. Yang
2022-06-27 10:11:13 -04:00
committed by PyTorch MergeBot
parent 5d595182c7
commit f3a5e364a0
4 changed files with 19 additions and 0 deletions

View File

@ -75,6 +75,7 @@ class ExperimentalFeatureConfigNode(TreeConfigNode):
"vulkan": VulkanConfigNode,
"parallel_tbb": ParallelTBBConfigNode,
"crossref": CrossRefConfigNode,
"dynamo": DynamoConfigNode,
"parallel_native": ParallelNativeConfigNode,
"onnx": ONNXConfigNode,
"libtorch": LibTorchConfigNode,
@ -179,6 +180,14 @@ class CrossRefConfigNode(TreeConfigNode):
return ImportantConfigNode
class DynamoConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["is_dynamo"] = node_name
def child_constructor(self):
return ImportantConfigNode
class ParallelNativeConfigNode(TreeConfigNode):
def modify_label(self, label):
return "PARALLELNATIVE=" + str(label)

View File

@ -240,6 +240,7 @@ def instantiate_configs(only_slow_gradcheck):
is_xla = fc.find_prop("is_xla") or False
is_asan = fc.find_prop("is_asan") or False
is_crossref = fc.find_prop("is_crossref") or False
is_dynamo = fc.find_prop("is_dynamo") or False
is_onnx = fc.find_prop("is_onnx") or False
is_pure_torch = fc.find_prop("is_pure_torch") or False
is_vulkan = fc.find_prop("is_vulkan") or False
@ -286,6 +287,9 @@ def instantiate_configs(only_slow_gradcheck):
if is_crossref:
parms_list_ignored_for_docker_image.append("crossref")
if is_dynamo:
parms_list_ignored_for_docker_image.append("dynamo")
if is_onnx:
parms_list.append("onnx")
python_version = fc.find_prop("pyver")

View File

@ -132,6 +132,8 @@ jobs:
{ config: "default", shard: 2, num_shards: 2, runner: "linux.2xlarge" },
{ config: "crossref", shard: 1, num_shards: 2, runner: "linux.2xlarge" },
{ config: "crossref", shard: 2, num_shards: 2, runner: "linux.2xlarge" },
{ config: "dynamo", shard: 1, num_shards: 2, runner: "linux.2xlarge" },
{ config: "dynamo", shard: 2, num_shards: 2, runner: "linux.2xlarge" },
]}
linux-bionic-cuda11_3-py3_7-clang9-build:

View File

@ -66,6 +66,10 @@ if [[ "$TEST_CONFIG" == *crossref* ]]; then
export PYTORCH_TEST_WITH_CROSSREF=1
fi
if [[ "$TEST_CONFIG" == *dynamo* ]]; then
export PYTORCH_TEST_WITH_DYNAMO=1
fi
# TODO: this condition is never true, need to fix this.
if [[ -n "$PR_NUMBER" ]] && [[ -z "$CI_MASTER" || "$CI_MASTER" == "false" ]]; then
# skip expensive checks when on PR and CI_MASTER flag is not set