[torchdynamo hash update] update the pinned torchdynamo hash (#85225)

This PR is auto-generated nightly by [this action](https://github.com/pytorch/pytorch/blob/master/.github/workflows/_update-commit-hash.yml).
Update the pinned torchdynamo hash.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/85225
Approved by: https://github.com/pytorchbot, https://github.com/voznesenskym
This commit is contained in:
PyTorch MergeBot
2022-09-26 20:07:13 +00:00
parent 89896b8778
commit 70cce9f8d1
2 changed files with 20 additions and 6 deletions

View File

@ -1,18 +1,31 @@
# Owner(s): ["module: nn"]
import tempfile
import torch
from copy import deepcopy
from functools import partial
from unittest import expectedFailure
import torch
from torch import nn
from torch.nn.utils.parametrize import register_parametrization, remove_parametrizations
from torch.nn.modules.lazy import LazyModuleMixin
from torch.nn.utils.parametrize import (
register_parametrization,
remove_parametrizations,
)
from torch.testing._internal.common_subclass import (
DiagTensorBelow,
subclass_db,
)
from torch.testing._internal.common_utils import (
TestCase, run_tests, parametrize, skipIfTorchDynamo, subtest, instantiate_parametrized_tests)
from torch.testing._internal.common_subclass import subclass_db, DiagTensorBelow
TestCase,
instantiate_parametrized_tests,
parametrize,
run_tests,
skipIfTorchDynamo,
subtest,
)
from torch.testing._internal.logging_tensor import LoggingTensor
from torch.utils._pytree import tree_map
from unittest import expectedFailure
# The current test methodology in this file is to test a variety of real use cases
# with a set of fully-fledged tensor subclasses. In the future, this may change
@ -43,6 +56,7 @@ class TestSubclass(TestCase):
self.assertNotIsInstance(x, nn.Parameter)
self.assertEqual(x.requires_grad, tensor_requires_grad)
@skipIfTorchDynamo()
@parametrize_tensor_cls
@parametrize("as_param", [False, True])
def test_deepcopy(self, tensor_cls, as_param):