Add None return type to init -- tests (#132352)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/132352
Approved by: https://github.com/ezyang
ghstack dependencies: #132335, #132351
This commit is contained in:
Oguz Ulgen
2024-08-01 00:22:48 -07:00
committed by PyTorch MergeBot
parent a6985c09cb
commit 221350e3a4
157 changed files with 1286 additions and 1286 deletions

View File

@ -3661,7 +3661,7 @@ class TestMakeFunctional(TestCase):
@parametrize("disable_autograd_tracking", [True, False])
def test_disable_autograd_tracking(self, disable_autograd_tracking):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
@ -3679,7 +3679,7 @@ class TestMakeFunctional(TestCase):
def test_parameter_tying(self):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.bias = nn.Parameter(torch.randn(3))
self.linear = nn.Linear(3, 3)
@ -3708,7 +3708,7 @@ class TestMakeFunctional(TestCase):
def test_buffer_tying(self):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.bias = nn.Parameter(torch.randn(3))
self.linear = nn.Linear(3, 3)
@ -3740,7 +3740,7 @@ class TestMakeFunctional(TestCase):
@parametrize("disable_autograd_tracking", [True, False])
def test_with_buffers_disable_autograd_tracking(self, disable_autograd_tracking):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
self.buffer = nn.Buffer(torch.randn(3))
@ -3762,7 +3762,7 @@ class TestMakeFunctional(TestCase):
@parametrize("detach_params", [True, False])
def test_using_detach_functional_call(self, detach_params):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
self.buffer = nn.Buffer(torch.randn(3))
@ -3788,7 +3788,7 @@ class TestMakeFunctional(TestCase):
def test_parameter_tying_grad(self):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
self.weight = self.linear.weight
@ -3820,7 +3820,7 @@ class TestMakeFunctional(TestCase):
def test_parameter_tying_ensemble(self):
class Foo(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
self.weight = self.linear.weight
@ -3854,7 +3854,7 @@ class TestMakeFunctional(TestCase):
@parametrize("mechanism", ["make_functional", "functional_call"])
def test_correctness_mnist(self, mechanism):
class Net(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
@ -3965,7 +3965,7 @@ class TestMakeFunctional(TestCase):
@parametrize("mechanism", ["make_functional", "functional_call"])
def test_make_functional_state_correctly_returned_after_forward(self, mechanism):
class Net(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.linear = nn.Linear(3, 3)
@ -4021,7 +4021,7 @@ class TestExamplesCorrectness(TestCase):
@parametrize("mechanism", ["make_functional", "functional_call"])
def test_maml_regression(self, device, mechanism):
class ThreeLayerNet(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.fc1 = nn.Linear(1, 40)
self.relu1 = nn.ReLU()