mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Pyrefly suppressions 7/n (#164913)
Adds suppressions to pyrefly will typecheck clean: https://github.com/pytorch/pytorch/issues/163283 Almost there! Test plan: dmypy restart && python3 scripts/lintrunner.py -a pyrefly check step 1: delete lines in the pyrefly.toml file from the project-excludes field step 2: run pyrefly check step 3: add suppressions, clean up unused suppressions before: https://gist.github.com/maggiemoss/4b3bf2037014e116bc00706a16aef199 after: INFO 0 errors (6,884 ignored) Pull Request resolved: https://github.com/pytorch/pytorch/pull/164913 Approved by: https://github.com/oulgen
This commit is contained in:
committed by
PyTorch MergeBot
parent
12d2ef557f
commit
c855f8632e
@ -111,6 +111,7 @@ class RNNBase(Module):
|
||||
|
||||
if (
|
||||
not isinstance(dropout, numbers.Number)
|
||||
# pyrefly: ignore # unsupported-operation
|
||||
or not 0 <= dropout <= 1
|
||||
or isinstance(dropout, bool)
|
||||
):
|
||||
@ -119,6 +120,7 @@ class RNNBase(Module):
|
||||
"representing the probability of an element being "
|
||||
"zeroed"
|
||||
)
|
||||
# pyrefly: ignore # unsupported-operation
|
||||
if dropout > 0 and num_layers == 1:
|
||||
warnings.warn(
|
||||
"dropout option adds dropout after all but last "
|
||||
@ -639,15 +641,22 @@ class RNN(RNNBase):
|
||||
|
||||
@overload
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
# pyrefly: ignore # bad-override
|
||||
def forward(
|
||||
self, input: Tensor, hx: Optional[Tensor] = None
|
||||
self,
|
||||
input: Tensor,
|
||||
hx: Optional[Tensor] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[Tensor, Tensor]:
|
||||
pass
|
||||
|
||||
@overload
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
def forward(
|
||||
self, input: PackedSequence, hx: Optional[Tensor] = None
|
||||
self,
|
||||
input: PackedSequence,
|
||||
hx: Optional[Tensor] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[PackedSequence, Tensor]:
|
||||
pass
|
||||
|
||||
@ -772,7 +781,11 @@ class RNN(RNNBase):
|
||||
|
||||
if isinstance(orig_input, PackedSequence):
|
||||
output_packed = PackedSequence(
|
||||
output, batch_sizes, sorted_indices, unsorted_indices
|
||||
output,
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
batch_sizes,
|
||||
sorted_indices,
|
||||
unsorted_indices,
|
||||
)
|
||||
return output_packed, self.permute_hidden(hidden, unsorted_indices)
|
||||
|
||||
@ -996,6 +1009,7 @@ class LSTM(RNNBase):
|
||||
|
||||
# In the future, we should prevent mypy from applying contravariance rules here.
|
||||
# See torch/nn/modules/module.py::_forward_unimplemented
|
||||
# pyrefly: ignore # bad-override
|
||||
def check_forward_args(
|
||||
self,
|
||||
input: Tensor,
|
||||
@ -1029,8 +1043,12 @@ class LSTM(RNNBase):
|
||||
# Same as above, see torch/nn/modules/module.py::_forward_unimplemented
|
||||
@overload # type: ignore[override]
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
# pyrefly: ignore # bad-override
|
||||
def forward(
|
||||
self, input: Tensor, hx: Optional[tuple[Tensor, Tensor]] = None
|
||||
self,
|
||||
input: Tensor,
|
||||
hx: Optional[tuple[Tensor, Tensor]] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[Tensor, tuple[Tensor, Tensor]]: # noqa: F811
|
||||
pass
|
||||
|
||||
@ -1038,7 +1056,10 @@ class LSTM(RNNBase):
|
||||
@overload
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
def forward(
|
||||
self, input: PackedSequence, hx: Optional[tuple[Tensor, Tensor]] = None
|
||||
self,
|
||||
input: PackedSequence,
|
||||
hx: Optional[tuple[Tensor, Tensor]] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[PackedSequence, tuple[Tensor, Tensor]]: # noqa: F811
|
||||
pass
|
||||
|
||||
@ -1152,7 +1173,11 @@ class LSTM(RNNBase):
|
||||
# xxx: isinstance check needs to be in conditional for TorchScript to compile
|
||||
if isinstance(orig_input, PackedSequence):
|
||||
output_packed = PackedSequence(
|
||||
output, batch_sizes, sorted_indices, unsorted_indices
|
||||
output,
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
batch_sizes,
|
||||
sorted_indices,
|
||||
unsorted_indices,
|
||||
)
|
||||
return output_packed, self.permute_hidden(hidden, unsorted_indices)
|
||||
else:
|
||||
@ -1318,15 +1343,22 @@ class GRU(RNNBase):
|
||||
|
||||
@overload # type: ignore[override]
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
# pyrefly: ignore # bad-override
|
||||
def forward(
|
||||
self, input: Tensor, hx: Optional[Tensor] = None
|
||||
self,
|
||||
input: Tensor,
|
||||
hx: Optional[Tensor] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[Tensor, Tensor]: # noqa: F811
|
||||
pass
|
||||
|
||||
@overload
|
||||
@torch._jit_internal._overload_method # noqa: F811
|
||||
def forward(
|
||||
self, input: PackedSequence, hx: Optional[Tensor] = None
|
||||
self,
|
||||
input: PackedSequence,
|
||||
hx: Optional[Tensor] = None,
|
||||
# pyrefly: ignore # bad-return
|
||||
) -> tuple[PackedSequence, Tensor]: # noqa: F811
|
||||
pass
|
||||
|
||||
@ -1420,7 +1452,11 @@ class GRU(RNNBase):
|
||||
# xxx: isinstance check needs to be in conditional for TorchScript to compile
|
||||
if isinstance(orig_input, PackedSequence):
|
||||
output_packed = PackedSequence(
|
||||
output, batch_sizes, sorted_indices, unsorted_indices
|
||||
output,
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
batch_sizes,
|
||||
sorted_indices,
|
||||
unsorted_indices,
|
||||
)
|
||||
return output_packed, self.permute_hidden(hidden, unsorted_indices)
|
||||
else:
|
||||
|
Reference in New Issue
Block a user