mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Pyrefly suppressions 7/n (#164913)
Adds suppressions to pyrefly will typecheck clean: https://github.com/pytorch/pytorch/issues/163283 Almost there! Test plan: dmypy restart && python3 scripts/lintrunner.py -a pyrefly check step 1: delete lines in the pyrefly.toml file from the project-excludes field step 2: run pyrefly check step 3: add suppressions, clean up unused suppressions before: https://gist.github.com/maggiemoss/4b3bf2037014e116bc00706a16aef199 after: INFO 0 errors (6,884 ignored) Pull Request resolved: https://github.com/pytorch/pytorch/pull/164913 Approved by: https://github.com/oulgen
This commit is contained in:
committed by
PyTorch MergeBot
parent
12d2ef557f
commit
c855f8632e
@ -72,6 +72,7 @@ class _NormBase(Module):
|
||||
torch.tensor(
|
||||
0,
|
||||
dtype=torch.long,
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
**{k: v for k, v in factory_kwargs.items() if k != "dtype"},
|
||||
),
|
||||
)
|
||||
@ -221,6 +222,7 @@ class _LazyNormBase(LazyModuleMixin, _NormBase):
|
||||
dtype=None,
|
||||
) -> None:
|
||||
factory_kwargs = {"device": device, "dtype": dtype}
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
super().__init__(
|
||||
# affine and track_running_stats are hardcoded to False to
|
||||
# avoid creating tensors that will soon be overwritten.
|
||||
@ -234,22 +236,29 @@ class _LazyNormBase(LazyModuleMixin, _NormBase):
|
||||
self.affine = affine
|
||||
self.track_running_stats = track_running_stats
|
||||
if self.affine:
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
self.weight = UninitializedParameter(**factory_kwargs)
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
self.bias = UninitializedParameter(**factory_kwargs)
|
||||
if self.track_running_stats:
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
self.running_mean = UninitializedBuffer(**factory_kwargs)
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
self.running_var = UninitializedBuffer(**factory_kwargs)
|
||||
self.num_batches_tracked = torch.tensor(
|
||||
0,
|
||||
dtype=torch.long,
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
**{k: v for k, v in factory_kwargs.items() if k != "dtype"},
|
||||
)
|
||||
|
||||
def reset_parameters(self) -> None:
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
if not self.has_uninitialized_params() and self.num_features != 0:
|
||||
super().reset_parameters()
|
||||
|
||||
def initialize_parameters(self, input) -> None: # type: ignore[override]
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
if self.has_uninitialized_params():
|
||||
self.num_features = input.shape[1]
|
||||
if self.affine:
|
||||
|
Reference in New Issue
Block a user