Pyrefly suppressions 4/n (#164615)

Adds suppressions to pyrefly will typecheck clean: https://github.com/pytorch/pytorch/issues/163283

Test plan:
dmypy restart && python3 scripts/lintrunner.py -a
pyrefly check

step 1: uncomment lines in the pyrefly.toml file
step 2: run pyrefly check
step 3: add suppressions, clean up unused suppressions
before: https://gist.github.com/maggiemoss/356645cf8cfe33123d9a27f23b30f7b1

after:

0 errors (2,753 ignored)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/164615
Approved by: https://github.com/oulgen
This commit is contained in:
Maggie Moss
2025-10-06 16:14:36 +00:00
committed by PyTorch MergeBot
parent 4bd1505f84
commit 4ab847bbc7
52 changed files with 293 additions and 21 deletions

View File

@ -415,6 +415,7 @@ def _single_tensor_adam(
if weight_decay.requires_grad:
grad = grad.addcmul_(param.clone(), weight_decay)
else:
# pyrefly: ignore # bad-argument-type
grad = grad.add(param, alpha=weight_decay)
else:
grad = grad.add(param, alpha=weight_decay)
@ -444,6 +445,7 @@ def _single_tensor_adam(
device_beta1 = beta1
# Decay the first and second moment running average coefficient
# pyrefly: ignore # no-matching-overload
exp_avg.lerp_(grad, 1 - device_beta1)
# Nested if is necessary to bypass jitscript rules
@ -692,6 +694,7 @@ def _multi_tensor_adam(
device_exp_avgs, device_grads, cast(float, 1 - device_beta1)
)
# pyrefly: ignore # no-matching-overload
torch._foreach_mul_(device_exp_avg_sqs, beta2)
# Due to the strictness of the _foreach_addcmul API, we can't have a single