mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-03 07:24:58 +08:00
Pyrefly suppressions 4/n (#164615)
Adds suppressions to pyrefly will typecheck clean: https://github.com/pytorch/pytorch/issues/163283 Test plan: dmypy restart && python3 scripts/lintrunner.py -a pyrefly check step 1: uncomment lines in the pyrefly.toml file step 2: run pyrefly check step 3: add suppressions, clean up unused suppressions before: https://gist.github.com/maggiemoss/356645cf8cfe33123d9a27f23b30f7b1 after: 0 errors (2,753 ignored) Pull Request resolved: https://github.com/pytorch/pytorch/pull/164615 Approved by: https://github.com/oulgen
This commit is contained in:
committed by
PyTorch MergeBot
parent
4bd1505f84
commit
4ab847bbc7
@ -415,6 +415,7 @@ def _single_tensor_adam(
|
||||
if weight_decay.requires_grad:
|
||||
grad = grad.addcmul_(param.clone(), weight_decay)
|
||||
else:
|
||||
# pyrefly: ignore # bad-argument-type
|
||||
grad = grad.add(param, alpha=weight_decay)
|
||||
else:
|
||||
grad = grad.add(param, alpha=weight_decay)
|
||||
@ -444,6 +445,7 @@ def _single_tensor_adam(
|
||||
device_beta1 = beta1
|
||||
|
||||
# Decay the first and second moment running average coefficient
|
||||
# pyrefly: ignore # no-matching-overload
|
||||
exp_avg.lerp_(grad, 1 - device_beta1)
|
||||
|
||||
# Nested if is necessary to bypass jitscript rules
|
||||
@ -692,6 +694,7 @@ def _multi_tensor_adam(
|
||||
device_exp_avgs, device_grads, cast(float, 1 - device_beta1)
|
||||
)
|
||||
|
||||
# pyrefly: ignore # no-matching-overload
|
||||
torch._foreach_mul_(device_exp_avg_sqs, beta2)
|
||||
|
||||
# Due to the strictness of the _foreach_addcmul API, we can't have a single
|
||||
|
||||
Reference in New Issue
Block a user