Files
pytorch/torch/distributions/logistic_normal.py
Maggie Moss b13cd141b3 Add pyrefly suppressions (#164748)
Adds suppressions to pyrefly will typecheck clean: https://github.com/pytorch/pytorch/issues/163283

Test plan:
dmypy restart && python3 scripts/lintrunner.py -a
pyrefly check

step 1: delete lines in the pyrefly.toml file from the `project-excludes` field
step 2: run pyrefly check
step 3: add suppressions, clean up unused suppressions
before: https://gist.github.com/maggiemoss/4b3bf2037014e116bc00706a16aef199

after:

0 errors (4,263 ignored)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/164748
Approved by: https://github.com/oulgen
2025-10-07 17:31:18 +00:00

69 lines
2.2 KiB
Python

# mypy: allow-untyped-defs
from typing import Optional, Union
from torch import Tensor
from torch.distributions import constraints, Independent
from torch.distributions.normal import Normal
from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import StickBreakingTransform
__all__ = ["LogisticNormal"]
class LogisticNormal(TransformedDistribution):
r"""
Creates a logistic-normal distribution parameterized by :attr:`loc` and :attr:`scale`
that define the base `Normal` distribution transformed with the
`StickBreakingTransform` such that::
X ~ LogisticNormal(loc, scale)
Y = log(X / (1 - X.cumsum(-1)))[..., :-1] ~ Normal(loc, scale)
Args:
loc (float or Tensor): mean of the base distribution
scale (float or Tensor): standard deviation of the base distribution
Example::
>>> # logistic-normal distributed with mean=(0, 0, 0) and stddev=(1, 1, 1)
>>> # of the base Normal distribution
>>> # xdoctest: +IGNORE_WANT("non-deterministic")
>>> m = LogisticNormal(torch.tensor([0.0] * 3), torch.tensor([1.0] * 3))
>>> m.sample()
tensor([ 0.7653, 0.0341, 0.0579, 0.1427])
"""
arg_constraints = {"loc": constraints.real, "scale": constraints.positive}
# pyrefly: ignore # bad-override
support = constraints.simplex
has_rsample = True
# pyrefly: ignore # bad-override
base_dist: Independent[Normal]
def __init__(
self,
loc: Union[Tensor, float],
scale: Union[Tensor, float],
validate_args: Optional[bool] = None,
) -> None:
base_dist = Normal(loc, scale, validate_args=validate_args)
if not base_dist.batch_shape:
base_dist = base_dist.expand([1])
super().__init__(
base_dist, StickBreakingTransform(), validate_args=validate_args
)
def expand(self, batch_shape, _instance=None):
new = self._get_checked_instance(LogisticNormal, _instance)
return super().expand(batch_shape, _instance=new)
@property
def loc(self) -> Tensor:
return self.base_dist.base_dist.loc
@property
def scale(self) -> Tensor:
return self.base_dist.base_dist.scale