Shortcuts

Source code for torch.distributions.normal

# mypy: allow-untyped-defs
import math

import torch
from torch import Tensor
from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import _standard_normal, broadcast_all
from torch.types import _Number, _size


__all__ = ["Normal"]


[docs]class Normal(ExponentialFamily): r""" Creates a normal (also called Gaussian) distribution parameterized by :attr:`loc` and :attr:`scale`. Example:: >>> # xdoctest: +IGNORE_WANT("non-deterministic") >>> m = Normal(torch.tensor([0.0]), torch.tensor([1.0])) >>> m.sample() # normally distributed with loc=0 and scale=1 tensor([ 0.1046]) Args: loc (float or Tensor): mean of the distribution (often referred to as mu) scale (float or Tensor): standard deviation of the distribution (often referred to as sigma) """ arg_constraints = {"loc": constraints.real, "scale": constraints.positive} support = constraints.real has_rsample = True _mean_carrier_measure = 0 @property def mean(self) -> Tensor: return self.loc @property def mode(self) -> Tensor: return self.loc @property def stddev(self) -> Tensor: return self.scale @property def variance(self) -> Tensor: return self.stddev.pow(2) def __init__(self, loc, scale, validate_args=None): self.loc, self.scale = broadcast_all(loc, scale) if isinstance(loc, _Number) and isinstance(scale, _Number): batch_shape = torch.Size() else: batch_shape = self.loc.size() super().__init__(batch_shape, validate_args=validate_args)
[docs] def expand(self, batch_shape, _instance=None): new = self._get_checked_instance(Normal, _instance) batch_shape = torch.Size(batch_shape) new.loc = self.loc.expand(batch_shape) new.scale = self.scale.expand(batch_shape) super(Normal, new).__init__(batch_shape, validate_args=False) new._validate_args = self._validate_args return new
[docs] def sample(self, sample_shape=torch.Size()): shape = self._extended_shape(sample_shape) with torch.no_grad(): return torch.normal(self.loc.expand(shape), self.scale.expand(shape))
[docs] def rsample(self, sample_shape: _size = torch.Size()) -> Tensor: shape = self._extended_shape(sample_shape) eps = _standard_normal(shape, dtype=self.loc.dtype, device=self.loc.device) return self.loc + eps * self.scale
[docs] def log_prob(self, value): if self._validate_args: self._validate_sample(value) # compute the variance var = self.scale**2 log_scale = ( math.log(self.scale) if isinstance(self.scale, _Number) else self.scale.log() ) return ( -((value - self.loc) ** 2) / (2 * var) - log_scale - math.log(math.sqrt(2 * math.pi)) )
[docs] def cdf(self, value): if self._validate_args: self._validate_sample(value) return 0.5 * ( 1 + torch.erf((value - self.loc) * self.scale.reciprocal() / math.sqrt(2)) )
[docs] def icdf(self, value): return self.loc + self.scale * torch.erfinv(2 * value - 1) * math.sqrt(2)
[docs] def entropy(self): return 0.5 + 0.5 * math.log(2 * math.pi) + torch.log(self.scale)
@property def _natural_params(self) -> tuple[Tensor, Tensor]: return (self.loc / self.scale.pow(2), -0.5 * self.scale.pow(2).reciprocal()) def _log_normalizer(self, x, y): return -0.25 * x.pow(2) / y + 0.5 * torch.log(-math.pi / y)

Docs

Access comprehensive developer documentation for PyTorch

View Docs

Tutorials

Get in-depth tutorials for beginners and advanced developers

View Tutorials

Resources

Find development resources and get your questions answered

View Resources
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy