gamma.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. from numbers import Number
  2. import torch
  3. from torch.distributions import constraints
  4. from torch.distributions.exp_family import ExponentialFamily
  5. from torch.distributions.utils import broadcast_all
  6. __all__ = ['Gamma']
  7. def _standard_gamma(concentration):
  8. return torch._standard_gamma(concentration)
  9. class Gamma(ExponentialFamily):
  10. r"""
  11. Creates a Gamma distribution parameterized by shape :attr:`concentration` and :attr:`rate`.
  12. Example::
  13. >>> # xdoctest: +IGNORE_WANT("non-deterinistic")
  14. >>> m = Gamma(torch.tensor([1.0]), torch.tensor([1.0]))
  15. >>> m.sample() # Gamma distributed with concentration=1 and rate=1
  16. tensor([ 0.1046])
  17. Args:
  18. concentration (float or Tensor): shape parameter of the distribution
  19. (often referred to as alpha)
  20. rate (float or Tensor): rate = 1 / scale of the distribution
  21. (often referred to as beta)
  22. """
  23. arg_constraints = {'concentration': constraints.positive, 'rate': constraints.positive}
  24. support = constraints.nonnegative
  25. has_rsample = True
  26. _mean_carrier_measure = 0
  27. @property
  28. def mean(self):
  29. return self.concentration / self.rate
  30. @property
  31. def mode(self):
  32. return ((self.concentration - 1) / self.rate).clamp(min=0)
  33. @property
  34. def variance(self):
  35. return self.concentration / self.rate.pow(2)
  36. def __init__(self, concentration, rate, validate_args=None):
  37. self.concentration, self.rate = broadcast_all(concentration, rate)
  38. if isinstance(concentration, Number) and isinstance(rate, Number):
  39. batch_shape = torch.Size()
  40. else:
  41. batch_shape = self.concentration.size()
  42. super().__init__(batch_shape, validate_args=validate_args)
  43. def expand(self, batch_shape, _instance=None):
  44. new = self._get_checked_instance(Gamma, _instance)
  45. batch_shape = torch.Size(batch_shape)
  46. new.concentration = self.concentration.expand(batch_shape)
  47. new.rate = self.rate.expand(batch_shape)
  48. super(Gamma, new).__init__(batch_shape, validate_args=False)
  49. new._validate_args = self._validate_args
  50. return new
  51. def rsample(self, sample_shape=torch.Size()):
  52. shape = self._extended_shape(sample_shape)
  53. value = _standard_gamma(self.concentration.expand(shape)) / self.rate.expand(shape)
  54. value.detach().clamp_(min=torch.finfo(value.dtype).tiny) # do not record in autograd graph
  55. return value
  56. def log_prob(self, value):
  57. value = torch.as_tensor(value, dtype=self.rate.dtype, device=self.rate.device)
  58. if self._validate_args:
  59. self._validate_sample(value)
  60. return (torch.xlogy(self.concentration, self.rate) +
  61. torch.xlogy(self.concentration - 1, value) -
  62. self.rate * value - torch.lgamma(self.concentration))
  63. def entropy(self):
  64. return (self.concentration - torch.log(self.rate) + torch.lgamma(self.concentration) +
  65. (1.0 - self.concentration) * torch.digamma(self.concentration))
  66. @property
  67. def _natural_params(self):
  68. return (self.concentration - 1, -self.rate)
  69. def _log_normalizer(self, x, y):
  70. return torch.lgamma(x + 1) + (x + 1) * torch.log(-y.reciprocal())
  71. def cdf(self, value):
  72. if self._validate_args:
  73. self._validate_sample(value)
  74. return torch.special.gammainc(self.concentration, self.rate * value)