uniform.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. from numbers import Number
  2. import torch
  3. from torch import nan
  4. from torch.distributions import constraints
  5. from torch.distributions.distribution import Distribution
  6. from torch.distributions.utils import broadcast_all
  7. __all__ = ['Uniform']
  8. class Uniform(Distribution):
  9. r"""
  10. Generates uniformly distributed random samples from the half-open interval
  11. ``[low, high)``.
  12. Example::
  13. >>> m = Uniform(torch.tensor([0.0]), torch.tensor([5.0]))
  14. >>> m.sample() # uniformly distributed in the range [0.0, 5.0)
  15. >>> # xdoctest: +SKIP
  16. tensor([ 2.3418])
  17. Args:
  18. low (float or Tensor): lower range (inclusive).
  19. high (float or Tensor): upper range (exclusive).
  20. """
  21. # TODO allow (loc,scale) parameterization to allow independent constraints.
  22. arg_constraints = {'low': constraints.dependent(is_discrete=False, event_dim=0),
  23. 'high': constraints.dependent(is_discrete=False, event_dim=0)}
  24. has_rsample = True
  25. @property
  26. def mean(self):
  27. return (self.high + self.low) / 2
  28. @property
  29. def mode(self):
  30. return nan * self.high
  31. @property
  32. def stddev(self):
  33. return (self.high - self.low) / 12**0.5
  34. @property
  35. def variance(self):
  36. return (self.high - self.low).pow(2) / 12
  37. def __init__(self, low, high, validate_args=None):
  38. self.low, self.high = broadcast_all(low, high)
  39. if isinstance(low, Number) and isinstance(high, Number):
  40. batch_shape = torch.Size()
  41. else:
  42. batch_shape = self.low.size()
  43. super().__init__(batch_shape, validate_args=validate_args)
  44. if self._validate_args and not torch.lt(self.low, self.high).all():
  45. raise ValueError("Uniform is not defined when low>= high")
  46. def expand(self, batch_shape, _instance=None):
  47. new = self._get_checked_instance(Uniform, _instance)
  48. batch_shape = torch.Size(batch_shape)
  49. new.low = self.low.expand(batch_shape)
  50. new.high = self.high.expand(batch_shape)
  51. super(Uniform, new).__init__(batch_shape, validate_args=False)
  52. new._validate_args = self._validate_args
  53. return new
  54. @constraints.dependent_property(is_discrete=False, event_dim=0)
  55. def support(self):
  56. return constraints.interval(self.low, self.high)
  57. def rsample(self, sample_shape=torch.Size()):
  58. shape = self._extended_shape(sample_shape)
  59. rand = torch.rand(shape, dtype=self.low.dtype, device=self.low.device)
  60. return self.low + rand * (self.high - self.low)
  61. def log_prob(self, value):
  62. if self._validate_args:
  63. self._validate_sample(value)
  64. lb = self.low.le(value).type_as(self.low)
  65. ub = self.high.gt(value).type_as(self.low)
  66. return torch.log(lb.mul(ub)) - torch.log(self.high - self.low)
  67. def cdf(self, value):
  68. if self._validate_args:
  69. self._validate_sample(value)
  70. result = (value - self.low) / (self.high - self.low)
  71. return result.clamp(min=0, max=1)
  72. def icdf(self, value):
  73. result = value * (self.high - self.low) + self.low
  74. return result
  75. def entropy(self):
  76. return torch.log(self.high - self.low)