distributions.py 3.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. import torch
  2. import numpy as np
  3. class AbstractDistribution:
  4. def sample(self):
  5. raise NotImplementedError()
  6. def mode(self):
  7. raise NotImplementedError()
  8. class DiracDistribution(AbstractDistribution):
  9. def __init__(self, value):
  10. self.value = value
  11. def sample(self):
  12. return self.value
  13. def mode(self):
  14. return self.value
  15. class DiagonalGaussianDistribution(object):
  16. def __init__(self, parameters, deterministic=False):
  17. self.parameters = parameters
  18. self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)
  19. self.logvar = torch.clamp(self.logvar, -30.0, 20.0)
  20. self.deterministic = deterministic
  21. self.std = torch.exp(0.5 * self.logvar)
  22. self.var = torch.exp(self.logvar)
  23. if self.deterministic:
  24. self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device)
  25. def sample(self):
  26. x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device)
  27. return x
  28. def kl(self, other=None):
  29. if self.deterministic:
  30. return torch.Tensor([0.])
  31. else:
  32. if other is None:
  33. return 0.5 * torch.sum(torch.pow(self.mean, 2)
  34. + self.var - 1.0 - self.logvar,
  35. dim=[1, 2, 3])
  36. else:
  37. return 0.5 * torch.sum(
  38. torch.pow(self.mean - other.mean, 2) / other.var
  39. + self.var / other.var - 1.0 - self.logvar + other.logvar,
  40. dim=[1, 2, 3])
  41. def nll(self, sample, dims=[1,2,3]):
  42. if self.deterministic:
  43. return torch.Tensor([0.])
  44. logtwopi = np.log(2.0 * np.pi)
  45. return 0.5 * torch.sum(
  46. logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var,
  47. dim=dims)
  48. def mode(self):
  49. return self.mean
  50. def normal_kl(mean1, logvar1, mean2, logvar2):
  51. """
  52. source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12
  53. Compute the KL divergence between two gaussians.
  54. Shapes are automatically broadcasted, so batches can be compared to
  55. scalars, among other use cases.
  56. """
  57. tensor = None
  58. for obj in (mean1, logvar1, mean2, logvar2):
  59. if isinstance(obj, torch.Tensor):
  60. tensor = obj
  61. break
  62. assert tensor is not None, "at least one argument must be a Tensor"
  63. # Force variances to be Tensors. Broadcasting helps convert scalars to
  64. # Tensors, but it does not work for torch.exp().
  65. logvar1, logvar2 = [
  66. x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)
  67. for x in (logvar1, logvar2)
  68. ]
  69. return 0.5 * (
  70. -1.0
  71. + logvar2
  72. - logvar1
  73. + torch.exp(logvar1 - logvar2)
  74. + ((mean1 - mean2) ** 2) * torch.exp(-logvar2)
  75. )