ema.py 3.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. import torch
  2. from torch import nn
  3. class LitEma(nn.Module):
  4. def __init__(self, model, decay=0.9999, use_num_updates=True):
  5. super().__init__()
  6. if decay < 0.0 or decay > 1.0:
  7. raise ValueError('Decay must be between 0 and 1')
  8. self.m_name2s_name = {}
  9. self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32))
  10. self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_updates
  11. else torch.tensor(-1, dtype=torch.int))
  12. for name, p in model.named_parameters():
  13. if p.requires_grad:
  14. # remove as '.'-character is not allowed in buffers
  15. s_name = name.replace('.', '')
  16. self.m_name2s_name.update({name: s_name})
  17. self.register_buffer(s_name, p.clone().detach().data)
  18. self.collected_params = []
  19. def reset_num_updates(self):
  20. del self.num_updates
  21. self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int))
  22. def forward(self, model):
  23. decay = self.decay
  24. if self.num_updates >= 0:
  25. self.num_updates += 1
  26. decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates))
  27. one_minus_decay = 1.0 - decay
  28. with torch.no_grad():
  29. m_param = dict(model.named_parameters())
  30. shadow_params = dict(self.named_buffers())
  31. for key in m_param:
  32. if m_param[key].requires_grad:
  33. sname = self.m_name2s_name[key]
  34. shadow_params[sname] = shadow_params[sname].type_as(m_param[key])
  35. shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key]))
  36. else:
  37. assert not key in self.m_name2s_name
  38. def copy_to(self, model):
  39. m_param = dict(model.named_parameters())
  40. shadow_params = dict(self.named_buffers())
  41. for key in m_param:
  42. if m_param[key].requires_grad:
  43. m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)
  44. else:
  45. assert not key in self.m_name2s_name
  46. def store(self, parameters):
  47. """
  48. Save the current parameters for restoring later.
  49. Args:
  50. parameters: Iterable of `torch.nn.Parameter`; the parameters to be
  51. temporarily stored.
  52. """
  53. self.collected_params = [param.clone() for param in parameters]
  54. def restore(self, parameters):
  55. """
  56. Restore the parameters stored with the `store` method.
  57. Useful to validate the model with EMA parameters without affecting the
  58. original optimization process. Store the parameters before the
  59. `copy_to` method. After validation (or model saving), use this to
  60. restore the former parameters.
  61. Args:
  62. parameters: Iterable of `torch.nn.Parameter`; the parameters to be
  63. updated with the stored parameters.
  64. """
  65. for c_param, param in zip(self.collected_params, parameters):
  66. param.data.copy_(c_param.data)