progressive_layer_drop.py 1.2 KB

123456789101112131415161718192021222324252627282930313233
  1. import numpy as np
  2. from deepspeed.utils import log_dist
  3. class ProgressiveLayerDrop(object):
  4. r""" Progressive Layer Dropping (PLD) for model training.
  5. This implements the PLD technique for compressed model training
  6. from this paper: https://arxiv.org/pdf/2010.13369.pdf
  7. Args:
  8. theta (float): a hyper-parameter that controls the trade-off between training time and robustness.
  9. The lower the theta value, the faster the training speed. Default value: 0.5.
  10. gamma (float): a hyper-parameter that controls how fast the drop ratio increases. Default value: 0.001.
  11. """
  12. def __init__(self, theta=0.5, gamma=0.001):
  13. super().__init__()
  14. self.theta = theta
  15. self.gamma = gamma
  16. self.current_theta = 1.0
  17. log_dist(f'Enabled progressive layer dropping (theta = {self.theta})', ranks=[0])
  18. def get_state(self):
  19. kwargs = {'progressive_layer_drop': True, 'pld_theta': self.get_theta()}
  20. return kwargs
  21. def get_theta(self):
  22. return self.current_theta
  23. def update_state(self, global_step):
  24. def _prob(x, gamma, p):
  25. return (1. - p) * np.exp(-gamma * x) + p
  26. self.current_theta = _prob(global_step, self.gamma, self.theta)