progressive_layer_drop.py 1.3 KB

1234567891011121314151617181920212223242526272829303132333435
  1. '''Copyright The Microsoft DeepSpeed Team'''
  2. import numpy as np
  3. from deepspeed.utils import log_dist
  4. class ProgressiveLayerDrop(object):
  5. r""" Progressive Layer Dropping (PLD) for model training.
  6. This implements the PLD technique for compressed model training
  7. from this paper: https://arxiv.org/pdf/2010.13369.pdf
  8. Args:
  9. theta (float): a hyper-parameter that controls the trade-off between training time and robustness.
  10. The lower the theta value, the faster the training speed. Default value: 0.5.
  11. gamma (float): a hyper-parameter that controls how fast the drop ratio increases. Default value: 0.001.
  12. """
  13. def __init__(self, theta=0.5, gamma=0.001):
  14. super().__init__()
  15. self.theta = theta
  16. self.gamma = gamma
  17. self.current_theta = 1.0
  18. log_dist(f'Enabled progressive layer dropping (theta = {self.theta})', ranks=[0])
  19. def get_state(self):
  20. kwargs = {'progressive_layer_drop': True, 'pld_theta': self.get_theta()}
  21. return kwargs
  22. def get_theta(self):
  23. return self.current_theta
  24. def update_state(self, global_step):
  25. def _prob(x, gamma, p):
  26. return (1. - p) * np.exp(-gamma * x) + p
  27. self.current_theta = _prob(global_step, self.gamma, self.theta)