config.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. # Copyright (c) Microsoft Corporation.
  2. # SPDX-License-Identifier: Apache-2.0
  3. # DeepSpeed Team
  4. import json
  5. from .constants import *
  6. class ElasticityError(Exception):
  7. """
  8. Base exception for all elasticity related errors
  9. """
  10. class ElasticityConfigError(ElasticityError):
  11. """
  12. Elasticity configuration error
  13. """
  14. class ElasticityIncompatibleWorldSize(ElasticityError):
  15. """
  16. Attempting to run a world size that is incompatible with a given elastic config
  17. """
  18. class ElasticityConfig:
  19. """
  20. Elastic config object, constructed from a param dictionary that only contains elastic
  21. config parameters, example below:
  22. If elasticity is enabled, user must specify (at least) max_train_batch_size
  23. and micro_batch_sizes.
  24. {
  25. "enabled": true,
  26. "max_train_batch_size": 2000,
  27. "micro_batch_sizes": [2,4,6],
  28. "min_gpus": 1,
  29. "max_gpus" : 10000
  30. "min_time": 20
  31. "ignore_non_elastic_batch_info": false
  32. "version": 0.1
  33. }
  34. """
  35. def __init__(self, param_dict):
  36. self.enabled = param_dict.get(ENABLED, ENABLED_DEFAULT)
  37. if self.enabled:
  38. if MAX_ACCEPTABLE_BATCH_SIZE in param_dict:
  39. self.max_acceptable_batch_size = param_dict[MAX_ACCEPTABLE_BATCH_SIZE]
  40. else:
  41. raise ElasticityConfigError(f"Elasticity config missing {MAX_ACCEPTABLE_BATCH_SIZE}")
  42. if MICRO_BATCHES in param_dict:
  43. self.micro_batches = param_dict[MICRO_BATCHES]
  44. else:
  45. raise ElasticityConfigError(f"Elasticity config missing {MICRO_BATCHES}")
  46. else:
  47. self.max_acceptable_batch_size = param_dict.get(MAX_ACCEPTABLE_BATCH_SIZE,
  48. MAX_ACCEPTABLE_BATCH_SIZE_DEFAULT)
  49. self.micro_batches = param_dict.get(MICRO_BATCHES, MICRO_BATCHES_DEFAULT)
  50. if not isinstance(self.micro_batches, list):
  51. raise ElasticityConfigError(
  52. f"Elasticity expected value of {MICRO_BATCHES} to be a "
  53. f"list of micro batches, instead is: {type(self.micro_batches)}, containing: {self.micro_batches}")
  54. if not all(map(lambda m: isinstance(m, int), self.micro_batches)):
  55. raise ElasticityConfigError(f"Elasticity expected {MICRO_BATCHES} to only contain a list of integers, "
  56. f"instead contains: f{self.micro_batches}")
  57. if not all(map(lambda m: m > 0, self.micro_batches)):
  58. raise ElasticityConfigError(f"Elasticity expected {MICRO_BATCHES} to only contain positive integers, "
  59. f"instead contains: f{self.micro_batches}")
  60. self.min_gpus = param_dict.get(MIN_GPUS, MIN_GPUS_DEFAULT)
  61. self.max_gpus = param_dict.get(MAX_GPUS, MAX_GPUS_DEFAULT)
  62. if self.min_gpus < 1 or self.max_gpus < 1:
  63. raise ElasticityConfigError("Elasticity min/max gpus must be > 0, "
  64. f"given min_gpus: {self.min_gpus}, max_gpus: {self.max_gpus}")
  65. if self.max_gpus < self.min_gpus:
  66. raise ElasticityConfigError("Elasticity min_gpus cannot be greater than max_gpus, "
  67. f"given min_gpus: {self.min_gpus}, max_gpus: {self.max_gpus}")
  68. self.model_parallel_size = param_dict.get(MODEL_PARALLEL_SIZE, MODEL_PARALLEL_SIZE_DEFAULT)
  69. if self.model_parallel_size < 1:
  70. raise ElasticityConfigError("Model-Parallel size cannot be less than 1, "
  71. f"given model-parallel size: {self.model_parallel_size}")
  72. self.num_gpus_per_node = param_dict.get(NUM_GPUS_PER_NODE, NUM_GPUS_PER_NODE_DEFAULT)
  73. if self.num_gpus_per_node < 1:
  74. raise ElasticityConfigError("Number of GPUs per node cannot be less than 1, "
  75. f"given number of GPUs per node: {self.num_gpus_per_node}")
  76. self.min_time = param_dict.get(MIN_TIME, MIN_TIME_DEFAULT)
  77. if self.min_time < 0:
  78. raise ElasticityConfigError(f"Elasticity min time needs to be >= 0: given {self.min_time}")
  79. self.version = param_dict.get(VERSION, VERSION_DEFAULT)
  80. self.prefer_larger_batch_size = param_dict.get(PREFER_LARGER_BATCH, PREFER_LARGER_BATCH_DEFAULT)
  81. self.ignore_non_elastic_batch_info = param_dict.get(IGNORE_NON_ELASTIC_BATCH_INFO,
  82. IGNORE_NON_ELASTIC_BATCH_INFO_DEFAULT)
  83. def repr(self):
  84. return self.__dict__
  85. def __repr__(self):
  86. return json.dumps(self.__dict__, sort_keys=True, indent=4)