utils.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458
  1. '''Copyright The Microsoft DeepSpeed Team'''
  2. import re
  3. import collections.abc
  4. import os
  5. import json
  6. from deepspeed.runtime.constants import GRADIENT_ACCUMULATION_STEPS, TRAIN_MICRO_BATCH_SIZE_PER_GPU
  7. import itertools
  8. import copy
  9. from ..utils import logger
  10. def search_error(filename):
  11. if not os.path.exists(filename):
  12. return "stderr.log does not exist"
  13. with open(filename) as f:
  14. for line in f:
  15. for s in ["Error", "error", "ERROR"]:
  16. idx = line.find(s)
  17. if idx != -1:
  18. return line[idx + len(s):].lstrip(": ")
  19. return None
  20. def was_interruptted(filename):
  21. if not os.path.exists(filename):
  22. return "stderr.log does not exist"
  23. with open(filename) as f:
  24. for line in f:
  25. s = "KeyboardInterrupt"
  26. idx = line.find(s)
  27. if idx != -1:
  28. return True
  29. return False
  30. def find_replace_str(value, replace_dict):
  31. if not isinstance(value, str):
  32. return str(value)
  33. matches = re.findall(r"\$[A-Za-z0-9_]+", value)
  34. for var in matches:
  35. var_key = var.replace("$", "").lower()
  36. if var_key == "nvme_path":
  37. continue
  38. assert var_key in replace_dict, f"unknown var key: {var_key}, in {replace_dict}"
  39. if isinstance(replace_dict[var_key], str):
  40. value = value.replace(var, replace_dict[var_key])
  41. else:
  42. assert len(matches) == 1, "unable to replace multiple non-string matches"
  43. value = replace_dict[var_key]
  44. return value
  45. def find_replace(target, replace_dict):
  46. if isinstance(target, dict):
  47. for key, value in target.items():
  48. if isinstance(value, str):
  49. target[key] = find_replace_str(value, replace_dict)
  50. if isinstance(value, list):
  51. for i in range(len(value)):
  52. value[i] = find_replace_str(value[i], replace_dict)
  53. if isinstance(value, dict):
  54. find_replace(value, replace_dict)
  55. elif isinstance(target, list):
  56. for i in range(len(target)):
  57. target[i] = str(find_replace_str(target[i], replace_dict))
  58. def get_list(val):
  59. if not isinstance(val, list):
  60. return [val]
  61. else:
  62. return val
  63. def combine_dict(d, u):
  64. for k, v in u.items():
  65. if isinstance(v, collections.abc.Mapping):
  66. d[k] = combine_dict(d.get(k, {}), v)
  67. else:
  68. if k not in d:
  69. d[k] = v
  70. else:
  71. if not isinstance(d[k], list):
  72. d[k] = [d[k]]
  73. d[k].extend(i for i in get_list(v) if i not in d[k])
  74. return d
  75. def del_if_exists(t, d):
  76. """Deletes a key from a dictionary if it exists.
  77. Args:
  78. t (string): target key to delete
  79. d (dict): dictionary to delete from
  80. """
  81. if t in d:
  82. del d[t]
  83. return
  84. for k, v in d.items():
  85. if isinstance(v, collections.abc.Mapping):
  86. del_if_exists(t, v)
  87. def replace_dict(d, u, ignored_keys=[]):
  88. """Replaces values in dict d with values in dict u.
  89. Args:
  90. d (dict): the target dict to overwrite
  91. u (dict): the dict containing the values to overwrite the target dict
  92. Returns:
  93. dict d with values overwritten by the corresponding ones in dict u.
  94. """
  95. if u is not None:
  96. for k, v in u.items():
  97. if k not in ignored_keys:
  98. if v is None:
  99. del_if_exists(k, d)
  100. continue
  101. if isinstance(v, collections.abc.Mapping):
  102. d[k] = replace_dict(d.get(k, {}), v, ignored_keys)
  103. else:
  104. d[k] = v
  105. return d
  106. def get_val_by_key(d: dict, k):
  107. if k in d:
  108. return d[k]
  109. for v in d.values():
  110. if isinstance(v, dict):
  111. return get_val_by_key(v, k)
  112. return None
  113. def set_val_by_key(d: dict, k, vv):
  114. if k in d:
  115. d[k] = vv
  116. for v in d.values():
  117. if isinstance(v, dict):
  118. set_val_by_key(v, k, vv)
  119. def fetch_hostfile(hostfile_path):
  120. if not os.path.isfile(hostfile_path):
  121. logger.warning("Unable to find hostfile, will proceed with training "
  122. "with local resources only.")
  123. return None
  124. # e.g., worker-0 slots=16
  125. with open(hostfile_path, 'r') as fd:
  126. resource_pool = collections.OrderedDict()
  127. for line in fd.readlines():
  128. line = line.strip()
  129. if line == '':
  130. # skip empty lines
  131. continue
  132. try:
  133. hostname, slots = line.split()
  134. _, slot_count = slots.split("=")
  135. slot_count = int(slot_count)
  136. except ValueError as err:
  137. logger.error("Hostfile is not formatted correctly, unable to "
  138. "proceed with training.")
  139. raise err
  140. if hostname in resource_pool:
  141. logger.error("Hostfile contains duplicate hosts, unable to "
  142. "proceed with training.")
  143. raise ValueError("host {} is already defined".format(hostname))
  144. resource_pool[hostname] = slot_count
  145. return resource_pool
  146. def validate_ds_config(config: dict):
  147. def is_False(config: dict, key):
  148. if config is None:
  149. return False
  150. return bool(config.get(key))
  151. config_zero = config.get("zero_optimization", {})
  152. if not config_zero:
  153. return True
  154. stage = config_zero.get("stage")
  155. offload = False
  156. if stage == 1:
  157. return True
  158. elif stage == 2:
  159. if is_False(config_zero,
  160. "cpu_offload") and is_False(config_zero,
  161. "cpu_offload_params"):
  162. return False
  163. elif stage == 3:
  164. offload_devices = ["cpu", "nvme"]
  165. if config_zero.get("offload_optimizer", {}).get("device") in offload_devices:
  166. offload = True
  167. if config_zero.get("offload_param", {}).get("device") in offload_devices:
  168. offload = True
  169. else:
  170. return True
  171. # HF requires that "ZeRO Offload can only work with DeepSpeed optimizers"
  172. if offload and not config.get("optimizer"):
  173. return False
  174. return True
  175. def remove_dupe_dicts(l):
  176. """ Removes duplicate dictionaries from a list. Uses list comprehension and the json library to sort and stringify each dictionary and the set data type to ensure unique values. Works with nested data structures.
  177. Args:
  178. l (list): a list of (nested) data structures.
  179. Returns:
  180. A list of unique values.
  181. """
  182. list_of_strings = [json.dumps(d, sort_keys=True) for d in l]
  183. list_of_strings = set(list_of_strings)
  184. return [json.loads(s) for s in list_of_strings]
  185. def prune_config(config, ignored_keys=[]):
  186. """ Prunes the input configurations
  187. Args:
  188. configs (dict): A configuration dictionary.
  189. ignored_keys (list, optional): the keys of the sections to delete. Defaults to [].
  190. Returns:
  191. A configuration dictionary.
  192. """
  193. if ignored_keys:
  194. for k in ignored_keys:
  195. def find_del_key(d: dict, k: str):
  196. if k in d:
  197. del d[k]
  198. else:
  199. for dd in d.values():
  200. if isinstance(dd, dict):
  201. find_del_key(dd, k)
  202. find_del_key(config, k)
  203. def prune_configs(configs, ignored_keys=[]):
  204. """ Prunes the input list of configurations
  205. Args:
  206. configs (list): A list of configuration dictionaries.
  207. ignored_keys (list, optional): the keys of the sections to delete. Defaults to [].
  208. Returns:
  209. A list of valid and unique configuration dictionaries.
  210. """
  211. pruned_list = []
  212. for config in configs:
  213. prune_config(config, ignored_keys)
  214. pruned_list.append(config)
  215. return remove_dupe_dicts(pruned_list)
  216. def get_tuning_keys(tuning_space: dict):
  217. """Outputs the list of tunnable parameters in the tuning space dict.
  218. Args:
  219. tuning_space (dict): a configuration dictionary containing tunable parameters as lists of values.
  220. Returns:
  221. A list of strings
  222. """
  223. tuning_keys = []
  224. for key, val in tuning_space.items():
  225. if isinstance(val, dict):
  226. tuning_keys.extend(get_tuning_keys(val))
  227. if isinstance(val, list) and len(val) > 1:
  228. tuning_keys.append(key)
  229. return tuning_keys
  230. def get_all_configs(tuning_space: dict, ignore_keys=None):
  231. """ Splits the tuning space dictionary to result in all combinations of values.
  232. Args:
  233. tuning_space (dict): the tuning space where tunable parameters are lists of values.
  234. """
  235. def gen_combinations(d: dict):
  236. keys, values = d.keys(), d.values()
  237. for v in values:
  238. if not isinstance(v, list):
  239. v = [v]
  240. values_choices = (gen_combinations(v) if isinstance(v,
  241. dict) else get_list(v)
  242. for v in values)
  243. for comb in itertools.product(*values_choices):
  244. yield dict(zip(keys, comb))
  245. all_configs = []
  246. ignored_key_vals = {}
  247. for ik in ignore_keys:
  248. ignored_key_vals[ik] = tuning_space.get(ik, {})
  249. del_if_exists(ik, tuning_space)
  250. for c in gen_combinations(tuning_space):
  251. replace_dict(c, ignored_key_vals)
  252. all_configs.append(c)
  253. return all_configs
  254. def canonical_name(config: dict, tuning_keys=None, prefix="", omit_val=False):
  255. """ Generates a name from the acronyms of the tuning keys in the config dict. TRAIN_MICRO_BATCH_SIZE_PER_GPU is always included in the tuning keys.
  256. Args:
  257. config (dict): the config dict used to generate the name
  258. tuning_keys (list, optional): the tuning keys used to generate the name. Defaults to None.
  259. prefix (str, optional): a string added to the beginning of the name. Defaults to None.
  260. """
  261. if TRAIN_MICRO_BATCH_SIZE_PER_GPU not in tuning_keys:
  262. tuning_keys.append(TRAIN_MICRO_BATCH_SIZE_PER_GPU)
  263. if GRADIENT_ACCUMULATION_STEPS not in tuning_keys:
  264. tuning_keys.append(GRADIENT_ACCUMULATION_STEPS)
  265. tuning_keys.sort()
  266. def get_offload_name(offload_config):
  267. cname = ""
  268. if offload_config is None:
  269. return "None_"
  270. for key, val in offload_config.items():
  271. key = "".join(map(lambda c: c[0], key.split('_')))
  272. if (isinstance(val, int) or isinstance(val, float)) and val > 9000:
  273. cname += key + '{:.1e}'.format(val) + "_"
  274. else:
  275. if isinstance(val, bool):
  276. val = "T" if val else "F"
  277. cname += f"{key}{val}_"
  278. return cname
  279. def get_name_by_keys(config: dict, tuning_keys=None, omit_val=False):
  280. cname = ""
  281. if not tuning_keys or config is None:
  282. return cname
  283. for key, val in config.items():
  284. # skip the arg_mappings section when naming the exp file
  285. if key == "arg_mappings":
  286. continue
  287. if key == "offload_param":
  288. cname += "op_"
  289. if not omit_val:
  290. cname += get_offload_name(val)
  291. continue
  292. if key == "offload_optimizer":
  293. cname += "oo_"
  294. if not omit_val:
  295. cname += get_offload_name(val)
  296. continue
  297. # recursively call the func to get name for the child dicts
  298. if isinstance(val, dict):
  299. n = get_name_by_keys(val, tuning_keys, omit_val=omit_val)
  300. if n != "":
  301. cname += n + "_"
  302. if tuning_keys and key not in tuning_keys:
  303. continue
  304. key_str = "".join(map(lambda c: c[0], key.split('_')))
  305. if not omit_val:
  306. if (isinstance(val, int) or isinstance(val, float)) and val > 9000:
  307. cname += key_str + '{:.1e}'.format(val) + "_"
  308. else:
  309. if isinstance(val, bool):
  310. val = "T" if val else "F"
  311. cname += f"{key_str}{val}_"
  312. else:
  313. cname += key_str + "_"
  314. return cname[:-1]
  315. name = get_name_by_keys(config, tuning_keys, omit_val=omit_val)
  316. return prefix + (name if name != "" else "exp")
  317. def get_first_config(config: dict):
  318. if not config:
  319. return None
  320. cfg = copy.deepcopy(config)
  321. for key, val in cfg.items():
  322. if isinstance(val, dict):
  323. if key == "optimizer": # use user defined optimizer which might have lists of values as params
  324. cfg[key] = val
  325. else:
  326. cfg[key] = get_first_config(val)
  327. if isinstance(val, list) and len(val) > 0:
  328. cfg[key] = val[0]
  329. return cfg
  330. def write_experiments(exps: list, exps_dir: str):
  331. exp_paths = []
  332. for exp in exps:
  333. exp_name = exp['name']
  334. # write the expr config to a json file
  335. exp_path = os.path.join(exps_dir, f'{exp_name}.json')
  336. with open(exp_path, 'w') as fd:
  337. json.dump(exp, fd)
  338. exp_paths.append(exp_path)
  339. return exp_paths
  340. def memory_to_string(n, postfix="", units=None, precision=2):
  341. if units is None:
  342. if n // 10**12 > 0:
  343. return str(round(n / 1024**4, precision)) + " T" + postfix
  344. if n // 10**9 > 0:
  345. return str(round(n / 1024**3, precision)) + " G" + postfix
  346. elif n // 10**6 > 0:
  347. return str(round(n / 1024**2, precision)) + " M" + postfix
  348. elif n // 10**3 > 0:
  349. return str(round(n / 1014, precision)) + " K" + postfix
  350. else:
  351. return str(n) + " "
  352. else:
  353. if units == "T":
  354. return str(round(n / 1024**4, precision)) + " " + units
  355. if units == "G" + postfix:
  356. return str(round(n / 1024**3, precision)) + " " + units
  357. elif units == "M" + postfix:
  358. return str(round(n / 1024**2, precision)) + " " + units
  359. elif units == "K" + postfix:
  360. return str(round(n / 1024, precision)) + " " + units
  361. else:
  362. return str(n) + " "
  363. def number_to_string(n, postfix="", units=None, precision=2):
  364. if units is None:
  365. if n // 10**9 > 0:
  366. return str(round(n / 1000**3, precision)) + " B" + postfix
  367. if n // 10**6 > 0:
  368. return str(round(n / 1000**2, precision)) + " M" + postfix
  369. elif n // 10**3 > 0:
  370. return str(round(n / 1000**1, precision)) + " K" + postfix
  371. else:
  372. return str(n) + " "
  373. else:
  374. if units == "B" + postfix:
  375. return str(round(n / 1000**3, precision)) + " " + units
  376. elif units == "M" + postfix:
  377. return str(round(n / 1000**2, precision)) + " " + units
  378. elif units == "K" + postfix:
  379. return str(round(n / 1000**1, precision)) + " " + units
  380. else:
  381. return str(n) + " "