utils.py 2.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586
  1. # Copyright (c) Microsoft Corporation.
  2. # SPDX-License-Identifier: Apache-2.0
  3. # DeepSpeed Team
  4. import numpy as np
  5. import itertools
  6. from ..utils import *
  7. import collections.abc
  8. def index_to_feature(p, dims):
  9. """convert index form (single integer) to feature form (vector)"""
  10. feature = []
  11. for dim in dims:
  12. feature.append(p % dim)
  13. p //= dim
  14. return feature
  15. def feature_to_index(feature, dims):
  16. """convert feature form (vector) to index form (single integer)"""
  17. p = 0
  18. for j, k in enumerate(feature):
  19. print("j:", "k:", k, "dims", dims[:j])
  20. p += int(np.prod(dims[:j])) * k
  21. return p
  22. def dict_to_dims(tuning_space):
  23. dims = []
  24. for key, val in tuning_space.items():
  25. if isinstance(val, dict):
  26. dims.extend(dict_to_dims(val))
  27. elif isinstance(val, list):
  28. dims.append(len(val))
  29. else:
  30. dims.append(1)
  31. return dims
  32. def gen_combinations(d: dict):
  33. keys, values = d.keys(), d.values()
  34. for v in values:
  35. if not isinstance(v, list):
  36. v = [v]
  37. values_choices = (gen_combinations(v) if isinstance(v, dict) else get_list(v) for v in values)
  38. for comb in itertools.product(*values_choices):
  39. yield dict(zip(keys, comb))
  40. def flatten(d, parent_key='', sep='_'):
  41. items = []
  42. for k, v in d.items():
  43. new_key = parent_key + sep + k if parent_key else k
  44. if isinstance(v, collections.abc.MutableMapping):
  45. items.extend(flatten(v, new_key, sep=sep).items())
  46. else:
  47. items.append((new_key, v))
  48. return dict(items)
  49. def dict_to_feature(feature_dict, keys, max_value=None):
  50. """Extract values from dict"""
  51. feature = []
  52. for key, val in feature_dict.items(): # First level
  53. if key not in keys:
  54. continue
  55. if val is None or val == "auto" or key == "autotuning" or val == "":
  56. continue
  57. if isinstance(val, dict):
  58. feature.append(dict_to_feature(val, max_value))
  59. else:
  60. feature.append(float(val))
  61. # normalization, should not matter in tree models
  62. if max_value is not None:
  63. norm_feature = []
  64. for f, mv in zip(feature, max_value):
  65. norm_feature.append(f / mv)
  66. feature = norm_feature
  67. return feature