error.py 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. from ray.rllib.utils.annotations import PublicAPI
  2. @PublicAPI
  3. class UnsupportedSpaceException(Exception):
  4. """Error for an unsupported action or observation space."""
  5. pass
  6. @PublicAPI
  7. class EnvError(Exception):
  8. """Error if we encounter an error during RL environment validation."""
  9. pass
  10. # -------
  11. # Error messages
  12. # -------
  13. # Message explaining there are no GPUs available for the
  14. # num_gpus=n or num_gpus_per_worker=m settings.
  15. ERR_MSG_NO_GPUS = \
  16. """Found {} GPUs on your machine (GPU devices found: {})! If your machine
  17. does not have any GPUs, you should set the config keys `num_gpus` and
  18. `num_gpus_per_worker` to 0 (they may be set to 1 by default for your
  19. particular RL algorithm)."""
  20. ERR_MSG_INVALID_ENV_DESCRIPTOR = \
  21. """The env string you provided ('{}') is:
  22. a) Not a supported/installed environment.
  23. b) Not a tune-registered environment creator.
  24. c) Not a valid env class string.
  25. Try one of the following:
  26. a) For Atari support: `pip install gym[atari] autorom[accept-rom-license]`.
  27. For VizDoom support: Install VizDoom
  28. (https://github.com/mwydmuch/ViZDoom/blob/master/doc/Building.md) and
  29. `pip install vizdoomgym`.
  30. For PyBullet support: `pip install pybullet`.
  31. b) To register your custom env, do `from ray import tune;
  32. tune.register('[name]', lambda cfg: [return env obj from here using cfg])`.
  33. Then in your config, do `config['env'] = [name]`.
  34. c) Make sure you provide a fully qualified classpath, e.g.:
  35. `ray.rllib.examples.env.repeat_after_me_env.RepeatAfterMeEnv`
  36. """
  37. # -------
  38. # HOWTO_ strings can be added to any error/warning/into message
  39. # to eplain to the user, how to actually fix the encountered problem.
  40. # -------
  41. # HOWTO change the RLlib config, depending on how user runs the job.
  42. HOWTO_CHANGE_CONFIG = """
  43. To change the config for the `rllib train|rollout` command, use
  44. `--config={'[key]': '[value]'}` on the command line.
  45. To change the config for `tune.run()` in a script: Modify the python dict
  46. passed to `tune.run(config=[...])`.
  47. To change the config for an RLlib Trainer instance: Modify the python dict
  48. passed to the Trainer's constructor, e.g. `PPOTrainer(config=[...])`.
  49. """