wheel_urls.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. """Test downloading Ray wheels for currently running commit
  2. This test runs on a single node and verifies that wheel URLs on all platforms
  3. for the currently running Ray commit are valid. This test is necessary to
  4. catch changes in the format or location of uploaded wheels. A test like this is
  5. is not straightforward to add in pre-merge CI because at pre-merge time, there
  6. is no commit to master yet and no uploaded wheels.
  7. Runtime environments use these URLs to download the currently running Ray wheel
  8. into isolated conda environments on each worker.
  9. Test owner: architkulkarni
  10. Acceptance criteria: Should run through and print "PASSED"
  11. """
  12. import ray
  13. import time
  14. import requests
  15. import pprint
  16. import ray._private.ray_constants as ray_constants
  17. from ray._private.utils import get_master_wheel_url, get_release_wheel_url
  18. from ray._private.test_utils import safe_write_to_results_json
  19. def update_progress(result):
  20. result["last_update"] = time.time()
  21. safe_write_to_results_json(result)
  22. if __name__ == "__main__":
  23. # Fail if running on a build from source that doesn't have a commit and
  24. # hasn't been uploaded as a wheel to AWS.
  25. assert "RAY_COMMIT_SHA" not in ray.__commit__, ray.__commit__
  26. retry = set()
  27. for sys_platform in ["darwin", "linux", "win32"]:
  28. for py_version in ray_constants.RUNTIME_ENV_CONDA_PY_VERSIONS:
  29. if "dev" in ray.__version__:
  30. url = get_master_wheel_url(
  31. ray_commit=ray.__commit__,
  32. sys_platform=sys_platform,
  33. ray_version=ray.__version__,
  34. py_version=py_version,
  35. )
  36. else:
  37. url = get_release_wheel_url(
  38. ray_commit=ray.__commit__,
  39. sys_platform=sys_platform,
  40. ray_version=ray.__version__,
  41. py_version=py_version,
  42. )
  43. if requests.head(url).status_code != 200:
  44. print("URL not found (yet?):", url)
  45. retry.add(url)
  46. continue
  47. print("Successfully tested URL: ", url)
  48. update_progress({"url": url})
  49. num_retries = 0
  50. MAX_NUM_RETRIES = 12
  51. while retry and num_retries < MAX_NUM_RETRIES:
  52. print(
  53. f"There are {len(retry)} URLs to retry. Sleeping 10 minutes "
  54. f"to give some time for wheels to be built. "
  55. f"Trial {num_retries + 1}/{MAX_NUM_RETRIES}."
  56. )
  57. print("List of URLs to retry:", retry)
  58. time.sleep(600)
  59. print("Retrying now...")
  60. for url in list(retry):
  61. if requests.head(url).status_code != 200:
  62. print(f"URL still not found: {url}")
  63. else:
  64. print("Successfully tested URL: ", url)
  65. update_progress({"url": url})
  66. retry.remove(url)
  67. num_retries = num_retries + 1
  68. if retry:
  69. print("FAILED")
  70. print("List of URLs not available after all retries: ")
  71. pprint.pprint(list(retry))
  72. else:
  73. print("PASSED")