copy_files.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. import argparse
  2. import os
  3. import subprocess
  4. import sys
  5. import time
  6. from collections import OrderedDict
  7. import requests
  8. from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
  9. def retry(f):
  10. def inner():
  11. resp = None
  12. for _ in range(5):
  13. resp = f()
  14. print("Getting Presigned URL, status_code", resp.status_code)
  15. if resp.status_code >= 500:
  16. print("errored, retrying...")
  17. print(resp.text)
  18. time.sleep(5)
  19. else:
  20. return resp
  21. if resp is None or resp.status_code >= 500:
  22. print("still errorred after many retries")
  23. sys.exit(1)
  24. return inner
  25. @retry
  26. def perform_auth():
  27. auth = BotoAWSRequestsAuth(
  28. aws_host="vop4ss7n22.execute-api.us-west-2.amazonaws.com",
  29. aws_region="us-west-2",
  30. aws_service="execute-api",
  31. )
  32. resp = requests.get(
  33. "https://vop4ss7n22.execute-api.us-west-2.amazonaws.com/endpoint/",
  34. auth=auth,
  35. params={"job_id": os.environ["BUILDKITE_JOB_ID"]},
  36. )
  37. return resp
  38. def handle_docker_login(resp):
  39. pwd = resp.json()["docker_password"]
  40. subprocess.check_call(
  41. ["docker", "login", "--username", "raytravisbot", "--password", pwd]
  42. )
  43. def gather_paths(dir_path):
  44. dir_path = dir_path.replace("/", os.path.sep)
  45. assert os.path.exists(dir_path)
  46. if os.path.isdir(dir_path):
  47. paths = [os.path.join(dir_path, f) for f in os.listdir(dir_path)]
  48. else:
  49. paths = [dir_path]
  50. return paths
  51. dest_resp_mapping = {
  52. "wheels": "presigned_resp_prod_wheels",
  53. "branch_wheels": "presigned_resp_prod_wheels",
  54. "jars": "presigned_resp_prod_wheels",
  55. "branch_jars": "presigned_resp_prod_wheels",
  56. "logs": "presigned_logs",
  57. }
  58. def upload_paths(paths, resp, destination):
  59. dest_key = dest_resp_mapping[destination]
  60. c = resp.json()[dest_key]
  61. of = OrderedDict(c["fields"])
  62. sha = os.environ["BUILDKITE_COMMIT"]
  63. branch = os.environ["BUILDKITE_BRANCH"]
  64. bk_job_id = os.environ["BUILDKITE_JOB_ID"]
  65. current_os = sys.platform
  66. for path in paths:
  67. fn = os.path.split(path)[-1]
  68. of["key"] = {
  69. "wheels": f"latest/{fn}",
  70. "branch_wheels": f"{branch}/{sha}/{fn}",
  71. "jars": f"jars/latest/{current_os}/{fn}",
  72. "branch_jars": f"jars/{branch}/{sha}/{current_os}/{fn}",
  73. "logs": f"bazel_events/{branch}/{sha}/{bk_job_id}/{fn}",
  74. }[destination]
  75. of["file"] = open(path, "rb")
  76. r = requests.post(c["url"], files=of)
  77. print(f"Uploaded {path} to {of['key']}", r.status_code)
  78. if __name__ == "__main__":
  79. parser = argparse.ArgumentParser(
  80. description="Helper script to upload files to S3 bucket"
  81. )
  82. parser.add_argument("--path", type=str, required=False)
  83. parser.add_argument("--destination", type=str)
  84. args = parser.parse_args()
  85. if os.environ.get("RAYCI_SKIP_UPLOAD", "false") == "true":
  86. print("Skipping upload.")
  87. sys.exit(0)
  88. assert args.destination in {
  89. "branch_jars",
  90. "branch_wheels",
  91. "jars",
  92. "logs",
  93. "wheels",
  94. "docker_login",
  95. }
  96. assert "BUILDKITE_JOB_ID" in os.environ
  97. assert "BUILDKITE_COMMIT" in os.environ
  98. resp = perform_auth()
  99. if args.destination == "docker_login":
  100. handle_docker_login(resp)
  101. else:
  102. paths = gather_paths(args.path)
  103. print("Planning to upload", paths)
  104. upload_paths(paths, resp, args.destination)