copy_files.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. import argparse
  2. import os
  3. import subprocess
  4. import sys
  5. import time
  6. from collections import OrderedDict
  7. from typing import List
  8. import requests
  9. from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
  10. def retry(f):
  11. def inner():
  12. resp = None
  13. for _ in range(5):
  14. resp = f()
  15. print("Getting Presigned URL, status_code", resp.status_code)
  16. if resp.status_code >= 500:
  17. print("errored, retrying...")
  18. print(resp.text)
  19. time.sleep(5)
  20. else:
  21. return resp
  22. if resp is None or resp.status_code >= 500:
  23. print("still errorred after many retries")
  24. sys.exit(1)
  25. return inner
  26. @retry
  27. def perform_auth():
  28. auth = BotoAWSRequestsAuth(
  29. aws_host="vop4ss7n22.execute-api.us-west-2.amazonaws.com",
  30. aws_region="us-west-2",
  31. aws_service="execute-api",
  32. )
  33. resp = requests.get(
  34. "https://vop4ss7n22.execute-api.us-west-2.amazonaws.com/endpoint/",
  35. auth=auth,
  36. params={"job_id": os.environ["BUILDKITE_JOB_ID"]},
  37. )
  38. return resp
  39. def handle_docker_login(resp):
  40. pwd = resp.json()["docker_password"]
  41. subprocess.call(
  42. ["docker", "login", "--username", "raytravisbot", "--password", pwd]
  43. )
  44. def gather_paths(dir_path) -> List[str]:
  45. dir_path = dir_path.replace("/", os.path.sep)
  46. assert os.path.exists(dir_path)
  47. if os.path.isdir(dir_path):
  48. paths = [os.path.join(dir_path, f) for f in os.listdir(dir_path)]
  49. else:
  50. paths = [dir_path]
  51. return paths
  52. dest_resp_mapping = {
  53. "wheels": "presigned_resp_prod_wheels",
  54. "branch_wheels": "presigned_resp_prod_wheels",
  55. "jars": "presigned_resp_prod_wheels",
  56. "branch_jars": "presigned_resp_prod_wheels",
  57. "logs": "presigned_logs",
  58. }
  59. def upload_paths(paths, resp, destination):
  60. dest_key = dest_resp_mapping[destination]
  61. c = resp.json()[dest_key]
  62. of = OrderedDict(c["fields"])
  63. sha = os.environ["BUILDKITE_COMMIT"]
  64. branch = os.environ["BUILDKITE_BRANCH"]
  65. bk_job_id = os.environ["BUILDKITE_JOB_ID"]
  66. current_os = sys.platform
  67. for path in paths:
  68. fn = os.path.split(path)[-1]
  69. of["key"] = {
  70. "wheels": f"latest/{fn}",
  71. "branch_wheels": f"{branch}/{sha}/{fn}",
  72. "jars": f"jars/latest/{current_os}/{fn}",
  73. "branch_jars": f"jars/{branch}/{sha}/{current_os}/{fn}",
  74. "logs": f"bazel_events/{branch}/{sha}/{bk_job_id}/{fn}",
  75. }[destination]
  76. of["file"] = open(path, "rb")
  77. r = requests.post(c["url"], files=of)
  78. print(f"Uploaded {path} to {of['key']}", r.status_code)
  79. if __name__ == "__main__":
  80. parser = argparse.ArgumentParser(
  81. description="Helper script to upload files to S3 bucket"
  82. )
  83. parser.add_argument("--path", type=str, required=False)
  84. parser.add_argument("--destination", type=str)
  85. args = parser.parse_args()
  86. assert args.destination in {
  87. "branch_jars",
  88. "branch_wheels",
  89. "jars",
  90. "logs",
  91. "wheels",
  92. "docker_login",
  93. }
  94. assert "BUILDKITE_JOB_ID" in os.environ
  95. assert "BUILDKITE_COMMIT" in os.environ
  96. resp = perform_auth()
  97. if args.destination == "docker_login":
  98. handle_docker_login(resp)
  99. else:
  100. paths = gather_paths(args.path)
  101. print("Planning to upload", paths)
  102. upload_paths(paths, resp, args.destination)