aws.py 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. import io
  2. import os
  3. import time
  4. import sys
  5. import requests
  6. from copy import deepcopy
  7. from typing import Optional
  8. from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
  9. import boto3
  10. from botocore.exceptions import ClientError
  11. from ray_release.logger import logger
  12. from ray_release.util import DeferredEnvVar
  13. RELEASE_AWS_BUCKET = DeferredEnvVar(
  14. "RELEASE_AWS_BUCKET", "ray-release-automation-results"
  15. )
  16. RELEASE_AWS_DB_NAME = DeferredEnvVar("RELEASE_AWS_DB_NAME", "ray_ci")
  17. RELEASE_AWS_DB_TABLE = DeferredEnvVar("RELEASE_AWS_DB_TABLE", "release_test_result")
  18. RELEASE_AWS_ANYSCALE_SECRET_ARN = DeferredEnvVar(
  19. "RELEASE_AWS_ANYSCALE_SECRET_ARN",
  20. "arn:aws:secretsmanager:us-west-2:029272617770:secret:"
  21. "release-automation/"
  22. "anyscale-token20210505220406333800000001-BcUuKB",
  23. )
  24. # If changed, update
  25. # test_cluster_manager::MinimalSessionManagerTest.testClusterComputeExtraTags
  26. RELEASE_AWS_RESOURCE_TYPES_TO_TRACK_FOR_BILLING = [
  27. "instance",
  28. "volume",
  29. ]
  30. S3_PRESIGNED_CACHE = None
  31. S3_PRESIGNED_KEY = "rayci_result_bucket"
  32. def get_secret_token(secret_id: str) -> str:
  33. return boto3.client("secretsmanager", region_name="us-west-2").get_secret_value(
  34. SecretId=secret_id
  35. )["SecretString"]
  36. def maybe_fetch_api_token():
  37. from anyscale.authenticate import AuthenticationBlock
  38. if not os.environ.get("ANYSCALE_CLI_TOKEN"):
  39. try:
  40. token, _ = AuthenticationBlock._load_credentials()
  41. logger.info("Loaded anyscale credentials from local storage.")
  42. os.environ["ANYSCALE_CLI_TOKEN"] = token
  43. return
  44. except Exception:
  45. pass # Ignore errors
  46. logger.info("Missing ANYSCALE_CLI_TOKEN, retrieving from AWS secrets store")
  47. # NOTE(simon) This should automatically retrieve
  48. # release-automation@anyscale.com's anyscale token
  49. cli_token = boto3.client(
  50. "secretsmanager", region_name="us-west-2"
  51. ).get_secret_value(SecretId=str(RELEASE_AWS_ANYSCALE_SECRET_ARN))[
  52. "SecretString"
  53. ]
  54. os.environ["ANYSCALE_CLI_TOKEN"] = cli_token
  55. def add_tags_to_aws_config(aws_config: dict, tags_to_add: dict, resource_types: list):
  56. aws_config = deepcopy(aws_config)
  57. tag_specifications = aws_config.setdefault("TagSpecifications", [])
  58. for resource in resource_types:
  59. # Check if there is already a tag specification for the resource.
  60. # If so, return first item.
  61. resource_tags: dict = next(
  62. (x for x in tag_specifications if x.get("ResourceType", "") == resource),
  63. None,
  64. )
  65. # If no tag specification exists, add
  66. if resource_tags is None:
  67. resource_tags = {"ResourceType": resource, "Tags": []}
  68. tag_specifications.append(resource_tags)
  69. # Add our tags to the specification
  70. tags = resource_tags["Tags"]
  71. for key, value in tags_to_add.items():
  72. tags.append({"Key": key, "Value": value})
  73. return aws_config
  74. def upload_to_s3(src_path: str, bucket: str, key_path: str) -> Optional[str]:
  75. """Upload a file to a S3 bucket
  76. This assumes the bucket has public read access on the objects uploaded.
  77. Args:
  78. src_path: local file path.
  79. bucket: S3 bucket name.
  80. key_path: destination url of the uploaded object.
  81. Return:
  82. HTTP URL where the uploaded object could be downloaded if successful,
  83. or None if fails.
  84. Raises:
  85. ClientError if upload fails
  86. """
  87. s3_client = boto3.client("s3")
  88. try:
  89. s3_client.upload_file(Filename=src_path, Bucket=bucket, Key=key_path)
  90. except ClientError as e:
  91. logger.warning(f"Failed to upload to s3: {e}")
  92. return None
  93. return f"https://{bucket}.s3.us-west-2.amazonaws.com/{key_path}"
  94. def _retry(f):
  95. def inner():
  96. resp = None
  97. for _ in range(5):
  98. resp = f()
  99. print("Getting Presigned URL, status_code", resp.status_code)
  100. if resp.status_code >= 500:
  101. print("errored, retrying...")
  102. print(resp.text)
  103. time.sleep(5)
  104. else:
  105. return resp
  106. if resp is None or resp.status_code >= 500:
  107. print("still errorred after many retries")
  108. sys.exit(1)
  109. return inner
  110. @_retry
  111. def _get_s3_rayci_test_data_presigned():
  112. global S3_PRESIGNED_CACHE
  113. if not S3_PRESIGNED_CACHE:
  114. auth = BotoAWSRequestsAuth(
  115. aws_host="vop4ss7n22.execute-api.us-west-2.amazonaws.com",
  116. aws_region="us-west-2",
  117. aws_service="execute-api",
  118. )
  119. S3_PRESIGNED_CACHE = requests.get(
  120. "https://vop4ss7n22.execute-api.us-west-2.amazonaws.com/endpoint/",
  121. auth=auth,
  122. params={"job_id": os.environ["BUILDKITE_JOB_ID"]},
  123. )
  124. return S3_PRESIGNED_CACHE
  125. def s3_put_rayci_test_data(Bucket: str, Key: str, Body: str):
  126. try:
  127. boto3.client("s3").put_object(
  128. Bucket=Bucket,
  129. Key=Key,
  130. Body=Body,
  131. )
  132. except ClientError:
  133. # or use presigned URL
  134. resp = _get_s3_rayci_test_data_presigned().json()[S3_PRESIGNED_KEY]
  135. data = resp["fields"]
  136. data.update(
  137. {
  138. "key": Key,
  139. "file": io.StringIO(Body),
  140. }
  141. )
  142. requests.post(resp["url"], files=data)