aws.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. import os
  2. from copy import deepcopy
  3. from typing import Optional
  4. import boto3
  5. from botocore.exceptions import ClientError
  6. from ray_release.logger import logger
  7. from ray_release.util import DeferredEnvVar
  8. RELEASE_AWS_BUCKET = DeferredEnvVar(
  9. "RELEASE_AWS_BUCKET", "ray-release-automation-results"
  10. )
  11. RELEASE_AWS_DB_NAME = DeferredEnvVar("RELEASE_AWS_DB_NAME", "ray_ci")
  12. RELEASE_AWS_DB_TABLE = DeferredEnvVar("RELEASE_AWS_DB_TABLE", "release_test_result")
  13. RELEASE_AWS_ANYSCALE_SECRET_ARN = DeferredEnvVar(
  14. "RELEASE_AWS_ANYSCALE_SECRET_ARN",
  15. "arn:aws:secretsmanager:us-west-2:029272617770:secret:"
  16. "release-automation/"
  17. "anyscale-token20210505220406333800000001-BcUuKB",
  18. )
  19. # If changed, update
  20. # test_cluster_manager::MinimalSessionManagerTest.testClusterComputeExtraTags
  21. RELEASE_AWS_RESOURCE_TYPES_TO_TRACK_FOR_BILLING = [
  22. "instance",
  23. "volume",
  24. ]
  25. def maybe_fetch_api_token():
  26. from anyscale.authenticate import AuthenticationBlock
  27. if not os.environ.get("ANYSCALE_CLI_TOKEN"):
  28. try:
  29. token, _ = AuthenticationBlock._load_credentials()
  30. logger.info("Loaded anyscale credentials from local storage.")
  31. os.environ["ANYSCALE_CLI_TOKEN"] = token
  32. return
  33. except Exception:
  34. pass # Ignore errors
  35. logger.info("Missing ANYSCALE_CLI_TOKEN, retrieving from AWS secrets store")
  36. # NOTE(simon) This should automatically retrieve
  37. # release-automation@anyscale.com's anyscale token
  38. cli_token = boto3.client(
  39. "secretsmanager", region_name="us-west-2"
  40. ).get_secret_value(SecretId=str(RELEASE_AWS_ANYSCALE_SECRET_ARN))[
  41. "SecretString"
  42. ]
  43. os.environ["ANYSCALE_CLI_TOKEN"] = cli_token
  44. def add_tags_to_aws_config(aws_config: dict, tags_to_add: dict, resource_types: list):
  45. aws_config = deepcopy(aws_config)
  46. tag_specifications = aws_config.setdefault("TagSpecifications", [])
  47. for resource in resource_types:
  48. # Check if there is already a tag specification for the resource.
  49. # If so, return first item.
  50. resource_tags: dict = next(
  51. (x for x in tag_specifications if x.get("ResourceType", "") == resource),
  52. None,
  53. )
  54. # If no tag specification exists, add
  55. if resource_tags is None:
  56. resource_tags = {"ResourceType": resource, "Tags": []}
  57. tag_specifications.append(resource_tags)
  58. # Add our tags to the specification
  59. tags = resource_tags["Tags"]
  60. for key, value in tags_to_add.items():
  61. tags.append({"Key": key, "Value": value})
  62. return aws_config
  63. def upload_to_s3(src_path: str, bucket: str, key_path: str) -> Optional[str]:
  64. """Upload a file to a S3 bucket
  65. This assumes the bucket has public read access on the objects uploaded.
  66. Args:
  67. src_path: local file path.
  68. bucket: S3 bucket name.
  69. key_path: destination url of the uploaded object.
  70. Return:
  71. HTTP URL where the uploaded object could be downloaded if successful,
  72. or None if fails.
  73. Raises:
  74. ClientError if upload fails
  75. """
  76. s3_client = boto3.client("s3")
  77. try:
  78. s3_client.upload_file(Filename=src_path, Bucket=bucket, Key=key_path)
  79. except ClientError as e:
  80. logger.warning(f"Failed to upload to s3: {e}")
  81. return None
  82. return f"https://{bucket}.s3.us-west-2.amazonaws.com/{key_path}"