S3 cloudformation update (#3199)

* First cut of S3 Cloudformation Update support: encryption property.

* Update type support for S3.  Abstract base class for CloudFormation-aware models, as designed by @bblommers, introduced to decentralize CloudFormation resource and name property values to model objects.

* Blackened...

* Un-renamed param in s3.models.update_from_cloudformation_json() and its call to stay compatible with other modules.

Co-authored-by: Bert Blommers <bblommers@users.noreply.github.com>
Co-authored-by: Joseph Weitekamp <jweite@amazon.com>
Co-authored-by: Bert Blommers <info@bertblommers.nl>
This commit is contained in:
jweite 2020-08-01 17:43:03 -04:00 committed by GitHub
parent 06ed67a8e5
commit 3342d49a43
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 241 additions and 19 deletions

View File

@ -21,6 +21,7 @@ from moto.batch import models as batch_models # noqa
from moto.cloudwatch import models as cloudwatch_models # noqa
from moto.datapipeline import models as datapipeline_models # noqa
from moto.dynamodb2 import models as dynamodb2_models # noqa
from moto.ec2 import models as ec2_models
from moto.ecr import models as ecr_models # noqa
from moto.ecs import models as ecs_models # noqa
from moto.elb import models as elb_models # noqa
@ -33,15 +34,13 @@ from moto.rds import models as rds_models # noqa
from moto.rds2 import models as rds2_models # noqa
from moto.redshift import models as redshift_models # noqa
from moto.route53 import models as route53_models # noqa
from moto.s3 import models as s3_models # noqa
from moto.s3 import models as s3_models, s3_backend # noqa
from moto.s3.utils import bucket_and_name_from_url
from moto.sns import models as sns_models # noqa
from moto.sqs import models as sqs_models # noqa
# End ugly list of imports
from moto.ec2 import models as ec2_models
from moto.s3 import models as _, s3_backend # noqa
from moto.s3.utils import bucket_and_name_from_url
from moto.core import ACCOUNT_ID, CloudFormationModel
from .utils import random_suffix
from .exceptions import (
@ -212,7 +211,6 @@ def clean_json(resource_json, resources_map):
def resource_class_from_type(resource_type):
if resource_type in NULL_MODELS:
return None
if resource_type not in MODEL_MAP:
logger.warning("No Moto CloudFormation support for %s", resource_type)
return None
@ -221,6 +219,9 @@ def resource_class_from_type(resource_type):
def resource_name_property_from_type(resource_type):
for model in MODEL_LIST:
if model.cloudformation_type() == resource_type:
return model.cloudformation_name_type()
return NAME_TYPE_MAP.get(resource_type)
@ -249,7 +250,9 @@ def generate_resource_name(resource_type, stack_name, logical_id):
return "{0}-{1}-{2}".format(stack_name, logical_id, random_suffix())
def parse_resource(logical_id, resource_json, resources_map):
def parse_resource(
logical_id, resource_json, resources_map, add_name_to_resource_json=True
):
resource_type = resource_json["Type"]
resource_class = resource_class_from_type(resource_type)
if not resource_class:
@ -261,21 +264,20 @@ def parse_resource(logical_id, resource_json, resources_map):
return None
resource_json = clean_json(resource_json, resources_map)
resource_name = generate_resource_name(
resource_type, resources_map.get("AWS::StackName"), logical_id
)
resource_name_property = resource_name_property_from_type(resource_type)
if resource_name_property:
if "Properties" not in resource_json:
resource_json["Properties"] = dict()
if resource_name_property not in resource_json["Properties"]:
resource_json["Properties"][
resource_name_property
] = generate_resource_name(
resource_type, resources_map.get("AWS::StackName"), logical_id
)
resource_name = resource_json["Properties"][resource_name_property]
else:
resource_name = generate_resource_name(
resource_type, resources_map.get("AWS::StackName"), logical_id
)
if (
add_name_to_resource_json
and resource_name_property not in resource_json["Properties"]
):
resource_json["Properties"][resource_name_property] = resource_name
if resource_name_property in resource_json["Properties"]:
resource_name = resource_json["Properties"][resource_name_property]
return resource_class, resource_json, resource_name
@ -301,7 +303,7 @@ def parse_and_create_resource(logical_id, resource_json, resources_map, region_n
def parse_and_update_resource(logical_id, resource_json, resources_map, region_name):
resource_class, new_resource_json, new_resource_name = parse_resource(
logical_id, resource_json, resources_map
logical_id, resource_json, resources_map, False
)
original_resource = resources_map[logical_id]
new_resource = resource_class.update_from_cloudformation_json(

View File

@ -540,6 +540,7 @@ class BaseModel(object):
class CloudFormationModel(BaseModel):
@abstractmethod
def cloudformation_name_type(self):
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-name.html
# This must be implemented as a staticmethod with no parameters
# Return None for resources that do not have a name property
pass

View File

@ -0,0 +1,33 @@
from collections import OrderedDict
def cfn_to_api_encryption(bucket_encryption_properties):
sse_algorithm = bucket_encryption_properties["ServerSideEncryptionConfiguration"][
0
]["ServerSideEncryptionByDefault"]["SSEAlgorithm"]
kms_master_key_id = bucket_encryption_properties[
"ServerSideEncryptionConfiguration"
][0]["ServerSideEncryptionByDefault"].get("KMSMasterKeyID")
apply_server_side_encryption_by_default = OrderedDict()
apply_server_side_encryption_by_default["SSEAlgorithm"] = sse_algorithm
if kms_master_key_id:
apply_server_side_encryption_by_default["KMSMasterKeyID"] = kms_master_key_id
rule = OrderedDict(
{"ApplyServerSideEncryptionByDefault": apply_server_side_encryption_by_default}
)
bucket_encryption = OrderedDict(
{"@xmlns": "http://s3.amazonaws.com/doc/2006-03-01/"}
)
bucket_encryption["Rule"] = rule
return bucket_encryption
def is_replacement_update(properties):
properties_requiring_replacement_update = ["BucketName", "ObjectLockEnabled"]
return any(
[
property_requiring_replacement in properties
for property_requiring_replacement in properties_requiring_replacement_update
]
)

View File

@ -43,6 +43,7 @@ from .exceptions import (
WrongPublicAccessBlockAccountIdError,
NoSuchUpload,
)
from .cloud_formation import cfn_to_api_encryption, is_replacement_update
from .utils import clean_key_name, _VersionedKeyStore
MAX_BUCKET_NAME_LENGTH = 63
@ -1084,8 +1085,54 @@ class FakeBucket(CloudFormationModel):
cls, resource_name, cloudformation_json, region_name
):
bucket = s3_backend.create_bucket(resource_name, region_name)
properties = cloudformation_json["Properties"]
if "BucketEncryption" in properties:
bucket_encryption = cfn_to_api_encryption(properties["BucketEncryption"])
s3_backend.put_bucket_encryption(
bucket_name=resource_name, encryption=[bucket_encryption]
)
return bucket
@classmethod
def update_from_cloudformation_json(
cls, original_resource, new_resource_name, cloudformation_json, region_name,
):
properties = cloudformation_json["Properties"]
if is_replacement_update(properties):
resource_name_property = cls.cloudformation_name_type()
if resource_name_property not in properties:
properties[resource_name_property] = new_resource_name
new_resource = cls.create_from_cloudformation_json(
properties[resource_name_property], cloudformation_json, region_name
)
properties[resource_name_property] = original_resource.name
cls.delete_from_cloudformation_json(
original_resource.name, cloudformation_json, region_name
)
return new_resource
else: # No Interruption
if "BucketEncryption" in properties:
bucket_encryption = cfn_to_api_encryption(
properties["BucketEncryption"]
)
s3_backend.put_bucket_encryption(
bucket_name=original_resource.name, encryption=[bucket_encryption]
)
return original_resource
@classmethod
def delete_from_cloudformation_json(
cls, resource_name, cloudformation_json, region_name
):
properties = cloudformation_json["Properties"]
bucket_name = properties[cls.cloudformation_name_type()]
s3_backend.delete_bucket(bucket_name)
def to_config_dict(self):
"""Return the AWS Config JSON format of this S3 bucket.

View File

@ -36,7 +36,7 @@ from nose.tools import assert_raises
import sure # noqa
from moto import settings, mock_s3, mock_s3_deprecated, mock_config
from moto import settings, mock_s3, mock_s3_deprecated, mock_config, mock_cloudformation
import moto.s3.models as s3model
from moto.core.exceptions import InvalidNextTokenException
from moto.core.utils import py2_strip_unicode_keys
@ -4686,3 +4686,142 @@ def test_presigned_put_url_with_custom_headers():
s3.delete_object(Bucket=bucket, Key=key)
s3.delete_bucket(Bucket=bucket)
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_basic():
s3 = boto3.client("s3", region_name="us-east-1")
cf = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
"StackId"
]
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_with_properties():
s3 = boto3.client("s3", region_name="us-east-1")
cf = boto3.client("cloudformation", region_name="us-east-1")
bucket_name = "MyBucket"
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"testInstance": {
"Type": "AWS::S3::Bucket",
"Properties": {
"BucketName": bucket_name,
"BucketEncryption": {
"ServerSideEncryptionConfiguration": [
{
"ServerSideEncryptionByDefault": {
"SSEAlgorithm": "AES256"
}
}
]
},
},
}
},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
"StackId"
]
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
s3.head_bucket(Bucket=bucket_name)
encryption = s3.get_bucket_encryption(Bucket=bucket_name)
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
"ApplyServerSideEncryptionByDefault"
]["SSEAlgorithm"].should.equal("AES256")
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_update_no_interruption():
s3 = boto3.client("s3", region_name="us-east-1")
cf = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"testInstance": {
"Type": "AWS::S3::Bucket",
"Properties": {
"BucketEncryption": {
"ServerSideEncryptionConfiguration": [
{
"ServerSideEncryptionByDefault": {
"SSEAlgorithm": "AES256"
}
}
]
}
},
}
},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
encryption = s3.get_bucket_encryption(
Bucket=stack_description["Outputs"][0]["OutputValue"]
)
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
"ApplyServerSideEncryptionByDefault"
]["SSEAlgorithm"].should.equal("AES256")
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_update_replacement():
s3 = boto3.client("s3", region_name="us-east-1")
cf = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"testInstance": {
"Type": "AWS::S3::Bucket",
"Properties": {"BucketName": "MyNewBucketName"},
}
},
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])