From 3b06ce689e533f24831cb225306716c123942b52 Mon Sep 17 00:00:00 2001 From: Ciaran Evans <9111975+ciaranevans@users.noreply.github.com> Date: Thu, 27 Aug 2020 08:22:44 +0100 Subject: [PATCH 01/21] Address SFN.Client.exceptions.ExecutionAlreadyExists Not implemented (#3263) * Add check for existing execution, fix issue with make init * Remove f-string usage * Remove fstring usage in test * Pin black and run formatting on test_stepfunction * Reverse changes made by black 20.8b1 --- moto/stepfunctions/exceptions.py | 5 +++++ moto/stepfunctions/models.py | 9 ++++++++ requirements-dev.txt | 2 +- setup.py | 1 + .../test_stepfunctions/test_stepfunctions.py | 21 +++++++++++++++++++ 5 files changed, 37 insertions(+), 1 deletion(-) diff --git a/moto/stepfunctions/exceptions.py b/moto/stepfunctions/exceptions.py index 704e4ea83..6000bab4e 100644 --- a/moto/stepfunctions/exceptions.py +++ b/moto/stepfunctions/exceptions.py @@ -18,6 +18,11 @@ class AWSError(Exception): ) +class ExecutionAlreadyExists(AWSError): + TYPE = "ExecutionAlreadyExists" + STATUS = 400 + + class ExecutionDoesNotExist(AWSError): TYPE = "ExecutionDoesNotExist" STATUS = 400 diff --git a/moto/stepfunctions/models.py b/moto/stepfunctions/models.py index e36598f23..58b6bb434 100644 --- a/moto/stepfunctions/models.py +++ b/moto/stepfunctions/models.py @@ -8,6 +8,7 @@ from moto.core.utils import iso_8601_datetime_without_milliseconds from moto.sts.models import ACCOUNT_ID from uuid import uuid4 from .exceptions import ( + ExecutionAlreadyExists, ExecutionDoesNotExist, InvalidArn, InvalidName, @@ -205,6 +206,7 @@ class StepFunctionBackend(BaseBackend): def start_execution(self, state_machine_arn, name=None): state_machine_name = self.describe_state_machine(state_machine_arn).name + self._ensure_execution_name_doesnt_exist(name) execution = Execution( region_name=self.region_name, account_id=self._get_account_id(), @@ -278,6 +280,13 @@ class StepFunctionBackend(BaseBackend): if not arn or not match: raise InvalidArn(invalid_msg) + def _ensure_execution_name_doesnt_exist(self, name): + for execution in self.executions: + if execution.name == name: + raise ExecutionAlreadyExists( + "Execution Already Exists: '" + execution.execution_arn + "'" + ) + def _get_account_id(self): return ACCOUNT_ID diff --git a/requirements-dev.txt b/requirements-dev.txt index 313f2dfb6..e40a568a5 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,6 @@ -r requirements.txt nose -black; python_version >= '3.6' +black==19.10b0; python_version >= '3.6' regex==2019.11.1; python_version >= '3.6' # Needed for black sure==1.4.11 coverage==4.5.4 diff --git a/setup.py b/setup.py index 707a56212..ffaa8b273 100755 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ install_requires = [ "werkzeug", "PyYAML>=5.1", "pytz", + "ecdsa<0.15", "python-dateutil<3.0.0,>=2.1", "python-jose[cryptography]>=3.1.0,<4.0.0", "docker>=2.5.1", diff --git a/tests/test_stepfunctions/test_stepfunctions.py b/tests/test_stepfunctions/test_stepfunctions.py index 4324964d8..043fd9bfb 100644 --- a/tests/test_stepfunctions/test_stepfunctions.py +++ b/tests/test_stepfunctions/test_stepfunctions.py @@ -404,6 +404,27 @@ def test_state_machine_start_execution_with_custom_name(): execution["startDate"].should.be.a(datetime) +@mock_stepfunctions +@mock_sts +def test_state_machine_start_execution_fails_on_duplicate_execution_name(): + client = boto3.client("stepfunctions", region_name=region) + # + sm = client.create_state_machine( + name="name", definition=str(simple_definition), roleArn=_get_default_role() + ) + execution_one = client.start_execution( + stateMachineArn=sm["stateMachineArn"], name="execution_name" + ) + # + with assert_raises(ClientError) as exc: + _ = client.start_execution( + stateMachineArn=sm["stateMachineArn"], name="execution_name" + ) + exc.exception.response["Error"]["Message"].should.equal( + "Execution Already Exists: '" + execution_one["executionArn"] + "'" + ) + + @mock_stepfunctions @mock_sts def test_state_machine_list_executions(): From 49d92861c0acaa006052a7a94355a5870cdc92d5 Mon Sep 17 00:00:00 2001 From: jweite Date: Thu, 27 Aug 2020 05:11:47 -0400 Subject: [PATCH 02/21] Iam cloudformation update, singificant cloudformation refactoring (#3218) * IAM User Cloudformation Enhancements: update, delete, getatt. * AWS::IAM::Policy Support * Added unit tests for AWS:IAM:Policy for roles and groups. Fixed bug related to groups. * AWS:IAM:AccessKey CloudFormation support. * Refactor of CloudFormation parsing.py methods to simplify and standardize how they call to the models. Adjusted some models accordingly. * Further model CloudFormation support changes to align with revised CloudFormation logic. Mostly avoidance of getting resoure name from properties. * Support for Kinesis Stream RetentionPeriodHours param. * Kinesis Stream Cloudformation Tag Support. * Added omitted 'region' param to boto3.client() calls in new tests. Co-authored-by: Joseph Weitekamp --- moto/awslambda/models.py | 7 +- moto/cloudformation/parsing.py | 140 +- moto/cloudwatch/models.py | 3 +- moto/datapipeline/models.py | 4 +- moto/dynamodb2/models/__init__.py | 8 +- moto/ecr/models.py | 6 +- moto/ecs/models.py | 26 +- moto/elbv2/models.py | 6 +- moto/events/models.py | 10 +- moto/iam/models.py | 403 +++++- moto/kinesis/models.py | 37 +- moto/kinesis/responses.py | 5 +- moto/rds/models.py | 12 +- moto/rds2/models.py | 13 +- moto/route53/models.py | 7 +- moto/s3/models.py | 6 +- moto/sns/models.py | 2 +- moto/sqs/models.py | 11 +- .../test_cloudformation_stack_crud_boto3.py | 10 +- tests/test_cloudformation/test_validate.py | 2 +- tests/test_iam/test_iam.py | 3 - tests/test_iam/test_iam_cloudformation.py | 1196 +++++++++++++++++ .../test_kinesis_cloudformation.py | 29 + tests/test_s3/test_s3.py | 143 +- tests/test_s3/test_s3_cloudformation.py | 145 ++ 25 files changed, 1914 insertions(+), 320 deletions(-) create mode 100644 tests/test_iam/test_iam_cloudformation.py create mode 100644 tests/test_s3/test_s3_cloudformation.py diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 2aa207da9..ce9c78fc6 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -702,10 +702,13 @@ class EventSourceMapping(CloudFormationModel): ) for esm in esms: - if esm.logical_resource_id in resource_name: - lambda_backend.delete_event_source_mapping + if esm.uuid == resource_name: esm.delete(region_name) + @property + def physical_resource_id(self): + return self.uuid + class LambdaVersion(CloudFormationModel): def __init__(self, spec): diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py index 272856367..760142033 100644 --- a/moto/cloudformation/parsing.py +++ b/moto/cloudformation/parsing.py @@ -246,12 +246,14 @@ def generate_resource_name(resource_type, stack_name, logical_id): return "{0}{1}".format( stack_name[:max_stack_name_portion_len], right_hand_part_of_name ).lower() + elif resource_type == "AWS::IAM::Policy": + return "{0}-{1}-{2}".format(stack_name[:5], logical_id[:4], random_suffix()) else: return "{0}-{1}-{2}".format(stack_name, logical_id, random_suffix()) def parse_resource( - logical_id, resource_json, resources_map, add_name_to_resource_json=True + resource_json, resources_map, ): resource_type = resource_json["Type"] resource_class = resource_class_from_type(resource_type) @@ -263,21 +265,37 @@ def parse_resource( ) return None + if "Properties" not in resource_json: + resource_json["Properties"] = {} + resource_json = clean_json(resource_json, resources_map) - resource_name = generate_resource_name( + + return resource_class, resource_json, resource_type + + +def parse_resource_and_generate_name( + logical_id, resource_json, resources_map, +): + resource_tuple = parse_resource(resource_json, resources_map) + if not resource_tuple: + return None + resource_class, resource_json, resource_type = resource_tuple + + generated_resource_name = generate_resource_name( resource_type, resources_map.get("AWS::StackName"), logical_id ) + resource_name_property = resource_name_property_from_type(resource_type) if resource_name_property: - if "Properties" not in resource_json: - resource_json["Properties"] = dict() if ( - add_name_to_resource_json - and resource_name_property not in resource_json["Properties"] + "Properties" in resource_json + and resource_name_property in resource_json["Properties"] ): - resource_json["Properties"][resource_name_property] = resource_name - if resource_name_property in resource_json["Properties"]: resource_name = resource_json["Properties"][resource_name_property] + else: + resource_name = generated_resource_name + else: + resource_name = generated_resource_name return resource_class, resource_json, resource_name @@ -289,12 +307,14 @@ def parse_and_create_resource(logical_id, resource_json, resources_map, region_n return None resource_type = resource_json["Type"] - resource_tuple = parse_resource(logical_id, resource_json, resources_map) + resource_tuple = parse_resource_and_generate_name( + logical_id, resource_json, resources_map + ) if not resource_tuple: return None - resource_class, resource_json, resource_name = resource_tuple + resource_class, resource_json, resource_physical_name = resource_tuple resource = resource_class.create_from_cloudformation_json( - resource_name, resource_json, region_name + resource_physical_name, resource_json, region_name ) resource.type = resource_type resource.logical_resource_id = logical_id @@ -302,28 +322,34 @@ def parse_and_create_resource(logical_id, resource_json, resources_map, region_n def parse_and_update_resource(logical_id, resource_json, resources_map, region_name): - resource_class, new_resource_json, new_resource_name = parse_resource( - logical_id, resource_json, resources_map, False - ) - original_resource = resources_map[logical_id] - new_resource = resource_class.update_from_cloudformation_json( - original_resource=original_resource, - new_resource_name=new_resource_name, - cloudformation_json=new_resource_json, - region_name=region_name, - ) - new_resource.type = resource_json["Type"] - new_resource.logical_resource_id = logical_id - return new_resource - - -def parse_and_delete_resource(logical_id, resource_json, resources_map, region_name): - resource_class, resource_json, resource_name = parse_resource( + resource_class, resource_json, new_resource_name = parse_resource_and_generate_name( logical_id, resource_json, resources_map ) - resource_class.delete_from_cloudformation_json( - resource_name, resource_json, region_name - ) + original_resource = resources_map[logical_id] + if not hasattr( + resource_class.update_from_cloudformation_json, "__isabstractmethod__" + ): + new_resource = resource_class.update_from_cloudformation_json( + original_resource=original_resource, + new_resource_name=new_resource_name, + cloudformation_json=resource_json, + region_name=region_name, + ) + new_resource.type = resource_json["Type"] + new_resource.logical_resource_id = logical_id + return new_resource + else: + return None + + +def parse_and_delete_resource(resource_name, resource_json, resources_map, region_name): + resource_class, resource_json, _ = parse_resource(resource_json, resources_map) + if not hasattr( + resource_class.delete_from_cloudformation_json, "__isabstractmethod__" + ): + resource_class.delete_from_cloudformation_json( + resource_name, resource_json, region_name + ) def parse_condition(condition, resources_map, condition_map): @@ -614,28 +640,36 @@ class ResourceMap(collections_abc.Mapping): ) self._parsed_resources[resource_name] = new_resource - for resource_name, resource in resources_by_action["Remove"].items(): - resource_json = old_template[resource_name] + for logical_name, _ in resources_by_action["Remove"].items(): + resource_json = old_template[logical_name] + resource = self._parsed_resources[logical_name] + # ToDo: Standardize this. + if hasattr(resource, "physical_resource_id"): + resource_name = self._parsed_resources[ + logical_name + ].physical_resource_id + else: + resource_name = None parse_and_delete_resource( resource_name, resource_json, self, self._region_name ) - self._parsed_resources.pop(resource_name) + self._parsed_resources.pop(logical_name) tries = 1 while resources_by_action["Modify"] and tries < 5: - for resource_name, resource in resources_by_action["Modify"].copy().items(): - resource_json = new_template[resource_name] + for logical_name, _ in resources_by_action["Modify"].copy().items(): + resource_json = new_template[logical_name] try: changed_resource = parse_and_update_resource( - resource_name, resource_json, self, self._region_name + logical_name, resource_json, self, self._region_name ) except Exception as e: # skip over dependency violations, and try again in a # second pass last_exception = e else: - self._parsed_resources[resource_name] = changed_resource - del resources_by_action["Modify"][resource_name] + self._parsed_resources[logical_name] = changed_resource + del resources_by_action["Modify"][logical_name] tries += 1 if tries == 5: raise last_exception @@ -650,22 +684,20 @@ class ResourceMap(collections_abc.Mapping): if parsed_resource and hasattr(parsed_resource, "delete"): parsed_resource.delete(self._region_name) else: - resource_name_attribute = ( - parsed_resource.cloudformation_name_type() - if hasattr(parsed_resource, "cloudformation_name_type") - else resource_name_property_from_type(parsed_resource.type) + if hasattr(parsed_resource, "physical_resource_id"): + resource_name = parsed_resource.physical_resource_id + else: + resource_name = None + + resource_json = self._resource_json_map[ + parsed_resource.logical_resource_id + ] + + parse_and_delete_resource( + resource_name, resource_json, self, self._region_name, ) - if resource_name_attribute: - resource_json = self._resource_json_map[ - parsed_resource.logical_resource_id - ] - resource_name = resource_json["Properties"][ - resource_name_attribute - ] - parse_and_delete_resource( - resource_name, resource_json, self, self._region_name - ) - self._parsed_resources.pop(parsed_resource.logical_resource_id) + + self._parsed_resources.pop(parsed_resource.logical_resource_id) except Exception as e: # skip over dependency violations, and try again in a # second pass diff --git a/moto/cloudwatch/models.py b/moto/cloudwatch/models.py index d8b28bc97..5d956215c 100644 --- a/moto/cloudwatch/models.py +++ b/moto/cloudwatch/models.py @@ -511,10 +511,9 @@ class LogGroup(CloudFormationModel): cls, resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] - log_group_name = properties["LogGroupName"] tags = properties.get("Tags", {}) return logs_backends[region_name].create_log_group( - log_group_name, tags, **properties + resource_name, tags, **properties ) diff --git a/moto/datapipeline/models.py b/moto/datapipeline/models.py index b17da1f09..e517b8f3e 100644 --- a/moto/datapipeline/models.py +++ b/moto/datapipeline/models.py @@ -90,9 +90,9 @@ class Pipeline(CloudFormationModel): datapipeline_backend = datapipeline_backends[region_name] properties = cloudformation_json["Properties"] - cloudformation_unique_id = "cf-" + properties["Name"] + cloudformation_unique_id = "cf-" + resource_name pipeline = datapipeline_backend.create_pipeline( - properties["Name"], cloudformation_unique_id + resource_name, cloudformation_unique_id ) datapipeline_backend.put_pipeline_definition( pipeline.pipeline_id, properties["PipelineObjects"] diff --git a/moto/dynamodb2/models/__init__.py b/moto/dynamodb2/models/__init__.py index 175ed64f8..6757a6859 100644 --- a/moto/dynamodb2/models/__init__.py +++ b/moto/dynamodb2/models/__init__.py @@ -461,7 +461,7 @@ class Table(CloudFormationModel): params["streams"] = properties["StreamSpecification"] table = dynamodb_backends[region_name].create_table( - name=properties["TableName"], **params + name=resource_name, **params ) return table @@ -469,11 +469,7 @@ class Table(CloudFormationModel): def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - - table = dynamodb_backends[region_name].delete_table( - name=properties["TableName"] - ) + table = dynamodb_backends[region_name].delete_table(name=resource_name) return table def _generate_arn(self, name): diff --git a/moto/ecr/models.py b/moto/ecr/models.py index a1d5aa6e5..33a0201fd 100644 --- a/moto/ecr/models.py +++ b/moto/ecr/models.py @@ -80,15 +80,11 @@ class Repository(BaseObject, CloudFormationModel): def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - ecr_backend = ecr_backends[region_name] return ecr_backend.create_repository( # RepositoryName is optional in CloudFormation, thus create a random # name if necessary - repository_name=properties.get( - "RepositoryName", "ecrrepository{0}".format(int(random() * 10 ** 6)) - ) + repository_name=resource_name ) @classmethod diff --git a/moto/ecs/models.py b/moto/ecs/models.py index bf20c2245..7041a322b 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -82,36 +82,24 @@ class Cluster(BaseObject, CloudFormationModel): def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - # if properties is not provided, cloudformation will use the default values for all properties - if "Properties" in cloudformation_json: - properties = cloudformation_json["Properties"] - else: - properties = {} - ecs_backend = ecs_backends[region_name] return ecs_backend.create_cluster( # ClusterName is optional in CloudFormation, thus create a random # name if necessary - cluster_name=properties.get( - "ClusterName", "ecscluster{0}".format(int(random() * 10 ** 6)) - ) + cluster_name=resource_name ) @classmethod def update_from_cloudformation_json( cls, original_resource, new_resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - - if original_resource.name != properties["ClusterName"]: + if original_resource.name != new_resource_name: ecs_backend = ecs_backends[region_name] ecs_backend.delete_cluster(original_resource.arn) return ecs_backend.create_cluster( # ClusterName is optional in CloudFormation, thus create a # random name if necessary - cluster_name=properties.get( - "ClusterName", "ecscluster{0}".format(int(random() * 10 ** 6)) - ) + cluster_name=new_resource_name ) else: # no-op when nothing changed between old and new resources @@ -355,14 +343,13 @@ class Service(BaseObject, CloudFormationModel): task_definition = properties["TaskDefinition"].family else: task_definition = properties["TaskDefinition"] - service_name = "{0}Service{1}".format(cluster, int(random() * 10 ** 6)) desired_count = properties["DesiredCount"] # TODO: LoadBalancers # TODO: Role ecs_backend = ecs_backends[region_name] return ecs_backend.create_service( - cluster, service_name, desired_count, task_definition_str=task_definition + cluster, resource_name, desired_count, task_definition_str=task_definition ) @classmethod @@ -386,12 +373,9 @@ class Service(BaseObject, CloudFormationModel): # TODO: LoadBalancers # TODO: Role ecs_backend.delete_service(cluster_name, service_name) - new_service_name = "{0}Service{1}".format( - cluster_name, int(random() * 10 ** 6) - ) return ecs_backend.create_service( cluster_name, - new_service_name, + new_resource_name, desired_count, task_definition_str=task_definition, ) diff --git a/moto/elbv2/models.py b/moto/elbv2/models.py index 1deaac9c4..cafdc28e4 100644 --- a/moto/elbv2/models.py +++ b/moto/elbv2/models.py @@ -160,7 +160,6 @@ class FakeTargetGroup(CloudFormationModel): elbv2_backend = elbv2_backends[region_name] - name = properties.get("Name") vpc_id = properties.get("VpcId") protocol = properties.get("Protocol") port = properties.get("Port") @@ -175,7 +174,7 @@ class FakeTargetGroup(CloudFormationModel): target_type = properties.get("TargetType") target_group = elbv2_backend.create_target_group( - name=name, + name=resource_name, vpc_id=vpc_id, protocol=protocol, port=port, @@ -437,13 +436,12 @@ class FakeLoadBalancer(CloudFormationModel): elbv2_backend = elbv2_backends[region_name] - name = properties.get("Name", resource_name) security_groups = properties.get("SecurityGroups") subnet_ids = properties.get("Subnets") scheme = properties.get("Scheme", "internet-facing") load_balancer = elbv2_backend.create_load_balancer( - name, security_groups, subnet_ids, scheme=scheme + resource_name, security_groups, subnet_ids, scheme=scheme ) return load_balancer diff --git a/moto/events/models.py b/moto/events/models.py index 7fa7d225f..9c27fbb33 100644 --- a/moto/events/models.py +++ b/moto/events/models.py @@ -88,7 +88,7 @@ class Rule(CloudFormationModel): ): properties = cloudformation_json["Properties"] event_backend = events_backends[region_name] - event_name = properties.get("Name") or resource_name + event_name = resource_name return event_backend.put_rule(name=event_name, **properties) @classmethod @@ -104,9 +104,8 @@ class Rule(CloudFormationModel): def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] event_backend = events_backends[region_name] - event_name = properties.get("Name") or resource_name + event_name = resource_name event_backend.delete_rule(name=event_name) @@ -176,7 +175,7 @@ class EventBus(CloudFormationModel): ): properties = cloudformation_json["Properties"] event_backend = events_backends[region_name] - event_name = properties["Name"] + event_name = resource_name event_source_name = properties.get("EventSourceName") return event_backend.create_event_bus( name=event_name, event_source_name=event_source_name @@ -195,9 +194,8 @@ class EventBus(CloudFormationModel): def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] event_backend = events_backends[region_name] - event_bus_name = properties["Name"] + event_bus_name = resource_name event_backend.delete_event_bus(event_bus_name) diff --git a/moto/iam/models.py b/moto/iam/models.py index 16b3ac0ab..3a174e17b 100755 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -12,7 +12,6 @@ import re from cryptography import x509 from cryptography.hazmat.backends import default_backend from six.moves.urllib.parse import urlparse -from uuid import uuid4 from moto.core.exceptions import RESTError from moto.core import BaseBackend, BaseModel, ACCOUNT_ID, CloudFormationModel @@ -84,7 +83,11 @@ class VirtualMfaDevice(object): return iso_8601_datetime_without_milliseconds(self.enable_date) -class Policy(BaseModel): +class Policy(CloudFormationModel): + + # Note: This class does not implement the CloudFormation support for AWS::IAM::Policy, as that CF resource + # is for creating *inline* policies. That is done in class InlinePolicy. + is_attachable = False def __init__( @@ -295,8 +298,149 @@ aws_managed_policies = [ ] -class InlinePolicy(Policy): - """TODO: is this needed?""" +class InlinePolicy(CloudFormationModel): + # Represents an Inline Policy created by CloudFormation + def __init__( + self, + resource_name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ): + self.name = resource_name + self.policy_name = None + self.policy_document = None + self.group_names = None + self.role_names = None + self.user_names = None + self.update(policy_name, policy_document, group_names, role_names, user_names) + + def update( + self, policy_name, policy_document, group_names, role_names, user_names, + ): + self.policy_name = policy_name + self.policy_document = ( + json.dumps(policy_document) + if isinstance(policy_document, dict) + else policy_document + ) + self.group_names = group_names + self.role_names = role_names + self.user_names = user_names + + @staticmethod + def cloudformation_name_type(): + return None # Resource never gets named after by template PolicyName! + + @staticmethod + def cloudformation_type(): + return "AWS::IAM::Policy" + + @classmethod + def create_from_cloudformation_json( + cls, resource_physical_name, cloudformation_json, region_name + ): + properties = cloudformation_json.get("Properties", {}) + policy_document = properties.get("PolicyDocument") + policy_name = properties.get("PolicyName") + user_names = properties.get("Users") + role_names = properties.get("Roles") + group_names = properties.get("Groups") + + return iam_backend.create_inline_policy( + resource_physical_name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ) + + @classmethod + def update_from_cloudformation_json( + cls, original_resource, new_resource_name, cloudformation_json, region_name, + ): + properties = cloudformation_json["Properties"] + + if cls.is_replacement_update(properties): + resource_name_property = cls.cloudformation_name_type() + if resource_name_property not in properties: + properties[resource_name_property] = new_resource_name + new_resource = cls.create_from_cloudformation_json( + properties[resource_name_property], cloudformation_json, region_name + ) + properties[resource_name_property] = original_resource.name + cls.delete_from_cloudformation_json( + original_resource.name, cloudformation_json, region_name + ) + return new_resource + + else: # No Interruption + properties = cloudformation_json.get("Properties", {}) + policy_document = properties.get("PolicyDocument") + policy_name = properties.get("PolicyName", original_resource.name) + user_names = properties.get("Users") + role_names = properties.get("Roles") + group_names = properties.get("Groups") + + return iam_backend.update_inline_policy( + original_resource.name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ) + + @classmethod + def delete_from_cloudformation_json( + cls, resource_name, cloudformation_json, region_name + ): + iam_backend.delete_inline_policy(resource_name) + + @staticmethod + def is_replacement_update(properties): + properties_requiring_replacement_update = [] + return any( + [ + property_requiring_replacement in properties + for property_requiring_replacement in properties_requiring_replacement_update + ] + ) + + @property + def physical_resource_id(self): + return self.name + + def apply_policy(self, backend): + if self.user_names: + for user_name in self.user_names: + backend.put_user_policy( + user_name, self.policy_name, self.policy_document + ) + if self.role_names: + for role_name in self.role_names: + backend.put_role_policy( + role_name, self.policy_name, self.policy_document + ) + if self.group_names: + for group_name in self.group_names: + backend.put_group_policy( + group_name, self.policy_name, self.policy_document + ) + + def unapply_policy(self, backend): + if self.user_names: + for user_name in self.user_names: + backend.delete_user_policy(user_name, self.policy_name) + if self.role_names: + for role_name in self.role_names: + backend.delete_role_policy(role_name, self.policy_name) + if self.group_names: + for group_name in self.group_names: + backend.delete_group_policy(group_name, self.policy_name) class Role(CloudFormationModel): @@ -338,11 +482,13 @@ class Role(CloudFormationModel): @classmethod def create_from_cloudformation_json( - cls, resource_name, cloudformation_json, region_name + cls, resource_physical_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] role_name = ( - properties["RoleName"] if "RoleName" in properties else str(uuid4())[0:5] + properties["RoleName"] + if "RoleName" in properties + else resource_physical_name ) role = iam_backend.create_role( @@ -416,13 +562,15 @@ class InstanceProfile(CloudFormationModel): @classmethod def create_from_cloudformation_json( - cls, resource_name, cloudformation_json, region_name + cls, resource_physical_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] role_ids = properties["Roles"] return iam_backend.create_instance_profile( - name=resource_name, path=properties.get("Path", "/"), role_ids=role_ids + name=resource_physical_name, + path=properties.get("Path", "/"), + role_ids=role_ids, ) @property @@ -475,12 +623,12 @@ class SigningCertificate(BaseModel): return iso_8601_datetime_without_milliseconds(self.upload_date) -class AccessKey(BaseModel): - def __init__(self, user_name): +class AccessKey(CloudFormationModel): + def __init__(self, user_name, status="Active"): self.user_name = user_name self.access_key_id = "AKIA" + random_access_key() self.secret_access_key = random_alphanumeric(40) - self.status = "Active" + self.status = status self.create_date = datetime.utcnow() self.last_used = None @@ -499,6 +647,66 @@ class AccessKey(BaseModel): return self.secret_access_key raise UnformattedGetAttTemplateException() + @staticmethod + def cloudformation_name_type(): + return None # Resource never gets named after by template PolicyName! + + @staticmethod + def cloudformation_type(): + return "AWS::IAM::AccessKey" + + @classmethod + def create_from_cloudformation_json( + cls, resource_physical_name, cloudformation_json, region_name + ): + properties = cloudformation_json.get("Properties", {}) + user_name = properties.get("UserName") + status = properties.get("Status", "Active") + + return iam_backend.create_access_key(user_name, status=status,) + + @classmethod + def update_from_cloudformation_json( + cls, original_resource, new_resource_name, cloudformation_json, region_name, + ): + properties = cloudformation_json["Properties"] + + if cls.is_replacement_update(properties): + new_resource = cls.create_from_cloudformation_json( + new_resource_name, cloudformation_json, region_name + ) + cls.delete_from_cloudformation_json( + original_resource.physical_resource_id, cloudformation_json, region_name + ) + return new_resource + + else: # No Interruption + properties = cloudformation_json.get("Properties", {}) + status = properties.get("Status") + return iam_backend.update_access_key( + original_resource.user_name, original_resource.access_key_id, status + ) + + @classmethod + def delete_from_cloudformation_json( + cls, resource_name, cloudformation_json, region_name + ): + iam_backend.delete_access_key_by_name(resource_name) + + @staticmethod + def is_replacement_update(properties): + properties_requiring_replacement_update = ["Serial", "UserName"] + return any( + [ + property_requiring_replacement in properties + for property_requiring_replacement in properties_requiring_replacement_update + ] + ) + + @property + def physical_resource_id(self): + return self.access_key_id + class SshPublicKey(BaseModel): def __init__(self, user_name, ssh_public_key_body): @@ -564,8 +772,14 @@ class Group(BaseModel): def list_policies(self): return self.policies.keys() + def delete_policy(self, policy_name): + if policy_name not in self.policies: + raise IAMNotFoundException("Policy {0} not found".format(policy_name)) -class User(BaseModel): + del self.policies[policy_name] + + +class User(CloudFormationModel): def __init__(self, name, path=None, tags=None): self.name = name self.id = random_resource_id() @@ -614,8 +828,8 @@ class User(BaseModel): del self.policies[policy_name] - def create_access_key(self): - access_key = AccessKey(self.name) + def create_access_key(self, status="Active"): + access_key = AccessKey(self.name, status) self.access_keys.append(access_key) return access_key @@ -633,9 +847,11 @@ class User(BaseModel): key = self.get_access_key_by_id(access_key_id) self.access_keys.remove(key) - def update_access_key(self, access_key_id, status): + def update_access_key(self, access_key_id, status=None): key = self.get_access_key_by_id(access_key_id) - key.status = status + if status is not None: + key.status = status + return key def get_access_key_by_id(self, access_key_id): for key in self.access_keys: @@ -646,6 +862,15 @@ class User(BaseModel): "The Access Key with id {0} cannot be found".format(access_key_id) ) + def has_access_key(self, access_key_id): + return any( + [ + access_key + for access_key in self.access_keys + if access_key.access_key_id == access_key_id + ] + ) + def upload_ssh_public_key(self, ssh_public_key_body): pubkey = SshPublicKey(self.name, ssh_public_key_body) self.ssh_public_keys.append(pubkey) @@ -677,7 +902,7 @@ class User(BaseModel): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException if attribute_name == "Arn": - raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"') + return self.arn raise UnformattedGetAttTemplateException() def to_csv(self): @@ -752,6 +977,66 @@ class User(BaseModel): access_key_2_last_used, ) + @staticmethod + def cloudformation_name_type(): + return "UserName" + + @staticmethod + def cloudformation_type(): + return "AWS::IAM::User" + + @classmethod + def create_from_cloudformation_json( + cls, resource_physical_name, cloudformation_json, region_name + ): + properties = cloudformation_json.get("Properties", {}) + path = properties.get("Path") + return iam_backend.create_user(resource_physical_name, path) + + @classmethod + def update_from_cloudformation_json( + cls, original_resource, new_resource_name, cloudformation_json, region_name, + ): + properties = cloudformation_json["Properties"] + + if cls.is_replacement_update(properties): + resource_name_property = cls.cloudformation_name_type() + if resource_name_property not in properties: + properties[resource_name_property] = new_resource_name + new_resource = cls.create_from_cloudformation_json( + properties[resource_name_property], cloudformation_json, region_name + ) + properties[resource_name_property] = original_resource.name + cls.delete_from_cloudformation_json( + original_resource.name, cloudformation_json, region_name + ) + return new_resource + + else: # No Interruption + if "Path" in properties: + original_resource.path = properties["Path"] + return original_resource + + @classmethod + def delete_from_cloudformation_json( + cls, resource_name, cloudformation_json, region_name + ): + iam_backend.delete_user(resource_name) + + @staticmethod + def is_replacement_update(properties): + properties_requiring_replacement_update = ["UserName"] + return any( + [ + property_requiring_replacement in properties + for property_requiring_replacement in properties_requiring_replacement_update + ] + ) + + @property + def physical_resource_id(self): + return self.name + class AccountPasswordPolicy(BaseModel): def __init__( @@ -984,6 +1269,8 @@ class IAMBackend(BaseBackend): self.virtual_mfa_devices = {} self.account_password_policy = None self.account_summary = AccountSummary(self) + self.inline_policies = {} + self.access_keys = {} super(IAMBackend, self).__init__() def _init_managed_policies(self): @@ -1478,6 +1765,10 @@ class IAMBackend(BaseBackend): group = self.get_group(group_name) return group.list_policies() + def delete_group_policy(self, group_name, policy_name): + group = self.get_group(group_name) + group.delete_policy(policy_name) + def get_group_policy(self, group_name, policy_name): group = self.get_group(group_name) return group.get_policy(policy_name) @@ -1674,14 +1965,15 @@ class IAMBackend(BaseBackend): def delete_policy(self, policy_arn): del self.managed_policies[policy_arn] - def create_access_key(self, user_name=None): + def create_access_key(self, user_name=None, status="Active"): user = self.get_user(user_name) - key = user.create_access_key() + key = user.create_access_key(status) + self.access_keys[key.physical_resource_id] = key return key - def update_access_key(self, user_name, access_key_id, status): + def update_access_key(self, user_name, access_key_id, status=None): user = self.get_user(user_name) - user.update_access_key(access_key_id, status) + return user.update_access_key(access_key_id, status) def get_access_key_last_used(self, access_key_id): access_keys_list = self.get_all_access_keys_for_all_users() @@ -1706,7 +1998,17 @@ class IAMBackend(BaseBackend): def delete_access_key(self, access_key_id, user_name): user = self.get_user(user_name) - user.delete_access_key(access_key_id) + access_key = user.get_access_key_by_id(access_key_id) + self.delete_access_key_by_name(access_key.access_key_id) + + def delete_access_key_by_name(self, name): + key = self.access_keys[name] + try: # User may have been deleted before their access key... + user = self.get_user(key.user_name) + user.delete_access_key(key.access_key_id) + except IAMNotFoundException: + pass + del self.access_keys[name] def upload_ssh_public_key(self, user_name, ssh_public_key_body): user = self.get_user(user_name) @@ -2017,5 +2319,62 @@ class IAMBackend(BaseBackend): def get_account_summary(self): return self.account_summary + def create_inline_policy( + self, + resource_name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ): + if resource_name in self.inline_policies: + raise IAMConflictException( + "EntityAlreadyExists", + "Inline Policy {0} already exists".format(resource_name), + ) + + inline_policy = InlinePolicy( + resource_name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ) + self.inline_policies[resource_name] = inline_policy + inline_policy.apply_policy(self) + return inline_policy + + def get_inline_policy(self, policy_id): + inline_policy = None + try: + inline_policy = self.inline_policies[policy_id] + except KeyError: + raise IAMNotFoundException("Inline policy {0} not found".format(policy_id)) + return inline_policy + + def update_inline_policy( + self, + resource_name, + policy_name, + policy_document, + group_names, + role_names, + user_names, + ): + inline_policy = self.get_inline_policy(resource_name) + inline_policy.unapply_policy(self) + inline_policy.update( + policy_name, policy_document, group_names, role_names, user_names, + ) + inline_policy.apply_policy(self) + return inline_policy + + def delete_inline_policy(self, policy_id): + inline_policy = self.get_inline_policy(policy_id) + inline_policy.unapply_policy(self) + del self.inline_policies[policy_id] + iam_backend = IAMBackend() diff --git a/moto/kinesis/models.py b/moto/kinesis/models.py index a9c4f5476..280402d5f 100644 --- a/moto/kinesis/models.py +++ b/moto/kinesis/models.py @@ -135,7 +135,7 @@ class Shard(BaseModel): class Stream(CloudFormationModel): - def __init__(self, stream_name, shard_count, region_name): + def __init__(self, stream_name, shard_count, retention_period_hours, region_name): self.stream_name = stream_name self.creation_datetime = datetime.datetime.now() self.region = region_name @@ -145,6 +145,7 @@ class Stream(CloudFormationModel): self.status = "ACTIVE" self.shard_count = None self.update_shard_count(shard_count) + self.retention_period_hours = retention_period_hours def update_shard_count(self, shard_count): # ToDo: This was extracted from init. It's only accurate for new streams. @@ -213,6 +214,7 @@ class Stream(CloudFormationModel): "StreamName": self.stream_name, "StreamStatus": self.status, "HasMoreShards": False, + "RetentionPeriodHours": self.retention_period_hours, "Shards": [shard.to_json() for shard in self.shards.values()], } } @@ -243,9 +245,19 @@ class Stream(CloudFormationModel): ): properties = cloudformation_json.get("Properties", {}) shard_count = properties.get("ShardCount", 1) - name = properties.get("Name", resource_name) + retention_period_hours = properties.get("RetentionPeriodHours", resource_name) + tags = { + tag_item["Key"]: tag_item["Value"] + for tag_item in properties.get("Tags", []) + } + backend = kinesis_backends[region_name] - return backend.create_stream(name, shard_count, region_name) + stream = backend.create_stream( + resource_name, shard_count, retention_period_hours, region_name + ) + if any(tags): + backend.add_tags_to_stream(stream.stream_name, tags) + return stream @classmethod def update_from_cloudformation_json( @@ -269,6 +281,15 @@ class Stream(CloudFormationModel): else: # No Interruption if "ShardCount" in properties: original_resource.update_shard_count(properties["ShardCount"]) + if "RetentionPeriodHours" in properties: + original_resource.retention_period_hours = properties[ + "RetentionPeriodHours" + ] + if "Tags" in properties: + original_resource.tags = { + tag_item["Key"]: tag_item["Value"] + for tag_item in properties.get("Tags", []) + } return original_resource @classmethod @@ -276,9 +297,7 @@ class Stream(CloudFormationModel): cls, resource_name, cloudformation_json, region_name ): backend = kinesis_backends[region_name] - properties = cloudformation_json.get("Properties", {}) - stream_name = properties.get(cls.cloudformation_name_type(), resource_name) - backend.delete_stream(stream_name) + backend.delete_stream(resource_name) @staticmethod def is_replacement_update(properties): @@ -398,10 +417,12 @@ class KinesisBackend(BaseBackend): self.streams = OrderedDict() self.delivery_streams = {} - def create_stream(self, stream_name, shard_count, region_name): + def create_stream( + self, stream_name, shard_count, retention_period_hours, region_name + ): if stream_name in self.streams: raise ResourceInUseError(stream_name) - stream = Stream(stream_name, shard_count, region_name) + stream = Stream(stream_name, shard_count, retention_period_hours, region_name) self.streams[stream_name] = stream return stream diff --git a/moto/kinesis/responses.py b/moto/kinesis/responses.py index 500f7855d..8e7fc3941 100644 --- a/moto/kinesis/responses.py +++ b/moto/kinesis/responses.py @@ -25,7 +25,10 @@ class KinesisResponse(BaseResponse): def create_stream(self): stream_name = self.parameters.get("StreamName") shard_count = self.parameters.get("ShardCount") - self.kinesis_backend.create_stream(stream_name, shard_count, self.region) + retention_period_hours = self.parameters.get("RetentionPeriodHours") + self.kinesis_backend.create_stream( + stream_name, shard_count, retention_period_hours, self.region + ) return "" def describe_stream(self): diff --git a/moto/rds/models.py b/moto/rds/models.py index 440da34d2..33be04e8c 100644 --- a/moto/rds/models.py +++ b/moto/rds/models.py @@ -4,7 +4,6 @@ import boto.rds from jinja2 import Template from moto.core import BaseBackend, CloudFormationModel -from moto.core.utils import get_random_hex from moto.ec2.models import ec2_backends from moto.rds.exceptions import UnformattedGetAttTemplateException from moto.rds2.models import rds2_backends @@ -33,9 +32,6 @@ class Database(CloudFormationModel): ): properties = cloudformation_json["Properties"] - db_instance_identifier = properties.get(cls.cloudformation_name_type()) - if not db_instance_identifier: - db_instance_identifier = resource_name.lower() + get_random_hex(12) db_security_groups = properties.get("DBSecurityGroups") if not db_security_groups: db_security_groups = [] @@ -48,7 +44,7 @@ class Database(CloudFormationModel): "availability_zone": properties.get("AvailabilityZone"), "backup_retention_period": properties.get("BackupRetentionPeriod"), "db_instance_class": properties.get("DBInstanceClass"), - "db_instance_identifier": db_instance_identifier, + "db_instance_identifier": resource_name, "db_name": properties.get("DBName"), "db_subnet_group_name": db_subnet_group_name, "engine": properties.get("Engine"), @@ -229,7 +225,7 @@ class SecurityGroup(CloudFormationModel): cls, resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] - group_name = resource_name.lower() + get_random_hex(12) + group_name = resource_name.lower() description = properties["GroupDescription"] security_group_ingress_rules = properties.get("DBSecurityGroupIngress", []) tags = properties.get("Tags") @@ -303,9 +299,7 @@ class SubnetGroup(CloudFormationModel): cls, resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] - subnet_name = properties.get(cls.cloudformation_name_type()) - if not subnet_name: - subnet_name = resource_name.lower() + get_random_hex(12) + subnet_name = resource_name.lower() description = properties["DBSubnetGroupDescription"] subnet_ids = properties["SubnetIds"] tags = properties.get("Tags") diff --git a/moto/rds2/models.py b/moto/rds2/models.py index 5f46311ec..6efbf8492 100644 --- a/moto/rds2/models.py +++ b/moto/rds2/models.py @@ -10,7 +10,6 @@ from jinja2 import Template from re import compile as re_compile from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel, CloudFormationModel -from moto.core.utils import get_random_hex from moto.core.utils import iso_8601_datetime_with_milliseconds from moto.ec2.models import ec2_backends from .exceptions import ( @@ -371,9 +370,6 @@ class Database(CloudFormationModel): ): properties = cloudformation_json["Properties"] - db_instance_identifier = properties.get(cls.cloudformation_name_type()) - if not db_instance_identifier: - db_instance_identifier = resource_name.lower() + get_random_hex(12) db_security_groups = properties.get("DBSecurityGroups") if not db_security_groups: db_security_groups = [] @@ -386,7 +382,7 @@ class Database(CloudFormationModel): "availability_zone": properties.get("AvailabilityZone"), "backup_retention_period": properties.get("BackupRetentionPeriod"), "db_instance_class": properties.get("DBInstanceClass"), - "db_instance_identifier": db_instance_identifier, + "db_instance_identifier": resource_name, "db_name": properties.get("DBName"), "db_subnet_group_name": db_subnet_group_name, "engine": properties.get("Engine"), @@ -650,7 +646,7 @@ class SecurityGroup(CloudFormationModel): cls, resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] - group_name = resource_name.lower() + get_random_hex(12) + group_name = resource_name.lower() description = properties["GroupDescription"] security_group_ingress_rules = properties.get("DBSecurityGroupIngress", []) tags = properties.get("Tags") @@ -759,9 +755,6 @@ class SubnetGroup(CloudFormationModel): ): properties = cloudformation_json["Properties"] - subnet_name = properties.get(cls.cloudformation_name_type()) - if not subnet_name: - subnet_name = resource_name.lower() + get_random_hex(12) description = properties["DBSubnetGroupDescription"] subnet_ids = properties["SubnetIds"] tags = properties.get("Tags") @@ -770,7 +763,7 @@ class SubnetGroup(CloudFormationModel): subnets = [ec2_backend.get_subnet(subnet_id) for subnet_id in subnet_ids] rds2_backend = rds2_backends[region_name] subnet_group = rds2_backend.create_subnet_group( - subnet_name, description, subnets, tags + resource_name, description, subnets, tags ) return subnet_group diff --git a/moto/route53/models.py b/moto/route53/models.py index 52f60d971..eb73f2bfb 100644 --- a/moto/route53/models.py +++ b/moto/route53/models.py @@ -298,10 +298,9 @@ class FakeZone(CloudFormationModel): def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - name = properties["Name"] - - hosted_zone = route53_backend.create_hosted_zone(name, private_zone=False) + hosted_zone = route53_backend.create_hosted_zone( + resource_name, private_zone=False + ) return hosted_zone diff --git a/moto/s3/models.py b/moto/s3/models.py index 70e33fdfb..4230479af 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -1086,7 +1086,7 @@ class FakeBucket(CloudFormationModel): ): bucket = s3_backend.create_bucket(resource_name, region_name) - properties = cloudformation_json["Properties"] + properties = cloudformation_json.get("Properties", {}) if "BucketEncryption" in properties: bucket_encryption = cfn_to_api_encryption(properties["BucketEncryption"]) @@ -1129,9 +1129,7 @@ class FakeBucket(CloudFormationModel): def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - bucket_name = properties[cls.cloudformation_name_type()] - s3_backend.delete_bucket(bucket_name) + s3_backend.delete_bucket(resource_name) def to_config_dict(self): """Return the AWS Config JSON format of this S3 bucket. diff --git a/moto/sns/models.py b/moto/sns/models.py index 779a0fb06..1d956ffde 100644 --- a/moto/sns/models.py +++ b/moto/sns/models.py @@ -104,7 +104,7 @@ class Topic(CloudFormationModel): sns_backend = sns_backends[region_name] properties = cloudformation_json["Properties"] - topic = sns_backend.create_topic(properties.get(cls.cloudformation_name_type())) + topic = sns_backend.create_topic(resource_name) for subscription in properties.get("Subscription", []): sns_backend.subscribe( topic.arn, subscription["Endpoint"], subscription["Protocol"] diff --git a/moto/sqs/models.py b/moto/sqs/models.py index a34e95c4f..039224f5b 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -374,10 +374,7 @@ class Queue(CloudFormationModel): sqs_backend = sqs_backends[region_name] return sqs_backend.create_queue( - name=properties["QueueName"], - tags=tags_dict, - region=region_name, - **properties + name=resource_name, tags=tags_dict, region=region_name, **properties ) @classmethod @@ -385,7 +382,7 @@ class Queue(CloudFormationModel): cls, original_resource, new_resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] - queue_name = properties["QueueName"] + queue_name = original_resource.name sqs_backend = sqs_backends[region_name] queue = sqs_backend.get_queue(queue_name) @@ -402,10 +399,8 @@ class Queue(CloudFormationModel): def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): - properties = cloudformation_json["Properties"] - queue_name = properties["QueueName"] sqs_backend = sqs_backends[region_name] - sqs_backend.delete_queue(queue_name) + sqs_backend.delete_queue(resource_name) @property def approximate_number_of_messages_delayed(self): diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 41d3fad3e..65469f1b3 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -592,7 +592,7 @@ def test_boto3_create_stack_set_with_yaml(): @mock_cloudformation @mock_s3 def test_create_stack_set_from_s3_url(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") s3_conn.create_bucket(Bucket="foobar") @@ -704,7 +704,7 @@ def test_boto3_create_stack_with_short_form_func_yaml(): @mock_s3 @mock_cloudformation def test_get_template_summary(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") conn = boto3.client("cloudformation", region_name="us-east-1") @@ -802,7 +802,7 @@ def test_create_stack_with_role_arn(): @mock_cloudformation @mock_s3 def test_create_stack_from_s3_url(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") s3_conn.create_bucket(Bucket="foobar") @@ -857,7 +857,7 @@ def test_update_stack_with_previous_value(): @mock_s3 @mock_ec2 def test_update_stack_from_s3_url(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") cf_conn = boto3.client("cloudformation", region_name="us-east-1") @@ -886,7 +886,7 @@ def test_update_stack_from_s3_url(): @mock_cloudformation @mock_s3 def test_create_change_set_from_s3_url(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") s3_conn.create_bucket(Bucket="foobar") diff --git a/tests/test_cloudformation/test_validate.py b/tests/test_cloudformation/test_validate.py index 081ceee54..ea14fceea 100644 --- a/tests/test_cloudformation/test_validate.py +++ b/tests/test_cloudformation/test_validate.py @@ -118,7 +118,7 @@ def test_boto3_yaml_validate_successful(): @mock_cloudformation @mock_s3 def test_boto3_yaml_validate_template_url_successful(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3_conn = boto3.resource("s3", region_name="us-east-1") s3_conn.create_bucket(Bucket="foobar") diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 610333303..288825d6e 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -5,12 +5,9 @@ import json import boto import boto3 import csv -import os import sure # noqa -import sys from boto.exception import BotoServerError from botocore.exceptions import ClientError -from dateutil.tz import tzutc from moto import mock_iam, mock_iam_deprecated, settings from moto.core import ACCOUNT_ID diff --git a/tests/test_iam/test_iam_cloudformation.py b/tests/test_iam/test_iam_cloudformation.py new file mode 100644 index 000000000..aa063273f --- /dev/null +++ b/tests/test_iam/test_iam_cloudformation.py @@ -0,0 +1,1196 @@ +import boto3 +import yaml +import sure # noqa + +from nose.tools import assert_raises +from botocore.exceptions import ClientError + +from moto import mock_iam, mock_cloudformation, mock_s3, mock_sts + +# AWS::IAM::User Tests +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_create_user(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + user_name = "MyUser" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + Properties: + UserName: {0} +""".strip().format( + user_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + provisioned_resource["LogicalResourceId"].should.equal("TheUser") + provisioned_resource["PhysicalResourceId"].should.equal(user_name) + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_user_no_interruption(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + user_name = provisioned_resource["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name)["User"] + user["Path"].should.equal("/") + + path = "/MyPath/" + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + Properties: + Path: {0} +""".strip().format( + path + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + user = iam_client.get_user(UserName=user_name)["User"] + user["Path"].should.equal(path) + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_user_replacement(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + original_user_name = provisioned_resource["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=original_user_name)["User"] + user["Path"].should.equal("/") + + new_user_name = "MyUser" + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + Properties: + UserName: {0} +""".strip().format( + new_user_name + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + with assert_raises(ClientError) as e: + iam_client.get_user(UserName=original_user_name) + e.exception.response["Error"]["Code"].should.equal("NoSuchEntity") + + iam_client.get_user(UserName=new_user_name) + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_drop_user(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheFirstUser: + Type: AWS::IAM::User + TheSecondUser: + Type: AWS::IAM::User +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + first_provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheFirstUser" + ][0] + second_provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheSecondUser" + ][0] + first_user_name = first_provisioned_user["PhysicalResourceId"] + second_user_name = second_provisioned_user["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + iam_client.get_user(UserName=first_user_name) + iam_client.get_user(UserName=second_user_name) + + template = """ +Resources: + TheSecondUser: + Type: AWS::IAM::User +""".strip() + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + len(provisioned_resources).should.equal(1) + second_provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheSecondUser" + ][0] + second_user_name.should.equal(second_provisioned_user["PhysicalResourceId"]) + + iam_client.get_user(UserName=second_user_name) + with assert_raises(ClientError) as e: + iam_client.get_user(UserName=first_user_name) + e.exception.response["Error"]["Code"].should.equal("NoSuchEntity") + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_user(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + user_name = "MyUser" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + Properties: + UserName: {} +""".strip().format( + user_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name) + + cf_client.delete_stack(StackName=stack_name) + + with assert_raises(ClientError) as e: + user = iam_client.get_user(UserName=user_name) + e.exception.response["Error"]["Code"].should.equal("NoSuchEntity") + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_user_having_generated_name(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + provisioned_resource["LogicalResourceId"].should.equal("TheUser") + user_name = provisioned_resource["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name) + + cf_client.delete_stack(StackName=stack_name) + + with assert_raises(ClientError) as e: + user = iam_client.get_user(UserName=user_name) + e.exception.response["Error"]["Code"].should.equal("NoSuchEntity") + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_user_get_attr(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + user_name = "MyUser" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + Properties: + UserName: {0} +Outputs: + UserName: + Value: !Ref TheUser + UserArn: + Value: !GetAtt TheUser.Arn +""".strip().format( + user_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + stack_description = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0] + output_user_name = [ + output["OutputValue"] + for output in stack_description["Outputs"] + if output["OutputKey"] == "UserName" + ][0] + output_user_arn = [ + output["OutputValue"] + for output in stack_description["Outputs"] + if output["OutputKey"] == "UserArn" + ][0] + + iam_client = boto3.client("iam", region_name="us-east-1") + user_description = iam_client.get_user(UserName=output_user_name)["User"] + output_user_arn.should.equal(user_description["Arn"]) + + +# AWS::IAM::Policy Tests +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_create_user_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + user_name = "MyUser" + iam_client.create_user(UserName=user_name) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + bucket = s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Users: + - {2} +""".strip().format( + policy_name, bucket_arn, user_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_user_policy(UserName=user_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_user_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + user_name_1 = "MyUser1" + iam_client.create_user(UserName=user_name_1) + user_name_2 = "MyUser2" + iam_client.create_user(UserName=user_name_2) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Users: + - {2} +""".strip().format( + policy_name, bucket_arn, user_name_1 + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_user_policy(UserName=user_name_1, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + # Change template and user + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:ListBuckets + Resource: {1} + Users: + - {2} +""".strip().format( + policy_name, bucket_arn, user_name_2 + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_user_policy(UserName=user_name_2, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + iam_client.get_user_policy.when.called_with( + UserName=user_name_1, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_user_policy_having_generated_name(): + iam_client = boto3.client("iam", region_name="us-east-1") + user_name = "MyUser" + iam_client.create_user(UserName=user_name) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + bucket = s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: MyPolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {0} + Users: + - {1} +""".strip().format( + bucket_arn, user_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_user_policy(UserName=user_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + cf_client.delete_stack(StackName=stack_name) + iam_client.get_user_policy.when.called_with( + UserName=user_name, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_create_role_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + role_name = "MyRole" + iam_client.create_role(RoleName=role_name, AssumeRolePolicyDocument="{}") + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Roles: + - {2} +""".strip().format( + policy_name, bucket_arn, role_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_role_policy(RoleName=role_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_role_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + role_name_1 = "MyRole1" + iam_client.create_role(RoleName=role_name_1, AssumeRolePolicyDocument="{}") + role_name_2 = "MyRole2" + iam_client.create_role(RoleName=role_name_2, AssumeRolePolicyDocument="{}") + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Roles: + - {2} +""".strip().format( + policy_name, bucket_arn, role_name_1 + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_role_policy(RoleName=role_name_1, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + # Change template and user + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:ListBuckets + Resource: {1} + Roles: + - {2} +""".strip().format( + policy_name, bucket_arn, role_name_2 + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_role_policy(RoleName=role_name_2, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + iam_client.get_role_policy.when.called_with( + RoleName=role_name_1, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_role_policy_having_generated_name(): + iam_client = boto3.client("iam", region_name="us-east-1") + role_name = "MyRole" + iam_client.create_role(RoleName=role_name, AssumeRolePolicyDocument="{}") + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: MyPolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {0} + Roles: + - {1} +""".strip().format( + bucket_arn, role_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_role_policy(RoleName=role_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + cf_client.delete_stack(StackName=stack_name) + iam_client.get_role_policy.when.called_with( + RoleName=role_name, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_create_group_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + group_name = "MyGroup" + iam_client.create_group(GroupName=group_name) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Groups: + - {2} +""".strip().format( + policy_name, bucket_arn, group_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_group_policy(GroupName=group_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_group_policy(): + iam_client = boto3.client("iam", region_name="us-east-1") + group_name_1 = "MyGroup1" + iam_client.create_group(GroupName=group_name_1) + group_name_2 = "MyGroup2" + iam_client.create_group(GroupName=group_name_2) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {1} + Groups: + - {2} +""".strip().format( + policy_name, bucket_arn, group_name_1 + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_group_policy(GroupName=group_name_1, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + # Change template and user + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: {0} + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:ListBuckets + Resource: {1} + Groups: + - {2} +""".strip().format( + policy_name, bucket_arn, group_name_2 + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_group_policy(GroupName=group_name_2, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + iam_client.get_group_policy.when.called_with( + GroupName=group_name_1, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +@mock_s3 +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_group_policy_having_generated_name(): + iam_client = boto3.client("iam", region_name="us-east-1") + group_name = "MyGroup" + iam_client.create_group(GroupName=group_name) + + s3_client = boto3.client("s3", region_name="us-east-1") + bucket_name = "my-bucket" + s3_client.create_bucket(Bucket=bucket_name) + bucket_arn = "arn:aws:s3:::{0}".format(bucket_name) + + cf_client = boto3.client("cloudformation", region_name="us-east-1") + stack_name = "MyStack" + policy_name = "MyPolicy" + + template = """ +Resources: + ThePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: MyPolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: s3:* + Resource: {0} + Groups: + - {1} +""".strip().format( + bucket_arn, group_name + ) + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resource = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ][0] + logical_resource_id = provisioned_resource["LogicalResourceId"] + logical_resource_id.should.equal("ThePolicy") + + original_policy_document = yaml.load(template, Loader=yaml.FullLoader)["Resources"][ + logical_resource_id + ]["Properties"]["PolicyDocument"] + policy = iam_client.get_group_policy(GroupName=group_name, PolicyName=policy_name) + policy["PolicyDocument"].should.equal(original_policy_document) + + cf_client.delete_stack(StackName=stack_name) + iam_client.get_group_policy.when.called_with( + GroupName=group_name, PolicyName=policy_name + ).should.throw(iam_client.exceptions.NoSuchEntityException) + + +# AWS::IAM::User AccessKeys +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_create_user_with_access_key(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + provisioned_access_keys = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheAccessKey" + ] + len(provisioned_access_keys).should.equal(1) + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name)["User"] + user["UserName"].should.equal(user_name) + access_keys = iam_client.list_access_keys(UserName=user_name) + access_keys["AccessKeyMetadata"][0]["UserName"].should.equal(user_name) + + +@mock_sts +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_access_key_get_attr(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser +Outputs: + AccessKeyId: + Value: !Ref TheAccessKey + SecretKey: + Value: !GetAtt TheAccessKey.SecretAccessKey +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + stack_description = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0] + output_access_key_id = [ + output["OutputValue"] + for output in stack_description["Outputs"] + if output["OutputKey"] == "AccessKeyId" + ][0] + output_secret_key = [ + output["OutputValue"] + for output in stack_description["Outputs"] + if output["OutputKey"] == "SecretKey" + ][0] + + sts_client = boto3.client( + "sts", + aws_access_key_id=output_access_key_id, + aws_secret_access_key=output_secret_key, + region_name="us-east-1", + ) + caller_identity = sts_client.get_caller_identity() + caller_identity["Arn"].split("/")[1].should.equal(user_name) + pass + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_users_access_key(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ + Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser + """.strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + provisioned_access_key = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheAccessKey" + ][0] + access_key_id = provisioned_access_key["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name) + access_keys = iam_client.list_access_keys(UserName=user_name) + + access_key_id.should.equal(access_keys["AccessKeyMetadata"][0]["AccessKeyId"]) + + cf_client.delete_stack(StackName=stack_name) + + iam_client.get_user.when.called_with(UserName=user_name).should.throw( + iam_client.exceptions.NoSuchEntityException + ) + iam_client.list_access_keys.when.called_with(UserName=user_name).should.throw( + iam_client.exceptions.NoSuchEntityException + ) + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_delete_users_access_key(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ + Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser + """.strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + provisioned_access_keys = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheAccessKey" + ] + len(provisioned_access_keys).should.equal(1) + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name)["User"] + user["UserName"].should.equal(user_name) + access_keys = iam_client.list_access_keys(UserName=user_name) + access_keys["AccessKeyMetadata"][0]["UserName"].should.equal(user_name) + + cf_client.delete_stack(StackName=stack_name) + + iam_client.get_user.when.called_with(UserName=user_name).should.throw( + iam_client.exceptions.NoSuchEntityException + ) + iam_client.list_access_keys.when.called_with(UserName=user_name).should.throw( + iam_client.exceptions.NoSuchEntityException + ) + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_users_access_key_no_interruption(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + provisioned_access_key = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheAccessKey" + ][0] + access_key_id = provisioned_access_key["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name) + access_keys = iam_client.list_access_keys(UserName=user_name) + access_key_id.should.equal(access_keys["AccessKeyMetadata"][0]["AccessKeyId"]) + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + Status: Inactive +""".strip() + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + access_keys = iam_client.list_access_keys(UserName=user_name) + access_keys["AccessKeyMetadata"][0]["Status"].should.equal("Inactive") + + +@mock_iam +@mock_cloudformation +def test_iam_cloudformation_update_users_access_key_replacement(): + cf_client = boto3.client("cloudformation", region_name="us-east-1") + + stack_name = "MyStack" + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref TheUser +""".strip() + + cf_client.create_stack(StackName=stack_name, TemplateBody=template) + + provisioned_resources = cf_client.list_stack_resources(StackName=stack_name)[ + "StackResourceSummaries" + ] + + provisioned_user = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheUser" + ][0] + user_name = provisioned_user["PhysicalResourceId"] + + provisioned_access_key = [ + resource + for resource in provisioned_resources + if resource["LogicalResourceId"] == "TheAccessKey" + ][0] + access_key_id = provisioned_access_key["PhysicalResourceId"] + + iam_client = boto3.client("iam", region_name="us-east-1") + user = iam_client.get_user(UserName=user_name) + access_keys = iam_client.list_access_keys(UserName=user_name) + access_key_id.should.equal(access_keys["AccessKeyMetadata"][0]["AccessKeyId"]) + + other_user_name = "MyUser" + iam_client.create_user(UserName=other_user_name) + + template = """ +Resources: + TheUser: + Type: AWS::IAM::User + TheAccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: {0} +""".strip().format( + other_user_name + ) + + cf_client.update_stack(StackName=stack_name, TemplateBody=template) + + access_keys = iam_client.list_access_keys(UserName=user_name) + len(access_keys["AccessKeyMetadata"]).should.equal(0) + + access_keys = iam_client.list_access_keys(UserName=other_user_name) + access_key_id.should_not.equal(access_keys["AccessKeyMetadata"][0]["AccessKeyId"]) diff --git a/tests/test_kinesis/test_kinesis_cloudformation.py b/tests/test_kinesis/test_kinesis_cloudformation.py index 7f3aef0de..59f73b888 100644 --- a/tests/test_kinesis/test_kinesis_cloudformation.py +++ b/tests/test_kinesis/test_kinesis_cloudformation.py @@ -73,6 +73,12 @@ Resources: Properties: Name: MyStream ShardCount: 4 + RetentionPeriodHours: 48 + Tags: + - Key: TagKey1 + Value: TagValue1 + - Key: TagKey2 + Value: TagValue2 """.strip() cf_conn.create_stack(StackName=stack_name, TemplateBody=template) @@ -83,6 +89,14 @@ Resources: stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[ "StreamDescription" ] + stream_description["RetentionPeriodHours"].should.equal(48) + + tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"] + tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"] + tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"] + tag1_value.should.equal("TagValue1") + tag2_value.should.equal("TagValue2") + shards_provisioned = len( [ shard @@ -98,12 +112,27 @@ Resources: Type: AWS::Kinesis::Stream Properties: ShardCount: 6 + RetentionPeriodHours: 24 + Tags: + - Key: TagKey1 + Value: TagValue1a + - Key: TagKey2 + Value: TagValue2a + """.strip() cf_conn.update_stack(StackName=stack_name, TemplateBody=template) stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[ "StreamDescription" ] + stream_description["RetentionPeriodHours"].should.equal(24) + + tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"] + tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"] + tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"] + tag1_value.should.equal("TagValue1a") + tag2_value.should.equal("TagValue2a") + shards_provisioned = len( [ shard diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index c8e3ed4de..6622b2f41 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals import datetime -import os import sys from boto3 import Session @@ -11,7 +10,6 @@ from six.moves.urllib.error import HTTPError from functools import wraps from gzip import GzipFile from io import BytesIO -import mimetypes import zlib import pickle import uuid @@ -36,7 +34,7 @@ from nose.tools import assert_raises import sure # noqa -from moto import settings, mock_s3, mock_s3_deprecated, mock_config, mock_cloudformation +from moto import settings, mock_s3, mock_s3_deprecated, mock_config import moto.s3.models as s3model from moto.core.exceptions import InvalidNextTokenException from moto.core.utils import py2_strip_unicode_keys @@ -4686,142 +4684,3 @@ def test_presigned_put_url_with_custom_headers(): s3.delete_object(Bucket=bucket, Key=key) s3.delete_bucket(Bucket=bucket) - - -@mock_s3 -@mock_cloudformation -def test_s3_bucket_cloudformation_basic(): - s3 = boto3.client("s3", region_name="us-east-1") - cf = boto3.client("cloudformation", region_name="us-east-1") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}}, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[ - "StackId" - ] - stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] - - s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) - - -@mock_s3 -@mock_cloudformation -def test_s3_bucket_cloudformation_with_properties(): - s3 = boto3.client("s3", region_name="us-east-1") - cf = boto3.client("cloudformation", region_name="us-east-1") - - bucket_name = "MyBucket" - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "testInstance": { - "Type": "AWS::S3::Bucket", - "Properties": { - "BucketName": bucket_name, - "BucketEncryption": { - "ServerSideEncryptionConfiguration": [ - { - "ServerSideEncryptionByDefault": { - "SSEAlgorithm": "AES256" - } - } - ] - }, - }, - } - }, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[ - "StackId" - ] - stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] - s3.head_bucket(Bucket=bucket_name) - - encryption = s3.get_bucket_encryption(Bucket=bucket_name) - encryption["ServerSideEncryptionConfiguration"]["Rules"][0][ - "ApplyServerSideEncryptionByDefault" - ]["SSEAlgorithm"].should.equal("AES256") - - -@mock_s3 -@mock_cloudformation -def test_s3_bucket_cloudformation_update_no_interruption(): - s3 = boto3.client("s3", region_name="us-east-1") - cf = boto3.client("cloudformation", region_name="us-east-1") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}}, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - cf.create_stack(StackName="test_stack", TemplateBody=template_json) - stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] - s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "testInstance": { - "Type": "AWS::S3::Bucket", - "Properties": { - "BucketEncryption": { - "ServerSideEncryptionConfiguration": [ - { - "ServerSideEncryptionByDefault": { - "SSEAlgorithm": "AES256" - } - } - ] - } - }, - } - }, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - cf.update_stack(StackName="test_stack", TemplateBody=template_json) - encryption = s3.get_bucket_encryption( - Bucket=stack_description["Outputs"][0]["OutputValue"] - ) - encryption["ServerSideEncryptionConfiguration"]["Rules"][0][ - "ApplyServerSideEncryptionByDefault" - ]["SSEAlgorithm"].should.equal("AES256") - - -@mock_s3 -@mock_cloudformation -def test_s3_bucket_cloudformation_update_replacement(): - s3 = boto3.client("s3", region_name="us-east-1") - cf = boto3.client("cloudformation", region_name="us-east-1") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}}, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - cf.create_stack(StackName="test_stack", TemplateBody=template_json) - stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] - s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "testInstance": { - "Type": "AWS::S3::Bucket", - "Properties": {"BucketName": "MyNewBucketName"}, - } - }, - "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, - } - template_json = json.dumps(template) - cf.update_stack(StackName="test_stack", TemplateBody=template_json) - stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] - s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) diff --git a/tests/test_s3/test_s3_cloudformation.py b/tests/test_s3/test_s3_cloudformation.py new file mode 100644 index 000000000..69d0c9f98 --- /dev/null +++ b/tests/test_s3/test_s3_cloudformation.py @@ -0,0 +1,145 @@ +import json +import boto3 + +import sure # noqa + +from moto import mock_s3, mock_cloudformation + + +@mock_s3 +@mock_cloudformation +def test_s3_bucket_cloudformation_basic(): + s3 = boto3.client("s3", region_name="us-east-1") + cf = boto3.client("cloudformation", region_name="us-east-1") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}}, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[ + "StackId" + ] + stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] + + s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) + + +@mock_s3 +@mock_cloudformation +def test_s3_bucket_cloudformation_with_properties(): + s3 = boto3.client("s3", region_name="us-east-1") + cf = boto3.client("cloudformation", region_name="us-east-1") + + bucket_name = "MyBucket" + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "testInstance": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": bucket_name, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "SSEAlgorithm": "AES256" + } + } + ] + }, + }, + } + }, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[ + "StackId" + ] + stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] + s3.head_bucket(Bucket=bucket_name) + + encryption = s3.get_bucket_encryption(Bucket=bucket_name) + encryption["ServerSideEncryptionConfiguration"]["Rules"][0][ + "ApplyServerSideEncryptionByDefault" + ]["SSEAlgorithm"].should.equal("AES256") + + +@mock_s3 +@mock_cloudformation +def test_s3_bucket_cloudformation_update_no_interruption(): + s3 = boto3.client("s3", region_name="us-east-1") + cf = boto3.client("cloudformation", region_name="us-east-1") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}}, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + cf.create_stack(StackName="test_stack", TemplateBody=template_json) + stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] + s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "testInstance": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "SSEAlgorithm": "AES256" + } + } + ] + } + }, + } + }, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + cf.update_stack(StackName="test_stack", TemplateBody=template_json) + encryption = s3.get_bucket_encryption( + Bucket=stack_description["Outputs"][0]["OutputValue"] + ) + encryption["ServerSideEncryptionConfiguration"]["Rules"][0][ + "ApplyServerSideEncryptionByDefault" + ]["SSEAlgorithm"].should.equal("AES256") + + +@mock_s3 +@mock_cloudformation +def test_s3_bucket_cloudformation_update_replacement(): + s3 = boto3.client("s3", region_name="us-east-1") + cf = boto3.client("cloudformation", region_name="us-east-1") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}}, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + cf.create_stack(StackName="test_stack", TemplateBody=template_json) + stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] + s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "testInstance": { + "Type": "AWS::S3::Bucket", + "Properties": {"BucketName": "MyNewBucketName"}, + } + }, + "Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}}, + } + template_json = json.dumps(template) + cf.update_stack(StackName="test_stack", TemplateBody=template_json) + stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0] + s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"]) From 1c939a5f069d9a082f9d0fb8cbb83557b0ed8dbe Mon Sep 17 00:00:00 2001 From: usmangani1 Date: Thu, 27 Aug 2020 21:01:39 +0530 Subject: [PATCH 03/21] Fix:EC2-Create-Subnet availability Zone Id support (#3198) * Fix:EC2-Create-Subnet availability Zone Id support * Linting * Fix:fixed build errors * linting Co-authored-by: Bert Blommers Co-authored-by: Bert Blommers Co-authored-by: usmankb --- moto/ec2/models.py | 33 +++++++++++++++++++++++++-------- moto/ec2/responses/subnets.py | 12 +++++++----- tests/test_ec2/test_subnets.py | 26 +++++++++++++++++++++++--- 3 files changed, 55 insertions(+), 16 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index f0ce89d8a..07a05bbda 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -3402,7 +3402,14 @@ class SubnetBackend(object): return subnets[subnet_id] raise InvalidSubnetIdError(subnet_id) - def create_subnet(self, vpc_id, cidr_block, availability_zone, context=None): + def create_subnet( + self, + vpc_id, + cidr_block, + availability_zone=None, + availability_zone_id=None, + context=None, + ): subnet_id = random_subnet_id() vpc = self.get_vpc( vpc_id @@ -3430,15 +3437,25 @@ class SubnetBackend(object): # consider it the default default_for_az = str(availability_zone not in self.subnets).lower() map_public_ip_on_launch = default_for_az - if availability_zone is None: + + if availability_zone is None and not availability_zone_id: availability_zone = "us-east-1a" try: - availability_zone_data = next( - zone - for zones in RegionsAndZonesBackend.zones.values() - for zone in zones - if zone.name == availability_zone - ) + if availability_zone: + availability_zone_data = next( + zone + for zones in RegionsAndZonesBackend.zones.values() + for zone in zones + if zone.name == availability_zone + ) + elif availability_zone_id: + availability_zone_data = next( + zone + for zones in RegionsAndZonesBackend.zones.values() + for zone in zones + if zone.zone_id == availability_zone_id + ) + except StopIteration: raise InvalidAvailabilityZoneError( availability_zone, diff --git a/moto/ec2/responses/subnets.py b/moto/ec2/responses/subnets.py index e11984e52..3bad8e12f 100644 --- a/moto/ec2/responses/subnets.py +++ b/moto/ec2/responses/subnets.py @@ -9,12 +9,14 @@ class Subnets(BaseResponse): def create_subnet(self): vpc_id = self._get_param("VpcId") cidr_block = self._get_param("CidrBlock") - availability_zone = self._get_param( - "AvailabilityZone", - if_none=random.choice(self.ec2_backend.describe_availability_zones()).name, - ) + availability_zone = self._get_param("AvailabilityZone") + availability_zone_id = self._get_param("AvailabilityZoneId") + if not availability_zone and not availability_zone_id: + availability_zone = random.choice( + self.ec2_backend.describe_availability_zones() + ).name subnet = self.ec2_backend.create_subnet( - vpc_id, cidr_block, availability_zone, context=self + vpc_id, cidr_block, availability_zone, availability_zone_id, context=self ) template = self.response_template(CREATE_SUBNET_RESPONSE) return template.render(subnet=subnet) diff --git a/tests/test_ec2/test_subnets.py b/tests/test_ec2/test_subnets.py index eae0bc468..08d404b97 100644 --- a/tests/test_ec2/test_subnets.py +++ b/tests/test_ec2/test_subnets.py @@ -75,6 +75,18 @@ def test_subnet_should_have_proper_availability_zone_set(): subnetA.availability_zone.should.equal("us-west-1b") +@mock_ec2 +def test_availability_zone_in_create_subnet(): + ec2 = boto3.resource("ec2", region_name="us-west-1") + + vpc = ec2.create_vpc(CidrBlock="172.31.0.0/16") + + subnet = ec2.create_subnet( + VpcId=vpc.id, CidrBlock="172.31.48.0/20", AvailabilityZoneId="use1-az6" + ) + subnet.availability_zone_id.should.equal("use1-az6") + + @mock_ec2 def test_default_subnet(): ec2 = boto3.resource("ec2", region_name="us-west-1") @@ -612,7 +624,15 @@ def test_run_instances_should_attach_to_default_subnet(): # Assert subnet is created appropriately subnets = client.describe_subnets()["Subnets"] default_subnet_id = subnets[0]["SubnetId"] - instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"].should.equal( - default_subnet_id + if len(subnets) > 1: + default_subnet_id1 = subnets[1]["SubnetId"] + assert ( + instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"] + == default_subnet_id + or instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"] + == default_subnet_id1 + ) + assert ( + subnets[0]["AvailableIpAddressCount"] == 4090 + or subnets[1]["AvailableIpAddressCount"] == 4090 ) - subnets[0]["AvailableIpAddressCount"].should.equal(4090) From 0a89f9d1dfc3dacea3791533fbc27b7d08a276e9 Mon Sep 17 00:00:00 2001 From: usmangani1 Date: Thu, 27 Aug 2020 22:01:20 +0530 Subject: [PATCH 04/21] Fix:SQS:Added Non existent queue name in ERROR RESPONSE (#3261) * Fix:SQS:Added Non existent queue name in ERROR RESPONSE * Linting Co-authored-by: Bert Blommers --- moto/sqs/responses.py | 11 +++++++++-- tests/test_sqs/test_sqs.py | 8 ++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/moto/sqs/responses.py b/moto/sqs/responses.py index 29804256c..5cc77e9fb 100644 --- a/moto/sqs/responses.py +++ b/moto/sqs/responses.py @@ -70,7 +70,10 @@ class SQSResponse(BaseResponse): def call_action(self): status_code, headers, body = super(SQSResponse, self).call_action() if status_code == 404: - return 404, headers, ERROR_INEXISTENT_QUEUE + queue_name = self.querystring.get("QueueName", [""])[0] + template = self.response_template(ERROR_INEXISTENT_QUEUE) + response = template.render(queue_name=queue_name) + return 404, headers, response return status_code, headers, body def _error(self, code, message, status=400): @@ -718,7 +721,11 @@ ERROR_INEXISTENT_QUEUE = """ Date: Tue, 1 Sep 2020 12:55:59 +0530 Subject: [PATCH 05/21] Fix: Api-Gateway ApiKeyAlreadyExists headers change. (#3162) * Fix: Api-Gateway ApiKeyAlreadyExists headers change. * Added test for non decorator * Fixed cli errors * Fix:fixed build errors * Fix: assert only in case of non server mode Co-authored-by: usmankb --- moto/apigateway/responses.py | 2 +- tests/test_apigateway/test_apigateway.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index d8f3ed505..0454ae58e 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -449,7 +449,7 @@ class APIGatewayResponse(BaseResponse): except ApiKeyAlreadyExists as error: return ( error.code, - self.headers, + {}, '{{"message":"{0}","code":"{1}"}}'.format( error.message, error.error_type ), diff --git a/tests/test_apigateway/test_apigateway.py b/tests/test_apigateway/test_apigateway.py index d79851ab0..c58d644fa 100644 --- a/tests/test_apigateway/test_apigateway.py +++ b/tests/test_apigateway/test_apigateway.py @@ -1858,6 +1858,23 @@ def test_create_api_key(): client.create_api_key.when.called_with(**payload).should.throw(ClientError) +@mock_apigateway +def test_create_api_headers(): + region_name = "us-west-2" + client = boto3.client("apigateway", region_name=region_name) + + apikey_value = "12345" + apikey_name = "TESTKEY1" + payload = {"value": apikey_value, "name": apikey_name} + + client.create_api_key(**payload) + with assert_raises(ClientError) as ex: + client.create_api_key(**payload) + ex.exception.response["Error"]["Code"].should.equal("ConflictException") + if not settings.TEST_SERVER_MODE: + ex.exception.response["ResponseMetadata"]["HTTPHeaders"].should.equal({}) + + @mock_apigateway def test_api_keys(): region_name = "us-west-2" From 236ab59afeb94a5bffa1447cab3404ae9c32aee7 Mon Sep 17 00:00:00 2001 From: xsphrx <34844540+xsphrx@users.noreply.github.com> Date: Tue, 1 Sep 2020 10:20:31 +0200 Subject: [PATCH 06/21] added cognito-idp initiate_auth and PASSWORD_VERIFIER challenge to respond_to_auth_challenge (#3260) * added cognito-idp initiate_auth and PASSWORD_VERIFIER challenge to respond_to_auth_challenge * fixed for python2 * added mfa, REFRESH_TOKEN to initiate_auth, SOFTWARE_TOKEN_MFA to respond_to_auth_challenge * added negative tests * test --- moto/cognitoidp/exceptions.py | 8 + moto/cognitoidp/models.py | 231 +++++++++++++- moto/cognitoidp/responses.py | 61 +++- moto/cognitoidp/utils.py | 11 + tests/test_cognitoidp/test_cognitoidp.py | 372 +++++++++++++++++++++++ 5 files changed, 681 insertions(+), 2 deletions(-) diff --git a/moto/cognitoidp/exceptions.py b/moto/cognitoidp/exceptions.py index c9b6368ca..baf5f6526 100644 --- a/moto/cognitoidp/exceptions.py +++ b/moto/cognitoidp/exceptions.py @@ -45,6 +45,14 @@ class NotAuthorizedError(BadRequest): ) +class UserNotConfirmedException(BadRequest): + def __init__(self, message): + super(UserNotConfirmedException, self).__init__() + self.description = json.dumps( + {"message": message, "__type": "UserNotConfirmedException"} + ) + + class InvalidParameterException(JsonRESTError): def __init__(self, msg=None): self.code = 400 diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index a3cb69084..bfa7177f1 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -21,13 +21,15 @@ from .exceptions import ( ResourceNotFoundError, UserNotFoundError, UsernameExistsException, + UserNotConfirmedException, InvalidParameterException, ) -from .utils import create_id +from .utils import create_id, check_secret_hash UserStatus = { "FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD", "CONFIRMED": "CONFIRMED", + "UNCONFIRMED": "UNCONFIRMED", } @@ -300,6 +302,9 @@ class CognitoIdpUser(BaseModel): self.attributes = attributes self.create_date = datetime.datetime.utcnow() self.last_modified_date = datetime.datetime.utcnow() + self.sms_mfa_enabled = False + self.software_token_mfa_enabled = False + self.token_verified = False # Groups this user is a member of. # Note that these links are bidirectional. @@ -316,6 +321,11 @@ class CognitoIdpUser(BaseModel): # list_users brings back "Attributes" while admin_get_user brings back "UserAttributes". def to_json(self, extended=False, attributes_key="Attributes"): + user_mfa_setting_list = [] + if self.software_token_mfa_enabled: + user_mfa_setting_list.append("SOFTWARE_TOKEN_MFA") + elif self.sms_mfa_enabled: + user_mfa_setting_list.append("SMS_MFA") user_json = self._base_json() if extended: user_json.update( @@ -323,6 +333,7 @@ class CognitoIdpUser(BaseModel): "Enabled": self.enabled, attributes_key: self.attributes, "MFAOptions": [], + "UserMFASettingList": user_mfa_setting_list, } ) @@ -731,6 +742,9 @@ class CognitoIdpBackend(BaseBackend): def respond_to_auth_challenge( self, session, client_id, challenge_name, challenge_responses ): + if challenge_name == "PASSWORD_VERIFIER": + session = challenge_responses.get("PASSWORD_CLAIM_SECRET_BLOCK") + user_pool = self.sessions.get(session) if not user_pool: raise ResourceNotFoundError(session) @@ -751,6 +765,62 @@ class CognitoIdpBackend(BaseBackend): del self.sessions[session] return self._log_user_in(user_pool, client, username) + elif challenge_name == "PASSWORD_VERIFIER": + username = challenge_responses.get("USERNAME") + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + password_claim_signature = challenge_responses.get( + "PASSWORD_CLAIM_SIGNATURE" + ) + if not password_claim_signature: + raise ResourceNotFoundError(password_claim_signature) + password_claim_secret_block = challenge_responses.get( + "PASSWORD_CLAIM_SECRET_BLOCK" + ) + if not password_claim_secret_block: + raise ResourceNotFoundError(password_claim_secret_block) + timestamp = challenge_responses.get("TIMESTAMP") + if not timestamp: + raise ResourceNotFoundError(timestamp) + + if user.software_token_mfa_enabled: + return { + "ChallengeName": "SOFTWARE_TOKEN_MFA", + "Session": session, + "ChallengeParameters": {}, + } + + if user.sms_mfa_enabled: + return { + "ChallengeName": "SMS_MFA", + "Session": session, + "ChallengeParameters": {}, + } + + del self.sessions[session] + return self._log_user_in(user_pool, client, username) + elif challenge_name == "SOFTWARE_TOKEN_MFA": + username = challenge_responses.get("USERNAME") + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + software_token_mfa_code = challenge_responses.get("SOFTWARE_TOKEN_MFA_CODE") + if not software_token_mfa_code: + raise ResourceNotFoundError(software_token_mfa_code) + + if client.generate_secret: + secret_hash = challenge_responses.get("SECRET_HASH") + if not check_secret_hash( + client.secret, client.id, username, secret_hash + ): + raise NotAuthorizedError(secret_hash) + + del self.sessions[session] + return self._log_user_in(user_pool, client, username) + else: return {} @@ -806,6 +876,165 @@ class CognitoIdpBackend(BaseBackend): user_pool.resource_servers[identifier] = resource_server return resource_server + def sign_up(self, client_id, username, password, attributes): + user_pool = None + for p in self.user_pools.values(): + if client_id in p.clients: + user_pool = p + if user_pool is None: + raise ResourceNotFoundError(client_id) + + user = CognitoIdpUser( + user_pool_id=user_pool.id, + username=username, + password=password, + attributes=attributes, + status=UserStatus["UNCONFIRMED"], + ) + user_pool.users[user.username] = user + return user + + def confirm_sign_up(self, client_id, username, confirmation_code): + user_pool = None + for p in self.user_pools.values(): + if client_id in p.clients: + user_pool = p + if user_pool is None: + raise ResourceNotFoundError(client_id) + + if username not in user_pool.users: + raise UserNotFoundError(username) + + user = user_pool.users[username] + user.status = UserStatus["CONFIRMED"] + return "" + + def initiate_auth(self, client_id, auth_flow, auth_parameters): + user_pool = None + for p in self.user_pools.values(): + if client_id in p.clients: + user_pool = p + if user_pool is None: + raise ResourceNotFoundError(client_id) + + client = p.clients.get(client_id) + + if auth_flow == "USER_SRP_AUTH": + username = auth_parameters.get("USERNAME") + srp_a = auth_parameters.get("SRP_A") + if not srp_a: + raise ResourceNotFoundError(srp_a) + if client.generate_secret: + secret_hash = auth_parameters.get("SECRET_HASH") + if not check_secret_hash( + client.secret, client.id, username, secret_hash + ): + raise NotAuthorizedError(secret_hash) + + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + if user.status == UserStatus["UNCONFIRMED"]: + raise UserNotConfirmedException("User is not confirmed.") + + session = str(uuid.uuid4()) + self.sessions[session] = user_pool + + return { + "ChallengeName": "PASSWORD_VERIFIER", + "Session": session, + "ChallengeParameters": { + "SALT": str(uuid.uuid4()), + "SRP_B": str(uuid.uuid4()), + "USERNAME": user.id, + "USER_ID_FOR_SRP": user.id, + "SECRET_BLOCK": session, + }, + } + elif auth_flow == "REFRESH_TOKEN": + refresh_token = auth_parameters.get("REFRESH_TOKEN") + if not refresh_token: + raise ResourceNotFoundError(refresh_token) + + client_id, username = user_pool.refresh_tokens[refresh_token] + if not username: + raise ResourceNotFoundError(username) + + if client.generate_secret: + secret_hash = auth_parameters.get("SECRET_HASH") + if not check_secret_hash( + client.secret, client.id, username, secret_hash + ): + raise NotAuthorizedError(secret_hash) + + ( + id_token, + access_token, + expires_in, + ) = user_pool.create_tokens_from_refresh_token(refresh_token) + + return { + "AuthenticationResult": { + "IdToken": id_token, + "AccessToken": access_token, + "ExpiresIn": expires_in, + } + } + else: + return None + + def associate_software_token(self, access_token): + for user_pool in self.user_pools.values(): + if access_token in user_pool.access_tokens: + _, username = user_pool.access_tokens[access_token] + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + return {"SecretCode": str(uuid.uuid4())} + else: + raise NotAuthorizedError(access_token) + + def verify_software_token(self, access_token, user_code): + for user_pool in self.user_pools.values(): + if access_token in user_pool.access_tokens: + _, username = user_pool.access_tokens[access_token] + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + user.token_verified = True + + return {"Status": "SUCCESS"} + else: + raise NotAuthorizedError(access_token) + + def set_user_mfa_preference( + self, access_token, software_token_mfa_settings, sms_mfa_settings + ): + for user_pool in self.user_pools.values(): + if access_token in user_pool.access_tokens: + _, username = user_pool.access_tokens[access_token] + user = user_pool.users.get(username) + if not user: + raise UserNotFoundError(username) + + if software_token_mfa_settings["Enabled"]: + if user.token_verified: + user.software_token_mfa_enabled = True + else: + raise InvalidParameterException( + "User has not verified software token mfa" + ) + + elif sms_mfa_settings["Enabled"]: + user.sms_mfa_enabled = True + + return None + else: + raise NotAuthorizedError(access_token) + cognitoidp_backends = {} for region in Session().get_available_regions("cognito-idp"): diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 972ba883a..f3c005ff5 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -4,7 +4,7 @@ import json import os from moto.core.responses import BaseResponse -from .models import cognitoidp_backends, find_region_by_value +from .models import cognitoidp_backends, find_region_by_value, UserStatus class CognitoIdpResponse(BaseResponse): @@ -390,6 +390,65 @@ class CognitoIdpResponse(BaseResponse): ) return json.dumps({"ResourceServer": resource_server.to_json()}) + def sign_up(self): + client_id = self._get_param("ClientId") + username = self._get_param("Username") + password = self._get_param("Password") + user = cognitoidp_backends[self.region].sign_up( + client_id=client_id, + username=username, + password=password, + attributes=self._get_param("UserAttributes", []), + ) + return json.dumps( + { + "UserConfirmed": user.status == UserStatus["CONFIRMED"], + "UserSub": user.id, + } + ) + + def confirm_sign_up(self): + client_id = self._get_param("ClientId") + username = self._get_param("Username") + confirmation_code = self._get_param("ConfirmationCode") + cognitoidp_backends[self.region].confirm_sign_up( + client_id=client_id, username=username, confirmation_code=confirmation_code, + ) + return "" + + def initiate_auth(self): + client_id = self._get_param("ClientId") + auth_flow = self._get_param("AuthFlow") + auth_parameters = self._get_param("AuthParameters") + + auth_result = cognitoidp_backends[self.region].initiate_auth( + client_id, auth_flow, auth_parameters + ) + + return json.dumps(auth_result) + + def associate_software_token(self): + access_token = self._get_param("AccessToken") + result = cognitoidp_backends[self.region].associate_software_token(access_token) + return json.dumps(result) + + def verify_software_token(self): + access_token = self._get_param("AccessToken") + user_code = self._get_param("UserCode") + result = cognitoidp_backends[self.region].verify_software_token( + access_token, user_code + ) + return json.dumps(result) + + def set_user_mfa_preference(self): + access_token = self._get_param("AccessToken") + software_token_mfa_settings = self._get_param("SoftwareTokenMfaSettings") + sms_mfa_settings = self._get_param("SMSMfaSettings") + cognitoidp_backends[self.region].set_user_mfa_preference( + access_token, software_token_mfa_settings, sms_mfa_settings + ) + return "" + class CognitoIdpJsonWebKeyResponse(BaseResponse): def __init__(self): diff --git a/moto/cognitoidp/utils.py b/moto/cognitoidp/utils.py index 5f5fe4f8f..11f34bcae 100644 --- a/moto/cognitoidp/utils.py +++ b/moto/cognitoidp/utils.py @@ -2,9 +2,20 @@ from __future__ import unicode_literals import six import random import string +import hashlib +import hmac +import base64 def create_id(): size = 26 chars = list(range(10)) + list(string.ascii_lowercase) return "".join(six.text_type(random.choice(chars)) for x in range(size)) + + +def check_secret_hash(app_client_secret, app_client_id, username, secret_hash): + key = bytes(str(app_client_secret).encode("latin-1")) + msg = bytes(str(username + app_client_id).encode("latin-1")) + new_digest = hmac.new(key, msg, hashlib.sha256).digest() + SECRET_HASH = base64.b64encode(new_digest).decode() + return SECRET_HASH == secret_hash diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 39875aeb4..65c5151e3 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -4,6 +4,9 @@ import json import os import random import re +import hmac +import hashlib +import base64 import requests import uuid @@ -1248,6 +1251,137 @@ def test_authentication_flow(): authentication_flow(conn) +def user_authentication_flow(conn): + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + user_attribute_name = str(uuid.uuid4()) + user_attribute_value = str(uuid.uuid4()) + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=str(uuid.uuid4()), + ReadAttributes=[user_attribute_name], + GenerateSecret=True, + )["UserPoolClient"]["ClientId"] + + conn.sign_up( + ClientId=client_id, Username=username, Password=password, + ) + + client_secret = conn.describe_user_pool_client( + UserPoolId=user_pool_id, ClientId=client_id, + )["UserPoolClient"]["ClientSecret"] + + conn.confirm_sign_up( + ClientId=client_id, Username=username, ConfirmationCode="123456", + ) + + # generating secret hash + key = bytes(str(client_secret).encode("latin-1")) + msg = bytes(str(username + client_id).encode("latin-1")) + new_digest = hmac.new(key, msg, hashlib.sha256).digest() + secret_hash = base64.b64encode(new_digest).decode() + + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": secret_hash, + }, + ) + + result = conn.respond_to_auth_challenge( + ClientId=client_id, + ChallengeName=result["ChallengeName"], + ChallengeResponses={ + "PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()), + "PASSWORD_CLAIM_SECRET_BLOCK": result["Session"], + "TIMESTAMP": str(uuid.uuid4()), + "USERNAME": username, + }, + ) + + refresh_token = result["AuthenticationResult"]["RefreshToken"] + + # add mfa token + conn.associate_software_token( + AccessToken=result["AuthenticationResult"]["AccessToken"], + ) + + conn.verify_software_token( + AccessToken=result["AuthenticationResult"]["AccessToken"], UserCode="123456", + ) + + conn.set_user_mfa_preference( + AccessToken=result["AuthenticationResult"]["AccessToken"], + SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True,}, + ) + + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="REFRESH_TOKEN", + AuthParameters={"SECRET_HASH": secret_hash, "REFRESH_TOKEN": refresh_token,}, + ) + + result["AuthenticationResult"]["IdToken"].should_not.be.none + result["AuthenticationResult"]["AccessToken"].should_not.be.none + + # authenticate user once again this time with mfa token + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": secret_hash, + }, + ) + + result = conn.respond_to_auth_challenge( + ClientId=client_id, + ChallengeName=result["ChallengeName"], + ChallengeResponses={ + "PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()), + "PASSWORD_CLAIM_SECRET_BLOCK": result["Session"], + "TIMESTAMP": str(uuid.uuid4()), + "USERNAME": username, + }, + ) + + result = conn.respond_to_auth_challenge( + ClientId=client_id, + Session=result["Session"], + ChallengeName=result["ChallengeName"], + ChallengeResponses={ + "SOFTWARE_TOKEN_MFA_CODE": "123456", + "USERNAME": username, + "SECRET_HASH": secret_hash, + }, + ) + + return { + "user_pool_id": user_pool_id, + "client_id": client_id, + "client_secret": client_secret, + "secret_hash": secret_hash, + "id_token": result["AuthenticationResult"]["IdToken"], + "access_token": result["AuthenticationResult"]["AccessToken"], + "refresh_token": refresh_token, + "username": username, + "password": password, + "additional_fields": {user_attribute_name: user_attribute_value}, + } + + +@mock_cognitoidp +def test_user_authentication_flow(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_authentication_flow(conn) + + @mock_cognitoidp def test_token_legitimacy(): conn = boto3.client("cognito-idp", "us-west-2") @@ -1437,6 +1571,244 @@ def test_resource_server(): ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) +@mock_cognitoidp +def test_sign_up(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), + )["UserPoolClient"]["ClientId"] + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + result = conn.sign_up(ClientId=client_id, Username=username, Password=password) + result["UserConfirmed"].should.be.false + result["UserSub"].should_not.be.none + + +@mock_cognitoidp +def test_confirm_sign_up(): + conn = boto3.client("cognito-idp", "us-west-2") + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True, + )["UserPoolClient"]["ClientId"] + conn.sign_up(ClientId=client_id, Username=username, Password=password) + + conn.confirm_sign_up( + ClientId=client_id, Username=username, ConfirmationCode="123456", + ) + + result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username) + result["UserStatus"].should.equal("CONFIRMED") + + +@mock_cognitoidp +def test_initiate_auth_USER_SRP_AUTH(): + conn = boto3.client("cognito-idp", "us-west-2") + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True, + )["UserPoolClient"]["ClientId"] + conn.sign_up(ClientId=client_id, Username=username, Password=password) + client_secret = conn.describe_user_pool_client( + UserPoolId=user_pool_id, ClientId=client_id, + )["UserPoolClient"]["ClientSecret"] + conn.confirm_sign_up( + ClientId=client_id, Username=username, ConfirmationCode="123456", + ) + + key = bytes(str(client_secret).encode("latin-1")) + msg = bytes(str(username + client_id).encode("latin-1")) + new_digest = hmac.new(key, msg, hashlib.sha256).digest() + secret_hash = base64.b64encode(new_digest).decode() + + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": secret_hash, + }, + ) + + result["ChallengeName"].should.equal("PASSWORD_VERIFIER") + + +@mock_cognitoidp +def test_initiate_auth_REFRESH_TOKEN(): + conn = boto3.client("cognito-idp", "us-west-2") + result = user_authentication_flow(conn) + result = conn.initiate_auth( + ClientId=result["client_id"], + AuthFlow="REFRESH_TOKEN", + AuthParameters={ + "REFRESH_TOKEN": result["refresh_token"], + "SECRET_HASH": result["secret_hash"], + }, + ) + + result["AuthenticationResult"]["AccessToken"].should_not.be.none + + +@mock_cognitoidp +def test_initiate_auth_for_unconfirmed_user(): + conn = boto3.client("cognito-idp", "us-west-2") + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True, + )["UserPoolClient"]["ClientId"] + conn.sign_up(ClientId=client_id, Username=username, Password=password) + client_secret = conn.describe_user_pool_client( + UserPoolId=user_pool_id, ClientId=client_id, + )["UserPoolClient"]["ClientSecret"] + + key = bytes(str(client_secret).encode("latin-1")) + msg = bytes(str(username + client_id).encode("latin-1")) + new_digest = hmac.new(key, msg, hashlib.sha256).digest() + secret_hash = base64.b64encode(new_digest).decode() + + caught = False + try: + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": secret_hash, + }, + ) + except conn.exceptions.UserNotConfirmedException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_initiate_auth_with_invalid_secret_hash(): + conn = boto3.client("cognito-idp", "us-west-2") + username = str(uuid.uuid4()) + password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True, + )["UserPoolClient"]["ClientId"] + conn.sign_up(ClientId=client_id, Username=username, Password=password) + client_secret = conn.describe_user_pool_client( + UserPoolId=user_pool_id, ClientId=client_id, + )["UserPoolClient"]["ClientSecret"] + conn.confirm_sign_up( + ClientId=client_id, Username=username, ConfirmationCode="123456", + ) + + invalid_secret_hash = str(uuid.uuid4()) + + caught = False + try: + result = conn.initiate_auth( + ClientId=client_id, + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": invalid_secret_hash, + }, + ) + except conn.exceptions.NotAuthorizedException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_setting_mfa(): + conn = boto3.client("cognito-idp", "us-west-2") + result = authentication_flow(conn) + conn.associate_software_token(AccessToken=result["access_token"]) + conn.verify_software_token(AccessToken=result["access_token"], UserCode="123456") + conn.set_user_mfa_preference( + AccessToken=result["access_token"], + SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True}, + ) + result = conn.admin_get_user( + UserPoolId=result["user_pool_id"], Username=result["username"] + ) + + result["UserMFASettingList"].should.have.length_of(1) + + +@mock_cognitoidp +def test_setting_mfa_when_token_not_verified(): + conn = boto3.client("cognito-idp", "us-west-2") + result = authentication_flow(conn) + conn.associate_software_token(AccessToken=result["access_token"]) + + caught = False + try: + conn.set_user_mfa_preference( + AccessToken=result["access_token"], + SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True}, + ) + except conn.exceptions.InvalidParameterException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_respond_to_auth_challenge_with_invalid_secret_hash(): + conn = boto3.client("cognito-idp", "us-west-2") + result = user_authentication_flow(conn) + + valid_secret_hash = result["secret_hash"] + invalid_secret_hash = str(uuid.uuid4()) + + challenge = conn.initiate_auth( + ClientId=result["client_id"], + AuthFlow="USER_SRP_AUTH", + AuthParameters={ + "USERNAME": result["username"], + "SRP_A": str(uuid.uuid4()), + "SECRET_HASH": valid_secret_hash, + }, + ) + + challenge = conn.respond_to_auth_challenge( + ClientId=result["client_id"], + ChallengeName=challenge["ChallengeName"], + ChallengeResponses={ + "PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()), + "PASSWORD_CLAIM_SECRET_BLOCK": challenge["Session"], + "TIMESTAMP": str(uuid.uuid4()), + "USERNAME": result["username"], + }, + ) + + caught = False + try: + conn.respond_to_auth_challenge( + ClientId=result["client_id"], + Session=challenge["Session"], + ChallengeName=challenge["ChallengeName"], + ChallengeResponses={ + "SOFTWARE_TOKEN_MFA_CODE": "123456", + "USERNAME": result["username"], + "SECRET_HASH": invalid_secret_hash, + }, + ) + except conn.exceptions.NotAuthorizedException: + caught = True + + caught.should.be.true + + # Test will retrieve public key from cognito.amazonaws.com/.well-known/jwks.json, # which isnt mocked in ServerMode if not settings.TEST_SERVER_MODE: From 94c676b9cf19dba4a67fefbce0bc3bc885b8b5fa Mon Sep 17 00:00:00 2001 From: Peter Baumgartner Date: Tue, 1 Sep 2020 03:24:08 -0600 Subject: [PATCH 07/21] include=["TAGS"] for describe_task_definition (#3265) * include=["TAGS"] for describe_task_definition * Different approach * describe_services tags and tests --- moto/ecs/responses.py | 22 ++++++++++++++-------- tests/test_ecs/test_ecs_boto3.py | 19 +++++++++++++++++++ 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/moto/ecs/responses.py b/moto/ecs/responses.py index e911bb943..15d2f0c4b 100644 --- a/moto/ecs/responses.py +++ b/moto/ecs/responses.py @@ -87,7 +87,10 @@ class EC2ContainerServiceResponse(BaseResponse): def describe_task_definition(self): task_definition_str = self._get_param("taskDefinition") data = self.ecs_backend.describe_task_definition(task_definition_str) - return json.dumps({"taskDefinition": data.response_object, "failures": []}) + resp = {"taskDefinition": data.response_object, "failures": []} + if "TAGS" in self._get_param("include", []): + resp["tags"] = self.ecs_backend.list_tags_for_resource(data.arn) + return json.dumps(resp) def deregister_task_definition(self): task_definition_str = self._get_param("taskDefinition") @@ -191,13 +194,16 @@ class EC2ContainerServiceResponse(BaseResponse): cluster_str = self._get_param("cluster") service_names = self._get_param("services") services = self.ecs_backend.describe_services(cluster_str, service_names) - - return json.dumps( - { - "services": [service.response_object for service in services], - "failures": [], - } - ) + resp = { + "services": [service.response_object for service in services], + "failures": [], + } + if "TAGS" in self._get_param("include", []): + for i, service in enumerate(services): + resp["services"][i]["tags"] = self.ecs_backend.list_tags_for_resource( + service.arn + ) + return json.dumps(resp) def update_service(self): cluster_str = self._get_param("cluster") diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index d9360df92..d46c8b983 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -254,6 +254,7 @@ def test_describe_task_definition(): "logConfiguration": {"logDriver": "json-file"}, } ], + tags=[{"key": "Name", "value": "test_ecs_task"}], ) _ = client.register_task_definition( family="test_ecs_task", @@ -297,6 +298,11 @@ def test_describe_task_definition(): "arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:2" ) + response = client.describe_task_definition( + taskDefinition="test_ecs_task:1", include=["TAGS"] + ) + response["tags"].should.equal([{"key": "Name", "value": "test_ecs_task"}]) + @mock_ecs def test_deregister_task_definition(): @@ -512,6 +518,7 @@ def test_describe_services(): serviceName="test_ecs_service1", taskDefinition="test_ecs_task", desiredCount=2, + tags=[{"key": "Name", "value": "test_ecs_service1"}], ) _ = client.create_service( cluster="test_ecs_cluster", @@ -554,6 +561,18 @@ def test_describe_services(): datetime.now() - response["services"][0]["deployments"][0]["updatedAt"].replace(tzinfo=None) ).seconds.should.be.within(0, 10) + response = client.describe_services( + cluster="test_ecs_cluster", + services=[ + "test_ecs_service1", + "arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2", + ], + include=["TAGS"], + ) + response["services"][0]["tags"].should.equal( + [{"key": "Name", "value": "test_ecs_service1"}] + ) + response["services"][1]["tags"].should.equal([]) @mock_ecs From 127b3e73e91bebf470a53efc52888c341b95f6d8 Mon Sep 17 00:00:00 2001 From: Toshiya Kawasaki Date: Tue, 1 Sep 2020 19:44:13 +0900 Subject: [PATCH 08/21] Fix scaffold.py (#3270) * upgrade prompt-toolkit to make scaffold.py work * update append_mock_to_init_py * enable to run when method name is not upper camel case * support new moto/backend.py format * use prompt-toolkit 2.x.x to support python2 * fix invalid initialization of moto/backends.py --- requirements-dev.txt | 2 +- scripts/scaffold.py | 52 +++++++++++++++++++++----------------------- 2 files changed, 26 insertions(+), 28 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e40a568a5..8a91eb14f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,7 @@ boto3>=1.4.4 botocore>=1.15.13 six>=1.9 parameterized>=0.7.0 -prompt-toolkit==1.0.14 +prompt-toolkit==2.0.10 # 3.x is not available with python2 click==6.7 inflection==0.3.1 lxml==4.2.3 diff --git a/scripts/scaffold.py b/scripts/scaffold.py index 43a648b48..de6781b3f 100755 --- a/scripts/scaffold.py +++ b/scripts/scaffold.py @@ -114,12 +114,12 @@ def append_mock_to_init_py(service): with open(path) as f: lines = [_.replace('\n', '') for _ in f.readlines()] - if any(_ for _ in lines if re.match('^from.*mock_{}.*$'.format(service), _)): + if any(_ for _ in lines if re.match('^mock_{}.*lazy_load(.*)$'.format(service), _)): return - filtered_lines = [_ for _ in lines if re.match('^from.*mock.*$', _)] + filtered_lines = [_ for _ in lines if re.match('^mock_.*lazy_load(.*)$', _)] last_import_line_index = lines.index(filtered_lines[-1]) - new_line = 'from .{} import mock_{} # noqa'.format(get_escaped_service(service), get_escaped_service(service)) + new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(get_escaped_service(service), get_escaped_service(service), get_escaped_service(service)) lines.insert(last_import_line_index + 1, new_line) body = '\n'.join(lines) + '\n' @@ -127,23 +127,6 @@ def append_mock_to_init_py(service): f.write(body) -def append_mock_import_to_backends_py(service): - path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py') - with open(path) as f: - lines = [_.replace('\n', '') for _ in f.readlines()] - - if any(_ for _ in lines if re.match('^from moto\.{}.*{}_backends.*$'.format(service, service), _)): - return - filtered_lines = [_ for _ in lines if re.match('^from.*backends.*$', _)] - last_import_line_index = lines.index(filtered_lines[-1]) - - new_line = 'from moto.{} import {}_backends'.format(get_escaped_service(service), get_escaped_service(service)) - lines.insert(last_import_line_index + 1, new_line) - - body = '\n'.join(lines) + '\n' - with open(path, 'w') as f: - f.write(body) - def append_mock_dict_to_backends_py(service): path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py') with open(path) as f: @@ -154,7 +137,7 @@ def append_mock_dict_to_backends_py(service): filtered_lines = [_ for _ in lines if re.match(".*\".*\":.*_backends.*", _)] last_elem_line_index = lines.index(filtered_lines[-1]) - new_line = " \"{}\": {}_backends,".format(service, get_escaped_service(service)) + new_line = " \"{}\": (\"{}\", \"{}_backends\"),".format(service, get_escaped_service(service), get_escaped_service(service)) prev_line = lines[last_elem_line_index] if not prev_line.endswith('{') and not prev_line.endswith(','): lines[last_elem_line_index] += ',' @@ -212,7 +195,6 @@ def initialize_service(service, operation, api_protocol): # append mock to init files append_mock_to_init_py(service) - append_mock_import_to_backends_py(service) append_mock_dict_to_backends_py(service) @@ -229,6 +211,9 @@ def to_snake_case(s): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() +def get_operation_name_in_keys(operation_name, operation_keys): + index = [_.lower() for _ in operation_keys].index(operation_name.lower()) + return operation_keys[index] def get_function_in_responses(service, operation, protocol): """refers to definition of API in botocore, and autogenerates function @@ -237,7 +222,11 @@ def get_function_in_responses(service, operation, protocol): """ client = boto3.client(service) - aws_operation_name = to_upper_camel_case(operation) + aws_operation_name = get_operation_name_in_keys( + to_upper_camel_case(operation), + list(client._service_model._service_description['operations'].keys()) + ) + op_model = client._service_model.operation_model(aws_operation_name) if not hasattr(op_model.output_shape, 'members'): outputs = {} @@ -282,7 +271,10 @@ def get_function_in_models(service, operation): https://github.com/boto/botocore/blob/develop/botocore/data/elbv2/2015-12-01/service-2.json """ client = boto3.client(service) - aws_operation_name = to_upper_camel_case(operation) + aws_operation_name = get_operation_name_in_keys( + to_upper_camel_case(operation), + list(client._service_model._service_description['operations'].keys()) + ) op_model = client._service_model.operation_model(aws_operation_name) inputs = op_model.input_shape.members if not hasattr(op_model.output_shape, 'members'): @@ -329,7 +321,11 @@ def get_response_query_template(service, operation): https://github.com/boto/botocore/blob/develop/botocore/data/elbv2/2015-12-01/service-2.json """ client = boto3.client(service) - aws_operation_name = to_upper_camel_case(operation) + aws_operation_name = get_operation_name_in_keys( + to_upper_camel_case(operation), + list(client._service_model._service_description['operations'].keys()) + ) + op_model = client._service_model.operation_model(aws_operation_name) result_wrapper = op_model.output_shape.serialization['resultWrapper'] response_wrapper = result_wrapper.replace('Result', 'Response') @@ -403,11 +399,13 @@ def insert_code_to_class(path, base_class, new_code): with open(path, 'w') as f: f.write(body) - def insert_url(service, operation, api_protocol): client = boto3.client(service) service_class = client.__class__.__name__ - aws_operation_name = to_upper_camel_case(operation) + aws_operation_name = get_operation_name_in_keys( + to_upper_camel_case(operation), + list(client._service_model._service_description['operations'].keys()) + ) uri = client._service_model.operation_model(aws_operation_name).http['requestUri'] path = os.path.join(os.path.dirname(__file__), '..', 'moto', get_escaped_service(service), 'urls.py') From 3ea46617d93ff38f7c5adf76a3af6cf733fad564 Mon Sep 17 00:00:00 2001 From: usmangani1 Date: Tue, 1 Sep 2020 22:35:25 +0530 Subject: [PATCH 09/21] Fix:sqs get-queue-attributes response template (#3255) * Fix:sqs get-queue-attributes response template * Fix:removed debug statements * Modified the template * "fixed build issues" * Linting Co-authored-by: usmankb Co-authored-by: Bert Blommers --- moto/sqs/models.py | 3 ++- moto/sqs/responses.py | 10 ++++---- tests/test_sqs/test_sqs.py | 49 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 5 deletions(-) diff --git a/moto/sqs/models.py b/moto/sqs/models.py index 039224f5b..71ca62941 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -626,7 +626,8 @@ class SQSBackend(BaseBackend): attributes = queue.attributes else: for name in (name for name in attribute_names if name in queue.attributes): - attributes[name] = queue.attributes.get(name) + if queue.attributes.get(name) is not None: + attributes[name] = queue.attributes.get(name) return attributes diff --git a/moto/sqs/responses.py b/moto/sqs/responses.py index 5cc77e9fb..e28fbca8a 100644 --- a/moto/sqs/responses.py +++ b/moto/sqs/responses.py @@ -490,10 +490,12 @@ DELETE_QUEUE_RESPONSE = """ GET_QUEUE_ATTRIBUTES_RESPONSE = """ {% for key, value in attributes.items() %} - - {{ key }} - {{ value }} - + {% if value is not none %} + + {{ key }} + {{ value }} + + {% endif %} {% endfor %} diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py index 945fe86ae..b072e8b94 100644 --- a/tests/test_sqs/test_sqs.py +++ b/tests/test_sqs/test_sqs.py @@ -45,6 +45,25 @@ sqs_template_with_tags = """ } }""" +TEST_POLICY = """ +{ + "Version":"2012-10-17", + "Statement":[ + { + "Effect": "Allow", + "Principal": { "AWS": "*" }, + "Action": "sqs:SendMessage", + "Resource": "'$sqs_queue_arn'", + "Condition":{ + "ArnEquals":{ + "aws:SourceArn":"'$sns_topic_arn'" + } + } + } + ] +} +""" + @mock_sqs def test_create_fifo_queue_fail(): @@ -1451,6 +1470,36 @@ def test_permissions(): ) +@mock_sqs +def test_get_queue_attributes_template_response_validation(): + client = boto3.client("sqs", region_name="us-east-1") + + resp = client.create_queue( + QueueName="test-dlr-queue.fifo", Attributes={"FifoQueue": "true"} + ) + queue_url = resp["QueueUrl"] + + attrs = client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=["All"]) + assert attrs.get("Attributes").get("Policy") is None + + attributes = {"Policy": TEST_POLICY} + + client.set_queue_attributes(QueueUrl=queue_url, Attributes=attributes) + attrs = client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=["Policy"]) + assert attrs.get("Attributes").get("Policy") is not None + + assert ( + json.loads(attrs.get("Attributes").get("Policy")).get("Version") == "2012-10-17" + ) + assert len(json.loads(attrs.get("Attributes").get("Policy")).get("Statement")) == 1 + assert ( + json.loads(attrs.get("Attributes").get("Policy")) + .get("Statement")[0] + .get("Action") + == "sqs:SendMessage" + ) + + @mock_sqs def test_add_permission_errors(): client = boto3.client("sqs", region_name="us-east-1") From 00a5641cb9e6205ca13064ff7184720299fdab64 Mon Sep 17 00:00:00 2001 From: usmangani1 Date: Wed, 2 Sep 2020 11:40:56 +0530 Subject: [PATCH 10/21] Fix:s3 Presign Put Request with File upload (#3235) * Fix:s3 Presign Put Request with File upload * Added imports in test Co-authored-by: usmankb --- moto/s3/responses.py | 5 +++++ tests/test_s3/test_s3.py | 25 ++++++++++++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 603571c0d..364ae4623 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -1092,6 +1092,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): else: # Flask server body = request.data + # when the data is being passed as a file + if request.files and not body: + for _, value in request.files.items(): + body = value.stream.read() + if body is None: body = b"" diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6622b2f41..078abfa3b 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -3,7 +3,7 @@ from __future__ import unicode_literals import datetime import sys - +import os from boto3 import Session from six.moves.urllib.request import urlopen from six.moves.urllib.error import HTTPError @@ -1054,6 +1054,29 @@ def test_streaming_upload_from_file_to_presigned_url(): assert response.status_code == 200 +@mock_s3 +def test_multipart_upload_from_file_to_presigned_url(): + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + s3.create_bucket(Bucket="mybucket") + + params = {"Bucket": "mybucket", "Key": "file_upload"} + presigned_url = boto3.client("s3").generate_presigned_url( + "put_object", params, ExpiresIn=900 + ) + + file = open("text.txt", "w") + file.write("test") + file.close() + files = {"upload_file": open("text.txt", "rb")} + + requests.put(presigned_url, files=files) + resp = s3.get_object(Bucket="mybucket", Key="file_upload") + data = resp["Body"].read() + assert data == b"test" + # cleanup + os.remove("text.txt") + + @mock_s3 def test_s3_object_in_private_bucket(): s3 = boto3.resource("s3") From 25161c0c18252b4ab53af9453fa878253dadfc1f Mon Sep 17 00:00:00 2001 From: Toshiya Kawasaki Date: Wed, 2 Sep 2020 16:51:51 +0900 Subject: [PATCH 11/21] Add kinesisvideo (#3271) * kinesisvideo create_stream * add kinesis video stream description * add kinesisvideo describe_stream * add kinesisvideo list_streams * add kinesisvideo delete_stream * remove unused comment * remove duplicated definition * add kinesis video exceptions * pass region_name to kinesisvideo client in test * fix kinesisvideo url path * resolve conflict of kinesisvideo url and kinesis url * specify region name to kinesisvideobackend * Add get-dataendpoint to kinesisvideo * include stream name in ResourceInUseException of kinesisvideo * use ACCOUNT_ID from moto.core in kinesisvideo * add server test for kinesisvideo * split up kinesisvideo test --- moto/__init__.py | 1 + moto/backends.py | 1 + moto/kinesis/urls.py | 3 +- moto/kinesisvideo/__init__.py | 6 + moto/kinesisvideo/exceptions.py | 24 +++ moto/kinesisvideo/models.py | 147 +++++++++++++++++++ moto/kinesisvideo/responses.py | 70 +++++++++ moto/kinesisvideo/urls.py | 18 +++ tests/test_kinesisvideo/test_kinesisvideo.py | 140 ++++++++++++++++++ tests/test_kinesisvideo/test_server.py | 18 +++ 10 files changed, 427 insertions(+), 1 deletion(-) create mode 100644 moto/kinesisvideo/__init__.py create mode 100644 moto/kinesisvideo/exceptions.py create mode 100644 moto/kinesisvideo/models.py create mode 100644 moto/kinesisvideo/responses.py create mode 100644 moto/kinesisvideo/urls.py create mode 100644 tests/test_kinesisvideo/test_kinesisvideo.py create mode 100644 tests/test_kinesisvideo/test_server.py diff --git a/moto/__init__.py b/moto/__init__.py index 7d841fbbc..da66d9c61 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -113,6 +113,7 @@ mock_swf_deprecated = lazy_load(".swf", "mock_swf_deprecated") XRaySegment = lazy_load(".xray", "XRaySegment") mock_xray = lazy_load(".xray", "mock_xray") mock_xray_client = lazy_load(".xray", "mock_xray_client") +mock_kinesisvideo = lazy_load(".kinesisvideo", "mock_kinesisvideo") # import logging # logging.getLogger('boto').setLevel(logging.CRITICAL) diff --git a/moto/backends.py b/moto/backends.py index 4252bfd95..9216d4615 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -69,6 +69,7 @@ BACKENDS = { "sts": ("sts", "sts_backends"), "swf": ("swf", "swf_backends"), "xray": ("xray", "xray_backends"), + "kinesisvideo": ("kinesisvideo", "kinesisvideo_backends"), } diff --git a/moto/kinesis/urls.py b/moto/kinesis/urls.py index c95f03190..a33225d60 100644 --- a/moto/kinesis/urls.py +++ b/moto/kinesis/urls.py @@ -2,7 +2,8 @@ from __future__ import unicode_literals from .responses import KinesisResponse url_bases = [ - "https?://kinesis.(.+).amazonaws.com", + # Need to avoid conflicting with kinesisvideo + r"https?://kinesis\.(.+).amazonaws.com", "https?://firehose.(.+).amazonaws.com", ] diff --git a/moto/kinesisvideo/__init__.py b/moto/kinesisvideo/__init__.py new file mode 100644 index 000000000..ee79d957b --- /dev/null +++ b/moto/kinesisvideo/__init__.py @@ -0,0 +1,6 @@ +from __future__ import unicode_literals +from .models import kinesisvideo_backends +from ..core.models import base_decorator + +kinesisvideo_backend = kinesisvideo_backends["us-east-1"] +mock_kinesisvideo = base_decorator(kinesisvideo_backends) diff --git a/moto/kinesisvideo/exceptions.py b/moto/kinesisvideo/exceptions.py new file mode 100644 index 000000000..e2e119b37 --- /dev/null +++ b/moto/kinesisvideo/exceptions.py @@ -0,0 +1,24 @@ +from __future__ import unicode_literals + +from moto.core.exceptions import RESTError + + +class KinesisvideoClientError(RESTError): + code = 400 + + +class ResourceNotFoundException(KinesisvideoClientError): + def __init__(self): + self.code = 404 + super(ResourceNotFoundException, self).__init__( + "ResourceNotFoundException", + "The requested stream is not found or not active.", + ) + + +class ResourceInUseException(KinesisvideoClientError): + def __init__(self, message): + self.code = 400 + super(ResourceInUseException, self).__init__( + "ResourceInUseException", message, + ) diff --git a/moto/kinesisvideo/models.py b/moto/kinesisvideo/models.py new file mode 100644 index 000000000..90d84ac02 --- /dev/null +++ b/moto/kinesisvideo/models.py @@ -0,0 +1,147 @@ +from __future__ import unicode_literals +from boto3 import Session +from moto.core import BaseBackend, BaseModel +from datetime import datetime +from .exceptions import ( + ResourceNotFoundException, + ResourceInUseException, +) +import random +import string +from moto.core.utils import get_random_hex +from moto.core import ACCOUNT_ID + + +class Stream(BaseModel): + def __init__( + self, + region_name, + device_name, + stream_name, + media_type, + kms_key_id, + data_retention_in_hours, + tags, + ): + self.region_name = region_name + self.stream_name = stream_name + self.device_name = device_name + self.media_type = media_type + self.kms_key_id = kms_key_id + self.data_retention_in_hours = data_retention_in_hours + self.tags = tags + self.status = "ACTIVE" + self.version = self._get_random_string() + self.creation_time = datetime.utcnow() + stream_arn = "arn:aws:kinesisvideo:{}:{}:stream/{}/1598784211076".format( + self.region_name, ACCOUNT_ID, self.stream_name + ) + self.data_endpoint_number = get_random_hex() + self.arn = stream_arn + + def _get_random_string(self, length=20): + letters = string.ascii_lowercase + result_str = "".join([random.choice(letters) for _ in range(length)]) + return result_str + + def get_data_endpoint(self, api_name): + data_endpoint_prefix = "s-" if api_name in ("PUT_MEDIA", "GET_MEDIA") else "b-" + return "https://{}{}.kinesisvideo.{}.amazonaws.com".format( + data_endpoint_prefix, self.data_endpoint_number, self.region_name + ) + + def to_dict(self): + return { + "DeviceName": self.device_name, + "StreamName": self.stream_name, + "StreamARN": self.arn, + "MediaType": self.media_type, + "KmsKeyId": self.kms_key_id, + "Version": self.version, + "Status": self.status, + "CreationTime": self.creation_time.isoformat(), + "DataRetentionInHours": self.data_retention_in_hours, + } + + +class KinesisVideoBackend(BaseBackend): + def __init__(self, region_name=None): + super(KinesisVideoBackend, self).__init__() + self.region_name = region_name + self.streams = {} + + def reset(self): + region_name = self.region_name + self.__dict__ = {} + self.__init__(region_name) + + def create_stream( + self, + device_name, + stream_name, + media_type, + kms_key_id, + data_retention_in_hours, + tags, + ): + streams = [_ for _ in self.streams.values() if _.stream_name == stream_name] + if len(streams) > 0: + raise ResourceInUseException( + "The stream {} already exists.".format(stream_name) + ) + stream = Stream( + self.region_name, + device_name, + stream_name, + media_type, + kms_key_id, + data_retention_in_hours, + tags, + ) + self.streams[stream.arn] = stream + return stream.arn + + def _get_stream(self, stream_name, stream_arn): + if stream_name: + streams = [_ for _ in self.streams.values() if _.stream_name == stream_name] + if len(streams) == 0: + raise ResourceNotFoundException() + stream = streams[0] + elif stream_arn: + stream = self.streams.get(stream_arn) + if stream is None: + raise ResourceNotFoundException() + return stream + + def describe_stream(self, stream_name, stream_arn): + stream = self._get_stream(stream_name, stream_arn) + stream_info = stream.to_dict() + return stream_info + + def list_streams(self, max_results, next_token, stream_name_condition): + stream_info_list = [_.to_dict() for _ in self.streams.values()] + next_token = None + return stream_info_list, next_token + + def delete_stream(self, stream_arn, current_version): + stream = self.streams.get(stream_arn) + if stream is None: + raise ResourceNotFoundException() + del self.streams[stream_arn] + + def get_data_endpoint(self, stream_name, stream_arn, api_name): + stream = self._get_stream(stream_name, stream_arn) + return stream.get_data_endpoint(api_name) + + # add methods from here + + +kinesisvideo_backends = {} +for region in Session().get_available_regions("kinesisvideo"): + kinesisvideo_backends[region] = KinesisVideoBackend(region) +for region in Session().get_available_regions( + "kinesisvideo", partition_name="aws-us-gov" +): + kinesisvideo_backends[region] = KinesisVideoBackend(region) +for region in Session().get_available_regions("kinesisvideo", partition_name="aws-cn"): + kinesisvideo_backends[region] = KinesisVideoBackend(region) diff --git a/moto/kinesisvideo/responses.py b/moto/kinesisvideo/responses.py new file mode 100644 index 000000000..376e5b5fe --- /dev/null +++ b/moto/kinesisvideo/responses.py @@ -0,0 +1,70 @@ +from __future__ import unicode_literals +from moto.core.responses import BaseResponse +from .models import kinesisvideo_backends +import json + + +class KinesisVideoResponse(BaseResponse): + SERVICE_NAME = "kinesisvideo" + + @property + def kinesisvideo_backend(self): + return kinesisvideo_backends[self.region] + + def create_stream(self): + device_name = self._get_param("DeviceName") + stream_name = self._get_param("StreamName") + media_type = self._get_param("MediaType") + kms_key_id = self._get_param("KmsKeyId") + data_retention_in_hours = self._get_int_param("DataRetentionInHours") + tags = self._get_param("Tags") + stream_arn = self.kinesisvideo_backend.create_stream( + device_name=device_name, + stream_name=stream_name, + media_type=media_type, + kms_key_id=kms_key_id, + data_retention_in_hours=data_retention_in_hours, + tags=tags, + ) + return json.dumps(dict(StreamARN=stream_arn)) + + def describe_stream(self): + stream_name = self._get_param("StreamName") + stream_arn = self._get_param("StreamARN") + stream_info = self.kinesisvideo_backend.describe_stream( + stream_name=stream_name, stream_arn=stream_arn, + ) + return json.dumps(dict(StreamInfo=stream_info)) + + def list_streams(self): + max_results = self._get_int_param("MaxResults") + next_token = self._get_param("NextToken") + stream_name_condition = self._get_param("StreamNameCondition") + stream_info_list, next_token = self.kinesisvideo_backend.list_streams( + max_results=max_results, + next_token=next_token, + stream_name_condition=stream_name_condition, + ) + return json.dumps(dict(StreamInfoList=stream_info_list, NextToken=next_token)) + + def delete_stream(self): + stream_arn = self._get_param("StreamARN") + current_version = self._get_param("CurrentVersion") + self.kinesisvideo_backend.delete_stream( + stream_arn=stream_arn, current_version=current_version, + ) + return json.dumps(dict()) + + def get_data_endpoint(self): + stream_name = self._get_param("StreamName") + stream_arn = self._get_param("StreamARN") + api_name = self._get_param("APIName") + data_endpoint = self.kinesisvideo_backend.get_data_endpoint( + stream_name=stream_name, stream_arn=stream_arn, api_name=api_name, + ) + return json.dumps(dict(DataEndpoint=data_endpoint)) + + # add methods from here + + +# add templates from here diff --git a/moto/kinesisvideo/urls.py b/moto/kinesisvideo/urls.py new file mode 100644 index 000000000..9aab7f8e2 --- /dev/null +++ b/moto/kinesisvideo/urls.py @@ -0,0 +1,18 @@ +from __future__ import unicode_literals +from .responses import KinesisVideoResponse + +url_bases = [ + "https?://kinesisvideo.(.+).amazonaws.com", +] + + +response = KinesisVideoResponse() + + +url_paths = { + "{0}/createStream$": response.dispatch, + "{0}/describeStream$": response.dispatch, + "{0}/deleteStream$": response.dispatch, + "{0}/listStreams$": response.dispatch, + "{0}/getDataEndpoint$": response.dispatch, +} diff --git a/tests/test_kinesisvideo/test_kinesisvideo.py b/tests/test_kinesisvideo/test_kinesisvideo.py new file mode 100644 index 000000000..de3d9ebbb --- /dev/null +++ b/tests/test_kinesisvideo/test_kinesisvideo.py @@ -0,0 +1,140 @@ +from __future__ import unicode_literals + +import boto3 +import sure # noqa +from nose.tools import assert_raises +from moto import mock_kinesisvideo +from botocore.exceptions import ClientError +import json + + +@mock_kinesisvideo +def test_create_stream(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + device_name = "random-device" + + # stream can be created + res = client.create_stream(StreamName=stream_name, DeviceName=device_name) + res.should.have.key("StreamARN").which.should.contain(stream_name) + + +@mock_kinesisvideo +def test_create_stream_with_same_name(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + device_name = "random-device" + + client.create_stream(StreamName=stream_name, DeviceName=device_name) + + # cannot create with same stream name + with assert_raises(ClientError): + client.create_stream(StreamName=stream_name, DeviceName=device_name) + + +@mock_kinesisvideo +def test_describe_stream(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + device_name = "random-device" + + res = client.create_stream(StreamName=stream_name, DeviceName=device_name) + res.should.have.key("StreamARN").which.should.contain(stream_name) + stream_arn = res["StreamARN"] + + # cannot create with existing stream name + with assert_raises(ClientError): + client.create_stream(StreamName=stream_name, DeviceName=device_name) + + # stream can be described with name + res = client.describe_stream(StreamName=stream_name) + res.should.have.key("StreamInfo") + stream_info = res["StreamInfo"] + stream_info.should.have.key("StreamARN").which.should.contain(stream_name) + stream_info.should.have.key("StreamName").which.should.equal(stream_name) + stream_info.should.have.key("DeviceName").which.should.equal(device_name) + + # stream can be described with arn + res = client.describe_stream(StreamARN=stream_arn) + res.should.have.key("StreamInfo") + stream_info = res["StreamInfo"] + stream_info.should.have.key("StreamARN").which.should.contain(stream_name) + stream_info.should.have.key("StreamName").which.should.equal(stream_name) + stream_info.should.have.key("DeviceName").which.should.equal(device_name) + + +@mock_kinesisvideo +def test_describe_stream_with_name_not_exist(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name_not_exist = "not-exist-stream" + + # cannot describe with not exist stream name + with assert_raises(ClientError): + client.describe_stream(StreamName=stream_name_not_exist) + + +@mock_kinesisvideo +def test_list_streams(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + stream_name_2 = "my-stream-2" + device_name = "random-device" + + client.create_stream(StreamName=stream_name, DeviceName=device_name) + client.create_stream(StreamName=stream_name_2, DeviceName=device_name) + + # streams can be listed + res = client.list_streams() + res.should.have.key("StreamInfoList") + streams = res["StreamInfoList"] + streams.should.have.length_of(2) + + +@mock_kinesisvideo +def test_delete_stream(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + stream_name_2 = "my-stream-2" + device_name = "random-device" + + client.create_stream(StreamName=stream_name, DeviceName=device_name) + res = client.create_stream(StreamName=stream_name_2, DeviceName=device_name) + stream_2_arn = res["StreamARN"] + + # stream can be deleted + client.delete_stream(StreamARN=stream_2_arn) + res = client.list_streams() + streams = res["StreamInfoList"] + streams.should.have.length_of(1) + + +@mock_kinesisvideo +def test_delete_stream_with_arn_not_exist(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + stream_name_2 = "my-stream-2" + device_name = "random-device" + + client.create_stream(StreamName=stream_name, DeviceName=device_name) + res = client.create_stream(StreamName=stream_name_2, DeviceName=device_name) + stream_2_arn = res["StreamARN"] + + client.delete_stream(StreamARN=stream_2_arn) + + # cannot delete with not exist stream + stream_arn_not_exist = stream_2_arn + with assert_raises(ClientError): + client.delete_stream(StreamARN=stream_arn_not_exist) + + +@mock_kinesisvideo +def test_data_endpoint(): + client = boto3.client("kinesisvideo", region_name="ap-northeast-1") + stream_name = "my-stream" + device_name = "random-device" + + # data-endpoint can be created + api_name = "GET_MEDIA" + client.create_stream(StreamName=stream_name, DeviceName=device_name) + res = client.get_data_endpoint(StreamName=stream_name, APIName=api_name) + res.should.have.key("DataEndpoint") diff --git a/tests/test_kinesisvideo/test_server.py b/tests/test_kinesisvideo/test_server.py new file mode 100644 index 000000000..20301353f --- /dev/null +++ b/tests/test_kinesisvideo/test_server.py @@ -0,0 +1,18 @@ +from __future__ import unicode_literals + +import sure # noqa + +import moto.server as server +from moto import mock_kinesisvideo + +""" +Test the different server responses +""" + + +@mock_kinesisvideo +def test_kinesisvideo_server_is_up(): + backend = server.create_backend_app("kinesisvideo") + test_client = backend.test_client() + res = test_client.post("/listStreams") + res.status_code.should.equal(200) From 6c4a60d0376d11fa7f5128f6b586ae65ed4ab3fa Mon Sep 17 00:00:00 2001 From: Toshiya Kawasaki Date: Wed, 2 Sep 2020 17:59:26 +0900 Subject: [PATCH 12/21] Add introduction of using scaffold.py (#3274) --- CONTRIBUTING.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index edcc46561..e4a189e5e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,6 +50,41 @@ Note the `urls.py` that redirects all incoming URL requests to a generic `dispat If you want more control over incoming requests or their bodies, it is possible to redirect specific requests to a custom method. See this PR for an example: https://github.com/spulec/moto/pull/2957/files +### Generating template code of services. + +By using `scripts/scaffold.py`, you can automatically generate template code of new services and new method of existing service. The script looks up API specification of given boto3 method and adds necessary codes includng request parameters and response parameters. In some cases, it fails to generate codes. +Please try out by runninig `python scripts/scaffold.py` + +```bash +$ python scripts/scaffold.py +Select service: codedeploy + +==Current Implementation Status== +[ ] add_tags_to_on_premises_instances +... +[ ] create_deployment +...[ +[ ] update_deployment_group +================================= +Select Operation: create_deployment + + + Initializing service codedeploy + creating moto/codedeploy + creating moto/codedeploy/models.py + creating moto/codedeploy/exceptions.py + creating moto/codedeploy/__init__.py + creating moto/codedeploy/responses.py + creating moto/codedeploy/urls.py + creating tests/test_codedeploy + creating tests/test_codedeploy/test_server.py + creating tests/test_codedeploy/test_codedeploy.py + inserting code moto/codedeploy/responses.py + inserting code moto/codedeploy/models.py +You will still need to add the mock into "__init__.py" +``` + + ## Maintainers ### Releasing a new version of Moto From 3fb7cf75d43562ba0dc494be6021a86fb0bc3436 Mon Sep 17 00:00:00 2001 From: Karthikeyan Singaravelan Date: Wed, 2 Sep 2020 15:40:29 +0530 Subject: [PATCH 13/21] Fix deprecation warning due to base64.decodestring in Python 3. (#3272) --- tests/test_ec2/test_instances.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_ec2/test_instances.py b/tests/test_ec2/test_instances.py index 1310b3a1d..7ec385973 100644 --- a/tests/test_ec2/test_instances.py +++ b/tests/test_ec2/test_instances.py @@ -23,6 +23,11 @@ from moto import mock_ec2_deprecated, mock_ec2, mock_cloudformation from tests.helpers import requires_boto_gte +if six.PY2: + decode_method = base64.decodestring +else: + decode_method = base64.decodebytes + ################ Test Readme ############### def add_servers(ami_id, count): conn = boto.connect_ec2() @@ -908,7 +913,7 @@ def test_user_data_with_run_instance(): instance_attribute = instance.get_attribute("userData") instance_attribute.should.be.a(InstanceAttribute) retrieved_user_data = instance_attribute.get("userData").encode("utf-8") - decoded_user_data = base64.decodestring(retrieved_user_data) + decoded_user_data = decode_method(retrieved_user_data) decoded_user_data.should.equal(b"some user data") From d2e16ecc2eadc5d1d751a4e8daf27ba96d827d65 Mon Sep 17 00:00:00 2001 From: usmangani1 Date: Wed, 2 Sep 2020 23:05:53 +0530 Subject: [PATCH 14/21] Fix:s3 Presign Post with object acl (#3246) * Fix:s3 Presign Post with object acl * Added import in tests Co-authored-by: usmankb --- moto/s3/responses.py | 4 ++++ tests/test_s3/test_s3.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 364ae4623..395cb5736 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -860,6 +860,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): new_key = self.backend.set_object(bucket_name, key, f) + if form.get("acl"): + acl = get_canned_acl(form.get("acl")) + new_key.set_acl(acl) + # Metadata metadata = metadata_from_headers(form) new_key.set_metadata(metadata) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 078abfa3b..960594801 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -2800,6 +2800,39 @@ def test_put_bucket_acl_body(): assert not result.get("Grants") +@mock_s3 +def test_object_acl_with_presigned_post(): + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + + bucket_name = "imageS3Bucket" + object_name = "text.txt" + fields = {"acl": "public-read"} + file = open("text.txt", "w") + file.write("test") + file.close() + + s3.create_bucket(Bucket=bucket_name) + response = s3.generate_presigned_post( + bucket_name, object_name, Fields=fields, ExpiresIn=60000 + ) + + with open(object_name, "rb") as f: + files = {"file": (object_name, f)} + requests.post(response["url"], data=response["fields"], files=files) + + response = s3.get_object_acl(Bucket=bucket_name, Key=object_name) + + assert "Grants" in response + assert len(response["Grants"]) == 2 + assert response["Grants"][1]["Permission"] == "READ" + + response = s3.get_object(Bucket=bucket_name, Key=object_name) + + assert "ETag" in response + assert "Body" in response + os.remove("text.txt") + + @mock_s3 def test_put_bucket_notification(): s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) From 49b12ab7f567f82573b9c3d006f0be2bc40f44b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C5=82awek=20Ehlert?= Date: Sat, 27 Jul 2019 12:52:23 +0200 Subject: [PATCH 15/21] First stab at extracting deps in setup.py to extras --- requirements.txt | 2 +- setup.py | 55 ++++++++++++++++++++++++++++++++++++++---------- 2 files changed, 45 insertions(+), 12 deletions(-) diff --git a/requirements.txt b/requirements.txt index 4de489f8c..f5a476248 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ # Please add requirements to setup.py --e . +-e .[all] diff --git a/setup.py b/setup.py index ffaa8b273..92ab7a5de 100755 --- a/setup.py +++ b/setup.py @@ -33,22 +33,13 @@ install_requires = [ "boto>=2.36.0", "boto3>=1.9.201", "botocore>=1.12.201", - "cryptography>=2.3.0", "requests>=2.5", "xmltodict", "six>1.9", "werkzeug", - "PyYAML>=5.1", "pytz", - "ecdsa<0.15", "python-dateutil<3.0.0,>=2.1", - "python-jose[cryptography]>=3.1.0,<4.0.0", - "docker>=2.5.1", - "jsondiff>=1.1.2", - "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", - "idna<3,>=2.5", - "cfn-lint>=0.4.0", "MarkupSafe<2.0", # This is a Jinja2 dependency, 2.0.0a1 currently seems broken ] @@ -72,7 +63,6 @@ if PY2: "mock<=3.0.5", "more-itertools==5.0.0", "setuptools==44.0.0", - "sshpubkeys>=3.1.0,<4.0", "zipp==0.6.0", ] else: @@ -81,14 +71,57 @@ else: "mock", "more-itertools", "setuptools", - "sshpubkeys>=3.1.0", "zipp", ] +_dep_cryptography = "cryptography>=2.3.0" +_dep_PyYAML = "PyYAML>=5.1" +_dep_python_jose = "python-jose[cryptography]>=3.1.0,<4.0.0" +_dep_python_jose_ecdsa_pin = "ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984 +_dep_docker = "docker>=2.5.1" +_dep_jsondiff = "jsondiff>=1.1.2" +_dep_aws_xray_sdk = "aws-xray-sdk!=0.96,>=0.93" +_dep_idna = "idna<3,>=2.5" +_dep_cfn_lint = "cfn-lint>=0.4.0" +_dep_sshpubkeys_py2 = "sshpubkeys>=3.1.0,<4.0; python_version<'3'" +_dep_sshpubkeys_py3 = "sshpubkeys>=3.1.0; python_version>'3'" + +all_extra_deps = [ + _dep_cryptography, + _dep_PyYAML, + _dep_python_jose, + _dep_python_jose_ecdsa_pin, + _dep_docker, + _dep_jsondiff, + _dep_aws_xray_sdk, + _dep_idna, + _dep_cfn_lint, + _dep_sshpubkeys_py2, + _dep_sshpubkeys_py3, +] + +# TODO: do we want to add ALL services here? +# i.e. even those without extra dependencies. +# Would be good for future-compatibility, I guess. +extras_per_service = { + "ec2": [_dep_cryptography, _dep_sshpubkeys_py2, _dep_sshpubkeys_py3], + 'acm': [_dep_cryptography], + 'iam': [_dep_cryptography], + 'cloudformation': [_dep_PyYAML, _dep_cfn_lint], + 'cognitoidp': [_dep_python_jose, _dep_python_jose_ecdsa_pin], + 'awslambda': [_dep_docker], + 'batch': [_dep_docker], + 'iotdata': [_dep_jsondiff], + 'xray': [_dep_aws_xray_sdk], +} + extras_require = { + 'all': all_extra_deps, 'server': ['flask'], } +extras_require.update(extras_per_service) + # https://hynek.me/articles/conditional-python-dependencies/ if int(setuptools.__version__.split(".", 1)[0]) < 18: if sys.version_info[0:2] < (3, 3): From 6c73def64a3f9d27a018412194cffde33a7e55be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C5=82awek=20Ehlert?= Date: Tue, 7 Jan 2020 13:25:20 +0100 Subject: [PATCH 16/21] Use extras when running the test server Make sure that `travis_moto_server.sh` script actually installs `all` and `server` extras. --- travis_moto_server.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/travis_moto_server.sh b/travis_moto_server.sh index 4be26073e..c764d1cd1 100755 --- a/travis_moto_server.sh +++ b/travis_moto_server.sh @@ -1,9 +1,8 @@ #!/usr/bin/env bash set -e -pip install flask # TravisCI on bionic dist uses old version of Docker Engine # which is incompatibile with newer docker-py # See https://github.com/docker/docker-py/issues/2639 pip install "docker>=2.5.1,<=4.2.2" -pip install /moto/dist/moto*.gz -moto_server -H 0.0.0.0 -p 5000 \ No newline at end of file +pip install $(ls /moto/dist/moto*.gz)[server,all] +moto_server -H 0.0.0.0 -p 5000 From 8854fd06e855f71256b7c6a63b38a2d200666ed9 Mon Sep 17 00:00:00 2001 From: zhil3 <41350057+zhil3@users.noreply.github.com> Date: Fri, 4 Sep 2020 04:11:17 -0400 Subject: [PATCH 17/21] Add describe_endpoint and register_certificate_without_ca in iot_mock module with unittest (#3279) Co-authored-by: Zhi Li --- IMPLEMENTATION_COVERAGE.md | 4 +-- moto/iot/models.py | 60 ++++++++++++++++++++++++++++++++++++++ moto/iot/responses.py | 16 ++++++++++ moto/utilities/utils.py | 10 +++++++ tests/test_iot/test_iot.py | 60 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 moto/utilities/utils.py diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 721c9c977..3246c2615 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -4447,7 +4447,7 @@ - [ ] describe_default_authorizer - [ ] describe_dimension - [ ] describe_domain_configuration -- [ ] describe_endpoint +- [X] describe_endpoint - [ ] describe_event_configurations - [ ] describe_index - [X] describe_job @@ -4533,7 +4533,7 @@ - [ ] list_violation_events - [ ] register_ca_certificate - [X] register_certificate -- [ ] register_certificate_without_ca +- [X] register_certificate_without_ca - [ ] register_thing - [ ] reject_certificate_transfer - [ ] remove_thing_from_billing_group diff --git a/moto/iot/models.py b/moto/iot/models.py index 5b74b353c..ebd15d10a 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -20,6 +20,7 @@ from .exceptions import ( InvalidStateTransitionException, VersionConflictException, ) +from moto.utilities.utils import random_string class FakeThing(BaseModel): @@ -374,6 +375,55 @@ class FakeJobExecution(BaseModel): return obj +class FakeEndpoint(BaseModel): + def __init__(self, endpoint_type, region_name): + if endpoint_type not in [ + "iot:Data", + "iot:Data-ATS", + "iot:CredentialProvider", + "iot:Jobs", + ]: + raise InvalidRequestException( + " An error occurred (InvalidRequestException) when calling the DescribeEndpoint " + "operation: Endpoint type %s not recognized." % endpoint_type + ) + self.region_name = region_name + data_identifier = random_string(14) + if endpoint_type == "iot:Data": + self.endpoint = "{i}.iot.{r}.amazonaws.com".format( + i=data_identifier, r=self.region_name + ) + elif "iot:Data-ATS" in endpoint_type: + self.endpoint = "{i}-ats.iot.{r}.amazonaws.com".format( + i=data_identifier, r=self.region_name + ) + elif "iot:CredentialProvider" in endpoint_type: + identifier = random_string(14) + self.endpoint = "{i}.credentials.iot.{r}.amazonaws.com".format( + i=identifier, r=self.region_name + ) + elif "iot:Jobs" in endpoint_type: + identifier = random_string(14) + self.endpoint = "{i}.jobs.iot.{r}.amazonaws.com".format( + i=identifier, r=self.region_name + ) + self.endpoint_type = endpoint_type + + def to_get_dict(self): + obj = { + "endpointAddress": self.endpoint, + } + + return obj + + def to_dict(self): + obj = { + "endpointAddress": self.endpoint, + } + + return obj + + class IoTBackend(BaseBackend): def __init__(self, region_name=None): super(IoTBackend, self).__init__() @@ -387,6 +437,7 @@ class IoTBackend(BaseBackend): self.policies = OrderedDict() self.principal_policies = OrderedDict() self.principal_things = OrderedDict() + self.endpoint = None def reset(self): region_name = self.region_name @@ -495,6 +546,10 @@ class IoTBackend(BaseBackend): raise ResourceNotFoundException() return thing_types[0] + def describe_endpoint(self, endpoint_type): + self.endpoint = FakeEndpoint(endpoint_type, self.region_name) + return self.endpoint + def delete_thing(self, thing_name, expected_version): # TODO: handle expected_version @@ -625,6 +680,11 @@ class IoTBackend(BaseBackend): self.certificates[certificate.certificate_id] = certificate return certificate + def register_certificate_without_ca(self, certificate_pem, status): + certificate = FakeCertificate(certificate_pem, status, self.region_name) + self.certificates[certificate.certificate_id] = certificate + return certificate + def update_certificate(self, certificate_id, new_status): cert = self.describe_certificate(certificate_id) # TODO: validate new_status diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 07a8c10c2..15c62d91e 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -88,6 +88,11 @@ class IoTResponse(BaseResponse): ) return json.dumps(thing_type.to_dict()) + def describe_endpoint(self): + endpoint_type = self._get_param("endpointType") + endpoint = self.iot_backend.describe_endpoint(endpoint_type=endpoint_type) + return json.dumps(endpoint.to_dict()) + def delete_thing(self): thing_name = self._get_param("thingName") expected_version = self._get_param("expectedVersion") @@ -330,6 +335,17 @@ class IoTResponse(BaseResponse): dict(certificateId=cert.certificate_id, certificateArn=cert.arn) ) + def register_certificate_without_ca(self): + certificate_pem = self._get_param("certificatePem") + status = self._get_param("status") + + cert = self.iot_backend.register_certificate_without_ca( + certificate_pem=certificate_pem, status=status, + ) + return json.dumps( + dict(certificateId=cert.certificate_id, certificateArn=cert.arn) + ) + def update_certificate(self): certificate_id = self._get_param("certificateId") new_status = self._get_param("newStatus") diff --git a/moto/utilities/utils.py b/moto/utilities/utils.py new file mode 100644 index 000000000..6bd5e8b86 --- /dev/null +++ b/moto/utilities/utils.py @@ -0,0 +1,10 @@ +import random +import string + + +def random_string(length=None): + n = length or 20 + random_str = "".join( + [random.choice(string.ascii_letters + string.digits) for i in range(n)] + ) + return random_str diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index c3ee4c96d..12e1ff7b0 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -463,6 +463,46 @@ def test_list_things_with_attribute_and_thing_type_filter_and_next_token(): ) +@mock_iot +def test_endpoints(): + region_name = "ap-northeast-1" + client = boto3.client("iot", region_name=region_name) + + # iot:Data + endpoint = client.describe_endpoint(endpointType="iot:Data") + endpoint.should.have.key("endpointAddress").which.should_not.contain("ats") + endpoint.should.have.key("endpointAddress").which.should.contain( + "iot.{}.amazonaws.com".format(region_name) + ) + + # iot:Data-ATS + endpoint = client.describe_endpoint(endpointType="iot:Data-ATS") + endpoint.should.have.key("endpointAddress").which.should.contain( + "ats.iot.{}.amazonaws.com".format(region_name) + ) + + # iot:Data-ATS + endpoint = client.describe_endpoint(endpointType="iot:CredentialProvider") + endpoint.should.have.key("endpointAddress").which.should.contain( + "credentials.iot.{}.amazonaws.com".format(region_name) + ) + + # iot:Data-ATS + endpoint = client.describe_endpoint(endpointType="iot:Jobs") + endpoint.should.have.key("endpointAddress").which.should.contain( + "jobs.iot.{}.amazonaws.com".format(region_name) + ) + + # raise InvalidRequestException + try: + client.describe_endpoint(endpointType="iot:Abc") + except client.exceptions.InvalidRequestException as exc: + error_code = exc.response["Error"]["Code"] + error_code.should.equal("InvalidRequestException") + else: + raise Exception("Should have raised error") + + @mock_iot def test_certs(): client = boto3.client("iot", region_name="us-east-1") @@ -523,6 +563,26 @@ def test_certs(): res = client.list_certificates() res.should.have.key("certificates") + # Test register_certificate without CA flow + cert = client.register_certificate_without_ca( + certificatePem=cert_pem, status="INACTIVE" + ) + cert.should.have.key("certificateId").which.should_not.be.none + cert.should.have.key("certificateArn").which.should_not.be.none + cert_id = cert["certificateId"] + + res = client.list_certificates() + res.should.have.key("certificates").which.should.have.length_of(1) + for cert in res["certificates"]: + cert.should.have.key("certificateArn").which.should_not.be.none + cert.should.have.key("certificateId").which.should_not.be.none + cert.should.have.key("status").which.should_not.be.none + cert.should.have.key("creationDate").which.should_not.be.none + + client.delete_certificate(certificateId=cert_id) + res = client.list_certificates() + res.should.have.key("certificates") + @mock_iot def test_delete_policy_validation(): From ca64d8fc7a0719e66a03d106ad312fef5bd477ee Mon Sep 17 00:00:00 2001 From: Ciaran Evans <9111975+ciaranevans@users.noreply.github.com> Date: Fri, 4 Sep 2020 09:58:16 +0100 Subject: [PATCH 18/21] Implement Execution inputs for Step Functions (#3284) * Add input attribute to Execution and test with describe_execution * Switch back method name --- moto/stepfunctions/models.py | 5 ++- moto/stepfunctions/responses.py | 7 ++- .../test_stepfunctions/test_stepfunctions.py | 44 ++++++++++++++----- 3 files changed, 43 insertions(+), 13 deletions(-) diff --git a/moto/stepfunctions/models.py b/moto/stepfunctions/models.py index 58b6bb434..19fb4561d 100644 --- a/moto/stepfunctions/models.py +++ b/moto/stepfunctions/models.py @@ -34,6 +34,7 @@ class Execution: state_machine_name, execution_name, state_machine_arn, + execution_input, ): execution_arn = "arn:aws:states:{}:{}:execution:{}:{}" execution_arn = execution_arn.format( @@ -43,6 +44,7 @@ class Execution: self.name = execution_name self.start_date = iso_8601_datetime_without_milliseconds(datetime.now()) self.state_machine_arn = state_machine_arn + self.execution_input = execution_input self.status = "RUNNING" self.stop_date = None @@ -204,7 +206,7 @@ class StepFunctionBackend(BaseBackend): if sm: self.state_machines.remove(sm) - def start_execution(self, state_machine_arn, name=None): + def start_execution(self, state_machine_arn, name=None, execution_input=None): state_machine_name = self.describe_state_machine(state_machine_arn).name self._ensure_execution_name_doesnt_exist(name) execution = Execution( @@ -213,6 +215,7 @@ class StepFunctionBackend(BaseBackend): state_machine_name=state_machine_name, execution_name=name or str(uuid4()), state_machine_arn=state_machine_arn, + execution_input=execution_input, ) self.executions.append(execution) return execution diff --git a/moto/stepfunctions/responses.py b/moto/stepfunctions/responses.py index 7083167b6..d9e438892 100644 --- a/moto/stepfunctions/responses.py +++ b/moto/stepfunctions/responses.py @@ -95,8 +95,11 @@ class StepFunctionResponse(BaseResponse): def start_execution(self): arn = self._get_param("stateMachineArn") name = self._get_param("name") + execution_input = self._get_param("input", if_none="{}") try: - execution = self.stepfunction_backend.start_execution(arn, name) + execution = self.stepfunction_backend.start_execution( + arn, name, execution_input + ) except AWSError as err: return err.response() response = { @@ -129,7 +132,7 @@ class StepFunctionResponse(BaseResponse): execution = self.stepfunction_backend.describe_execution(arn) response = { "executionArn": arn, - "input": "{}", + "input": execution.execution_input, "name": execution.name, "startDate": execution.start_date, "stateMachineArn": execution.state_machine_arn, diff --git a/tests/test_stepfunctions/test_stepfunctions.py b/tests/test_stepfunctions/test_stepfunctions.py index 043fd9bfb..d94867719 100644 --- a/tests/test_stepfunctions/test_stepfunctions.py +++ b/tests/test_stepfunctions/test_stepfunctions.py @@ -1,8 +1,8 @@ from __future__ import unicode_literals import boto3 +import json import sure # noqa -import datetime from datetime import datetime from botocore.exceptions import ClientError @@ -134,7 +134,7 @@ def test_state_machine_creation_fails_with_invalid_names(): # for invalid_name in invalid_names: - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): client.create_state_machine( name=invalid_name, definition=str(simple_definition), @@ -147,7 +147,7 @@ def test_state_machine_creation_requires_valid_role_arn(): client = boto3.client("stepfunctions", region_name=region) name = "example_step_function" # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): client.create_state_machine( name=name, definition=str(simple_definition), @@ -242,7 +242,7 @@ def test_state_machine_creation_can_be_described(): def test_state_machine_throws_error_when_describing_unknown_machine(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): unknown_state_machine = ( "arn:aws:states:" + region @@ -258,7 +258,7 @@ def test_state_machine_throws_error_when_describing_unknown_machine(): def test_state_machine_throws_error_when_describing_bad_arn(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): client.describe_state_machine(stateMachineArn="bad") @@ -267,7 +267,7 @@ def test_state_machine_throws_error_when_describing_bad_arn(): def test_state_machine_throws_error_when_describing_machine_in_different_account(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): unknown_state_machine = ( "arn:aws:states:" + region + ":000000000000:stateMachine:unknown" ) @@ -376,7 +376,7 @@ def test_state_machine_start_execution(): def test_state_machine_start_execution_bad_arn_raises_exception(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): client.start_execution(stateMachineArn="bad") @@ -464,7 +464,7 @@ def test_state_machine_list_executions_when_none_exist(): @mock_stepfunctions @mock_sts -def test_state_machine_describe_execution(): +def test_state_machine_describe_execution_with_no_input(): client = boto3.client("stepfunctions", region_name=region) # sm = client.create_state_machine( @@ -483,12 +483,36 @@ def test_state_machine_describe_execution(): description.shouldnt.have("stopDate") +@mock_stepfunctions +@mock_sts +def test_state_machine_describe_execution_with_custom_input(): + client = boto3.client("stepfunctions", region_name=region) + # + execution_input = json.dumps({"input_key": "input_val"}) + sm = client.create_state_machine( + name="name", definition=str(simple_definition), roleArn=_get_default_role() + ) + execution = client.start_execution( + stateMachineArn=sm["stateMachineArn"], input=execution_input + ) + description = client.describe_execution(executionArn=execution["executionArn"]) + # + description["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) + description["executionArn"].should.equal(execution["executionArn"]) + description["input"].should.equal(execution_input) + description["name"].shouldnt.be.empty + description["startDate"].should.equal(execution["startDate"]) + description["stateMachineArn"].should.equal(sm["stateMachineArn"]) + description["status"].should.equal("RUNNING") + description.shouldnt.have("stopDate") + + @mock_stepfunctions @mock_sts def test_execution_throws_error_when_describing_unknown_execution(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): unknown_execution = ( "arn:aws:states:" + region + ":" + _get_account_id() + ":execution:unknown" ) @@ -519,7 +543,7 @@ def test_state_machine_can_be_described_by_execution(): def test_state_machine_throws_error_when_describing_unknown_execution(): client = boto3.client("stepfunctions", region_name=region) # - with assert_raises(ClientError) as exc: + with assert_raises(ClientError): unknown_execution = ( "arn:aws:states:" + region + ":" + _get_account_id() + ":execution:unknown" ) From 6a467ec50f50a14ad67845b3d46eadab919c8719 Mon Sep 17 00:00:00 2001 From: Ciaran Evans <9111975+ciaranevans@users.noreply.github.com> Date: Fri, 4 Sep 2020 10:22:21 +0100 Subject: [PATCH 19/21] Add validation on execution input --- moto/stepfunctions/exceptions.py | 5 +++ moto/stepfunctions/models.py | 11 +++++ .../test_stepfunctions/test_stepfunctions.py | 41 +++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/moto/stepfunctions/exceptions.py b/moto/stepfunctions/exceptions.py index 6000bab4e..4abb6a8af 100644 --- a/moto/stepfunctions/exceptions.py +++ b/moto/stepfunctions/exceptions.py @@ -38,6 +38,11 @@ class InvalidName(AWSError): STATUS = 400 +class InvalidExecutionInput(AWSError): + TYPE = "InvalidExecutionInput" + STATUS = 400 + + class StateMachineDoesNotExist(AWSError): TYPE = "StateMachineDoesNotExist" STATUS = 400 diff --git a/moto/stepfunctions/models.py b/moto/stepfunctions/models.py index 19fb4561d..3184d6456 100644 --- a/moto/stepfunctions/models.py +++ b/moto/stepfunctions/models.py @@ -1,3 +1,4 @@ +import json import re from datetime import datetime @@ -11,6 +12,7 @@ from .exceptions import ( ExecutionAlreadyExists, ExecutionDoesNotExist, InvalidArn, + InvalidExecutionInput, InvalidName, StateMachineDoesNotExist, ) @@ -209,6 +211,7 @@ class StepFunctionBackend(BaseBackend): def start_execution(self, state_machine_arn, name=None, execution_input=None): state_machine_name = self.describe_state_machine(state_machine_arn).name self._ensure_execution_name_doesnt_exist(name) + self._validate_execution_input(execution_input) execution = Execution( region_name=self.region_name, account_id=self._get_account_id(), @@ -290,6 +293,14 @@ class StepFunctionBackend(BaseBackend): "Execution Already Exists: '" + execution.execution_arn + "'" ) + def _validate_execution_input(self, execution_input): + try: + json.loads(execution_input) + except Exception as ex: + raise InvalidExecutionInput( + "Invalid State Machine Execution Input: '" + str(ex) + "'" + ) + def _get_account_id(self): return ACCOUNT_ID diff --git a/tests/test_stepfunctions/test_stepfunctions.py b/tests/test_stepfunctions/test_stepfunctions.py index d94867719..36b08487c 100644 --- a/tests/test_stepfunctions/test_stepfunctions.py +++ b/tests/test_stepfunctions/test_stepfunctions.py @@ -425,6 +425,47 @@ def test_state_machine_start_execution_fails_on_duplicate_execution_name(): ) +@mock_stepfunctions +@mock_sts +def test_state_machine_start_execution_with_custom_input(): + client = boto3.client("stepfunctions", region_name=region) + # + sm = client.create_state_machine( + name="name", definition=str(simple_definition), roleArn=_get_default_role() + ) + execution_input = json.dumps({"input_key": "input_value"}) + execution = client.start_execution( + stateMachineArn=sm["stateMachineArn"], input=execution_input + ) + # + execution["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) + uuid_regex = "[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}" + expected_exec_name = ( + "arn:aws:states:" + + region + + ":" + + _get_account_id() + + ":execution:name:" + + uuid_regex + ) + execution["executionArn"].should.match(expected_exec_name) + execution["startDate"].should.be.a(datetime) + + +@mock_stepfunctions +@mock_sts +def test_state_machine_start_execution_with_invalid_input(): + client = boto3.client("stepfunctions", region_name=region) + # + sm = client.create_state_machine( + name="name", definition=str(simple_definition), roleArn=_get_default_role() + ) + with assert_raises(ClientError): + _ = client.start_execution(stateMachineArn=sm["stateMachineArn"], input="") + with assert_raises(ClientError): + _ = client.start_execution(stateMachineArn=sm["stateMachineArn"], input="{") + + @mock_stepfunctions @mock_sts def test_state_machine_list_executions(): From c66812edbaca4b2a9535552ee022262d02513996 Mon Sep 17 00:00:00 2001 From: Toshiya Kawasaki Date: Fri, 4 Sep 2020 20:14:48 +0900 Subject: [PATCH 20/21] Add kinesisvideo archived media (#3280) * add get_hls_streaming_session_url * add get_dash_streaming_session_url * add get_clip * add test_server for kinesisvideo archived media * fix for lint * fix for lint * avoid testing kinesisvideoarchivedmedia with TEST_SERVER_MODE=true --- Makefile | 6 +- moto/__init__.py | 3 + moto/backends.py | 4 + moto/kinesisvideo/responses.py | 5 -- moto/kinesisvideoarchivedmedia/__init__.py | 6 ++ moto/kinesisvideoarchivedmedia/exceptions.py | 3 + moto/kinesisvideoarchivedmedia/models.py | 88 +++++++++++++++++++ moto/kinesisvideoarchivedmedia/responses.py | 70 +++++++++++++++ moto/kinesisvideoarchivedmedia/urls.py | 14 +++ .../test_kinesisvideoarchivedmedia.py | 86 ++++++++++++++++++ .../test_server.py | 19 ++++ 11 files changed, 298 insertions(+), 6 deletions(-) create mode 100644 moto/kinesisvideoarchivedmedia/__init__.py create mode 100644 moto/kinesisvideoarchivedmedia/exceptions.py create mode 100644 moto/kinesisvideoarchivedmedia/models.py create mode 100644 moto/kinesisvideoarchivedmedia/responses.py create mode 100644 moto/kinesisvideoarchivedmedia/urls.py create mode 100644 tests/test_kinesisvideoarchivedmedia/test_kinesisvideoarchivedmedia.py create mode 100644 tests/test_kinesisvideoarchivedmedia/test_server.py diff --git a/Makefile b/Makefile index e84d036b7..acc5b2037 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,11 @@ SHELL := /bin/bash ifeq ($(TEST_SERVER_MODE), true) # exclude test_iot and test_iotdata for now # because authentication of iot is very complicated - TEST_EXCLUDE := --exclude='test_iot.*' + + # exclude test_kinesisvideoarchivedmedia + # because testing with moto_server is difficult with data-endpoint + + TEST_EXCLUDE := --exclude='test_iot.*' --exclude="test_kinesisvideoarchivedmedia.*" else TEST_EXCLUDE := endif diff --git a/moto/__init__.py b/moto/__init__.py index da66d9c61..e21d3f894 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -114,6 +114,9 @@ XRaySegment = lazy_load(".xray", "XRaySegment") mock_xray = lazy_load(".xray", "mock_xray") mock_xray_client = lazy_load(".xray", "mock_xray_client") mock_kinesisvideo = lazy_load(".kinesisvideo", "mock_kinesisvideo") +mock_kinesisvideoarchivedmedia = lazy_load( + ".kinesisvideoarchivedmedia", "mock_kinesisvideoarchivedmedia" +) # import logging # logging.getLogger('boto').setLevel(logging.CRITICAL) diff --git a/moto/backends.py b/moto/backends.py index 9216d4615..7b1c1d08d 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -70,6 +70,10 @@ BACKENDS = { "swf": ("swf", "swf_backends"), "xray": ("xray", "xray_backends"), "kinesisvideo": ("kinesisvideo", "kinesisvideo_backends"), + "kinesis-video-archived-media": ( + "kinesisvideoarchivedmedia", + "kinesisvideoarchivedmedia_backends", + ), } diff --git a/moto/kinesisvideo/responses.py b/moto/kinesisvideo/responses.py index 376e5b5fe..d1e386f2e 100644 --- a/moto/kinesisvideo/responses.py +++ b/moto/kinesisvideo/responses.py @@ -63,8 +63,3 @@ class KinesisVideoResponse(BaseResponse): stream_name=stream_name, stream_arn=stream_arn, api_name=api_name, ) return json.dumps(dict(DataEndpoint=data_endpoint)) - - # add methods from here - - -# add templates from here diff --git a/moto/kinesisvideoarchivedmedia/__init__.py b/moto/kinesisvideoarchivedmedia/__init__.py new file mode 100644 index 000000000..c1676c871 --- /dev/null +++ b/moto/kinesisvideoarchivedmedia/__init__.py @@ -0,0 +1,6 @@ +from __future__ import unicode_literals +from .models import kinesisvideoarchivedmedia_backends +from ..core.models import base_decorator + +kinesisvideoarchivedmedia_backend = kinesisvideoarchivedmedia_backends["us-east-1"] +mock_kinesisvideoarchivedmedia = base_decorator(kinesisvideoarchivedmedia_backends) diff --git a/moto/kinesisvideoarchivedmedia/exceptions.py b/moto/kinesisvideoarchivedmedia/exceptions.py new file mode 100644 index 000000000..38c60cea2 --- /dev/null +++ b/moto/kinesisvideoarchivedmedia/exceptions.py @@ -0,0 +1,3 @@ +from __future__ import unicode_literals + +# Not implemented exceptions for now diff --git a/moto/kinesisvideoarchivedmedia/models.py b/moto/kinesisvideoarchivedmedia/models.py new file mode 100644 index 000000000..46fddf567 --- /dev/null +++ b/moto/kinesisvideoarchivedmedia/models.py @@ -0,0 +1,88 @@ +from __future__ import unicode_literals +from boto3 import Session +from moto.core import BaseBackend +from moto.kinesisvideo import kinesisvideo_backends +from moto.sts.utils import random_session_token + + +class KinesisVideoArchivedMediaBackend(BaseBackend): + def __init__(self, region_name=None): + super(KinesisVideoArchivedMediaBackend, self).__init__() + self.region_name = region_name + + def reset(self): + region_name = self.region_name + self.__dict__ = {} + self.__init__(region_name) + + def _get_streaming_url(self, stream_name, stream_arn, api_name): + stream = kinesisvideo_backends[self.region_name]._get_stream( + stream_name, stream_arn + ) + data_endpoint = stream.get_data_endpoint(api_name) + session_token = random_session_token() + api_to_relative_path = { + "GET_HLS_STREAMING_SESSION_URL": "/hls/v1/getHLSMasterPlaylist.m3u8", + "GET_DASH_STREAMING_SESSION_URL": "/dash/v1/getDASHManifest.mpd", + } + relative_path = api_to_relative_path[api_name] + url = "{}{}?SessionToken={}".format(data_endpoint, relative_path, session_token) + return url + + def get_hls_streaming_session_url( + self, + stream_name, + stream_arn, + playback_mode, + hls_fragment_selector, + container_format, + discontinuity_mode, + display_fragment_timestamp, + expires, + max_media_playlist_fragment_results, + ): + # Ignore option paramters as the format of hls_url does't depends on them + api_name = "GET_HLS_STREAMING_SESSION_URL" + url = self._get_streaming_url(stream_name, stream_arn, api_name) + return url + + def get_dash_streaming_session_url( + self, + stream_name, + stream_arn, + playback_mode, + display_fragment_timestamp, + display_fragment_number, + dash_fragment_selector, + expires, + max_manifest_fragment_results, + ): + # Ignore option paramters as the format of hls_url does't depends on them + api_name = "GET_DASH_STREAMING_SESSION_URL" + url = self._get_streaming_url(stream_name, stream_arn, api_name) + return url + + def get_clip(self, stream_name, stream_arn, clip_fragment_selector): + kinesisvideo_backends[self.region_name]._get_stream(stream_name, stream_arn) + content_type = "video/mp4" # Fixed content_type as it depends on input stream + payload = b"sample-mp4-video" + return content_type, payload + + +kinesisvideoarchivedmedia_backends = {} +for region in Session().get_available_regions("kinesis-video-archived-media"): + kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend( + region + ) +for region in Session().get_available_regions( + "kinesis-video-archived-media", partition_name="aws-us-gov" +): + kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend( + region + ) +for region in Session().get_available_regions( + "kinesis-video-archived-media", partition_name="aws-cn" +): + kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend( + region + ) diff --git a/moto/kinesisvideoarchivedmedia/responses.py b/moto/kinesisvideoarchivedmedia/responses.py new file mode 100644 index 000000000..d021ced0e --- /dev/null +++ b/moto/kinesisvideoarchivedmedia/responses.py @@ -0,0 +1,70 @@ +from __future__ import unicode_literals +from moto.core.responses import BaseResponse +from .models import kinesisvideoarchivedmedia_backends +import json + + +class KinesisVideoArchivedMediaResponse(BaseResponse): + SERVICE_NAME = "kinesis-video-archived-media" + + @property + def kinesisvideoarchivedmedia_backend(self): + return kinesisvideoarchivedmedia_backends[self.region] + + def get_hls_streaming_session_url(self): + stream_name = self._get_param("StreamName") + stream_arn = self._get_param("StreamARN") + playback_mode = self._get_param("PlaybackMode") + hls_fragment_selector = self._get_param("HLSFragmentSelector") + container_format = self._get_param("ContainerFormat") + discontinuity_mode = self._get_param("DiscontinuityMode") + display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp") + expires = self._get_int_param("Expires") + max_media_playlist_fragment_results = self._get_param( + "MaxMediaPlaylistFragmentResults" + ) + hls_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url( + stream_name=stream_name, + stream_arn=stream_arn, + playback_mode=playback_mode, + hls_fragment_selector=hls_fragment_selector, + container_format=container_format, + discontinuity_mode=discontinuity_mode, + display_fragment_timestamp=display_fragment_timestamp, + expires=expires, + max_media_playlist_fragment_results=max_media_playlist_fragment_results, + ) + return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url)) + + def get_dash_streaming_session_url(self): + stream_name = self._get_param("StreamName") + stream_arn = self._get_param("StreamARN") + playback_mode = self._get_param("PlaybackMode") + display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp") + display_fragment_number = self._get_param("DisplayFragmentNumber") + dash_fragment_selector = self._get_param("DASHFragmentSelector") + expires = self._get_int_param("Expires") + max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults") + dash_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url( + stream_name=stream_name, + stream_arn=stream_arn, + playback_mode=playback_mode, + display_fragment_timestamp=display_fragment_timestamp, + display_fragment_number=display_fragment_number, + dash_fragment_selector=dash_fragment_selector, + expires=expires, + max_manifest_fragment_results=max_manifest_fragment_results, + ) + return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url)) + + def get_clip(self): + stream_name = self._get_param("StreamName") + stream_arn = self._get_param("StreamARN") + clip_fragment_selector = self._get_param("ClipFragmentSelector") + content_type, payload = self.kinesisvideoarchivedmedia_backend.get_clip( + stream_name=stream_name, + stream_arn=stream_arn, + clip_fragment_selector=clip_fragment_selector, + ) + new_headers = {"Content-Type": content_type} + return payload, new_headers diff --git a/moto/kinesisvideoarchivedmedia/urls.py b/moto/kinesisvideoarchivedmedia/urls.py new file mode 100644 index 000000000..88c2d59f0 --- /dev/null +++ b/moto/kinesisvideoarchivedmedia/urls.py @@ -0,0 +1,14 @@ +from __future__ import unicode_literals +from .responses import KinesisVideoArchivedMediaResponse + +url_bases = [ + r"https?://.*\.kinesisvideo.(.+).amazonaws.com", +] + + +response = KinesisVideoArchivedMediaResponse() + + +url_paths = { + "{0}/.*$": response.dispatch, +} diff --git a/tests/test_kinesisvideoarchivedmedia/test_kinesisvideoarchivedmedia.py b/tests/test_kinesisvideoarchivedmedia/test_kinesisvideoarchivedmedia.py new file mode 100644 index 000000000..ee4439197 --- /dev/null +++ b/tests/test_kinesisvideoarchivedmedia/test_kinesisvideoarchivedmedia.py @@ -0,0 +1,86 @@ +from __future__ import unicode_literals + +import boto3 +import sure # noqa +from moto import mock_kinesisvideoarchivedmedia +from moto import mock_kinesisvideo +from datetime import datetime, timedelta + + +@mock_kinesisvideo +@mock_kinesisvideoarchivedmedia +def test_get_hls_streaming_session_url(): + region_name = "ap-northeast-1" + kvs_client = boto3.client("kinesisvideo", region_name=region_name) + stream_name = "my-stream" + kvs_client.create_stream(StreamName=stream_name) + + api_name = "GET_HLS_STREAMING_SESSION_URL" + res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name) + data_endpoint = res["DataEndpoint"] + + client = boto3.client( + "kinesis-video-archived-media", + region_name=region_name, + endpoint_url=data_endpoint, + ) + res = client.get_hls_streaming_session_url(StreamName=stream_name,) + reg_exp = "^{}/hls/v1/getHLSMasterPlaylist.m3u8\?SessionToken\=.+$".format( + data_endpoint + ) + res.should.have.key("HLSStreamingSessionURL").which.should.match(reg_exp) + + +@mock_kinesisvideo +@mock_kinesisvideoarchivedmedia +def test_get_dash_streaming_session_url(): + region_name = "ap-northeast-1" + kvs_client = boto3.client("kinesisvideo", region_name=region_name) + stream_name = "my-stream" + kvs_client.create_stream(StreamName=stream_name) + + api_name = "GET_DASH_STREAMING_SESSION_URL" + res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name) + data_endpoint = res["DataEndpoint"] + + client = boto3.client( + "kinesis-video-archived-media", + region_name=region_name, + endpoint_url=data_endpoint, + ) + res = client.get_dash_streaming_session_url(StreamName=stream_name,) + reg_exp = "^{}/dash/v1/getDASHManifest.mpd\?SessionToken\=.+$".format(data_endpoint) + res.should.have.key("DASHStreamingSessionURL").which.should.match(reg_exp) + + +@mock_kinesisvideo +@mock_kinesisvideoarchivedmedia +def test_get_clip(): + region_name = "ap-northeast-1" + kvs_client = boto3.client("kinesisvideo", region_name=region_name) + stream_name = "my-stream" + kvs_client.create_stream(StreamName=stream_name) + + api_name = "GET_DASH_STREAMING_SESSION_URL" + res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name) + data_endpoint = res["DataEndpoint"] + + client = boto3.client( + "kinesis-video-archived-media", + region_name=region_name, + endpoint_url=data_endpoint, + ) + end_timestamp = datetime.utcnow() - timedelta(hours=1) + start_timestamp = end_timestamp - timedelta(minutes=5) + res = client.get_clip( + StreamName=stream_name, + ClipFragmentSelector={ + "FragmentSelectorType": "PRODUCER_TIMESTAMP", + "TimestampRange": { + "StartTimestamp": start_timestamp, + "EndTimestamp": end_timestamp, + }, + }, + ) + res.should.have.key("ContentType").which.should.match("video/mp4") + res.should.have.key("Payload") diff --git a/tests/test_kinesisvideoarchivedmedia/test_server.py b/tests/test_kinesisvideoarchivedmedia/test_server.py new file mode 100644 index 000000000..482c7bb1b --- /dev/null +++ b/tests/test_kinesisvideoarchivedmedia/test_server.py @@ -0,0 +1,19 @@ +from __future__ import unicode_literals + +import sure # noqa + +import moto.server as server +from moto import mock_kinesisvideoarchivedmedia + +""" +Test the different server responses +""" + + +@mock_kinesisvideoarchivedmedia +def test_kinesisvideoarchivedmedia_server_is_up(): + backend = server.create_backend_app("kinesis-video-archived-media") + test_client = backend.test_client() + res = test_client.post("/getHLSStreamingSessionURL") + # Just checking server is up + res.status_code.should.equal(404) From 9b7ee6687151fb56c26c42851739b2aefa7248ef Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Sat, 5 Sep 2020 11:43:58 -0500 Subject: [PATCH 21/21] update implementation coverage. --- IMPLEMENTATION_COVERAGE.md | 169 ++++++++++++++++++------------------- 1 file changed, 82 insertions(+), 87 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 3246c2615..90ebf9a57 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -464,13 +464,13 @@ - [ ] delete_scaling_policy - [ ] delete_scheduled_action - [ ] deregister_scalable_target -- [x] describe_scalable_targets +- [X] describe_scalable_targets - [ ] describe_scaling_activities - [ ] describe_scaling_policies - [ ] describe_scheduled_actions - [ ] put_scaling_policy - [ ] put_scheduled_action -- [x] register_scalable_target - includes enhanced validation support for ECS targets +- [X] register_scalable_target ## application-insights @@ -642,15 +642,15 @@ ## athena
-26% implemented +36% implemented - [ ] batch_get_named_query - [ ] batch_get_query_execution -- [ ] create_named_query +- [X] create_named_query - [X] create_work_group - [ ] delete_named_query - [ ] delete_work_group -- [ ] get_named_query +- [X] get_named_query - [ ] get_query_execution - [ ] get_query_results - [X] get_work_group @@ -1664,7 +1664,7 @@ ## cognito-idp
-38% implemented +44% implemented - [ ] add_custom_attributes - [X] admin_add_user_to_group @@ -1693,11 +1693,11 @@ - [ ] admin_update_device_status - [X] admin_update_user_attributes - [ ] admin_user_global_sign_out -- [ ] associate_software_token +- [X] associate_software_token - [X] change_password - [ ] confirm_device - [X] confirm_forgot_password -- [ ] confirm_sign_up +- [X] confirm_sign_up - [X] create_group - [X] create_identity_provider - [X] create_resource_server @@ -1732,7 +1732,7 @@ - [ ] get_user_attribute_verification_code - [ ] get_user_pool_mfa_config - [ ] global_sign_out -- [ ] initiate_auth +- [X] initiate_auth - [ ] list_devices - [X] list_groups - [X] list_identity_providers @@ -1747,10 +1747,10 @@ - [X] respond_to_auth_challenge - [ ] set_risk_configuration - [ ] set_ui_customization -- [ ] set_user_mfa_preference +- [X] set_user_mfa_preference - [ ] set_user_pool_mfa_config - [ ] set_user_settings -- [ ] sign_up +- [X] sign_up - [ ] start_user_import_job - [ ] stop_user_import_job - [ ] tag_resource @@ -1764,7 +1764,7 @@ - [ ] update_user_pool - [X] update_user_pool_client - [X] update_user_pool_domain -- [ ] verify_software_token +- [X] verify_software_token - [ ] verify_user_attribute
@@ -1889,7 +1889,7 @@ ## config
-26% implemented +32% implemented - [X] batch_get_aggregate_resource_config - [X] batch_get_resource_config @@ -1901,7 +1901,7 @@ - [X] delete_delivery_channel - [ ] delete_evaluation_results - [ ] delete_organization_config_rule -- [ ] delete_organization_conformance_pack +- [X] delete_organization_conformance_pack - [ ] delete_pending_aggregation_request - [ ] delete_remediation_configuration - [ ] delete_remediation_exceptions @@ -1925,8 +1925,8 @@ - [X] describe_delivery_channels - [ ] describe_organization_config_rule_statuses - [ ] describe_organization_config_rules -- [ ] describe_organization_conformance_pack_statuses -- [ ] describe_organization_conformance_packs +- [X] describe_organization_conformance_pack_statuses +- [X] describe_organization_conformance_packs - [ ] describe_pending_aggregation_requests - [ ] describe_remediation_configurations - [ ] describe_remediation_exceptions @@ -1944,7 +1944,7 @@ - [ ] get_conformance_pack_compliance_summary - [ ] get_discovered_resource_counts - [ ] get_organization_config_rule_detailed_status -- [ ] get_organization_conformance_pack_detailed_status +- [X] get_organization_conformance_pack_detailed_status - [X] get_resource_config_history - [X] list_aggregate_discovered_resources - [X] list_discovered_resources @@ -1957,7 +1957,7 @@ - [X] put_delivery_channel - [X] put_evaluations - [ ] put_organization_config_rule -- [ ] put_organization_conformance_pack +- [X] put_organization_conformance_pack - [ ] put_remediation_configurations - [ ] put_remediation_exceptions - [ ] put_resource_config @@ -2580,7 +2580,7 @@ ## ec2
-26% implemented +27% implemented - [ ] accept_reserved_instances_exchange_quote - [ ] accept_transit_gateway_peering_attachment @@ -2639,7 +2639,7 @@ - [X] create_internet_gateway - [X] create_key_pair - [X] create_launch_template -- [x] create_launch_template_version +- [ ] create_launch_template_version - [ ] create_local_gateway_route - [ ] create_local_gateway_route_table_vpc_association - [X] create_nat_gateway @@ -2939,7 +2939,7 @@ - [ ] purchase_reserved_instances_offering - [ ] purchase_scheduled_instances - [X] reboot_instances -- [ ] register_image +- [X] register_image - [ ] register_instance_event_notification_attributes - [ ] register_transit_gateway_multicast_group_members - [ ] register_transit_gateway_multicast_group_sources @@ -3031,7 +3031,7 @@ ## ecs
-73% implemented +72% implemented - [ ] create_capacity_provider - [X] create_cluster @@ -4118,7 +4118,7 @@ ## iam
-69% implemented +70% implemented - [ ] add_client_id_to_open_id_connect_provider - [X] add_role_to_instance_profile @@ -4146,7 +4146,7 @@ - [X] delete_account_alias - [X] delete_account_password_policy - [X] delete_group -- [ ] delete_group_policy +- [X] delete_group_policy - [X] delete_instance_profile - [X] delete_login_profile - [X] delete_open_id_connect_provider @@ -4367,7 +4367,7 @@ ## iot
-27% implemented +28% implemented - [ ] accept_certificate_transfer - [ ] add_thing_to_billing_group @@ -4837,7 +4837,6 @@ - [ ] describe_configuration - [ ] describe_configuration_revision - [ ] get_bootstrap_brokers -- [ ] get_compatible_kafka_versions - [ ] list_cluster_operations - [ ] list_clusters - [ ] list_configuration_revisions @@ -4850,7 +4849,6 @@ - [ ] update_broker_count - [ ] update_broker_storage - [ ] update_cluster_configuration -- [ ] update_cluster_kafka_version - [ ] update_monitoring
@@ -4920,11 +4918,11 @@ ## kinesis-video-archived-media
-0% implemented +60% implemented -- [ ] get_clip -- [ ] get_dash_streaming_session_url -- [ ] get_hls_streaming_session_url +- [X] get_clip +- [X] get_dash_streaming_session_url +- [X] get_hls_streaming_session_url - [ ] get_media_for_fragment_list - [ ] list_fragments
@@ -5004,18 +5002,18 @@ ## kinesisvideo
-0% implemented +26% implemented - [ ] create_signaling_channel -- [ ] create_stream +- [X] create_stream - [ ] delete_signaling_channel -- [ ] delete_stream +- [X] delete_stream - [ ] describe_signaling_channel -- [ ] describe_stream -- [ ] get_data_endpoint +- [X] describe_stream +- [X] get_data_endpoint - [ ] get_signaling_channel_endpoint - [ ] list_signaling_channels -- [ ] list_streams +- [X] list_streams - [ ] list_tags_for_resource - [ ] list_tags_for_stream - [ ] tag_resource @@ -5100,7 +5098,7 @@ ## lambda
-38% implemented +44% implemented - [ ] add_layer_version_permission - [X] add_permission @@ -6100,7 +6098,7 @@ ## organizations
-47% implemented +68% implemented - [ ] accept_handshake - [X] attach_policy @@ -6114,7 +6112,7 @@ - [ ] delete_organization - [ ] delete_organizational_unit - [X] delete_policy -- [ ] deregister_delegated_administrator +- [X] deregister_delegated_administrator - [X] describe_account - [X] describe_create_account_status - [ ] describe_effective_policy @@ -6123,20 +6121,20 @@ - [X] describe_organizational_unit - [X] describe_policy - [ ] detach_policy -- [ ] disable_aws_service_access -- [ ] disable_policy_type +- [X] disable_aws_service_access +- [X] disable_policy_type - [ ] enable_all_features -- [ ] enable_aws_service_access -- [ ] enable_policy_type +- [X] enable_aws_service_access +- [X] enable_policy_type - [ ] invite_account_to_organization - [ ] leave_organization - [X] list_accounts - [X] list_accounts_for_parent -- [ ] list_aws_service_access_for_organization +- [X] list_aws_service_access_for_organization - [X] list_children - [ ] list_create_account_status -- [ ] list_delegated_administrators -- [ ] list_delegated_services_for_account +- [X] list_delegated_administrators +- [X] list_delegated_services_for_account - [ ] list_handshakes_for_account - [ ] list_handshakes_for_organization - [X] list_organizational_units_for_parent @@ -6147,7 +6145,7 @@ - [X] list_tags_for_resource - [X] list_targets_for_policy - [X] move_account -- [ ] register_delegated_administrator +- [X] register_delegated_administrator - [ ] remove_account_from_organization - [X] tag_resource - [X] untag_resource @@ -6545,21 +6543,21 @@ ## ram
-0% implemented +20% implemented - [ ] accept_resource_share_invitation - [ ] associate_resource_share - [ ] associate_resource_share_permission -- [ ] create_resource_share -- [ ] delete_resource_share +- [X] create_resource_share +- [X] delete_resource_share - [ ] disassociate_resource_share - [ ] disassociate_resource_share_permission -- [ ] enable_sharing_with_aws_organization +- [X] enable_sharing_with_aws_organization - [ ] get_permission - [ ] get_resource_policies - [ ] get_resource_share_associations - [ ] get_resource_share_invitations -- [ ] get_resource_shares +- [X] get_resource_shares - [ ] list_pending_invitation_resources - [ ] list_permissions - [ ] list_principals @@ -6570,7 +6568,7 @@ - [ ] reject_resource_share_invitation - [ ] tag_resource - [ ] untag_resource -- [ ] update_resource_share +- [X] update_resource_share
## rds @@ -7074,7 +7072,7 @@ ## s3
-25% implemented +26% implemented - [ ] abort_multipart_upload - [ ] complete_multipart_upload @@ -7093,7 +7091,7 @@ - [X] delete_bucket_tagging - [ ] delete_bucket_website - [X] delete_object -- [x] delete_object_tagging +- [X] delete_object_tagging - [ ] delete_objects - [ ] delete_public_access_block - [ ] get_bucket_accelerate_configuration @@ -7193,7 +7191,7 @@ ## sagemaker
-0% implemented +12% implemented - [ ] add_tags - [ ] associate_trial_component @@ -7203,22 +7201,22 @@ - [ ] create_code_repository - [ ] create_compilation_job - [ ] create_domain -- [ ] create_endpoint -- [ ] create_endpoint_config +- [X] create_endpoint +- [X] create_endpoint_config - [ ] create_experiment - [ ] create_flow_definition - [ ] create_human_task_ui - [ ] create_hyper_parameter_tuning_job - [ ] create_labeling_job -- [ ] create_model +- [X] create_model - [ ] create_model_package - [ ] create_monitoring_schedule -- [ ] create_notebook_instance +- [X] create_notebook_instance - [ ] create_notebook_instance_lifecycle_config - [ ] create_presigned_domain_url - [ ] create_presigned_notebook_instance_url - [ ] create_processing_job -- [ ] create_training_job +- [X] create_training_job - [ ] create_transform_job - [ ] create_trial - [ ] create_trial_component @@ -7228,14 +7226,14 @@ - [ ] delete_app - [ ] delete_code_repository - [ ] delete_domain -- [ ] delete_endpoint -- [ ] delete_endpoint_config +- [X] delete_endpoint +- [X] delete_endpoint_config - [ ] delete_experiment - [ ] delete_flow_definition -- [ ] delete_model +- [X] delete_model - [ ] delete_model_package - [ ] delete_monitoring_schedule -- [ ] delete_notebook_instance +- [X] delete_notebook_instance - [ ] delete_notebook_instance_lifecycle_config - [ ] delete_tags - [ ] delete_trial @@ -7248,21 +7246,21 @@ - [ ] describe_code_repository - [ ] describe_compilation_job - [ ] describe_domain -- [ ] describe_endpoint -- [ ] describe_endpoint_config +- [X] describe_endpoint +- [X] describe_endpoint_config - [ ] describe_experiment - [ ] describe_flow_definition - [ ] describe_human_task_ui - [ ] describe_hyper_parameter_tuning_job - [ ] describe_labeling_job -- [ ] describe_model +- [X] describe_model - [ ] describe_model_package - [ ] describe_monitoring_schedule - [ ] describe_notebook_instance - [ ] describe_notebook_instance_lifecycle_config - [ ] describe_processing_job - [ ] describe_subscribed_workteam -- [ ] describe_training_job +- [X] describe_training_job - [ ] describe_transform_job - [ ] describe_trial - [ ] describe_trial_component @@ -7287,7 +7285,7 @@ - [ ] list_labeling_jobs - [ ] list_labeling_jobs_for_workteam - [ ] list_model_packages -- [ ] list_models +- [X] list_models - [ ] list_monitoring_executions - [ ] list_monitoring_schedules - [ ] list_notebook_instance_lifecycle_configs @@ -7305,13 +7303,13 @@ - [ ] render_ui_template - [ ] search - [ ] start_monitoring_schedule -- [ ] start_notebook_instance +- [X] start_notebook_instance - [ ] stop_auto_ml_job - [ ] stop_compilation_job - [ ] stop_hyper_parameter_tuning_job - [ ] stop_labeling_job - [ ] stop_monitoring_schedule -- [ ] stop_notebook_instance +- [X] stop_notebook_instance - [ ] stop_processing_job - [ ] stop_training_job - [ ] stop_transform_job @@ -7645,7 +7643,7 @@ ## ses
-21% implemented +23% implemented - [ ] clone_receipt_rule_set - [X] create_configuration_set @@ -7653,8 +7651,8 @@ - [ ] create_configuration_set_tracking_options - [ ] create_custom_verification_email_template - [ ] create_receipt_filter -- [ ] create_receipt_rule -- [ ] create_receipt_rule_set +- [X] create_receipt_rule +- [X] create_receipt_rule_set - [ ] create_template - [ ] delete_configuration_set - [ ] delete_configuration_set_event_destination @@ -7959,7 +7957,7 @@ ## ssm
-12% implemented +18% implemented - [X] add_tags_to_resource - [ ] cancel_command @@ -7967,14 +7965,14 @@ - [ ] create_activation - [ ] create_association - [ ] create_association_batch -- [ ] create_document +- [X] create_document - [ ] create_maintenance_window - [ ] create_ops_item - [ ] create_patch_baseline - [ ] create_resource_data_sync - [ ] delete_activation - [ ] delete_association -- [ ] delete_document +- [X] delete_document - [ ] delete_inventory - [ ] delete_maintenance_window - [X] delete_parameter @@ -7992,7 +7990,7 @@ - [ ] describe_automation_executions - [ ] describe_automation_step_executions - [ ] describe_available_patches -- [ ] describe_document +- [X] describe_document - [ ] describe_document_permission - [ ] describe_effective_instance_associations - [ ] describe_effective_patches_for_patch_baseline @@ -8023,7 +8021,7 @@ - [ ] get_connection_status - [ ] get_default_patch_baseline - [ ] get_deployable_patch_snapshot_for_instance -- [ ] get_document +- [X] get_document - [ ] get_inventory - [ ] get_inventory_schema - [ ] get_maintenance_window @@ -8048,7 +8046,7 @@ - [ ] list_compliance_items - [ ] list_compliance_summaries - [ ] list_document_versions -- [ ] list_documents +- [X] list_documents - [ ] list_inventory_entries - [ ] list_resource_compliance_summaries - [ ] list_resource_data_sync @@ -8073,8 +8071,8 @@ - [ ] terminate_session - [ ] update_association - [ ] update_association_status -- [ ] update_document -- [ ] update_document_default_version +- [X] update_document +- [X] update_document_default_version - [ ] update_maintenance_window - [ ] update_maintenance_window_target - [ ] update_maintenance_window_task @@ -8706,7 +8704,6 @@ - [ ] delete_group - [ ] delete_mailbox_permissions - [ ] delete_resource -- [ ] delete_retention_policy - [ ] delete_user - [ ] deregister_from_work_mail - [ ] describe_group @@ -8716,7 +8713,6 @@ - [ ] disassociate_delegate_from_resource - [ ] disassociate_member_from_group - [ ] get_access_control_effect -- [ ] get_default_retention_policy - [ ] get_mailbox_details - [ ] list_access_control_rules - [ ] list_aliases @@ -8730,7 +8726,6 @@ - [ ] list_users - [ ] put_access_control_rule - [ ] put_mailbox_permissions -- [ ] put_retention_policy - [ ] register_to_work_mail - [ ] reset_password - [ ] tag_resource