diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index f039425f6..f99d86df3 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2878,15 +2878,15 @@ - [ ] test_failover ## elasticbeanstalk -0% implemented +13% implemented - [ ] abort_environment_update - [ ] apply_environment_managed_action - [ ] check_dns_availability - [ ] compose_environments -- [ ] create_application +- [X] create_application - [ ] create_application_version - [ ] create_configuration_template -- [ ] create_environment +- [X] create_environment - [ ] create_platform_version - [ ] create_storage_location - [ ] delete_application @@ -2903,13 +2903,13 @@ - [ ] describe_environment_managed_action_history - [ ] describe_environment_managed_actions - [ ] describe_environment_resources -- [ ] describe_environments +- [X] describe_environments - [ ] describe_events - [ ] describe_instances_health - [ ] describe_platform_version -- [ ] list_available_solution_stacks +- [X] list_available_solution_stacks - [ ] list_platform_versions -- [ ] list_tags_for_resource +- [X] list_tags_for_resource - [ ] rebuild_environment - [ ] request_environment_info - [ ] restart_app_server @@ -2921,7 +2921,7 @@ - [ ] update_application_version - [ ] update_configuration_template - [ ] update_environment -- [ ] update_tags_for_resource +- [X] update_tags_for_resource - [ ] validate_configuration_settings ## elastictranscoder diff --git a/MANIFEST.in b/MANIFEST.in index bd7eb968a..51d1b223c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,5 +3,6 @@ include requirements.txt requirements-dev.txt tox.ini include moto/ec2/resources/instance_types.json include moto/ec2/resources/amis.json include moto/cognitoidp/resources/*.json +include moto/dynamodb2/parsing/reserved_keywords.txt recursive-include moto/templates * recursive-include tests * diff --git a/moto/__init__.py b/moto/__init__.py index 44b25f41e..4c9d4753c 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -21,6 +21,7 @@ from .datasync import mock_datasync # noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa from .dynamodbstreams import mock_dynamodbstreams # noqa +from .elasticbeanstalk import mock_elasticbeanstalk # noqa from .ec2 import mock_ec2, mock_ec2_deprecated # noqa from .ec2_instance_connect import mock_ec2_instance_connect # noqa from .ecr import mock_ecr, mock_ecr_deprecated # noqa diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index e5e5e3bfd..e011af601 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -461,6 +461,7 @@ class RestAPI(BaseModel): self.description = description self.create_date = int(time.time()) self.api_key_source = kwargs.get("api_key_source") or "HEADER" + self.policy = kwargs.get("policy") or None self.endpoint_configuration = kwargs.get("endpoint_configuration") or { "types": ["EDGE"] } @@ -485,6 +486,7 @@ class RestAPI(BaseModel): "apiKeySource": self.api_key_source, "endpointConfiguration": self.endpoint_configuration, "tags": self.tags, + "policy": self.policy, } def add_child(self, path, parent_id=None): @@ -713,6 +715,7 @@ class APIGatewayBackend(BaseBackend): api_key_source=None, endpoint_configuration=None, tags=None, + policy=None, ): api_id = create_id() rest_api = RestAPI( @@ -723,6 +726,7 @@ class APIGatewayBackend(BaseBackend): api_key_source=api_key_source, endpoint_configuration=endpoint_configuration, tags=tags, + policy=policy, ) self.apis[api_id] = rest_api return rest_api diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index 822d4c0ce..a3c41a6d4 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -59,6 +59,7 @@ class APIGatewayResponse(BaseResponse): api_key_source = self._get_param("apiKeySource") endpoint_configuration = self._get_param("endpointConfiguration") tags = self._get_param("tags") + policy = self._get_param("policy") # Param validation if api_key_source and api_key_source not in API_KEY_SOURCES: @@ -94,6 +95,7 @@ class APIGatewayResponse(BaseResponse): api_key_source=api_key_source, endpoint_configuration=endpoint_configuration, tags=tags, + policy=policy, ) return 200, {}, json.dumps(rest_api.to_dict()) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 9cdf2397c..589a790ae 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -1006,11 +1006,11 @@ class LambdaBackend(BaseBackend): return True return False - def add_policy_statement(self, function_name, raw): + def add_permission(self, function_name, raw): fn = self.get_function(function_name) fn.policy.add_statement(raw) - def del_policy_statement(self, function_name, sid, revision=""): + def remove_permission(self, function_name, sid, revision=""): fn = self.get_function(function_name) fn.policy.del_statement(sid, revision) diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index 28b0e74fd..d81bd55d9 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -146,7 +146,7 @@ class LambdaResponse(BaseResponse): function_name = path.split("/")[-2] if self.lambda_backend.get_function(function_name): statement = self.body - self.lambda_backend.add_policy_statement(function_name, statement) + self.lambda_backend.add_permission(function_name, statement) return 200, {}, json.dumps({"Statement": statement}) else: return 404, {}, "{}" @@ -166,9 +166,7 @@ class LambdaResponse(BaseResponse): statement_id = path.split("/")[-1].split("?")[0] revision = querystring.get("RevisionId", "") if self.lambda_backend.get_function(function_name): - self.lambda_backend.del_policy_statement( - function_name, statement_id, revision - ) + self.lambda_backend.remove_permission(function_name, statement_id, revision) return 204, {}, "{}" else: return 404, {}, "{}" diff --git a/moto/backends.py b/moto/backends.py index a358b8fd2..a48df74a4 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -23,6 +23,7 @@ from moto.ec2 import ec2_backends from moto.ec2_instance_connect import ec2_instance_connect_backends from moto.ecr import ecr_backends from moto.ecs import ecs_backends +from moto.elasticbeanstalk import eb_backends from moto.elb import elb_backends from moto.elbv2 import elbv2_backends from moto.emr import emr_backends @@ -77,6 +78,7 @@ BACKENDS = { "ec2_instance_connect": ec2_instance_connect_backends, "ecr": ecr_backends, "ecs": ecs_backends, + "elasticbeanstalk": eb_backends, "elb": elb_backends, "elbv2": elbv2_backends, "events": events_backends, diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index 8136e353d..281ab5e19 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -239,8 +239,11 @@ class FakeStack(BaseModel): self.cross_stack_resources = cross_stack_resources or {} self.resource_map = self._create_resource_map() self.output_map = self._create_output_map() - self._add_stack_event("CREATE_COMPLETE") - self.status = "CREATE_COMPLETE" + if create_change_set: + self.status = "REVIEW_IN_PROGRESS" + else: + self.create_resources() + self._add_stack_event("CREATE_COMPLETE") self.creation_time = datetime.utcnow() def _create_resource_map(self): @@ -253,7 +256,7 @@ class FakeStack(BaseModel): self.template_dict, self.cross_stack_resources, ) - resource_map.create() + resource_map.load() return resource_map def _create_output_map(self): @@ -326,6 +329,10 @@ class FakeStack(BaseModel): def exports(self): return self.output_map.exports + def create_resources(self): + self.resource_map.create() + self.status = "CREATE_COMPLETE" + def update(self, template, role_arn=None, parameters=None, tags=None): self._add_stack_event( "UPDATE_IN_PROGRESS", resource_status_reason="User Initiated" @@ -640,6 +647,7 @@ class CloudFormationBackend(BaseBackend): else: stack._add_stack_event("UPDATE_IN_PROGRESS") stack._add_stack_event("UPDATE_COMPLETE") + stack.create_resources() return True def describe_stacks(self, name_or_stack_id): diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py index cc4daf9ce..a32ff6736 100644 --- a/moto/cloudformation/parsing.py +++ b/moto/cloudformation/parsing.py @@ -531,14 +531,16 @@ class ResourceMap(collections_abc.Mapping): for condition_name in self.lazy_condition_map: self.lazy_condition_map[condition_name] - def create(self): + def load(self): self.load_mapping() self.transform_mapping() self.load_parameters() self.load_conditions() + def create(self): # Since this is a lazy map, to create every object we just need to # iterate through self. + # Assumes that self.load() has been called before self.tags.update( { "aws:cloudformation:stack-name": self.get("AWS::StackName"), diff --git a/moto/cloudwatch/models.py b/moto/cloudwatch/models.py index bc941809b..19b1efa5f 100644 --- a/moto/cloudwatch/models.py +++ b/moto/cloudwatch/models.py @@ -22,6 +22,14 @@ class Dimension(object): self.name = name self.value = value + def __eq__(self, item): + if isinstance(item, Dimension): + return self.name == item.name and self.value == item.value + return False + + def __ne__(self, item): # Only needed on Py2; Py3 defines it implicitly + return self != item + def daterange(start, stop, step=timedelta(days=1), inclusive=False): """ @@ -124,6 +132,17 @@ class MetricDatum(BaseModel): Dimension(dimension["Name"], dimension["Value"]) for dimension in dimensions ] + def filter(self, namespace, name, dimensions): + if namespace and namespace != self.namespace: + return False + if name and name != self.name: + return False + if dimensions and any( + Dimension(d["Name"], d["Value"]) not in self.dimensions for d in dimensions + ): + return False + return True + class Dashboard(BaseModel): def __init__(self, name, body): @@ -202,6 +221,15 @@ class CloudWatchBackend(BaseBackend): self.metric_data = [] self.paged_metric_data = {} + @property + # Retrieve a list of all OOTB metrics that are provided by metrics providers + # Computed on the fly + def aws_metric_data(self): + md = [] + for name, service in metric_providers.items(): + md.extend(service.get_cloudwatch_metrics()) + return md + def put_metric_alarm( self, name, @@ -295,6 +323,43 @@ class CloudWatchBackend(BaseBackend): ) ) + def get_metric_data(self, queries, start_time, end_time): + period_data = [ + md for md in self.metric_data if start_time <= md.timestamp <= end_time + ] + results = [] + for query in queries: + query_ns = query["metric_stat._metric._namespace"] + query_name = query["metric_stat._metric._metric_name"] + query_data = [ + md + for md in period_data + if md.namespace == query_ns and md.name == query_name + ] + metric_values = [m.value for m in query_data] + result_vals = [] + stat = query["metric_stat._stat"] + if len(metric_values) > 0: + if stat == "Average": + result_vals.append(sum(metric_values) / len(metric_values)) + elif stat == "Minimum": + result_vals.append(min(metric_values)) + elif stat == "Maximum": + result_vals.append(max(metric_values)) + elif stat == "Sum": + result_vals.append(sum(metric_values)) + + label = query["metric_stat._metric._metric_name"] + " " + stat + results.append( + { + "id": query["id"], + "label": label, + "vals": result_vals, + "timestamps": [datetime.now() for _ in result_vals], + } + ) + return results + def get_metric_statistics( self, namespace, metric_name, start_time, end_time, period, stats ): @@ -334,7 +399,7 @@ class CloudWatchBackend(BaseBackend): return data def get_all_metrics(self): - return self.metric_data + return self.metric_data + self.aws_metric_data def put_dashboard(self, name, body): self.dashboards[name] = Dashboard(name, body) @@ -386,7 +451,7 @@ class CloudWatchBackend(BaseBackend): self.alarms[alarm_name].update_state(reason, reason_data, state_value) - def list_metrics(self, next_token, namespace, metric_name): + def list_metrics(self, next_token, namespace, metric_name, dimensions): if next_token: if next_token not in self.paged_metric_data: raise RESTError( @@ -397,15 +462,16 @@ class CloudWatchBackend(BaseBackend): del self.paged_metric_data[next_token] # Cant reuse same token twice return self._get_paginated(metrics) else: - metrics = self.get_filtered_metrics(metric_name, namespace) + metrics = self.get_filtered_metrics(metric_name, namespace, dimensions) return self._get_paginated(metrics) - def get_filtered_metrics(self, metric_name, namespace): + def get_filtered_metrics(self, metric_name, namespace, dimensions): metrics = self.get_all_metrics() - if namespace: - metrics = [md for md in metrics if md.namespace == namespace] - if metric_name: - metrics = [md for md in metrics if md.name == metric_name] + metrics = [ + md + for md in metrics + if md.filter(namespace=namespace, name=metric_name, dimensions=dimensions) + ] return metrics def _get_paginated(self, metrics): @@ -445,3 +511,8 @@ for region in Session().get_available_regions( cloudwatch_backends[region] = CloudWatchBackend() for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"): cloudwatch_backends[region] = CloudWatchBackend() + +# List of services that provide OOTB CW metrics +# See the S3Backend constructor for an example +# TODO: We might have to separate this out per region for non-global services +metric_providers = {} diff --git a/moto/cloudwatch/responses.py b/moto/cloudwatch/responses.py index 7993c9f06..56ba68bb9 100644 --- a/moto/cloudwatch/responses.py +++ b/moto/cloudwatch/responses.py @@ -92,6 +92,18 @@ class CloudWatchResponse(BaseResponse): template = self.response_template(PUT_METRIC_DATA_TEMPLATE) return template.render() + @amzn_request_id + def get_metric_data(self): + start = dtparse(self._get_param("StartTime")) + end = dtparse(self._get_param("EndTime")) + queries = self._get_list_prefix("MetricDataQueries.member") + results = self.cloudwatch_backend.get_metric_data( + start_time=start, end_time=end, queries=queries + ) + + template = self.response_template(GET_METRIC_DATA_TEMPLATE) + return template.render(results=results) + @amzn_request_id def get_metric_statistics(self): namespace = self._get_param("Namespace") @@ -124,9 +136,10 @@ class CloudWatchResponse(BaseResponse): def list_metrics(self): namespace = self._get_param("Namespace") metric_name = self._get_param("MetricName") + dimensions = self._get_multi_param("Dimensions.member") next_token = self._get_param("NextToken") next_token, metrics = self.cloudwatch_backend.list_metrics( - next_token, namespace, metric_name + next_token, namespace, metric_name, dimensions ) template = self.response_template(LIST_METRICS_TEMPLATE) return template.render(metrics=metrics, next_token=next_token) @@ -285,6 +298,35 @@ PUT_METRIC_DATA_TEMPLATE = """ + + + {{ request_id }} + + + + + {% for result in results %} + + {{ result.id }} + + Complete + + {% for val in result.timestamps %} + {{ val }} + {% endfor %} + + + {% for val in result.vals %} + {{ val }} + {% endfor %} + + + {% endfor %} + + +""" + GET_METRIC_STATISTICS_TEMPLATE = """ diff --git a/moto/cognitoidentity/utils.py b/moto/cognitoidentity/utils.py index 6143d5121..54016ad17 100644 --- a/moto/cognitoidentity/utils.py +++ b/moto/cognitoidentity/utils.py @@ -1,5 +1,5 @@ -from moto.core.utils import get_random_hex +from uuid import uuid4 def get_random_identity_id(region): - return "{0}:{1}".format(region, get_random_hex(length=19)) + return "{0}:{1}".format(region, uuid4()) diff --git a/moto/core/utils.py b/moto/core/utils.py index f61b040e0..dce9f675c 100644 --- a/moto/core/utils.py +++ b/moto/core/utils.py @@ -328,3 +328,25 @@ def py2_strip_unicode_keys(blob): blob = new_set return blob + + +def tags_from_query_string( + querystring_dict, prefix="Tag", key_suffix="Key", value_suffix="Value" +): + response_values = {} + for key, value in querystring_dict.items(): + if key.startswith(prefix) and key.endswith(key_suffix): + tag_index = key.replace(prefix + ".", "").replace("." + key_suffix, "") + tag_key = querystring_dict.get( + "{prefix}.{index}.{key_suffix}".format( + prefix=prefix, index=tag_index, key_suffix=key_suffix, + ) + )[0] + tag_value_key = "{prefix}.{index}.{value_suffix}".format( + prefix=prefix, index=tag_index, value_suffix=value_suffix, + ) + if tag_value_key in querystring_dict: + response_values[tag_key] = querystring_dict.get(tag_value_key)[0] + else: + response_values[tag_key] = None + return response_values diff --git a/moto/dynamodb2/exceptions.py b/moto/dynamodb2/exceptions.py index 5dd87ef6b..18e498a90 100644 --- a/moto/dynamodb2/exceptions.py +++ b/moto/dynamodb2/exceptions.py @@ -39,6 +39,17 @@ class AttributeDoesNotExist(MockValidationException): super(AttributeDoesNotExist, self).__init__(self.attr_does_not_exist_msg) +class ProvidedKeyDoesNotExist(MockValidationException): + provided_key_does_not_exist_msg = ( + "The provided key element does not match the schema" + ) + + def __init__(self): + super(ProvidedKeyDoesNotExist, self).__init__( + self.provided_key_does_not_exist_msg + ) + + class ExpressionAttributeNameNotDefined(InvalidUpdateExpression): name_not_defined_msg = "An expression attribute name used in the document path is not defined; attribute name: {n}" @@ -131,3 +142,10 @@ class IncorrectOperandType(InvalidUpdateExpression): super(IncorrectOperandType, self).__init__( self.inv_operand_msg.format(f=operator_or_function, t=operand_type) ) + + +class IncorrectDataType(MockValidationException): + inc_data_type_msg = "An operand in the update expression has an incorrect data type" + + def __init__(self): + super(IncorrectDataType, self).__init__(self.inc_data_type_msg) diff --git a/moto/dynamodb2/models/__init__.py b/moto/dynamodb2/models/__init__.py index 00825e06a..ea16f456f 100644 --- a/moto/dynamodb2/models/__init__.py +++ b/moto/dynamodb2/models/__init__.py @@ -8,7 +8,6 @@ import re import uuid from boto3 import Session -from botocore.exceptions import ParamValidationError from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel from moto.core.utils import unix_time @@ -20,8 +19,9 @@ from moto.dynamodb2.exceptions import ( ItemSizeTooLarge, ItemSizeToUpdateTooLarge, ) -from moto.dynamodb2.models.utilities import bytesize, attribute_is_list +from moto.dynamodb2.models.utilities import bytesize from moto.dynamodb2.models.dynamo_type import DynamoType +from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor from moto.dynamodb2.parsing.expressions import UpdateExpressionParser from moto.dynamodb2.parsing.validators import UpdateExpressionValidator @@ -71,9 +71,23 @@ class Item(BaseModel): for key, value in attrs.items(): self.attrs[key] = DynamoType(value) + def __eq__(self, other): + return all( + [ + self.hash_key == other.hash_key, + self.hash_key_type == other.hash_key_type, + self.range_key == other.range_key, + self.range_key_type == other.range_key_type, + self.attrs == other.attrs, + ] + ) + def __repr__(self): return "Item: {0}".format(self.to_json()) + def size(self): + return sum(bytesize(key) + value.size() for key, value in self.attrs.items()) + def to_json(self): attributes = {} for attribute_key, attribute in self.attrs.items(): @@ -91,192 +105,6 @@ class Item(BaseModel): included = self.attrs return {"Item": included} - def update( - self, update_expression, expression_attribute_names, expression_attribute_values - ): - # Update subexpressions are identifiable by the operator keyword, so split on that and - # get rid of the empty leading string. - parts = [ - p - for p in re.split( - r"\b(SET|REMOVE|ADD|DELETE)\b", update_expression, flags=re.I - ) - if p - ] - # make sure that we correctly found only operator/value pairs - assert ( - len(parts) % 2 == 0 - ), "Mismatched operators and values in update expression: '{}'".format( - update_expression - ) - for action, valstr in zip(parts[:-1:2], parts[1::2]): - action = action.upper() - - # "Should" retain arguments in side (...) - values = re.split(r",(?![^(]*\))", valstr) - for value in values: - # A Real value - value = value.lstrip(":").rstrip(",").strip() - for k, v in expression_attribute_names.items(): - value = re.sub(r"{0}\b".format(k), v, value) - - if action == "REMOVE": - key = value - attr, list_index = attribute_is_list(key.split(".")[0]) - if "." not in key: - if list_index: - new_list = DynamoType(self.attrs[attr]) - new_list.delete(None, list_index) - self.attrs[attr] = new_list - else: - self.attrs.pop(value, None) - else: - # Handle nested dict updates - self.attrs[attr].delete(".".join(key.split(".")[1:])) - elif action == "SET": - key, value = value.split("=", 1) - key = key.strip() - value = value.strip() - - # check whether key is a list - attr, list_index = attribute_is_list(key.split(".")[0]) - # If value not exists, changes value to a default if needed, else its the same as it was - value = self._get_default(value) - # If operation == list_append, get the original value and append it - value = self._get_appended_list(value, expression_attribute_values) - - if type(value) != DynamoType: - if value in expression_attribute_values: - dyn_value = DynamoType(expression_attribute_values[value]) - else: - dyn_value = DynamoType({"S": value}) - else: - dyn_value = value - - if "." in key and attr not in self.attrs: - raise ValueError # Setting nested attr not allowed if first attr does not exist yet - elif attr not in self.attrs: - try: - self.attrs[attr] = dyn_value # set new top-level attribute - except ItemSizeTooLarge: - raise ItemSizeToUpdateTooLarge() - else: - self.attrs[attr].set( - ".".join(key.split(".")[1:]), dyn_value, list_index - ) # set value recursively - - elif action == "ADD": - key, value = value.split(" ", 1) - key = key.strip() - value_str = value.strip() - if value_str in expression_attribute_values: - dyn_value = DynamoType(expression_attribute_values[value]) - else: - raise TypeError - - # Handle adding numbers - value gets added to existing value, - # or added to 0 if it doesn't exist yet - if dyn_value.is_number(): - existing = self.attrs.get(key, DynamoType({"N": "0"})) - if not existing.same_type(dyn_value): - raise TypeError() - self.attrs[key] = DynamoType( - { - "N": str( - decimal.Decimal(existing.value) - + decimal.Decimal(dyn_value.value) - ) - } - ) - - # Handle adding sets - value is added to the set, or set is - # created with only this value if it doesn't exist yet - # New value must be of same set type as previous value - elif dyn_value.is_set(): - key_head = key.split(".")[0] - key_tail = ".".join(key.split(".")[1:]) - if key_head not in self.attrs: - self.attrs[key_head] = DynamoType({dyn_value.type: {}}) - existing = self.attrs.get(key_head) - existing = existing.get(key_tail) - if existing.value and not existing.same_type(dyn_value): - raise TypeError() - new_set = set(existing.value or []).union(dyn_value.value) - existing.set( - key=None, - new_value=DynamoType({dyn_value.type: list(new_set)}), - ) - else: # Number and Sets are the only supported types for ADD - raise TypeError - - elif action == "DELETE": - key, value = value.split(" ", 1) - key = key.strip() - value_str = value.strip() - if value_str in expression_attribute_values: - dyn_value = DynamoType(expression_attribute_values[value]) - else: - raise TypeError - - if not dyn_value.is_set(): - raise TypeError - key_head = key.split(".")[0] - key_tail = ".".join(key.split(".")[1:]) - existing = self.attrs.get(key_head) - existing = existing.get(key_tail) - if existing: - if not existing.same_type(dyn_value): - raise TypeError - new_set = set(existing.value).difference(dyn_value.value) - existing.set( - key=None, - new_value=DynamoType({existing.type: list(new_set)}), - ) - else: - raise NotImplementedError( - "{} update action not yet supported".format(action) - ) - - def _get_appended_list(self, value, expression_attribute_values): - if type(value) != DynamoType: - list_append_re = re.match("list_append\\((.+),(.+)\\)", value) - if list_append_re: - new_value = expression_attribute_values[list_append_re.group(2).strip()] - old_list_key = list_append_re.group(1) - # old_key could be a function itself (if_not_exists) - if old_list_key.startswith("if_not_exists"): - old_list = self._get_default(old_list_key) - if not isinstance(old_list, DynamoType): - old_list = DynamoType(expression_attribute_values[old_list]) - else: - old_list = self.attrs[old_list_key.split(".")[0]] - if "." in old_list_key: - # Value is nested inside a map - find the appropriate child attr - old_list = old_list.child_attr( - ".".join(old_list_key.split(".")[1:]) - ) - if not old_list.is_list(): - raise ParamValidationError - old_list.value.extend([DynamoType(v) for v in new_value["L"]]) - value = old_list - return value - - def _get_default(self, value): - if value.startswith("if_not_exists"): - # Function signature - match = re.match( - r".*if_not_exists\s*\((?P.+),\s*(?P.+)\).*", value - ) - if not match: - raise TypeError - - path, value = match.groups() - - # If it already exists, get its value so we dont overwrite it - if path in self.attrs: - value = self.attrs[path] - return value - def update_with_attribute_updates(self, attribute_updates): for attribute_name, update_action in attribute_updates.items(): action = update_action["Action"] @@ -921,6 +749,14 @@ class Table(BaseModel): break last_evaluated_key = None + size_limit = 1000000 # DynamoDB has a 1MB size limit + item_size = sum(res.size() for res in results) + if item_size > size_limit: + item_size = idx = 0 + while item_size + results[idx].size() < size_limit: + item_size += results[idx].size() + idx += 1 + limit = min(limit, idx) if limit else idx if limit and len(results) > limit: results = results[:limit] last_evaluated_key = {self.hash_key_attr: results[-1].hash_key} @@ -1198,9 +1034,9 @@ class DynamoDBBackend(BaseBackend): table_name, key, update_expression, - attribute_updates, expression_attribute_names, expression_attribute_values, + attribute_updates=None, expected=None, condition_expression=None, ): @@ -1255,17 +1091,18 @@ class DynamoDBBackend(BaseBackend): item = table.get_item(hash_value, range_value) if update_expression: - UpdateExpressionValidator( + validated_ast = UpdateExpressionValidator( update_expression_ast, expression_attribute_names=expression_attribute_names, expression_attribute_values=expression_attribute_values, item=item, ).validate() - item.update( - update_expression, - expression_attribute_names, - expression_attribute_values, - ) + try: + UpdateExpressionExecutor( + validated_ast, item, expression_attribute_names + ).execute() + except ItemSizeTooLarge: + raise ItemSizeToUpdateTooLarge() else: item.update_with_attribute_updates(attribute_updates) if table.stream_shard is not None: @@ -1321,6 +1158,94 @@ class DynamoDBBackend(BaseBackend): return table.ttl + def transact_write_items(self, transact_items): + # Create a backup in case any of the transactions fail + original_table_state = copy.deepcopy(self.tables) + try: + for item in transact_items: + if "ConditionCheck" in item: + item = item["ConditionCheck"] + key = item["Key"] + table_name = item["TableName"] + condition_expression = item.get("ConditionExpression", None) + expression_attribute_names = item.get( + "ExpressionAttributeNames", None + ) + expression_attribute_values = item.get( + "ExpressionAttributeValues", None + ) + current = self.get_item(table_name, key) + + condition_op = get_filter_expression( + condition_expression, + expression_attribute_names, + expression_attribute_values, + ) + if not condition_op.expr(current): + raise ValueError("The conditional request failed") + elif "Put" in item: + item = item["Put"] + attrs = item["Item"] + table_name = item["TableName"] + condition_expression = item.get("ConditionExpression", None) + expression_attribute_names = item.get( + "ExpressionAttributeNames", None + ) + expression_attribute_values = item.get( + "ExpressionAttributeValues", None + ) + self.put_item( + table_name, + attrs, + condition_expression=condition_expression, + expression_attribute_names=expression_attribute_names, + expression_attribute_values=expression_attribute_values, + ) + elif "Delete" in item: + item = item["Delete"] + key = item["Key"] + table_name = item["TableName"] + condition_expression = item.get("ConditionExpression", None) + expression_attribute_names = item.get( + "ExpressionAttributeNames", None + ) + expression_attribute_values = item.get( + "ExpressionAttributeValues", None + ) + self.delete_item( + table_name, + key, + condition_expression=condition_expression, + expression_attribute_names=expression_attribute_names, + expression_attribute_values=expression_attribute_values, + ) + elif "Update" in item: + item = item["Update"] + key = item["Key"] + table_name = item["TableName"] + update_expression = item["UpdateExpression"] + condition_expression = item.get("ConditionExpression", None) + expression_attribute_names = item.get( + "ExpressionAttributeNames", None + ) + expression_attribute_values = item.get( + "ExpressionAttributeValues", None + ) + self.update_item( + table_name, + key, + update_expression=update_expression, + condition_expression=condition_expression, + expression_attribute_names=expression_attribute_names, + expression_attribute_values=expression_attribute_values, + ) + else: + raise ValueError + except: # noqa: E722 Do not use bare except + # Rollback to the original state, and reraise the error + self.tables = original_table_state + raise + dynamodb_backends = {} for region in Session().get_available_regions("dynamodb"): diff --git a/moto/dynamodb2/models/dynamo_type.py b/moto/dynamodb2/models/dynamo_type.py index a3199dcaa..1fc1bcef3 100644 --- a/moto/dynamodb2/models/dynamo_type.py +++ b/moto/dynamodb2/models/dynamo_type.py @@ -1,10 +1,53 @@ import six from moto.dynamodb2.comparisons import get_comparison_func -from moto.dynamodb2.exceptions import InvalidUpdateExpression +from moto.dynamodb2.exceptions import InvalidUpdateExpression, IncorrectDataType from moto.dynamodb2.models.utilities import attribute_is_list, bytesize +class DDBType(object): + """ + Official documentation at https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html + """ + + BINARY_SET = "BS" + NUMBER_SET = "NS" + STRING_SET = "SS" + STRING = "S" + NUMBER = "N" + MAP = "M" + LIST = "L" + BOOLEAN = "BOOL" + BINARY = "B" + NULL = "NULL" + + +class DDBTypeConversion(object): + _human_type_mapping = { + val: key.replace("_", " ") + for key, val in DDBType.__dict__.items() + if key.upper() == key + } + + @classmethod + def get_human_type(cls, abbreviated_type): + """ + Args: + abbreviated_type(str): An attribute of DDBType + + Returns: + str: The human readable form of the DDBType. + """ + try: + human_type_str = cls._human_type_mapping[abbreviated_type] + except KeyError: + raise ValueError( + "Invalid abbreviated_type {at}".format(at=abbreviated_type) + ) + + return human_type_str + + class DynamoType(object): """ http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes @@ -50,13 +93,22 @@ class DynamoType(object): self.value = new_value.value else: if attr not in self.value: # nonexistingattribute - type_of_new_attr = "M" if "." in key else new_value.type + type_of_new_attr = DDBType.MAP if "." in key else new_value.type self.value[attr] = DynamoType({type_of_new_attr: {}}) # {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value) self.value[attr].set( ".".join(key.split(".")[1:]), new_value, list_index ) + def __contains__(self, item): + if self.type == DDBType.STRING: + return False + try: + self.__getitem__(item) + return True + except KeyError: + return False + def delete(self, key, index=None): if index: if not key: @@ -126,27 +178,35 @@ class DynamoType(object): def __add__(self, other): if self.type != other.type: raise TypeError("Different types of operandi is not allowed.") - if self.type == "N": - return DynamoType({"N": "{v}".format(v=int(self.value) + int(other.value))}) + if self.is_number(): + self_value = float(self.value) if "." in self.value else int(self.value) + other_value = float(other.value) if "." in other.value else int(other.value) + return DynamoType( + {DDBType.NUMBER: "{v}".format(v=self_value + other_value)} + ) else: - raise TypeError("Sum only supported for Numbers.") + raise IncorrectDataType() def __sub__(self, other): if self.type != other.type: raise TypeError("Different types of operandi is not allowed.") - if self.type == "N": - return DynamoType({"N": "{v}".format(v=int(self.value) - int(other.value))}) + if self.type == DDBType.NUMBER: + self_value = float(self.value) if "." in self.value else int(self.value) + other_value = float(other.value) if "." in other.value else int(other.value) + return DynamoType( + {DDBType.NUMBER: "{v}".format(v=self_value - other_value)} + ) else: raise TypeError("Sum only supported for Numbers.") def __getitem__(self, item): if isinstance(item, six.string_types): # If our DynamoType is a map it should be subscriptable with a key - if self.type == "M": + if self.type == DDBType.MAP: return self.value[item] elif isinstance(item, int): # If our DynamoType is a list is should be subscriptable with an index - if self.type == "L": + if self.type == DDBType.LIST: return self.value[item] raise TypeError( "This DynamoType {dt} is not subscriptable by a {it}".format( @@ -154,6 +214,20 @@ class DynamoType(object): ) ) + def __setitem__(self, key, value): + if isinstance(key, int): + if self.is_list(): + if key >= len(self.value): + # DynamoDB doesn't care you are out of box just add it to the end. + self.value.append(value) + else: + self.value[key] = value + elif isinstance(key, six.string_types): + if self.is_map(): + self.value[key] = value + else: + raise NotImplementedError("No set_item for {t}".format(t=type(key))) + @property def cast_value(self): if self.is_number(): @@ -222,16 +296,22 @@ class DynamoType(object): return comparison_func(self.cast_value, *range_values) def is_number(self): - return self.type == "N" + return self.type == DDBType.NUMBER def is_set(self): - return self.type == "SS" or self.type == "NS" or self.type == "BS" + return self.type in (DDBType.STRING_SET, DDBType.NUMBER_SET, DDBType.BINARY_SET) def is_list(self): - return self.type == "L" + return self.type == DDBType.LIST def is_map(self): - return self.type == "M" + return self.type == DDBType.MAP def same_type(self, other): return self.type == other.type + + def pop(self, key, *args, **kwargs): + if self.is_map() or self.is_list(): + self.value.pop(key, *args, **kwargs) + else: + raise TypeError("pop not supported for DynamoType {t}".format(t=self.type)) diff --git a/moto/dynamodb2/parsing/executors.py b/moto/dynamodb2/parsing/executors.py new file mode 100644 index 000000000..8c51c9cec --- /dev/null +++ b/moto/dynamodb2/parsing/executors.py @@ -0,0 +1,262 @@ +from abc import abstractmethod + +from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType +from moto.dynamodb2.models import DynamoType +from moto.dynamodb2.models.dynamo_type import DDBTypeConversion, DDBType +from moto.dynamodb2.parsing.ast_nodes import ( + UpdateExpressionSetAction, + UpdateExpressionDeleteAction, + UpdateExpressionRemoveAction, + UpdateExpressionAddAction, + UpdateExpressionPath, + DDBTypedValue, + ExpressionAttribute, + ExpressionSelector, + ExpressionAttributeName, +) +from moto.dynamodb2.parsing.validators import ExpressionPathResolver + + +class NodeExecutor(object): + def __init__(self, ast_node, expression_attribute_names): + self.node = ast_node + self.expression_attribute_names = expression_attribute_names + + @abstractmethod + def execute(self, item): + pass + + def get_item_part_for_path_nodes(self, item, path_nodes): + """ + For a list of path nodes travers the item by following the path_nodes + Args: + item(Item): + path_nodes(list): + + Returns: + + """ + if len(path_nodes) == 0: + return item.attrs + else: + return ExpressionPathResolver( + self.expression_attribute_names + ).resolve_expression_path_nodes_to_dynamo_type(item, path_nodes) + + def get_item_before_end_of_path(self, item): + """ + Get the part ot the item where the item will perform the action. For most actions this should be the parent. As + that element will need to be modified by the action. + Args: + item(Item): + + Returns: + DynamoType or dict: The path to be set + """ + return self.get_item_part_for_path_nodes( + item, self.get_path_expression_nodes()[:-1] + ) + + def get_item_at_end_of_path(self, item): + """ + For a DELETE the path points at the stringset so we need to evaluate the full path. + Args: + item(Item): + + Returns: + DynamoType or dict: The path to be set + """ + return self.get_item_part_for_path_nodes(item, self.get_path_expression_nodes()) + + # Get the part ot the item where the item will perform the action. For most actions this should be the parent. As + # that element will need to be modified by the action. + get_item_part_in_which_to_perform_action = get_item_before_end_of_path + + def get_path_expression_nodes(self): + update_expression_path = self.node.children[0] + assert isinstance(update_expression_path, UpdateExpressionPath) + return update_expression_path.children + + def get_element_to_action(self): + return self.get_path_expression_nodes()[-1] + + def get_action_value(self): + """ + + Returns: + DynamoType: The value to be set + """ + ddb_typed_value = self.node.children[1] + assert isinstance(ddb_typed_value, DDBTypedValue) + dynamo_type_value = ddb_typed_value.children[0] + assert isinstance(dynamo_type_value, DynamoType) + return dynamo_type_value + + +class SetExecutor(NodeExecutor): + def execute(self, item): + self.set( + item_part_to_modify_with_set=self.get_item_part_in_which_to_perform_action( + item + ), + element_to_set=self.get_element_to_action(), + value_to_set=self.get_action_value(), + expression_attribute_names=self.expression_attribute_names, + ) + + @classmethod + def set( + cls, + item_part_to_modify_with_set, + element_to_set, + value_to_set, + expression_attribute_names, + ): + if isinstance(element_to_set, ExpressionAttribute): + attribute_name = element_to_set.get_attribute_name() + item_part_to_modify_with_set[attribute_name] = value_to_set + elif isinstance(element_to_set, ExpressionSelector): + index = element_to_set.get_index() + item_part_to_modify_with_set[index] = value_to_set + elif isinstance(element_to_set, ExpressionAttributeName): + attribute_name = expression_attribute_names[ + element_to_set.get_attribute_name_placeholder() + ] + item_part_to_modify_with_set[attribute_name] = value_to_set + else: + raise NotImplementedError( + "Moto does not support setting {t} yet".format(t=type(element_to_set)) + ) + + +class DeleteExecutor(NodeExecutor): + operator = "operator: DELETE" + + def execute(self, item): + string_set_to_remove = self.get_action_value() + assert isinstance(string_set_to_remove, DynamoType) + if not string_set_to_remove.is_set(): + raise IncorrectOperandType( + self.operator, + DDBTypeConversion.get_human_type(string_set_to_remove.type), + ) + + string_set = self.get_item_at_end_of_path(item) + assert isinstance(string_set, DynamoType) + if string_set.type != string_set_to_remove.type: + raise IncorrectDataType() + # String set is currently implemented as a list + string_set_list = string_set.value + + stringset_to_remove_list = string_set_to_remove.value + + for value in stringset_to_remove_list: + try: + string_set_list.remove(value) + except (KeyError, ValueError): + # DynamoDB does not mind if value is not present + pass + + +class RemoveExecutor(NodeExecutor): + def execute(self, item): + element_to_remove = self.get_element_to_action() + if isinstance(element_to_remove, ExpressionAttribute): + attribute_name = element_to_remove.get_attribute_name() + self.get_item_part_in_which_to_perform_action(item).pop( + attribute_name, None + ) + elif isinstance(element_to_remove, ExpressionAttributeName): + attribute_name = self.expression_attribute_names[ + element_to_remove.get_attribute_name_placeholder() + ] + self.get_item_part_in_which_to_perform_action(item).pop( + attribute_name, None + ) + elif isinstance(element_to_remove, ExpressionSelector): + index = element_to_remove.get_index() + try: + self.get_item_part_in_which_to_perform_action(item).pop(index) + except IndexError: + # DynamoDB does not care that index is out of bounds, it will just do nothing. + pass + else: + raise NotImplementedError( + "Moto does not support setting {t} yet".format( + t=type(element_to_remove) + ) + ) + + +class AddExecutor(NodeExecutor): + def execute(self, item): + value_to_add = self.get_action_value() + if isinstance(value_to_add, DynamoType): + if value_to_add.is_set(): + current_string_set = self.get_item_at_end_of_path(item) + assert isinstance(current_string_set, DynamoType) + if not current_string_set.type == value_to_add.type: + raise IncorrectDataType() + # Sets are implemented as list + for value in value_to_add.value: + if value in current_string_set.value: + continue + else: + current_string_set.value.append(value) + elif value_to_add.type == DDBType.NUMBER: + existing_value = self.get_item_at_end_of_path(item) + assert isinstance(existing_value, DynamoType) + if not existing_value.type == DDBType.NUMBER: + raise IncorrectDataType() + new_value = existing_value + value_to_add + SetExecutor.set( + item_part_to_modify_with_set=self.get_item_before_end_of_path(item), + element_to_set=self.get_element_to_action(), + value_to_set=new_value, + expression_attribute_names=self.expression_attribute_names, + ) + else: + raise IncorrectDataType() + + +class UpdateExpressionExecutor(object): + execution_map = { + UpdateExpressionSetAction: SetExecutor, + UpdateExpressionAddAction: AddExecutor, + UpdateExpressionRemoveAction: RemoveExecutor, + UpdateExpressionDeleteAction: DeleteExecutor, + } + + def __init__(self, update_ast, item, expression_attribute_names): + self.update_ast = update_ast + self.item = item + self.expression_attribute_names = expression_attribute_names + + def execute(self, node=None): + """ + As explained in moto.dynamodb2.parsing.expressions.NestableExpressionParserMixin._create_node the order of nodes + in the AST can be translated of the order of statements in the expression. As such we can start at the root node + and process the nodes 1-by-1. If no specific execution for the node type is defined we can execute the children + in order since it will be a container node that is expandable and left child will be first in the statement. + + Args: + node(Node): + + Returns: + None + """ + if node is None: + node = self.update_ast + + node_executor = self.get_specific_execution(node) + if node_executor is None: + for node in node.children: + self.execute(node) + else: + node_executor(node, self.expression_attribute_names).execute(self.item) + + def get_specific_execution(self, node): + for node_class in self.execution_map: + if isinstance(node, node_class): + return self.execution_map[node_class] + return None diff --git a/moto/dynamodb2/parsing/validators.py b/moto/dynamodb2/parsing/validators.py index 180c7a874..f924a713c 100644 --- a/moto/dynamodb2/parsing/validators.py +++ b/moto/dynamodb2/parsing/validators.py @@ -11,6 +11,7 @@ from moto.dynamodb2.exceptions import ( ExpressionAttributeNameNotDefined, IncorrectOperandType, InvalidUpdateExpressionInvalidDocumentPath, + ProvidedKeyDoesNotExist, ) from moto.dynamodb2.models import DynamoType from moto.dynamodb2.parsing.ast_nodes import ( @@ -56,6 +57,76 @@ class ExpressionAttributeValueProcessor(DepthFirstTraverser): return DDBTypedValue(DynamoType(target)) +class ExpressionPathResolver(object): + def __init__(self, expression_attribute_names): + self.expression_attribute_names = expression_attribute_names + + @classmethod + def raise_exception_if_keyword(cls, attribute): + if attribute.upper() in ReservedKeywords.get_reserved_keywords(): + raise AttributeIsReservedKeyword(attribute) + + def resolve_expression_path(self, item, update_expression_path): + assert isinstance(update_expression_path, UpdateExpressionPath) + return self.resolve_expression_path_nodes(item, update_expression_path.children) + + def resolve_expression_path_nodes(self, item, update_expression_path_nodes): + target = item.attrs + + for child in update_expression_path_nodes: + # First replace placeholder with attribute_name + attr_name = None + if isinstance(child, ExpressionAttributeName): + attr_placeholder = child.get_attribute_name_placeholder() + try: + attr_name = self.expression_attribute_names[attr_placeholder] + except KeyError: + raise ExpressionAttributeNameNotDefined(attr_placeholder) + elif isinstance(child, ExpressionAttribute): + attr_name = child.get_attribute_name() + self.raise_exception_if_keyword(attr_name) + if attr_name is not None: + # Resolv attribute_name + try: + target = target[attr_name] + except (KeyError, TypeError): + if child == update_expression_path_nodes[-1]: + return NoneExistingPath(creatable=True) + return NoneExistingPath() + else: + if isinstance(child, ExpressionPathDescender): + continue + elif isinstance(child, ExpressionSelector): + index = child.get_index() + if target.is_list(): + try: + target = target[index] + except IndexError: + # When a list goes out of bounds when assigning that is no problem when at the assignment + # side. It will just append to the list. + if child == update_expression_path_nodes[-1]: + return NoneExistingPath(creatable=True) + return NoneExistingPath() + else: + raise InvalidUpdateExpressionInvalidDocumentPath + else: + raise NotImplementedError( + "Path resolution for {t}".format(t=type(child)) + ) + if not isinstance(target, DynamoType): + print(target) + return DDBTypedValue(target) + + def resolve_expression_path_nodes_to_dynamo_type( + self, item, update_expression_path_nodes + ): + node = self.resolve_expression_path_nodes(item, update_expression_path_nodes) + if isinstance(node, NoneExistingPath): + raise ProvidedKeyDoesNotExist() + assert isinstance(node, DDBTypedValue) + return node.get_value() + + class ExpressionAttributeResolvingProcessor(DepthFirstTraverser): def _processing_map(self): return { @@ -107,55 +178,9 @@ class ExpressionAttributeResolvingProcessor(DepthFirstTraverser): return node def resolve_expression_path(self, node): - assert isinstance(node, UpdateExpressionPath) - - target = deepcopy(self.item.attrs) - for child in node.children: - # First replace placeholder with attribute_name - attr_name = None - if isinstance(child, ExpressionAttributeName): - attr_placeholder = child.get_attribute_name_placeholder() - try: - attr_name = self.expression_attribute_names[attr_placeholder] - except KeyError: - raise ExpressionAttributeNameNotDefined(attr_placeholder) - elif isinstance(child, ExpressionAttribute): - attr_name = child.get_attribute_name() - self.raise_exception_if_keyword(attr_name) - if attr_name is not None: - # Resolv attribute_name - try: - target = target[attr_name] - except (KeyError, TypeError): - if child == node.children[-1]: - return NoneExistingPath(creatable=True) - return NoneExistingPath() - else: - if isinstance(child, ExpressionPathDescender): - continue - elif isinstance(child, ExpressionSelector): - index = child.get_index() - if target.is_list(): - try: - target = target[index] - except IndexError: - # When a list goes out of bounds when assigning that is no problem when at the assignment - # side. It will just append to the list. - if child == node.children[-1]: - return NoneExistingPath(creatable=True) - return NoneExistingPath() - else: - raise InvalidUpdateExpressionInvalidDocumentPath - else: - raise NotImplementedError( - "Path resolution for {t}".format(t=type(child)) - ) - return DDBTypedValue(DynamoType(target)) - - @classmethod - def raise_exception_if_keyword(cls, attribute): - if attribute.upper() in ReservedKeywords.get_reserved_keywords(): - raise AttributeIsReservedKeyword(attribute) + return ExpressionPathResolver( + self.expression_attribute_names + ).resolve_expression_path(self.item, node) class UpdateExpressionFunctionEvaluator(DepthFirstTraverser): @@ -183,7 +208,9 @@ class UpdateExpressionFunctionEvaluator(DepthFirstTraverser): assert isinstance(result, (DDBTypedValue, NoneExistingPath)) return result elif function_name == "list_append": - first_arg = self.get_list_from_ddb_typed_value(first_arg, function_name) + first_arg = deepcopy( + self.get_list_from_ddb_typed_value(first_arg, function_name) + ) second_arg = self.get_list_from_ddb_typed_value(second_arg, function_name) for list_element in second_arg.value: first_arg.value.append(list_element) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index bb0e597c9..b703f2935 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -470,8 +470,10 @@ class DynamoHandler(BaseResponse): for k, v in six.iteritems(self.body.get("ExpressionAttributeNames", {})) ) - if " AND " in key_condition_expression: - expressions = key_condition_expression.split(" AND ", 1) + if " and " in key_condition_expression.lower(): + expressions = re.split( + " AND ", key_condition_expression, maxsplit=1, flags=re.IGNORECASE + ) index_hash_key = [key for key in index if key["KeyType"] == "HASH"][0] hash_key_var = reverse_attribute_lookup.get( @@ -760,12 +762,12 @@ class DynamoHandler(BaseResponse): item = self.dynamodb_backend.update_item( name, key, - update_expression, - attribute_updates, - expression_attribute_names, - expression_attribute_values, - expected, - condition_expression, + update_expression=update_expression, + attribute_updates=attribute_updates, + expression_attribute_names=expression_attribute_names, + expression_attribute_values=expression_attribute_values, + expected=expected, + condition_expression=condition_expression, ) except MockValidationException as mve: er = "com.amazonaws.dynamodb.v20111205#ValidationException" @@ -922,3 +924,15 @@ class DynamoHandler(BaseResponse): result.update({"ConsumedCapacity": [v for v in consumed_capacity.values()]}) return dynamo_json_dump(result) + + def transact_write_items(self): + transact_items = self.body["TransactItems"] + try: + self.dynamodb_backend.transact_write_items(transact_items) + except ValueError: + er = "com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException" + return self.error( + er, "A condition specified in the operation could not be evaluated." + ) + response = {"ConsumedCapacity": [], "ItemCollectionMetrics": {}} + return dynamo_json_dump(response) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index 9ceb06f6f..ce9c3ef5b 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -557,6 +557,10 @@ class Instance(TaggedEC2Resource, BotoInstance): # worst case we'll get IP address exaustion... rarely pass + def add_block_device(self, size, device_path): + volume = self.ec2_backend.create_volume(size, self.region_name) + self.ec2_backend.attach_volume(volume.id, self.id, device_path) + def setup_defaults(self): # Default have an instance with root volume should you not wish to # override with attach volume cmd. @@ -564,9 +568,10 @@ class Instance(TaggedEC2Resource, BotoInstance): self.ec2_backend.attach_volume(volume.id, self.id, "/dev/sda1") def teardown_defaults(self): - volume_id = self.block_device_mapping["/dev/sda1"].volume_id - self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1") - self.ec2_backend.delete_volume(volume_id) + if "/dev/sda1" in self.block_device_mapping: + volume_id = self.block_device_mapping["/dev/sda1"].volume_id + self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1") + self.ec2_backend.delete_volume(volume_id) @property def get_block_device_mapping(self): @@ -621,6 +626,7 @@ class Instance(TaggedEC2Resource, BotoInstance): subnet_id=properties.get("SubnetId"), key_name=properties.get("KeyName"), private_ip=properties.get("PrivateIpAddress"), + block_device_mappings=properties.get("BlockDeviceMappings", {}), ) instance = reservation.instances[0] for tag in properties.get("Tags", []): @@ -880,7 +886,14 @@ class InstanceBackend(object): ) new_reservation.instances.append(new_instance) new_instance.add_tags(instance_tags) - new_instance.setup_defaults() + if "block_device_mappings" in kwargs: + for block_device in kwargs["block_device_mappings"]: + new_instance.add_block_device( + block_device["Ebs"]["VolumeSize"], block_device["DeviceName"] + ) + else: + new_instance.setup_defaults() + return new_reservation def start_instances(self, instance_ids): diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py index 490ffb642..de17f0609 100644 --- a/moto/ec2/responses/instances.py +++ b/moto/ec2/responses/instances.py @@ -52,7 +52,7 @@ class InstanceResponse(BaseResponse): private_ip = self._get_param("PrivateIpAddress") associate_public_ip = self._get_param("AssociatePublicIpAddress") key_name = self._get_param("KeyName") - ebs_optimized = self._get_param("EbsOptimized") + ebs_optimized = self._get_param("EbsOptimized") or False instance_initiated_shutdown_behavior = self._get_param( "InstanceInitiatedShutdownBehavior" ) diff --git a/moto/ec2/responses/tags.py b/moto/ec2/responses/tags.py index 5290b7409..0c7d89a41 100644 --- a/moto/ec2/responses/tags.py +++ b/moto/ec2/responses/tags.py @@ -2,7 +2,8 @@ from __future__ import unicode_literals from moto.core.responses import BaseResponse from moto.ec2.models import validate_resource_ids -from moto.ec2.utils import tags_from_query_string, filters_from_querystring +from moto.ec2.utils import filters_from_querystring +from moto.core.utils import tags_from_query_string class TagResponse(BaseResponse): diff --git a/moto/ec2/utils.py b/moto/ec2/utils.py index 74fe3d27b..61d22d8b2 100644 --- a/moto/ec2/utils.py +++ b/moto/ec2/utils.py @@ -196,22 +196,6 @@ def split_route_id(route_id): return values[0], values[1] -def tags_from_query_string(querystring_dict): - prefix = "Tag" - suffix = "Key" - response_values = {} - for key, value in querystring_dict.items(): - if key.startswith(prefix) and key.endswith(suffix): - tag_index = key.replace(prefix + ".", "").replace("." + suffix, "") - tag_key = querystring_dict.get("Tag.{0}.Key".format(tag_index))[0] - tag_value_key = "Tag.{0}.Value".format(tag_index) - if tag_value_key in querystring_dict: - response_values[tag_key] = querystring_dict.get(tag_value_key)[0] - else: - response_values[tag_key] = None - return response_values - - def dhcp_configuration_from_querystring(querystring, option="DhcpConfiguration"): """ turn: diff --git a/moto/elasticbeanstalk/__init__.py b/moto/elasticbeanstalk/__init__.py new file mode 100644 index 000000000..851fa445b --- /dev/null +++ b/moto/elasticbeanstalk/__init__.py @@ -0,0 +1,4 @@ +from .models import eb_backends +from moto.core.models import base_decorator + +mock_elasticbeanstalk = base_decorator(eb_backends) diff --git a/moto/elasticbeanstalk/exceptions.py b/moto/elasticbeanstalk/exceptions.py new file mode 100644 index 000000000..f1e27c564 --- /dev/null +++ b/moto/elasticbeanstalk/exceptions.py @@ -0,0 +1,15 @@ +from moto.core.exceptions import RESTError + + +class InvalidParameterValueError(RESTError): + def __init__(self, message): + super(InvalidParameterValueError, self).__init__( + "InvalidParameterValue", message + ) + + +class ResourceNotFoundException(RESTError): + def __init__(self, message): + super(ResourceNotFoundException, self).__init__( + "ResourceNotFoundException", message + ) diff --git a/moto/elasticbeanstalk/models.py b/moto/elasticbeanstalk/models.py new file mode 100644 index 000000000..3767846c1 --- /dev/null +++ b/moto/elasticbeanstalk/models.py @@ -0,0 +1,152 @@ +import weakref + +from boto3 import Session + +from moto.core import BaseBackend, BaseModel +from .exceptions import InvalidParameterValueError, ResourceNotFoundException + + +class FakeEnvironment(BaseModel): + def __init__( + self, application, environment_name, solution_stack_name, tags, + ): + self.application = weakref.proxy( + application + ) # weakref to break circular dependencies + self.environment_name = environment_name + self.solution_stack_name = solution_stack_name + self.tags = tags + + @property + def application_name(self): + return self.application.application_name + + @property + def environment_arn(self): + return ( + "arn:aws:elasticbeanstalk:{region}:{account_id}:" + "environment/{application_name}/{environment_name}".format( + region=self.region, + account_id="123456789012", + application_name=self.application_name, + environment_name=self.environment_name, + ) + ) + + @property + def platform_arn(self): + return "TODO" # TODO + + @property + def region(self): + return self.application.region + + +class FakeApplication(BaseModel): + def __init__(self, backend, application_name): + self.backend = weakref.proxy(backend) # weakref to break cycles + self.application_name = application_name + self.environments = dict() + + def create_environment( + self, environment_name, solution_stack_name, tags, + ): + if environment_name in self.environments: + raise InvalidParameterValueError + + env = FakeEnvironment( + application=self, + environment_name=environment_name, + solution_stack_name=solution_stack_name, + tags=tags, + ) + self.environments[environment_name] = env + + return env + + @property + def region(self): + return self.backend.region + + +class EBBackend(BaseBackend): + def __init__(self, region): + self.region = region + self.applications = dict() + + def reset(self): + # preserve region + region = self.region + self._reset_model_refs() + self.__dict__ = {} + self.__init__(region) + + def create_application(self, application_name): + if application_name in self.applications: + raise InvalidParameterValueError( + "Application {} already exists.".format(application_name) + ) + new_app = FakeApplication(backend=self, application_name=application_name,) + self.applications[application_name] = new_app + return new_app + + def create_environment(self, app, environment_name, stack_name, tags): + return app.create_environment( + environment_name=environment_name, + solution_stack_name=stack_name, + tags=tags, + ) + + def describe_environments(self): + envs = [] + for app in self.applications.values(): + for env in app.environments.values(): + envs.append(env) + return envs + + def list_available_solution_stacks(self): + # Implemented in response.py + pass + + def update_tags_for_resource(self, resource_arn, tags_to_add, tags_to_remove): + try: + res = self._find_environment_by_arn(resource_arn) + except KeyError: + raise ResourceNotFoundException( + "Resource not found for ARN '{}'.".format(resource_arn) + ) + + for key, value in tags_to_add.items(): + res.tags[key] = value + + for key in tags_to_remove: + del res.tags[key] + + def list_tags_for_resource(self, resource_arn): + try: + res = self._find_environment_by_arn(resource_arn) + except KeyError: + raise ResourceNotFoundException( + "Resource not found for ARN '{}'.".format(resource_arn) + ) + return res.tags + + def _find_environment_by_arn(self, arn): + for app in self.applications.keys(): + for env in self.applications[app].environments.values(): + if env.environment_arn == arn: + return env + raise KeyError() + + +eb_backends = {} +for region in Session().get_available_regions("elasticbeanstalk"): + eb_backends[region] = EBBackend(region) +for region in Session().get_available_regions( + "elasticbeanstalk", partition_name="aws-us-gov" +): + eb_backends[region] = EBBackend(region) +for region in Session().get_available_regions( + "elasticbeanstalk", partition_name="aws-cn" +): + eb_backends[region] = EBBackend(region) diff --git a/moto/elasticbeanstalk/responses.py b/moto/elasticbeanstalk/responses.py new file mode 100644 index 000000000..387cbb3ea --- /dev/null +++ b/moto/elasticbeanstalk/responses.py @@ -0,0 +1,1386 @@ +from moto.core.responses import BaseResponse +from moto.core.utils import tags_from_query_string +from .models import eb_backends +from .exceptions import InvalidParameterValueError + + +class EBResponse(BaseResponse): + @property + def backend(self): + """ + :rtype: EBBackend + """ + return eb_backends[self.region] + + def create_application(self): + app = self.backend.create_application( + application_name=self._get_param("ApplicationName"), + ) + + template = self.response_template(EB_CREATE_APPLICATION) + return template.render(region_name=self.backend.region, application=app,) + + def describe_applications(self): + template = self.response_template(EB_DESCRIBE_APPLICATIONS) + return template.render(applications=self.backend.applications.values(),) + + def create_environment(self): + application_name = self._get_param("ApplicationName") + try: + app = self.backend.applications[application_name] + except KeyError: + raise InvalidParameterValueError( + "No Application named '{}' found.".format(application_name) + ) + + tags = tags_from_query_string(self.querystring, prefix="Tags.member") + env = self.backend.create_environment( + app, + environment_name=self._get_param("EnvironmentName"), + stack_name=self._get_param("SolutionStackName"), + tags=tags, + ) + + template = self.response_template(EB_CREATE_ENVIRONMENT) + return template.render(environment=env, region=self.backend.region,) + + def describe_environments(self): + envs = self.backend.describe_environments() + + template = self.response_template(EB_DESCRIBE_ENVIRONMENTS) + return template.render(environments=envs,) + + def list_available_solution_stacks(self): + return EB_LIST_AVAILABLE_SOLUTION_STACKS + + def update_tags_for_resource(self): + resource_arn = self._get_param("ResourceArn") + tags_to_add = tags_from_query_string( + self.querystring, prefix="TagsToAdd.member" + ) + tags_to_remove = self._get_multi_param("TagsToRemove.member") + self.backend.update_tags_for_resource(resource_arn, tags_to_add, tags_to_remove) + + return EB_UPDATE_TAGS_FOR_RESOURCE + + def list_tags_for_resource(self): + resource_arn = self._get_param("ResourceArn") + tags = self.backend.list_tags_for_resource(resource_arn) + + template = self.response_template(EB_LIST_TAGS_FOR_RESOURCE) + return template.render(tags=tags, arn=resource_arn,) + + +EB_CREATE_APPLICATION = """ + + + + + 2019-09-03T13:08:29.049Z + + + + false + 180 + false + + + false + 200 + false + + + + arn:aws:elasticbeanstalk:{{ region_name }}:111122223333:application/{{ application_name }} + {{ application.application_name }} + 2019-09-03T13:08:29.049Z + + + + 1b6173c8-13aa-4b0a-99e9-eb36a1fb2778 + + +""" + + +EB_DESCRIBE_APPLICATIONS = """ + + + + {% for application in applications %} + + + 2019-09-03T13:08:29.049Z + + + + 180 + false + false + + + false + 200 + false + + + + arn:aws:elasticbeanstalk:{{ region_name }}:111122223333:application/{{ application.name }} + {{ application.application_name }} + 2019-09-03T13:08:29.049Z + + {% endfor %} + + + + 015a05eb-282e-4b76-bd18-663fdfaf42e4 + + +""" + + +EB_CREATE_ENVIRONMENT = """ + + + {{ environment.solution_stack_name }} + Grey + {{ environment.environment_arn }} + 2019-09-04T09:41:24.222Z + 2019-09-04T09:41:24.222Z + {{ environment_id }} + {{ environment.platform_arn }} + + WebServer + Standard + 1.0 + + {{ environment.environment_name }} + {{ environment.application_name }} + Launching + + + 18dc8158-f5d7-4d5a-82ef-07fcaadf81c6 + + +""" + + +EB_DESCRIBE_ENVIRONMENTS = """ + + + + {% for env in environments %} + + {{ env.solution_stack_name }} + Grey + {{ env.environment_arn }} + false + 2019-08-30T09:35:10.913Z + false + + 2019-08-22T07:02:47.332Z + {{ env.environment_id }} + 1 + {{ env.platform_arn }} + + WebServer + Standard + 1.0 + + No Data + {{ env.environment_name }} + + + + {{ env.application_name }} + Ready + + {% endfor %} + + + + dd56b215-01a0-40b2-bd1e-57589c39424f + + +""" + + +# Current list as of 2019-09-04 +EB_LIST_AVAILABLE_SOLUTION_STACKS = """ + + + + 64bit Amazon Linux 2018.03 v4.10.1 running Node.js + 64bit Amazon Linux 2018.03 v4.9.2 running Node.js + 64bit Amazon Linux 2018.03 v4.8.0 running Node.js + 64bit Amazon Linux 2018.03 v4.6.0 running Node.js + 64bit Amazon Linux 2018.03 v4.5.3 running Node.js + 64bit Amazon Linux 2018.03 v4.5.1 running Node.js + 64bit Amazon Linux 2018.03 v4.5.0 running Node.js + 64bit Amazon Linux 2017.09 v4.4.6 running Node.js + 64bit Amazon Linux 2017.09 v4.4.5 running Node.js + 64bit Amazon Linux 2017.09 v4.4.4 running Node.js + 64bit Amazon Linux 2017.09 v4.4.2 running Node.js + 64bit Amazon Linux 2017.09 v4.4.0 running Node.js + 64bit Amazon Linux 2017.03 v4.3.0 running Node.js + 64bit Amazon Linux 2017.03 v4.2.2 running Node.js + 64bit Amazon Linux 2017.03 v4.2.1 running Node.js + 64bit Amazon Linux 2017.03 v4.2.0 running Node.js + 64bit Amazon Linux 2017.03 v4.1.1 running Node.js + 64bit Amazon Linux 2017.03 v4.1.0 running Node.js + 64bit Amazon Linux 2016.09 v4.0.1 running Node.js + 64bit Amazon Linux 2016.09 v4.0.0 running Node.js + 64bit Amazon Linux 2016.09 v3.3.1 running Node.js + 64bit Amazon Linux 2016.09 v3.1.0 running Node.js + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.4 + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.5 + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.6 + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.0 + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.1 + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.12 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.7 running PHP 7.1 + 64bit Amazon Linux 2018.03 v2.8.6 running PHP 7.1 + 64bit Amazon Linux 2018.03 v2.8.6 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.5 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.4 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.3 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.2 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.1 running PHP 7.2 + 64bit Amazon Linux 2018.03 v2.8.0 running PHP 7.1 + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 5.6 + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 7.0 + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 7.1 + 64bit Amazon Linux 2018.03 v2.7.0 running PHP 7.0 + 64bit Amazon Linux 2018.03 v2.7.0 running PHP 7.1 + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 5.4 + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 5.6 + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.5 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.4 + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.5 + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.6 + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 7.1 + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.4 + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.5 + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.6 + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 7.1 + 64bit Amazon Linux 2017.09 v2.6.2 running PHP 5.6 + 64bit Amazon Linux 2017.09 v2.6.2 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.1 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.4 + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.5 + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.6 + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 7.0 + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 7.1 + 64bit Amazon Linux 2017.03 v2.5.0 running PHP 7.0 + 64bit Amazon Linux 2017.03 v2.5.0 running PHP 7.1 + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 5.5 + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 5.6 + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 7.0 + 64bit Amazon Linux 2017.03 v2.4.3 running PHP 7.0 + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.4 + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.5 + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.6 + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 7.0 + 64bit Amazon Linux 2017.03 v2.4.1 running PHP 7.0 + 64bit Amazon Linux 2017.03 v2.4.0 running PHP 7.0 + 64bit Amazon Linux 2016.09 v2.3.2 running PHP 7.0 + 64bit Amazon Linux 2016.09 v2.3.1 running PHP 7.0 + 64bit Amazon Linux 2018.03 v2.9.1 running Python 3.6 + 64bit Amazon Linux 2018.03 v2.9.1 running Python 3.4 + 64bit Amazon Linux 2018.03 v2.9.1 running Python + 64bit Amazon Linux 2018.03 v2.9.1 running Python 2.7 + 64bit Amazon Linux 2018.03 v2.7.5 running Python 3.6 + 64bit Amazon Linux 2018.03 v2.7.1 running Python 3.6 + 64bit Amazon Linux 2018.03 v2.7.0 running Python 3.6 + 64bit Amazon Linux 2017.09 v2.6.4 running Python 3.6 + 64bit Amazon Linux 2017.09 v2.6.1 running Python 3.6 + 64bit Amazon Linux 2017.03 v2.4.0 running Python 3.4 + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.6 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.5 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.4 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.3 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.2 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.1 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.0 (Puma) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.6 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.5 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.4 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.3 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.2 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.1 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.0 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 1.9.3 + 64bit Amazon Linux 2018.03 v2.8.0 running Ruby 2.5 (Passenger Standalone) + 64bit Amazon Linux 2017.03 v2.4.4 running Ruby 2.3 (Puma) + 64bit Amazon Linux 2017.03 v2.4.4 running Ruby 2.3 (Passenger Standalone) + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 8.5 Java 8 + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 8 Java 8 + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 7 Java 7 + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 7 Java 6 + 64bit Amazon Linux 2018.03 v3.1.1 running Tomcat 8.5 Java 8 + 64bit Amazon Linux 2017.03 v2.6.5 running Tomcat 8 Java 8 + 64bit Amazon Linux 2017.03 v2.6.2 running Tomcat 8 Java 8 + 64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 8 Java 8 + 64bit Amazon Linux 2017.03 v2.6.0 running Tomcat 8 Java 8 + 64bit Amazon Linux 2016.09 v2.5.4 running Tomcat 8 Java 8 + 64bit Amazon Linux 2016.03 v2.1.0 running Tomcat 8 Java 8 + 64bit Windows Server Core 2016 v2.2.1 running IIS 10.0 + 64bit Windows Server 2016 v2.2.1 running IIS 10.0 + 64bit Windows Server Core 2012 R2 v2.2.1 running IIS 8.5 + 64bit Windows Server 2012 R2 v2.2.1 running IIS 8.5 + 64bit Windows Server Core 2016 v1.2.0 running IIS 10.0 + 64bit Windows Server 2016 v1.2.0 running IIS 10.0 + 64bit Windows Server Core 2012 R2 v1.2.0 running IIS 8.5 + 64bit Windows Server 2012 R2 v1.2.0 running IIS 8.5 + 64bit Windows Server 2012 v1.2.0 running IIS 8 + 64bit Windows Server 2008 R2 v1.2.0 running IIS 7.5 + 64bit Windows Server Core 2012 R2 running IIS 8.5 + 64bit Windows Server 2012 R2 running IIS 8.5 + 64bit Windows Server 2012 running IIS 8 + 64bit Windows Server 2008 R2 running IIS 7.5 + 64bit Amazon Linux 2018.03 v2.12.16 running Docker 18.06.1-ce + 64bit Amazon Linux 2016.09 v2.5.2 running Docker 1.12.6 + 64bit Amazon Linux 2018.03 v2.15.2 running Multi-container Docker 18.06.1-ce (Generic) + 64bit Debian jessie v2.12.16 running Go 1.4 (Preconfigured - Docker) + 64bit Debian jessie v2.12.16 running Go 1.3 (Preconfigured - Docker) + 64bit Debian jessie v2.12.16 running Python 3.4 (Preconfigured - Docker) + 64bit Debian jessie v2.10.0 running Python 3.4 (Preconfigured - Docker) + 64bit Amazon Linux 2018.03 v2.9.1 running Java 8 + 64bit Amazon Linux 2018.03 v2.9.1 running Java 7 + 64bit Amazon Linux 2018.03 v2.8.0 running Java 8 + 64bit Amazon Linux 2018.03 v2.7.6 running Java 8 + 64bit Amazon Linux 2018.03 v2.7.5 running Java 8 + 64bit Amazon Linux 2018.03 v2.7.4 running Java 8 + 64bit Amazon Linux 2018.03 v2.7.2 running Java 8 + 64bit Amazon Linux 2018.03 v2.7.1 running Java 8 + 64bit Amazon Linux 2017.09 v2.6.8 running Java 8 + 64bit Amazon Linux 2017.09 v2.6.5 running Java 8 + 64bit Amazon Linux 2017.09 v2.6.4 running Java 8 + 64bit Amazon Linux 2017.09 v2.6.3 running Java 8 + 64bit Amazon Linux 2017.09 v2.6.0 running Java 8 + 64bit Amazon Linux 2017.03 v2.5.4 running Java 8 + 64bit Amazon Linux 2017.03 v2.5.3 running Java 8 + 64bit Amazon Linux 2017.03 v2.5.2 running Java 8 + 64bit Amazon Linux 2016.09 v2.4.4 running Java 8 + 64bit Amazon Linux 2018.03 v2.12.1 running Go 1.12.7 + 64bit Amazon Linux 2018.03 v2.6.14 running Packer 1.0.3 + 64bit Amazon Linux 2018.03 v2.12.16 running GlassFish 5.0 Java 8 (Preconfigured - Docker) + + + + 64bit Amazon Linux 2018.03 v4.10.1 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.9.2 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.8.0 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.6.0 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.5.3 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.5.1 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v4.5.0 running Node.js + + zip + + + + 64bit Amazon Linux 2017.09 v4.4.6 running Node.js + + zip + + + + 64bit Amazon Linux 2017.09 v4.4.5 running Node.js + + zip + + + + 64bit Amazon Linux 2017.09 v4.4.4 running Node.js + + zip + + + + 64bit Amazon Linux 2017.09 v4.4.2 running Node.js + + zip + + + + 64bit Amazon Linux 2017.09 v4.4.0 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.3.0 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.2.2 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.2.1 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.2.0 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.1.1 running Node.js + + zip + + + + 64bit Amazon Linux 2017.03 v4.1.0 running Node.js + + zip + + + + 64bit Amazon Linux 2016.09 v4.0.1 running Node.js + + zip + + + + 64bit Amazon Linux 2016.09 v4.0.0 running Node.js + + zip + + + + 64bit Amazon Linux 2016.09 v3.3.1 running Node.js + + zip + + + + 64bit Amazon Linux 2016.09 v3.1.0 running Node.js + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.14 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.12 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.7 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.6 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.6 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.5 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.4 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.3 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.2 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.1 running PHP 7.2 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.0 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.1 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.0 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.0 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.6 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.5 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.2 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.2 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.1 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2017.03 v2.5.0 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.03 v2.5.0 running PHP 7.1 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.4 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.3 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.4 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.5 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.6 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.2 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.1 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.0 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2016.09 v2.3.2 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2016.09 v2.3.1 running PHP 7.0 + + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Python 3.4 + + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Python + + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Python 2.7 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.5 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.1 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2018.03 v2.7.0 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2017.09 v2.6.1 running Python 3.6 + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.0 running Python 3.4 + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.6 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.5 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.4 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.3 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.2 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.1 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.0 (Puma) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.6 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.5 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.4 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.3 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.2 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.1 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 2.0 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v2.10.1 running Ruby 1.9.3 + + zip + + + + 64bit Amazon Linux 2018.03 v2.8.0 running Ruby 2.5 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.4 running Ruby 2.3 (Puma) + + zip + + + + 64bit Amazon Linux 2017.03 v2.4.4 running Ruby 2.3 (Passenger Standalone) + + zip + + + + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 8.5 Java 8 + + war + zip + + + + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 7 Java 7 + + war + zip + + + + 64bit Amazon Linux 2018.03 v3.2.1 running Tomcat 7 Java 6 + + war + zip + + + + 64bit Amazon Linux 2018.03 v3.1.1 running Tomcat 8.5 Java 8 + + war + zip + + + + 64bit Amazon Linux 2017.03 v2.6.5 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2017.03 v2.6.2 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2017.03 v2.6.0 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2016.09 v2.5.4 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Amazon Linux 2016.03 v2.1.0 running Tomcat 8 Java 8 + + war + zip + + + + 64bit Windows Server Core 2016 v2.2.1 running IIS 10.0 + + zip + + + + 64bit Windows Server 2016 v2.2.1 running IIS 10.0 + + zip + + + + 64bit Windows Server Core 2012 R2 v2.2.1 running IIS 8.5 + + zip + + + + 64bit Windows Server 2012 R2 v2.2.1 running IIS 8.5 + + zip + + + + 64bit Windows Server Core 2016 v1.2.0 running IIS 10.0 + + zip + + + + 64bit Windows Server 2016 v1.2.0 running IIS 10.0 + + zip + + + + 64bit Windows Server Core 2012 R2 v1.2.0 running IIS 8.5 + + zip + + + + 64bit Windows Server 2012 R2 v1.2.0 running IIS 8.5 + + zip + + + + 64bit Windows Server 2012 v1.2.0 running IIS 8 + + zip + + + + 64bit Windows Server 2008 R2 v1.2.0 running IIS 7.5 + + zip + + + + 64bit Windows Server Core 2012 R2 running IIS 8.5 + + zip + + + + 64bit Windows Server 2012 R2 running IIS 8.5 + + zip + + + + 64bit Windows Server 2012 running IIS 8 + + zip + + + + 64bit Windows Server 2008 R2 running IIS 7.5 + + zip + + + + 64bit Amazon Linux 2018.03 v2.12.16 running Docker 18.06.1-ce + + + + 64bit Amazon Linux 2016.09 v2.5.2 running Docker 1.12.6 + + + + 64bit Amazon Linux 2018.03 v2.15.2 running Multi-container Docker 18.06.1-ce (Generic) + + zip + json + + + + 64bit Debian jessie v2.12.16 running Go 1.4 (Preconfigured - Docker) + + zip + + + + 64bit Debian jessie v2.12.16 running Go 1.3 (Preconfigured - Docker) + + zip + + + + 64bit Debian jessie v2.12.16 running Python 3.4 (Preconfigured - Docker) + + zip + + + + 64bit Debian jessie v2.10.0 running Python 3.4 (Preconfigured - Docker) + + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.9.1 running Java 7 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.8.0 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.7.6 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.7.5 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.7.4 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.7.2 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.7.1 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.09 v2.6.8 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.09 v2.6.5 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.09 v2.6.4 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.09 v2.6.3 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.09 v2.6.0 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.03 v2.5.4 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.03 v2.5.3 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2017.03 v2.5.2 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2016.09 v2.4.4 running Java 8 + + jar + zip + + + + 64bit Amazon Linux 2018.03 v2.12.1 running Go 1.12.7 + + zip + + + + 64bit Amazon Linux 2018.03 v2.6.14 running Packer 1.0.3 + + + + 64bit Amazon Linux 2018.03 v2.12.16 running GlassFish 5.0 Java 8 (Preconfigured - Docker) + + zip + + + + + + bd6bd2b2-9983-4845-b53b-fe53e8a5e1e7 + + +""" + + +EB_UPDATE_TAGS_FOR_RESOURCE = """ + + + f355d788-e67e-440f-b915-99e35254ffee + + +""" + + +EB_LIST_TAGS_FOR_RESOURCE = """ + + + + {% for key, value in tags.items() %} + + {{ key }} + {{ value }} + + {% endfor %} + + {{ arn }} + + + 178e410f-3b57-456f-a64c-a3b6a16da9ab + + +""" diff --git a/moto/elasticbeanstalk/urls.py b/moto/elasticbeanstalk/urls.py new file mode 100644 index 000000000..2d57f7f9d --- /dev/null +++ b/moto/elasticbeanstalk/urls.py @@ -0,0 +1,11 @@ +from __future__ import unicode_literals + +from .responses import EBResponse + +url_bases = [ + r"https?://elasticbeanstalk.(?P[a-zA-Z0-9\-_]+).amazonaws.com", +] + +url_paths = { + "{0}/$": EBResponse.dispatch, +} diff --git a/moto/emr/responses.py b/moto/emr/responses.py index 3708db0ed..d2b234ced 100644 --- a/moto/emr/responses.py +++ b/moto/emr/responses.py @@ -10,9 +10,10 @@ from six.moves.urllib.parse import urlparse from moto.core.responses import AWSServiceSpec from moto.core.responses import BaseResponse from moto.core.responses import xml_to_json_response +from moto.core.utils import tags_from_query_string from .exceptions import EmrError from .models import emr_backends -from .utils import steps_from_query_string, tags_from_query_string +from .utils import steps_from_query_string def generate_boto3_response(operation): @@ -91,7 +92,7 @@ class ElasticMapReduceResponse(BaseResponse): @generate_boto3_response("AddTags") def add_tags(self): cluster_id = self._get_param("ResourceId") - tags = tags_from_query_string(self.querystring) + tags = tags_from_query_string(self.querystring, prefix="Tags") self.backend.add_tags(cluster_id, tags) template = self.response_template(ADD_TAGS_TEMPLATE) return template.render() diff --git a/moto/emr/utils.py b/moto/emr/utils.py index 0f75995b8..fb33214c8 100644 --- a/moto/emr/utils.py +++ b/moto/emr/utils.py @@ -22,22 +22,6 @@ def random_instance_group_id(size=13): return "i-{0}".format(random_id()) -def tags_from_query_string(querystring_dict): - prefix = "Tags" - suffix = "Key" - response_values = {} - for key, value in querystring_dict.items(): - if key.startswith(prefix) and key.endswith(suffix): - tag_index = key.replace(prefix + ".", "").replace("." + suffix, "") - tag_key = querystring_dict.get("Tags.{0}.Key".format(tag_index))[0] - tag_value_key = "Tags.{0}.Value".format(tag_index) - if tag_value_key in querystring_dict: - response_values[tag_key] = querystring_dict.get(tag_value_key)[0] - else: - response_values[tag_key] = None - return response_values - - def steps_from_query_string(querystring_dict): steps = [] for step in querystring_dict: diff --git a/moto/resourcegroupstaggingapi/models.py b/moto/resourcegroupstaggingapi/models.py index d05a53f81..b6e35d586 100644 --- a/moto/resourcegroupstaggingapi/models.py +++ b/moto/resourcegroupstaggingapi/models.py @@ -145,10 +145,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): # Do S3, resource type s3 if not resource_type_filters or "s3" in resource_type_filters: for bucket in self.s3_backend.buckets.values(): - tags = [] - for tag in bucket.tags.tag_set.tags: - tags.append({"Key": tag.key, "Value": tag.value}) - + tags = self.s3_backend.tagger.list_tags_for_resource(bucket.arn)["Tags"] if not tags or not tag_filter( tags ): # Skip if no tags, or invalid filter @@ -362,8 +359,9 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): # Do S3, resource type s3 for bucket in self.s3_backend.buckets.values(): - for tag in bucket.tags.tag_set.tags: - yield tag.key + tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn) + for key, _ in tags.items(): + yield key # EC2 tags def get_ec2_keys(res_id): @@ -414,9 +412,10 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): # Do S3, resource type s3 for bucket in self.s3_backend.buckets.values(): - for tag in bucket.tags.tag_set.tags: - if tag.key == tag_key: - yield tag.value + tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn) + for key, value in tags.items(): + if key == tag_key: + yield value # EC2 tags def get_ec2_values(res_id): diff --git a/moto/s3/models.py b/moto/s3/models.py index 8c2a86f41..7373dc9e3 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -22,6 +22,8 @@ import six from bisect import insort from moto.core import ACCOUNT_ID, BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime +from moto.cloudwatch.models import MetricDatum +from moto.utilities.tagging_service import TaggingService from .exceptions import ( BucketAlreadyExists, MissingBucket, @@ -34,7 +36,6 @@ from .exceptions import ( MalformedXML, InvalidStorageClass, InvalidTargetBucketForLogging, - DuplicateTagKeys, CrossLocationLoggingProhibitted, NoSuchPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration, @@ -94,6 +95,7 @@ class FakeKey(BaseModel): version_id=0, max_buffer_size=DEFAULT_KEY_BUFFER_SIZE, multipart=None, + bucket_name=None, ): self.name = name self.last_modified = datetime.datetime.utcnow() @@ -105,8 +107,8 @@ class FakeKey(BaseModel): self._etag = etag self._version_id = version_id self._is_versioned = is_versioned - self._tagging = FakeTagging() self.multipart = multipart + self.bucket_name = bucket_name self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) self._max_buffer_size = max_buffer_size @@ -126,6 +128,13 @@ class FakeKey(BaseModel): self.lock.release() return r + @property + def arn(self): + # S3 Objects don't have an ARN, but we do need something unique when creating tags against this resource + return "arn:aws:s3:::{}/{}/{}".format( + self.bucket_name, self.name, self.version_id + ) + @value.setter def value(self, new_value): self._value_buffer.seek(0) @@ -152,9 +161,6 @@ class FakeKey(BaseModel): self._metadata = {} self._metadata.update(metadata) - def set_tagging(self, tagging): - self._tagging = tagging - def set_storage_class(self, storage): if storage is not None and storage not in STORAGE_CLASS: raise InvalidStorageClass(storage=storage) @@ -210,10 +216,6 @@ class FakeKey(BaseModel): def metadata(self): return self._metadata - @property - def tagging(self): - return self._tagging - @property def response_dict(self): res = { @@ -471,26 +473,10 @@ def get_canned_acl(acl): return FakeAcl(grants=grants) -class FakeTagging(BaseModel): - def __init__(self, tag_set=None): - self.tag_set = tag_set or FakeTagSet() - - -class FakeTagSet(BaseModel): - def __init__(self, tags=None): - self.tags = tags or [] - - -class FakeTag(BaseModel): - def __init__(self, key, value=None): - self.key = key - self.value = value - - class LifecycleFilter(BaseModel): def __init__(self, prefix=None, tag=None, and_filter=None): self.prefix = prefix - self.tag = tag + (self.tag_key, self.tag_value) = tag if tag else (None, None) self.and_filter = and_filter def to_config_dict(self): @@ -499,11 +485,11 @@ class LifecycleFilter(BaseModel): "predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix} } - elif self.tag: + elif self.tag_key: return { "predicate": { "type": "LifecycleTagPredicate", - "tag": {"key": self.tag.key, "value": self.tag.value}, + "tag": {"key": self.tag_key, "value": self.tag_value}, } } @@ -527,12 +513,9 @@ class LifecycleAndFilter(BaseModel): if self.prefix is not None: data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix}) - for tag in self.tags: + for key, value in self.tags.items(): data.append( - { - "type": "LifecycleTagPredicate", - "tag": {"key": tag.key, "value": tag.value}, - } + {"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},} ) return data @@ -787,7 +770,6 @@ class FakeBucket(BaseModel): self.policy = None self.website_configuration = None self.acl = get_canned_acl("private") - self.tags = FakeTagging() self.cors = [] self.logging = {} self.notification_configuration = None @@ -879,7 +861,7 @@ class FakeBucket(BaseModel): and_filter = None if rule["Filter"].get("And"): filters += 1 - and_tags = [] + and_tags = {} if rule["Filter"]["And"].get("Tag"): if not isinstance(rule["Filter"]["And"]["Tag"], list): rule["Filter"]["And"]["Tag"] = [ @@ -887,7 +869,7 @@ class FakeBucket(BaseModel): ] for t in rule["Filter"]["And"]["Tag"]: - and_tags.append(FakeTag(t["Key"], t.get("Value", ""))) + and_tags[t["Key"]] = t.get("Value", "") try: and_prefix = ( @@ -901,7 +883,7 @@ class FakeBucket(BaseModel): filter_tag = None if rule["Filter"].get("Tag"): filters += 1 - filter_tag = FakeTag( + filter_tag = ( rule["Filter"]["Tag"]["Key"], rule["Filter"]["Tag"].get("Value", ""), ) @@ -988,16 +970,6 @@ class FakeBucket(BaseModel): def delete_cors(self): self.cors = [] - def set_tags(self, tagging): - self.tags = tagging - - def delete_tags(self): - self.tags = FakeTagging() - - @property - def tagging(self): - return self.tags - def set_logging(self, logging_config, bucket_backend): if not logging_config: self.logging = {} @@ -1085,6 +1057,10 @@ class FakeBucket(BaseModel): def set_acl(self, acl): self.acl = acl + @property + def arn(self): + return "arn:aws:s3:::{}".format(self.name) + @property def physical_resource_id(self): return self.name @@ -1110,7 +1086,7 @@ class FakeBucket(BaseModel): int(time.mktime(self.creation_date.timetuple())) ), # PY2 and 3 compatible "configurationItemMD5Hash": "", - "arn": "arn:aws:s3:::{}".format(self.name), + "arn": self.arn, "resourceType": "AWS::S3::Bucket", "resourceId": self.name, "resourceName": self.name, @@ -1119,7 +1095,7 @@ class FakeBucket(BaseModel): "resourceCreationTime": str(self.creation_date), "relatedEvents": [], "relationships": [], - "tags": {tag.key: tag.value for tag in self.tagging.tag_set.tags}, + "tags": s3_backend.tagger.get_tag_dict_for_resource(self.arn), "configuration": { "name": self.name, "owner": {"id": OWNER}, @@ -1181,6 +1157,42 @@ class S3Backend(BaseBackend): def __init__(self): self.buckets = {} self.account_public_access_block = None + self.tagger = TaggingService() + + # TODO: This is broken! DO NOT IMPORT MUTABLE DATA TYPES FROM OTHER AREAS -- THIS BREAKS UNMOCKING! + # WRAP WITH A GETTER/SETTER FUNCTION + # Register this class as a CloudWatch Metric Provider + # Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available + # metric_providers["S3"] = self + + def get_cloudwatch_metrics(self): + metrics = [] + for name, bucket in self.buckets.items(): + metrics.append( + MetricDatum( + namespace="AWS/S3", + name="BucketSizeBytes", + value=bucket.keys.item_size(), + dimensions=[ + {"Name": "StorageType", "Value": "StandardStorage"}, + {"Name": "BucketName", "Value": name}, + ], + timestamp=datetime.datetime.now(), + ) + ) + metrics.append( + MetricDatum( + namespace="AWS/S3", + name="NumberOfObjects", + value=len(bucket.keys), + dimensions=[ + {"Name": "StorageType", "Value": "AllStorageTypes"}, + {"Name": "BucketName", "Value": name}, + ], + timestamp=datetime.datetime.now(), + ) + ) + return metrics def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: @@ -1350,23 +1362,32 @@ class S3Backend(BaseBackend): else: return None - def set_key_tagging(self, bucket_name, key_name, tagging, version_id=None): - key = self.get_key(bucket_name, key_name, version_id) + def get_key_tags(self, key): + return self.tagger.list_tags_for_resource(key.arn) + + def set_key_tags(self, key, tags, key_name=None): if key is None: raise MissingKey(key_name) - key.set_tagging(tagging) + self.tagger.delete_all_tags_for_resource(key.arn) + self.tagger.tag_resource( + key.arn, [{"Key": key, "Value": value} for key, value in tags.items()], + ) return key - def put_bucket_tagging(self, bucket_name, tagging): - tag_keys = [tag.key for tag in tagging.tag_set.tags] - if len(tag_keys) != len(set(tag_keys)): - raise DuplicateTagKeys() + def get_bucket_tags(self, bucket_name): bucket = self.get_bucket(bucket_name) - bucket.set_tags(tagging) + return self.tagger.list_tags_for_resource(bucket.arn) + + def put_bucket_tags(self, bucket_name, tags): + bucket = self.get_bucket(bucket_name) + self.tagger.delete_all_tags_for_resource(bucket.arn) + self.tagger.tag_resource( + bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()], + ) def delete_bucket_tagging(self, bucket_name): bucket = self.get_bucket(bucket_name) - bucket.delete_tags() + self.tagger.delete_all_tags_for_resource(bucket.arn) def put_bucket_cors(self, bucket_name, cors_rules): bucket = self.get_bucket(bucket_name) @@ -1574,6 +1595,7 @@ class S3Backend(BaseBackend): key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id) new_key = key.copy(dest_key_name, dest_bucket.is_versioned) + self.tagger.copy_tags(key.arn, new_key.arn) if storage is not None: new_key.set_storage_class(storage) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 442489a8a..e1ab93860 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -24,6 +24,7 @@ from moto.s3bucket_path.utils import ( from .exceptions import ( BucketAlreadyExists, + DuplicateTagKeys, S3ClientError, MissingBucket, MissingKey, @@ -43,9 +44,6 @@ from .models import ( FakeGrant, FakeAcl, FakeKey, - FakeTagging, - FakeTagSet, - FakeTag, ) from .utils import ( bucket_name_from_url, @@ -134,7 +132,8 @@ ACTION_MAP = { def parse_key_name(pth): - return pth.lstrip("/") + # strip the first '/' left by urlparse + return pth[1:] if pth.startswith("/") else pth def is_delete_keys(request, path, bucket_name): @@ -378,13 +377,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): template = self.response_template(S3_OBJECT_ACL_RESPONSE) return template.render(obj=bucket) elif "tagging" in querystring: - bucket = self.backend.get_bucket(bucket_name) + tags = self.backend.get_bucket_tags(bucket_name)["Tags"] # "Special Error" if no tags: - if len(bucket.tagging.tag_set.tags) == 0: + if len(tags) == 0: template = self.response_template(S3_NO_BUCKET_TAGGING) return 404, {}, template.render(bucket_name=bucket_name) - template = self.response_template(S3_BUCKET_TAGGING_RESPONSE) - return template.render(bucket=bucket) + template = self.response_template(S3_OBJECT_TAGGING_RESPONSE) + return template.render(tags=tags) elif "logging" in querystring: bucket = self.backend.get_bucket(bucket_name) if not bucket.logging: @@ -652,7 +651,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return "" elif "tagging" in querystring: tagging = self._bucket_tagging_from_xml(body) - self.backend.put_bucket_tagging(bucket_name, tagging) + self.backend.put_bucket_tags(bucket_name, tagging) return "" elif "website" in querystring: self.backend.set_bucket_website_configuration(bucket_name, body) @@ -839,27 +838,35 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): def _bucket_response_delete_keys(self, request, body, bucket_name): template = self.response_template(S3_DELETE_KEYS_RESPONSE) + body_dict = xmltodict.parse(body) - keys = minidom.parseString(body).getElementsByTagName("Key") - deleted_names = [] - error_names = [] - if len(keys) == 0: + objects = body_dict["Delete"].get("Object", []) + if not isinstance(objects, list): + # We expect a list of objects, but when there is a single node xmltodict does not + # return a list. + objects = [objects] + if len(objects) == 0: raise MalformedXML() - for k in keys: - key_name = k.firstChild.nodeValue + deleted_objects = [] + error_names = [] + + for object_ in objects: + key_name = object_["Key"] + version_id = object_.get("VersionId", None) + success = self.backend.delete_key( - bucket_name, undo_clean_key_name(key_name) + bucket_name, undo_clean_key_name(key_name), version_id=version_id ) if success: - deleted_names.append(key_name) + deleted_objects.append((key_name, version_id)) else: error_names.append(key_name) return ( 200, {}, - template.render(deleted=deleted_names, delete_errors=error_names), + template.render(deleted=deleted_objects, delete_errors=error_names), ) def _handle_range_header(self, request, headers, response_content): @@ -1098,8 +1105,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): template = self.response_template(S3_OBJECT_ACL_RESPONSE) return 200, response_headers, template.render(obj=key) if "tagging" in query: + tags = self.backend.get_key_tags(key)["Tags"] template = self.response_template(S3_OBJECT_TAGGING_RESPONSE) - return 200, response_headers, template.render(obj=key) + return 200, response_headers, template.render(tags=tags) response_headers.update(key.metadata) response_headers.update(key.response_dict) @@ -1171,8 +1179,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): version_id = query["versionId"][0] else: version_id = None + key = self.backend.get_key(bucket_name, key_name, version_id=version_id) tagging = self._tagging_from_xml(body) - self.backend.set_key_tagging(bucket_name, key_name, tagging, version_id) + self.backend.set_key_tags(key, tagging, key_name) return 200, response_headers, "" if "x-amz-copy-source" in request.headers: @@ -1213,7 +1222,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): tdirective = request.headers.get("x-amz-tagging-directive") if tdirective == "REPLACE": tagging = self._tagging_from_headers(request.headers) - new_key.set_tagging(tagging) + self.backend.set_key_tags(new_key, tagging) template = self.response_template(S3_OBJECT_COPY_RESPONSE) response_headers.update(new_key.response_dict) return 200, response_headers, template.render(key=new_key) @@ -1237,7 +1246,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): new_key.website_redirect_location = request.headers.get( "x-amz-website-redirect-location" ) - new_key.set_tagging(tagging) + self.backend.set_key_tags(new_key, tagging) response_headers.update(new_key.response_dict) return 200, response_headers, "" @@ -1365,55 +1374,45 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return None def _tagging_from_headers(self, headers): + tags = {} if headers.get("x-amz-tagging"): parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True) - tags = [] for tag in parsed_header.items(): - tags.append(FakeTag(tag[0], tag[1][0])) - - tag_set = FakeTagSet(tags) - tagging = FakeTagging(tag_set) - return tagging - else: - return FakeTagging() + tags[tag[0]] = tag[1][0] + return tags def _tagging_from_xml(self, xml): parsed_xml = xmltodict.parse(xml, force_list={"Tag": True}) - tags = [] + tags = {} for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]: - tags.append(FakeTag(tag["Key"], tag["Value"])) + tags[tag["Key"]] = tag["Value"] - tag_set = FakeTagSet(tags) - tagging = FakeTagging(tag_set) - return tagging + return tags def _bucket_tagging_from_xml(self, xml): parsed_xml = xmltodict.parse(xml) - tags = [] + tags = {} # Optional if no tags are being sent: if parsed_xml["Tagging"].get("TagSet"): # If there is only 1 tag, then it's not a list: if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list): - tags.append( - FakeTag( - parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"], - parsed_xml["Tagging"]["TagSet"]["Tag"]["Value"], - ) - ) + tags[parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"]] = parsed_xml[ + "Tagging" + ]["TagSet"]["Tag"]["Value"] else: for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]: - tags.append(FakeTag(tag["Key"], tag["Value"])) + if tag["Key"] in tags: + raise DuplicateTagKeys() + tags[tag["Key"]] = tag["Value"] # Verify that "aws:" is not in the tags. If so, then this is a problem: - for tag in tags: - if tag.key.startswith("aws:"): + for key, _ in tags.items(): + if key.startswith("aws:"): raise NoSystemTags() - tag_set = FakeTagSet(tags) - tagging = FakeTagging(tag_set) - return tagging + return tags def _cors_from_xml(self, xml): parsed_xml = xmltodict.parse(xml) @@ -1733,10 +1732,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """ {% if rule.filter.prefix != None %} {{ rule.filter.prefix }} {% endif %} - {% if rule.filter.tag %} + {% if rule.filter.tag_key %} - {{ rule.filter.tag.key }} - {{ rule.filter.tag.value }} + {{ rule.filter.tag_key }} + {{ rule.filter.tag_value }} {% endif %} {% if rule.filter.and_filter %} @@ -1744,10 +1743,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """ {% if rule.filter.and_filter.prefix != None %} {{ rule.filter.and_filter.prefix }} {% endif %} - {% for tag in rule.filter.and_filter.tags %} + {% for key, value in rule.filter.and_filter.tags.items() %} - {{ tag.key }} - {{ tag.value }} + {{ key }} + {{ value }} {% endfor %} @@ -1861,9 +1860,10 @@ S3_BUCKET_GET_VERSIONS = """ S3_DELETE_KEYS_RESPONSE = """ -{% for k in deleted %} +{% for k, v in deleted %} {{k}} +{% if v %}{{v}}{% endif %} {% endfor %} {% for k in delete_errors %} @@ -1908,22 +1908,10 @@ S3_OBJECT_TAGGING_RESPONSE = """\ - {% for tag in obj.tagging.tag_set.tags %} + {% for tag in tags %} - {{ tag.key }} - {{ tag.value }} - - {% endfor %} - -""" - -S3_BUCKET_TAGGING_RESPONSE = """ - - - {% for tag in bucket.tagging.tag_set.tags %} - - {{ tag.key }} - {{ tag.value }} + {{ tag.Key }} + {{ tag.Value }} {% endfor %} diff --git a/moto/s3/urls.py b/moto/s3/urls.py index 752762184..4c4e9ea76 100644 --- a/moto/s3/urls.py +++ b/moto/s3/urls.py @@ -15,5 +15,5 @@ url_paths = { # path-based bucket + key "{0}/(?P[^/]+)/(?P.+)": S3ResponseInstance.key_or_control_response, # subdomain bucket + key with empty first part of path - "{0}//(?P.*)$": S3ResponseInstance.key_or_control_response, + "{0}/(?P/.*)$": S3ResponseInstance.key_or_control_response, } diff --git a/moto/s3/utils.py b/moto/s3/utils.py index 6ddcfa63e..014e98ca9 100644 --- a/moto/s3/utils.py +++ b/moto/s3/utils.py @@ -146,6 +146,12 @@ class _VersionedKeyStore(dict): for key in self: yield key, self.getlist(key) + def item_size(self): + size = 0 + for val in self.values(): + size += sys.getsizeof(val) + return size + items = iteritems = _iteritems lists = iterlists = _iterlists values = itervalues = _itervalues diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 11a024be6..29bd6c96e 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -121,8 +121,16 @@ class SecretsManagerBackend(BaseBackend): "You can't perform this operation on the secret because it was marked for deletion." ) + secret = self.secrets[secret_id] + tags = secret["tags"] + description = secret["description"] + version_id = self._add_secret( - secret_id, secret_string=secret_string, secret_binary=secret_binary + secret_id, + secret_string=secret_string, + secret_binary=secret_binary, + description=description, + tags=tags, ) response = json.dumps( @@ -136,7 +144,13 @@ class SecretsManagerBackend(BaseBackend): return response def create_secret( - self, name, secret_string=None, secret_binary=None, tags=[], **kwargs + self, + name, + secret_string=None, + secret_binary=None, + description=None, + tags=[], + **kwargs ): # error if secret exists @@ -146,7 +160,11 @@ class SecretsManagerBackend(BaseBackend): ) version_id = self._add_secret( - name, secret_string=secret_string, secret_binary=secret_binary, tags=tags + name, + secret_string=secret_string, + secret_binary=secret_binary, + description=description, + tags=tags, ) response = json.dumps( @@ -164,6 +182,7 @@ class SecretsManagerBackend(BaseBackend): secret_id, secret_string=None, secret_binary=None, + description=None, tags=[], version_id=None, version_stages=None, @@ -216,13 +235,27 @@ class SecretsManagerBackend(BaseBackend): secret["rotation_lambda_arn"] = "" secret["auto_rotate_after_days"] = 0 secret["tags"] = tags + secret["description"] = description return version_id def put_secret_value(self, secret_id, secret_string, secret_binary, version_stages): + if secret_id in self.secrets.keys(): + secret = self.secrets[secret_id] + tags = secret["tags"] + description = secret["description"] + else: + tags = [] + description = "" + version_id = self._add_secret( - secret_id, secret_string, secret_binary, version_stages=version_stages + secret_id, + secret_string, + secret_binary, + description=description, + tags=tags, + version_stages=version_stages, ) response = json.dumps( @@ -246,7 +279,7 @@ class SecretsManagerBackend(BaseBackend): { "ARN": secret_arn(self.region, secret["secret_id"]), "Name": secret["name"], - "Description": "", + "Description": secret.get("description", ""), "KmsKeyId": "", "RotationEnabled": secret["rotation_enabled"], "RotationLambdaARN": secret["rotation_lambda_arn"], @@ -310,6 +343,7 @@ class SecretsManagerBackend(BaseBackend): self._add_secret( secret_id, old_secret_version["secret_string"], + secret["description"], secret["tags"], version_id=new_version_id, version_stages=["AWSCURRENT"], @@ -416,7 +450,7 @@ class SecretsManagerBackend(BaseBackend): { "ARN": secret_arn(self.region, secret["secret_id"]), "DeletedDate": secret.get("deleted_date", None), - "Description": "", + "Description": secret.get("description", ""), "KmsKeyId": "", "LastAccessedDate": None, "LastChangedDate": None, diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index 757b888a3..9a899c90d 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -21,11 +21,13 @@ class SecretsManagerResponse(BaseResponse): name = self._get_param("Name") secret_string = self._get_param("SecretString") secret_binary = self._get_param("SecretBinary") + description = self._get_param("Description", if_none="") tags = self._get_param("Tags", if_none=[]) return secretsmanager_backends[self.region].create_secret( name=name, secret_string=secret_string, secret_binary=secret_binary, + description=description, tags=tags, ) diff --git a/moto/server.py b/moto/server.py index 92fe6f229..498f6c504 100644 --- a/moto/server.py +++ b/moto/server.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import argparse +import io import json import re import sys @@ -29,6 +30,7 @@ UNSIGNED_REQUESTS = { "AWSCognitoIdentityService": ("cognito-identity", "us-east-1"), "AWSCognitoIdentityProviderService": ("cognito-idp", "us-east-1"), } +UNSIGNED_ACTIONS = {"AssumeRoleWithSAML": ("sts", "us-east-1")} class DomainDispatcherApplication(object): @@ -77,9 +79,13 @@ class DomainDispatcherApplication(object): else: # Unsigned request target = environ.get("HTTP_X_AMZ_TARGET") + action = self.get_action_from_body(environ) if target: service, _ = target.split(".", 1) service, region = UNSIGNED_REQUESTS.get(service, DEFAULT_SERVICE_REGION) + elif action and action in UNSIGNED_ACTIONS: + # See if we can match the Action to a known service + service, region = UNSIGNED_ACTIONS.get(action) else: # S3 is the last resort when the target is also unknown service, region = DEFAULT_SERVICE_REGION @@ -130,6 +136,26 @@ class DomainDispatcherApplication(object): self.app_instances[backend] = app return app + def get_action_from_body(self, environ): + body = None + try: + # AWS requests use querystrings as the body (Action=x&Data=y&...) + simple_form = environ["CONTENT_TYPE"].startswith( + "application/x-www-form-urlencoded" + ) + request_body_size = int(environ["CONTENT_LENGTH"]) + if simple_form and request_body_size: + body = environ["wsgi.input"].read(request_body_size).decode("utf-8") + body_dict = dict(x.split("=") for x in body.split("&")) + return body_dict["Action"] + except (KeyError, ValueError): + pass + finally: + if body: + # We've consumed the body = need to reset it + environ["wsgi.input"] = io.StringIO(body) + return None + def __call__(self, environ, start_response): backend_app = self.get_application(environ) return backend_app(environ, start_response) diff --git a/moto/sts/models.py b/moto/sts/models.py index 12824b2ed..b274b1acd 100644 --- a/moto/sts/models.py +++ b/moto/sts/models.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals +from base64 import b64decode import datetime +import xmltodict from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds from moto.core import ACCOUNT_ID @@ -79,5 +81,24 @@ class STSBackend(BaseBackend): def assume_role_with_web_identity(self, **kwargs): return self.assume_role(**kwargs) + def assume_role_with_saml(self, **kwargs): + del kwargs["principal_arn"] + saml_assertion_encoded = kwargs.pop("saml_assertion") + saml_assertion_decoded = b64decode(saml_assertion_encoded) + saml_assertion = xmltodict.parse(saml_assertion_decoded.decode("utf-8")) + kwargs["duration"] = int( + saml_assertion["samlp:Response"]["Assertion"]["AttributeStatement"][ + "Attribute" + ][2]["AttributeValue"] + ) + kwargs["role_session_name"] = saml_assertion["samlp:Response"]["Assertion"][ + "AttributeStatement" + ]["Attribute"][0]["AttributeValue"] + kwargs["external_id"] = None + kwargs["policy"] = None + role = AssumedRole(**kwargs) + self.assumed_roles.append(role) + return role + sts_backend = STSBackend() diff --git a/moto/sts/responses.py b/moto/sts/responses.py index f36799b03..9af2c3e12 100644 --- a/moto/sts/responses.py +++ b/moto/sts/responses.py @@ -71,6 +71,19 @@ class TokenResponse(BaseResponse): template = self.response_template(ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE) return template.render(role=role) + def assume_role_with_saml(self): + role_arn = self.querystring.get("RoleArn")[0] + principal_arn = self.querystring.get("PrincipalArn")[0] + saml_assertion = self.querystring.get("SAMLAssertion")[0] + + role = sts_backend.assume_role_with_saml( + role_arn=role_arn, + principal_arn=principal_arn, + saml_assertion=saml_assertion, + ) + template = self.response_template(ASSUME_ROLE_WITH_SAML_RESPONSE) + return template.render(role=role) + def get_caller_identity(self): template = self.response_template(GET_CALLER_IDENTITY_RESPONSE) @@ -168,6 +181,30 @@ ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE = """""" +ASSUME_ROLE_WITH_SAML_RESPONSE = """ + + https://signin.aws.amazon.com/saml + + {{ role.user_id }} + {{ role.arn }} + + + {{ role.access_key_id }} + {{ role.secret_access_key }} + {{ role.session_token }} + {{ role.expiration_ISO8601 }} + + {{ role.user_id }} + B64EncodedStringOfHashOfIssuerAccountIdAndUserId= + persistent + http://localhost:3000/ + + + c6104cbe-af31-11e0-8154-cbc7ccf896c7 + +""" + + GET_CALLER_IDENTITY_RESPONSE = """ {{ arn }} diff --git a/moto/utilities/tagging_service.py b/moto/utilities/tagging_service.py index 89b857277..2d6ac99c9 100644 --- a/moto/utilities/tagging_service.py +++ b/moto/utilities/tagging_service.py @@ -5,15 +5,23 @@ class TaggingService: self.valueName = valueName self.tags = {} + def get_tag_dict_for_resource(self, arn): + result = {} + if self.has_tags(arn): + for k, v in self.tags[arn].items(): + result[k] = v + return result + def list_tags_for_resource(self, arn): result = [] - if arn in self.tags: + if self.has_tags(arn): for k, v in self.tags[arn].items(): result.append({self.keyName: k, self.valueName: v}) return {self.tagName: result} def delete_all_tags_for_resource(self, arn): - del self.tags[arn] + if self.has_tags(arn): + del self.tags[arn] def has_tags(self, arn): return arn in self.tags @@ -27,6 +35,12 @@ class TaggingService: else: self.tags[arn][t[self.keyName]] = None + def copy_tags(self, from_arn, to_arn): + if self.has_tags(from_arn): + self.tag_resource( + to_arn, self.list_tags_for_resource(from_arn)[self.tagName] + ) + def untag_resource_using_names(self, arn, tag_names): for name in tag_names: if name in self.tags.get(arn, {}): diff --git a/setup.py b/setup.py index adc5e4bb9..684c0dcea 100755 --- a/setup.py +++ b/setup.py @@ -101,5 +101,4 @@ setup( project_urls={ "Documentation": "http://docs.getmoto.org/en/latest/", }, - data_files=[('', ['moto/dynamodb2/parsing/reserved_keywords.txt'])], ) diff --git a/tests/test_apigateway/test_apigateway.py b/tests/test_apigateway/test_apigateway.py index 596ed2dd4..b04328a03 100644 --- a/tests/test_apigateway/test_apigateway.py +++ b/tests/test_apigateway/test_apigateway.py @@ -69,6 +69,22 @@ def test_create_rest_api_with_tags(): response["tags"].should.equal({"MY_TAG1": "MY_VALUE1"}) +@mock_apigateway +def test_create_rest_api_with_policy(): + client = boto3.client("apigateway", region_name="us-west-2") + + policy = '{"Version": "2012-10-17","Statement": []}' + response = client.create_rest_api( + name="my_api", description="this is my api", policy=policy + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + + assert "policy" in response + response["policy"].should.equal(policy) + + @mock_apigateway def test_create_rest_api_invalid_apikeysource(): client = boto3.client("apigateway", region_name="us-west-2") diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index eb8453e43..e67576518 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -1677,6 +1677,42 @@ def test_create_function_with_unknown_arn(): ) +@mock_lambda +def test_remove_function_permission(): + conn = boto3.client("lambda", _lambda_region) + zip_content = get_test_zip_file1() + conn.create_function( + FunctionName="testFunction", + Runtime="python2.7", + Role=(get_role_name()), + Handler="lambda_function.handler", + Code={"ZipFile": zip_content}, + Description="test lambda function", + Timeout=3, + MemorySize=128, + Publish=True, + ) + + conn.add_permission( + FunctionName="testFunction", + StatementId="1", + Action="lambda:InvokeFunction", + Principal="432143214321", + SourceArn="arn:aws:lambda:us-west-2:account-id:function:helloworld", + SourceAccount="123412341234", + EventSourceToken="blah", + Qualifier="2", + ) + + remove = conn.remove_permission( + FunctionName="testFunction", StatementId="1", Qualifier="2", + ) + remove["ResponseMetadata"]["HTTPStatusCode"].should.equal(204) + policy = conn.get_policy(FunctionName="testFunction", Qualifier="2")["Policy"] + policy = json.loads(policy) + policy["Statement"].should.equal([]) + + def create_invalid_lambda(role): conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 5444c2278..4df1ff5d2 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -835,8 +835,10 @@ def test_describe_change_set(): ) stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet") + stack["ChangeSetName"].should.equal("NewChangeSet") stack["StackName"].should.equal("NewStack") + stack["Status"].should.equal("REVIEW_IN_PROGRESS") cf_conn.create_change_set( StackName="NewStack", @@ -851,15 +853,30 @@ def test_describe_change_set(): @mock_cloudformation +@mock_ec2 def test_execute_change_set_w_arn(): cf_conn = boto3.client("cloudformation", region_name="us-east-1") + ec2 = boto3.client("ec2", region_name="us-east-1") + # Verify no instances exist at the moment + ec2.describe_instances()["Reservations"].should.have.length_of(0) + # Create a Change set, and verify no resources have been created yet change_set = cf_conn.create_change_set( StackName="NewStack", TemplateBody=dummy_template_json, ChangeSetName="NewChangeSet", ChangeSetType="CREATE", ) + ec2.describe_instances()["Reservations"].should.have.length_of(0) + cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal( + "REVIEW_IN_PROGRESS" + ) + # Execute change set cf_conn.execute_change_set(ChangeSetName=change_set["Id"]) + # Verify that the status has changed, and the appropriate resources have been created + cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal( + "CREATE_COMPLETE" + ) + ec2.describe_instances()["Reservations"].should.have.length_of(1) @mock_cloudformation diff --git a/tests/test_cloudwatch/test_cloudwatch.py b/tests/test_cloudwatch/test_cloudwatch.py index 5a05a55e1..60b6898bd 100644 --- a/tests/test_cloudwatch/test_cloudwatch.py +++ b/tests/test_cloudwatch/test_cloudwatch.py @@ -1,9 +1,10 @@ import boto from boto.ec2.cloudwatch.alarm import MetricAlarm +from boto.s3.key import Key from datetime import datetime import sure # noqa -from moto import mock_cloudwatch_deprecated +from moto import mock_cloudwatch_deprecated, mock_s3_deprecated def alarm_fixture(name="tester", action=None): @@ -83,7 +84,8 @@ def test_put_metric_data(): ) metrics = conn.list_metrics() - metrics.should.have.length_of(1) + metric_names = [m for m in metrics if m.name == "metric"] + metric_names.should.have(1) metric = metrics[0] metric.namespace.should.equal("tester") metric.name.should.equal("metric") @@ -153,3 +155,36 @@ def test_get_metric_statistics(): datapoint = datapoints[0] datapoint.should.have.key("Minimum").which.should.equal(1.5) datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp) + + +# TODO: THIS IS CURRENTLY BROKEN! +# @mock_s3_deprecated +# @mock_cloudwatch_deprecated +# def test_cloudwatch_return_s3_metrics(): +# +# region = "us-east-1" +# +# cw = boto.ec2.cloudwatch.connect_to_region(region) +# s3 = boto.s3.connect_to_region(region) +# +# bucket_name_1 = "test-bucket-1" +# bucket_name_2 = "test-bucket-2" +# +# bucket1 = s3.create_bucket(bucket_name=bucket_name_1) +# key = Key(bucket1) +# key.key = "the-key" +# key.set_contents_from_string("foobar" * 4) +# s3.create_bucket(bucket_name=bucket_name_2) +# +# metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1}) +# # Verify that the OOTB S3 metrics are available for the created buckets +# len(metrics_s3_bucket_1).should.be(2) +# metric_names = [m.name for m in metrics_s3_bucket_1] +# sorted(metric_names).should.equal( +# ["Metric:BucketSizeBytes", "Metric:NumberOfObjects"] +# ) +# +# # Explicit clean up - the metrics for these buckets are messing with subsequent tests +# key.delete() +# s3.delete_bucket(bucket_name_1) +# s3.delete_bucket(bucket_name_2) diff --git a/tests/test_cloudwatch/test_cloudwatch_boto3.py b/tests/test_cloudwatch/test_cloudwatch_boto3.py index 7fe144052..0c814ee44 100644 --- a/tests/test_cloudwatch/test_cloudwatch_boto3.py +++ b/tests/test_cloudwatch/test_cloudwatch_boto3.py @@ -3,6 +3,7 @@ import boto3 from botocore.exceptions import ClientError from datetime import datetime, timedelta +from freezegun import freeze_time from nose.tools import assert_raises from uuid import uuid4 import pytz @@ -211,6 +212,35 @@ def test_get_metric_statistics(): datapoint["Sum"].should.equal(1.5) +@mock_cloudwatch +@freeze_time("2020-02-10 18:44:05") +def test_custom_timestamp(): + utc_now = datetime.now(tz=pytz.utc) + time = "2020-02-10T18:44:09Z" + cw = boto3.client("cloudwatch", "eu-west-1") + + cw.put_metric_data( + Namespace="tester", + MetricData=[dict(MetricName="metric1", Value=1.5, Timestamp=time)], + ) + + cw.put_metric_data( + Namespace="tester", + MetricData=[ + dict(MetricName="metric2", Value=1.5, Timestamp=datetime(2020, 2, 10)) + ], + ) + + stats = cw.get_metric_statistics( + Namespace="tester", + MetricName="metric", + StartTime=utc_now - timedelta(seconds=60), + EndTime=utc_now + timedelta(seconds=60), + Period=60, + Statistics=["SampleCount", "Sum"], + ) + + @mock_cloudwatch def test_list_metrics(): cloudwatch = boto3.client("cloudwatch", "eu-west-1") @@ -233,8 +263,16 @@ def test_list_metrics(): # Verify format res.should.equal( [ - {u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"}, - {u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"}, + { + u"Namespace": "list_test_1/", + u"Dimensions": [], + u"MetricName": "metric1", + }, + { + u"Namespace": "list_test_1/", + u"Dimensions": [], + u"MetricName": "metric1", + }, ] ) # Verify unknown namespace still has no results @@ -292,3 +330,232 @@ def create_metrics(cloudwatch, namespace, metrics=5, data_points=5): Namespace=namespace, MetricData=[{"MetricName": metric_name, "Value": j, "Unit": "Seconds"}], ) + + +@mock_cloudwatch +def test_get_metric_data_within_timeframe(): + utc_now = datetime.now(tz=pytz.utc) + cloudwatch = boto3.client("cloudwatch", "eu-west-1") + namespace1 = "my_namespace/" + # put metric data + values = [0, 2, 4, 3.5, 7, 100] + cloudwatch.put_metric_data( + Namespace=namespace1, + MetricData=[ + {"MetricName": "metric1", "Value": val, "Unit": "Seconds"} for val in values + ], + ) + # get_metric_data + stats = ["Average", "Sum", "Minimum", "Maximum"] + response = cloudwatch.get_metric_data( + MetricDataQueries=[ + { + "Id": "result_" + stat, + "MetricStat": { + "Metric": {"Namespace": namespace1, "MetricName": "metric1"}, + "Period": 60, + "Stat": stat, + }, + } + for stat in stats + ], + StartTime=utc_now - timedelta(seconds=60), + EndTime=utc_now + timedelta(seconds=60), + ) + # + # Assert Average/Min/Max/Sum is returned as expected + avg = [ + res for res in response["MetricDataResults"] if res["Id"] == "result_Average" + ][0] + avg["Label"].should.equal("metric1 Average") + avg["StatusCode"].should.equal("Complete") + [int(val) for val in avg["Values"]].should.equal([19]) + + sum_ = [res for res in response["MetricDataResults"] if res["Id"] == "result_Sum"][ + 0 + ] + sum_["Label"].should.equal("metric1 Sum") + sum_["StatusCode"].should.equal("Complete") + [val for val in sum_["Values"]].should.equal([sum(values)]) + + min_ = [ + res for res in response["MetricDataResults"] if res["Id"] == "result_Minimum" + ][0] + min_["Label"].should.equal("metric1 Minimum") + min_["StatusCode"].should.equal("Complete") + [int(val) for val in min_["Values"]].should.equal([0]) + + max_ = [ + res for res in response["MetricDataResults"] if res["Id"] == "result_Maximum" + ][0] + max_["Label"].should.equal("metric1 Maximum") + max_["StatusCode"].should.equal("Complete") + [int(val) for val in max_["Values"]].should.equal([100]) + + +@mock_cloudwatch +def test_get_metric_data_partially_within_timeframe(): + utc_now = datetime.now(tz=pytz.utc) + yesterday = utc_now - timedelta(days=1) + last_week = utc_now - timedelta(days=7) + cloudwatch = boto3.client("cloudwatch", "eu-west-1") + namespace1 = "my_namespace/" + # put metric data + values = [0, 2, 4, 3.5, 7, 100] + cloudwatch.put_metric_data( + Namespace=namespace1, + MetricData=[ + { + "MetricName": "metric1", + "Value": 10, + "Unit": "Seconds", + "Timestamp": utc_now, + } + ], + ) + cloudwatch.put_metric_data( + Namespace=namespace1, + MetricData=[ + { + "MetricName": "metric1", + "Value": 20, + "Unit": "Seconds", + "Timestamp": yesterday, + } + ], + ) + cloudwatch.put_metric_data( + Namespace=namespace1, + MetricData=[ + { + "MetricName": "metric1", + "Value": 50, + "Unit": "Seconds", + "Timestamp": last_week, + } + ], + ) + # get_metric_data + response = cloudwatch.get_metric_data( + MetricDataQueries=[ + { + "Id": "result", + "MetricStat": { + "Metric": {"Namespace": namespace1, "MetricName": "metric1"}, + "Period": 60, + "Stat": "Sum", + }, + } + ], + StartTime=yesterday - timedelta(seconds=60), + EndTime=utc_now + timedelta(seconds=60), + ) + # + # Assert Last week's data is not returned + len(response["MetricDataResults"]).should.equal(1) + sum_ = response["MetricDataResults"][0] + sum_["Label"].should.equal("metric1 Sum") + sum_["StatusCode"].should.equal("Complete") + sum_["Values"].should.equal([30.0]) + + +@mock_cloudwatch +def test_get_metric_data_outside_timeframe(): + utc_now = datetime.now(tz=pytz.utc) + last_week = utc_now - timedelta(days=7) + cloudwatch = boto3.client("cloudwatch", "eu-west-1") + namespace1 = "my_namespace/" + # put metric data + cloudwatch.put_metric_data( + Namespace=namespace1, + MetricData=[ + { + "MetricName": "metric1", + "Value": 50, + "Unit": "Seconds", + "Timestamp": last_week, + } + ], + ) + # get_metric_data + response = cloudwatch.get_metric_data( + MetricDataQueries=[ + { + "Id": "result", + "MetricStat": { + "Metric": {"Namespace": namespace1, "MetricName": "metric1"}, + "Period": 60, + "Stat": "Sum", + }, + } + ], + StartTime=utc_now - timedelta(seconds=60), + EndTime=utc_now + timedelta(seconds=60), + ) + # + # Assert Last week's data is not returned + len(response["MetricDataResults"]).should.equal(1) + response["MetricDataResults"][0]["Id"].should.equal("result") + response["MetricDataResults"][0]["StatusCode"].should.equal("Complete") + response["MetricDataResults"][0]["Values"].should.equal([]) + + +@mock_cloudwatch +def test_get_metric_data_for_multiple_metrics(): + utc_now = datetime.now(tz=pytz.utc) + cloudwatch = boto3.client("cloudwatch", "eu-west-1") + namespace = "my_namespace/" + # put metric data + cloudwatch.put_metric_data( + Namespace=namespace, + MetricData=[ + { + "MetricName": "metric1", + "Value": 50, + "Unit": "Seconds", + "Timestamp": utc_now, + } + ], + ) + cloudwatch.put_metric_data( + Namespace=namespace, + MetricData=[ + { + "MetricName": "metric2", + "Value": 25, + "Unit": "Seconds", + "Timestamp": utc_now, + } + ], + ) + # get_metric_data + response = cloudwatch.get_metric_data( + MetricDataQueries=[ + { + "Id": "result1", + "MetricStat": { + "Metric": {"Namespace": namespace, "MetricName": "metric1"}, + "Period": 60, + "Stat": "Sum", + }, + }, + { + "Id": "result2", + "MetricStat": { + "Metric": {"Namespace": namespace, "MetricName": "metric2"}, + "Period": 60, + "Stat": "Sum", + }, + }, + ], + StartTime=utc_now - timedelta(seconds=60), + EndTime=utc_now + timedelta(seconds=60), + ) + # + len(response["MetricDataResults"]).should.equal(2) + + res1 = [res for res in response["MetricDataResults"] if res["Id"] == "result1"][0] + res1["Values"].should.equal([50.0]) + + res2 = [res for res in response["MetricDataResults"] if res["Id"] == "result2"][0] + res2["Values"].should.equal([25.0]) diff --git a/tests/test_cognitoidentity/test_cognitoidentity.py b/tests/test_cognitoidentity/test_cognitoidentity.py index 8eae183c6..0ec7acfb0 100644 --- a/tests/test_cognitoidentity/test_cognitoidentity.py +++ b/tests/test_cognitoidentity/test_cognitoidentity.py @@ -7,6 +7,7 @@ from nose.tools import assert_raises from moto import mock_cognitoidentity from moto.cognitoidentity.utils import get_random_identity_id from moto.core import ACCOUNT_ID +from uuid import UUID @mock_cognitoidentity @@ -83,8 +84,10 @@ def test_describe_identity_pool_with_invalid_id_raises_error(): # testing a helper function def test_get_random_identity_id(): - assert len(get_random_identity_id("us-west-2")) > 0 - assert len(get_random_identity_id("us-west-2").split(":")[1]) == 19 + identity_id = get_random_identity_id("us-west-2") + region, id = identity_id.split(":") + region.should.equal("us-west-2") + UUID(id, version=4) # Will throw an error if it's not a valid UUID @mock_cognitoidentity @@ -96,7 +99,6 @@ def test_get_id(): IdentityPoolId="us-west-2:12345", Logins={"someurl": "12345"}, ) - print(result) assert ( result.get("IdentityId", "").startswith("us-west-2") or result.get("ResponseMetadata").get("HTTPStatusCode") == 200 diff --git a/tests/test_cognitoidentity/test_server.py b/tests/test_cognitoidentity/test_server.py index 903dae290..8c4229f06 100644 --- a/tests/test_cognitoidentity/test_server.py +++ b/tests/test_cognitoidentity/test_server.py @@ -48,6 +48,5 @@ def test_get_id(): }, ) - print(res.data) json_data = json.loads(res.data.decode("utf-8")) assert ":" in json_data["IdentityId"] diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index 1ffd52a2c..1bf39428e 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -11,6 +11,8 @@ from moto import mock_s3 from moto.config import mock_config from moto.core import ACCOUNT_ID +import sure # noqa + @mock_config def test_put_configuration_recorder(): diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index baea25c1b..0f45e0244 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1,21 +1,17 @@ from __future__ import unicode_literals, print_function -import re from decimal import Decimal -import six import boto import boto3 from boto3.dynamodb.conditions import Attr, Key import re -import requests import sure # noqa from moto import mock_dynamodb2, mock_dynamodb2_deprecated from moto.dynamodb2 import dynamodb_backend2, dynamodb_backends2 from boto.exception import JSONResponseError from botocore.exceptions import ClientError, ParamValidationError from tests.helpers import requires_boto_gte -import tests.backport_assert_raises import moto.dynamodb2.comparisons import moto.dynamodb2.models @@ -1454,6 +1450,13 @@ def test_filter_expression(): filter_expr.expr(row1).should.be(True) filter_expr.expr(row2).should.be(False) + # lowercase AND test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression( + "Id > :v0 and Subs < :v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "7"}} + ) + filter_expr.expr(row1).should.be(True) + filter_expr.expr(row2).should.be(False) + # OR test filter_expr = moto.dynamodb2.comparisons.get_filter_expression( "Id = :v0 OR Id=:v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "8"}} @@ -2785,7 +2788,7 @@ def test_query_gsi_with_range_key(): res = dynamodb.query( TableName="test", IndexName="test_gsi", - KeyConditionExpression="gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key", + KeyConditionExpression="gsi_hash_key = :gsi_hash_key and gsi_range_key = :gsi_range_key", ExpressionAttributeValues={ ":gsi_hash_key": {"S": "key1"}, ":gsi_range_key": {"S": "range1"}, @@ -3214,6 +3217,25 @@ def test_remove_top_level_attribute(): result.should.equal({"id": {"S": "foo"}}) +@mock_dynamodb2 +def test_remove_top_level_attribute_non_existent(): + """ + Remove statements do not require attribute to exist they silently pass + """ + table_name = "test_remove" + client = create_table_with_list(table_name) + ddb_item = {"id": {"S": "foo"}, "item": {"S": "bar"}} + client.put_item(TableName=table_name, Item=ddb_item) + client.update_item( + TableName=table_name, + Key={"id": {"S": "foo"}}, + UpdateExpression="REMOVE non_existent_attribute", + ExpressionAttributeNames={"#i": "item"}, + ) + result = client.get_item(TableName=table_name, Key={"id": {"S": "foo"}})["Item"] + result.should.equal(ddb_item) + + @mock_dynamodb2 def test_remove_list_index__remove_existing_index(): table_name = "test_list_index_access" @@ -4212,6 +4234,396 @@ def test_gsi_verify_negative_number_order(): ) +@mock_dynamodb2 +def test_transact_write_items_put(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Put multiple items + dynamodb.transact_write_items( + TransactItems=[ + { + "Put": { + "Item": {"id": {"S": "foo{}".format(str(i))}, "foo": {"S": "bar"},}, + "TableName": "test-table", + } + } + for i in range(0, 5) + ] + ) + # Assert all are present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(5) + + +@mock_dynamodb2 +def test_transact_write_items_put_conditional_expressions(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + dynamodb.put_item( + TableName="test-table", Item={"id": {"S": "foo2"},}, + ) + # Put multiple items + with assert_raises(ClientError) as ex: + dynamodb.transact_write_items( + TransactItems=[ + { + "Put": { + "Item": { + "id": {"S": "foo{}".format(str(i))}, + "foo": {"S": "bar"}, + }, + "TableName": "test-table", + "ConditionExpression": "#i <> :i", + "ExpressionAttributeNames": {"#i": "id"}, + "ExpressionAttributeValues": { + ":i": { + "S": "foo2" + } # This item already exist, so the ConditionExpression should fail + }, + } + } + for i in range(0, 5) + ] + ) + # Assert the exception is correct + ex.exception.response["Error"]["Code"].should.equal( + "ConditionalCheckFailedException" + ) + ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.exception.response["Error"]["Message"].should.equal( + "A condition specified in the operation could not be evaluated." + ) + # Assert all are present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"id": {"S": "foo2"}}) + + +@mock_dynamodb2 +def test_transact_write_items_conditioncheck_passes(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item without email address + dynamodb.put_item( + TableName="test-table", Item={"id": {"S": "foo"},}, + ) + # Put an email address, after verifying it doesn't exist yet + dynamodb.transact_write_items( + TransactItems=[ + { + "ConditionCheck": { + "Key": {"id": {"S": "foo"}}, + "TableName": "test-table", + "ConditionExpression": "attribute_not_exists(#e)", + "ExpressionAttributeNames": {"#e": "email_address"}, + } + }, + { + "Put": { + "Item": { + "id": {"S": "foo"}, + "email_address": {"S": "test@moto.com"}, + }, + "TableName": "test-table", + } + }, + ] + ) + # Assert all are present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}}) + + +@mock_dynamodb2 +def test_transact_write_items_conditioncheck_fails(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item with email address + dynamodb.put_item( + TableName="test-table", + Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}}, + ) + # Try to put an email address, but verify whether it exists + # ConditionCheck should fail + with assert_raises(ClientError) as ex: + dynamodb.transact_write_items( + TransactItems=[ + { + "ConditionCheck": { + "Key": {"id": {"S": "foo"}}, + "TableName": "test-table", + "ConditionExpression": "attribute_not_exists(#e)", + "ExpressionAttributeNames": {"#e": "email_address"}, + } + }, + { + "Put": { + "Item": { + "id": {"S": "foo"}, + "email_address": {"S": "update@moto.com"}, + }, + "TableName": "test-table", + } + }, + ] + ) + # Assert the exception is correct + ex.exception.response["Error"]["Code"].should.equal( + "ConditionalCheckFailedException" + ) + ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.exception.response["Error"]["Message"].should.equal( + "A condition specified in the operation could not be evaluated." + ) + + # Assert the original email address is still present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}}) + + +@mock_dynamodb2 +def test_transact_write_items_delete(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item + dynamodb.put_item( + TableName="test-table", Item={"id": {"S": "foo"},}, + ) + # Delete the item + dynamodb.transact_write_items( + TransactItems=[ + {"Delete": {"Key": {"id": {"S": "foo"}}, "TableName": "test-table",}} + ] + ) + # Assert the item is deleted + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(0) + + +@mock_dynamodb2 +def test_transact_write_items_delete_with_successful_condition_expression(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item without email address + dynamodb.put_item( + TableName="test-table", Item={"id": {"S": "foo"},}, + ) + # ConditionExpression will pass - no email address has been specified yet + dynamodb.transact_write_items( + TransactItems=[ + { + "Delete": { + "Key": {"id": {"S": "foo"},}, + "TableName": "test-table", + "ConditionExpression": "attribute_not_exists(#e)", + "ExpressionAttributeNames": {"#e": "email_address"}, + } + } + ] + ) + # Assert the item is deleted + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(0) + + +@mock_dynamodb2 +def test_transact_write_items_delete_with_failed_condition_expression(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item with email address + dynamodb.put_item( + TableName="test-table", + Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}}, + ) + # Try to delete an item that does not have an email address + # ConditionCheck should fail + with assert_raises(ClientError) as ex: + dynamodb.transact_write_items( + TransactItems=[ + { + "Delete": { + "Key": {"id": {"S": "foo"},}, + "TableName": "test-table", + "ConditionExpression": "attribute_not_exists(#e)", + "ExpressionAttributeNames": {"#e": "email_address"}, + } + } + ] + ) + # Assert the exception is correct + ex.exception.response["Error"]["Code"].should.equal( + "ConditionalCheckFailedException" + ) + ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.exception.response["Error"]["Message"].should.equal( + "A condition specified in the operation could not be evaluated." + ) + # Assert the original item is still present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}}) + + +@mock_dynamodb2 +def test_transact_write_items_update(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item + dynamodb.put_item(TableName="test-table", Item={"id": {"S": "foo"}}) + # Update the item + dynamodb.transact_write_items( + TransactItems=[ + { + "Update": { + "Key": {"id": {"S": "foo"}}, + "TableName": "test-table", + "UpdateExpression": "SET #e = :v", + "ExpressionAttributeNames": {"#e": "email_address"}, + "ExpressionAttributeValues": {":v": {"S": "test@moto.com"}}, + } + } + ] + ) + # Assert the item is updated + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}}) + + +@mock_dynamodb2 +def test_transact_write_items_update_with_failed_condition_expression(): + table_schema = { + "KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}], + "AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},], + } + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema + ) + # Insert an item with email address + dynamodb.put_item( + TableName="test-table", + Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}}, + ) + # Try to update an item that does not have an email address + # ConditionCheck should fail + with assert_raises(ClientError) as ex: + dynamodb.transact_write_items( + TransactItems=[ + { + "Update": { + "Key": {"id": {"S": "foo"}}, + "TableName": "test-table", + "UpdateExpression": "SET #e = :v", + "ConditionExpression": "attribute_not_exists(#e)", + "ExpressionAttributeNames": {"#e": "email_address"}, + "ExpressionAttributeValues": {":v": {"S": "update@moto.com"}}, + } + } + ] + ) + # Assert the exception is correct + ex.exception.response["Error"]["Code"].should.equal( + "ConditionalCheckFailedException" + ) + ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.exception.response["Error"]["Message"].should.equal( + "A condition specified in the operation could not be evaluated." + ) + # Assert the original item is still present + items = dynamodb.scan(TableName="test-table")["Items"] + items.should.have.length_of(1) + items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}}) + + +@mock_dynamodb2 +def test_dynamodb_max_1mb_limit(): + ddb = boto3.resource("dynamodb", region_name="eu-west-1") + + table_name = "populated-mock-table" + table = ddb.create_table( + TableName=table_name, + KeySchema=[ + {"AttributeName": "partition_key", "KeyType": "HASH"}, + {"AttributeName": "sort_key", "KeyType": "RANGE"}, + ], + AttributeDefinitions=[ + {"AttributeName": "partition_key", "AttributeType": "S"}, + {"AttributeName": "sort_key", "AttributeType": "S"}, + ], + BillingMode="PAY_PER_REQUEST", + ) + + # Populate the table + items = [ + { + "partition_key": "partition_key_val", # size=30 + "sort_key": "sort_key_value____" + str(i), # size=30 + } + for i in range(10000, 29999) + ] + with table.batch_writer() as batch: + for item in items: + batch.put_item(Item=item) + + response = table.query( + KeyConditionExpression=Key("partition_key").eq("partition_key_val") + ) + # We shouldn't get everything back - the total result set is well over 1MB + len(items).should.be.greater_than(response["Count"]) + response["LastEvaluatedKey"].shouldnt.be(None) + + def assert_raise_syntax_error(client_error, token, near): """ Assert whether a client_error is as expected Syntax error. Syntax error looks like: `syntax_error_template` @@ -4286,3 +4698,251 @@ def test_list_tables_exclusive_start_table_name_empty(): resp = client.list_tables(Limit=1, ExclusiveStartTableName="whatever") len(resp["TableNames"]).should.equal(0) + + +def assert_correct_client_error( + client_error, code, message_template, message_values=None, braces=None +): + """ + Assert whether a client_error is as expected. Allow for a list of values to be passed into the message + + Args: + client_error(ClientError): The ClientError exception that was raised + code(str): The code for the error (e.g. ValidationException) + message_template(str): Error message template. if message_values is not None then this template has a {values} + as placeholder. For example: + 'Value provided in ExpressionAttributeValues unused in expressions: keys: {values}' + message_values(list of str|None): The values that are passed in the error message + braces(list of str|None): List of length 2 with opening and closing brace for the values. By default it will be + surrounded by curly brackets + """ + braces = braces or ["{", "}"] + assert client_error.response["Error"]["Code"] == code + if message_values is not None: + values_string = "{open_brace}(?P.*){close_brace}".format( + open_brace=braces[0], close_brace=braces[1] + ) + re_msg = re.compile(message_template.format(values=values_string)) + match_result = re_msg.match(client_error.response["Error"]["Message"]) + assert match_result is not None + values_string = match_result.groupdict()["values"] + values = [key for key in values_string.split(", ")] + assert len(message_values) == len(values) + for value in message_values: + assert value in values + else: + assert client_error.response["Error"]["Message"] == message_template + + +def create_simple_table_and_return_client(): + dynamodb = boto3.client("dynamodb", region_name="eu-west-1") + dynamodb.create_table( + TableName="moto-test", + KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"},], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "MyStr": {"S": "1"},}, + ) + return dynamodb + + +# https://github.com/spulec/moto/issues/2806 +# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html +# #DDB-UpdateItem-request-UpdateExpression +@mock_dynamodb2 +def test_update_item_with_attribute_in_right_hand_side_and_operation(): + dynamodb = create_simple_table_and_return_client() + + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET myNum = myNum+:val", + ExpressionAttributeValues={":val": {"N": "3"}}, + ) + + result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}}) + assert result["Item"]["myNum"]["N"] == "4" + + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET myNum = myNum - :val", + ExpressionAttributeValues={":val": {"N": "1"}}, + ) + result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}}) + assert result["Item"]["myNum"]["N"] == "3" + + +@mock_dynamodb2 +def test_non_existing_attribute_should_raise_exception(): + """ + Does error message get correctly raised if attribute is referenced but it does not exist for the item. + """ + dynamodb = create_simple_table_and_return_client() + + try: + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET MyStr = no_attr + MyStr", + ) + assert False, "Validation exception not thrown" + except dynamodb.exceptions.ClientError as e: + assert_correct_client_error( + e, + "ValidationException", + "The provided expression refers to an attribute that does not exist in the item", + ) + + +@mock_dynamodb2 +def test_update_expression_with_plus_in_attribute_name(): + """ + Does error message get correctly raised if attribute contains a plus and is passed in without an AttributeName. And + lhs & rhs are not attribute IDs by themselve. + """ + dynamodb = create_simple_table_and_return_client() + + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "my+Num": {"S": "1"}, "MyStr": {"S": "aaa"},}, + ) + try: + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET MyStr = my+Num", + ) + assert False, "Validation exception not thrown" + except dynamodb.exceptions.ClientError as e: + assert_correct_client_error( + e, + "ValidationException", + "The provided expression refers to an attribute that does not exist in the item", + ) + + +@mock_dynamodb2 +def test_update_expression_with_minus_in_attribute_name(): + """ + Does error message get correctly raised if attribute contains a minus and is passed in without an AttributeName. And + lhs & rhs are not attribute IDs by themselve. + """ + dynamodb = create_simple_table_and_return_client() + + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "my-Num": {"S": "1"}, "MyStr": {"S": "aaa"},}, + ) + try: + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET MyStr = my-Num", + ) + assert False, "Validation exception not thrown" + except dynamodb.exceptions.ClientError as e: + assert_correct_client_error( + e, + "ValidationException", + "The provided expression refers to an attribute that does not exist in the item", + ) + + +@mock_dynamodb2 +def test_update_expression_with_space_in_attribute_name(): + """ + Does error message get correctly raised if attribute contains a space and is passed in without an AttributeName. And + lhs & rhs are not attribute IDs by themselves. + """ + dynamodb = create_simple_table_and_return_client() + + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "my Num": {"S": "1"}, "MyStr": {"S": "aaa"},}, + ) + + try: + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET MyStr = my Num", + ) + assert False, "Validation exception not thrown" + except dynamodb.exceptions.ClientError as e: + assert_raise_syntax_error(e, "Num", "my Num") + + +@mock_dynamodb2 +def test_summing_up_2_strings_raises_exception(): + """ + Update set supports different DynamoDB types but some operations are not supported. For example summing up 2 strings + raises an exception. It results in ClientError with code ValidationException: + Saying An operand in the update expression has an incorrect data type + """ + dynamodb = create_simple_table_and_return_client() + + try: + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET MyStr = MyStr + MyStr", + ) + assert False, "Validation exception not thrown" + except dynamodb.exceptions.ClientError as e: + assert_correct_client_error( + e, + "ValidationException", + "An operand in the update expression has an incorrect data type", + ) + + +# https://github.com/spulec/moto/issues/2806 +@mock_dynamodb2 +def test_update_item_with_attribute_in_right_hand_side(): + """ + After tokenization and building expression make sure referenced attributes are replaced with their current value + """ + dynamodb = create_simple_table_and_return_client() + + # Make sure there are 2 values + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "myVal1": {"S": "Value1"}, "myVal2": {"S": "Value2"}}, + ) + + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET myVal1 = myVal2", + ) + + result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}}) + assert result["Item"]["myVal1"]["S"] == result["Item"]["myVal2"]["S"] == "Value2" + + +@mock_dynamodb2 +def test_multiple_updates(): + dynamodb = create_simple_table_and_return_client() + dynamodb.put_item( + TableName="moto-test", + Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "path": {"N": "6"}}, + ) + dynamodb.update_item( + TableName="moto-test", + Key={"id": {"S": "1"}}, + UpdateExpression="SET myNum = #p + :val, newAttr = myNum", + ExpressionAttributeValues={":val": {"N": "1"}}, + ExpressionAttributeNames={"#p": "path"}, + ) + result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})["Item"] + expected_result = { + "myNum": {"N": "7"}, + "newAttr": {"N": "1"}, + "path": {"N": "6"}, + "id": {"S": "1"}, + } + assert result == expected_result diff --git a/tests/test_dynamodb2/test_dynamodb_executor.py b/tests/test_dynamodb2/test_dynamodb_executor.py new file mode 100644 index 000000000..4ef0bb423 --- /dev/null +++ b/tests/test_dynamodb2/test_dynamodb_executor.py @@ -0,0 +1,446 @@ +from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType +from moto.dynamodb2.models import Item, DynamoType +from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor +from moto.dynamodb2.parsing.expressions import UpdateExpressionParser +from moto.dynamodb2.parsing.validators import UpdateExpressionValidator +from parameterized import parameterized + + +def test_execution_of_if_not_exists_not_existing_value(): + update_expression = "SET a = if_not_exists(b, a)" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"S": "A"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"S": "A"}}, + ) + assert expected_item == item + + +def test_execution_of_if_not_exists_with_existing_attribute_should_return_attribute(): + update_expression = "SET a = if_not_exists(b, a)" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"S": "A"}, "b": {"S": "B"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"S": "B"}, "b": {"S": "B"}}, + ) + assert expected_item == item + + +def test_execution_of_if_not_exists_with_existing_attribute_should_return_value(): + update_expression = "SET a = if_not_exists(b, :val)" + update_expression_values = {":val": {"N": "4"}} + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "b": {"N": "3"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=update_expression_values, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "b": {"N": "3"}, "a": {"N": "3"}}, + ) + assert expected_item == item + + +def test_execution_of_if_not_exists_with_non_existing_attribute_should_return_value(): + update_expression = "SET a = if_not_exists(b, :val)" + update_expression_values = {":val": {"N": "4"}} + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=update_expression_values, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"N": "4"}}, + ) + assert expected_item == item + + +def test_execution_of_sum_operation(): + update_expression = "SET a = a + b" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"N": "7"}, "b": {"N": "4"}}, + ) + assert expected_item == item + + +def test_execution_of_remove(): + update_expression = "Remove a" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "1"}, "b": {"N": "4"}}, + ) + assert expected_item == item + + +def test_execution_of_remove_in_map(): + update_expression = "Remove itemmap.itemlist[1].foo11" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={ + "id": {"S": "foo2"}, + "itemmap": { + "M": { + "itemlist": { + "L": [ + {"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}}, + {"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}}, + ] + } + } + }, + }, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={ + "id": {"S": "foo2"}, + "itemmap": { + "M": { + "itemlist": { + "L": [ + {"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}}, + {"M": {"foo10": {"S": "bar1"},}}, + ] + } + } + }, + }, + ) + assert expected_item == item + + +def test_execution_of_remove_in_list(): + update_expression = "Remove itemmap.itemlist[1]" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={ + "id": {"S": "foo2"}, + "itemmap": { + "M": { + "itemlist": { + "L": [ + {"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}}, + {"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}}, + ] + } + } + }, + }, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=None, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={ + "id": {"S": "foo2"}, + "itemmap": { + "M": { + "itemlist": { + "L": [{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},] + } + } + }, + }, + ) + assert expected_item == item + + +def test_execution_of_delete_element_from_set(): + update_expression = "delete s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values={":value": {"SS": ["value2", "value5"]}}, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value3"]},}, + ) + assert expected_item == item + + +def test_execution_of_add_number(): + update_expression = "add s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"N": "5"},}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values={":value": {"N": "10"}}, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"N": "15"}}, + ) + assert expected_item == item + + +def test_execution_of_add_set_to_a_number(): + update_expression = "add s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"N": "5"},}, + ) + try: + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values={":value": {"SS": ["s1"]}}, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"N": "15"}}, + ) + assert expected_item == item + assert False + except IncorrectDataType: + assert True + + +def test_execution_of_add_to_a_set(): + update_expression = "ADD s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},}, + ) + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values={":value": {"SS": ["value2", "value5"]}}, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + expected_item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={ + "id": {"S": "foo2"}, + "s": {"SS": ["value1", "value2", "value3", "value5"]}, + }, + ) + assert expected_item == item + + +@parameterized( + [ + ({":value": {"S": "10"}}, "STRING",), + ({":value": {"N": "10"}}, "NUMBER",), + ({":value": {"B": "10"}}, "BINARY",), + ({":value": {"BOOL": True}}, "BOOLEAN",), + ({":value": {"NULL": True}}, "NULL",), + ({":value": {"M": {"el0": {"S": "10"}}}}, "MAP",), + ({":value": {"L": []}}, "LIST",), + ] +) +def test_execution_of__delete_element_from_set_invalid_value( + expression_attribute_values, unexpected_data_type +): + """A delete statement must use a value of type SS in order to delete elements from a set.""" + update_expression = "delete s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},}, + ) + try: + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values=expression_attribute_values, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + assert False, "Must raise exception" + except IncorrectOperandType as e: + assert e.operator_or_function == "operator: DELETE" + assert e.operand_type == unexpected_data_type + + +def test_execution_of_delete_element_from_a_string_attribute(): + """A delete statement must use a value of type SS in order to delete elements from a set.""" + update_expression = "delete s :value" + update_expression_ast = UpdateExpressionParser.make(update_expression) + item = Item( + hash_key=DynamoType({"S": "id"}), + hash_key_type="TYPE", + range_key=None, + range_key_type=None, + attrs={"id": {"S": "foo2"}, "s": {"S": "5"},}, + ) + try: + validated_ast = UpdateExpressionValidator( + update_expression_ast, + expression_attribute_names=None, + expression_attribute_values={":value": {"SS": ["value2"]}}, + item=item, + ).validate() + UpdateExpressionExecutor(validated_ast, item, None).execute() + assert False, "Must raise exception" + except IncorrectDataType: + assert True diff --git a/tests/test_dynamodb2/test_dynamodb_table_with_range_key.py b/tests/test_dynamodb2/test_dynamodb_table_with_range_key.py index 1aa2175c1..6fba713ec 100644 --- a/tests/test_dynamodb2/test_dynamodb_table_with_range_key.py +++ b/tests/test_dynamodb2/test_dynamodb_table_with_range_key.py @@ -8,6 +8,8 @@ from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError import sure # noqa from freezegun import freeze_time +from nose.tools import assert_raises + from moto import mock_dynamodb2, mock_dynamodb2_deprecated from boto.exception import JSONResponseError from tests.helpers import requires_boto_gte @@ -1273,6 +1275,15 @@ def test_update_item_with_expression(): ) +def assert_failure_due_to_key_not_in_schema(func, **kwargs): + with assert_raises(ClientError) as ex: + func(**kwargs) + ex.exception.response["Error"]["Code"].should.equal("ValidationException") + ex.exception.response["Error"]["Message"].should.equal( + "The provided key element does not match the schema" + ) + + @mock_dynamodb2 def test_update_item_add_with_expression(): table = _create_table_with_range_key() @@ -1299,14 +1310,13 @@ def test_update_item_add_with_expression(): dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) # Update item to add a string value to a non-existing set - # Should just create the set in the background - table.update_item( + # Should throw: 'The provided key element does not match the schema' + assert_failure_due_to_key_not_in_schema( + table.update_item, Key=item_key, UpdateExpression="ADD non_existing_str_set :v", ExpressionAttributeValues={":v": {"item4"}}, ) - current_item["non_existing_str_set"] = {"item4"} - dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) # Update item to add a num value to a num set table.update_item( @@ -1381,15 +1391,14 @@ def test_update_item_add_with_nested_sets(): dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) # Update item to add a string value to a non-existing set - # Should just create the set in the background - table.update_item( + # Should raise + assert_failure_due_to_key_not_in_schema( + table.update_item, Key=item_key, UpdateExpression="ADD #ns.#ne :v", ExpressionAttributeNames={"#ns": "nested", "#ne": "non_existing_str_set"}, ExpressionAttributeValues={":v": {"new_item"}}, ) - current_item["nested"]["non_existing_str_set"] = {"new_item"} - dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) @mock_dynamodb2 diff --git a/tests/test_eb/test_eb.py b/tests/test_eb/test_eb.py new file mode 100644 index 000000000..42eb09be3 --- /dev/null +++ b/tests/test_eb/test_eb.py @@ -0,0 +1,130 @@ +import boto3 +import sure # noqa +from botocore.exceptions import ClientError + +from moto import mock_elasticbeanstalk + + +@mock_elasticbeanstalk +def test_create_application(): + # Create Elastic Beanstalk Application + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + app = conn.create_application(ApplicationName="myapp",) + app["Application"]["ApplicationName"].should.equal("myapp") + + +@mock_elasticbeanstalk +def test_create_application_dup(): + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + conn.create_application(ApplicationName="myapp",) + conn.create_application.when.called_with(ApplicationName="myapp",).should.throw( + ClientError + ) + + +@mock_elasticbeanstalk +def test_describe_applications(): + # Create Elastic Beanstalk Application + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + conn.create_application(ApplicationName="myapp",) + + apps = conn.describe_applications() + len(apps["Applications"]).should.equal(1) + apps["Applications"][0]["ApplicationName"].should.equal("myapp") + + +@mock_elasticbeanstalk +def test_create_environment(): + # Create Elastic Beanstalk Environment + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + app = conn.create_application(ApplicationName="myapp",) + env = conn.create_environment(ApplicationName="myapp", EnvironmentName="myenv",) + env["EnvironmentName"].should.equal("myenv") + + +@mock_elasticbeanstalk +def test_describe_environments(): + # List Elastic Beanstalk Envs + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + conn.create_application(ApplicationName="myapp",) + conn.create_environment( + ApplicationName="myapp", EnvironmentName="myenv", + ) + + envs = conn.describe_environments() + envs = envs["Environments"] + len(envs).should.equal(1) + envs[0]["ApplicationName"].should.equal("myapp") + envs[0]["EnvironmentName"].should.equal("myenv") + + +def tags_dict_to_list(tag_dict): + tag_list = [] + for key, value in tag_dict.items(): + tag_list.append({"Key": key, "Value": value}) + return tag_list + + +def tags_list_to_dict(tag_list): + tag_dict = {} + for tag in tag_list: + tag_dict[tag["Key"]] = tag["Value"] + return tag_dict + + +@mock_elasticbeanstalk +def test_create_environment_tags(): + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + conn.create_application(ApplicationName="myapp",) + env_tags = {"initial key": "initial value"} + env = conn.create_environment( + ApplicationName="myapp", + EnvironmentName="myenv", + Tags=tags_dict_to_list(env_tags), + ) + + tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],) + tags["ResourceArn"].should.equal(env["EnvironmentArn"]) + tags_list_to_dict(tags["ResourceTags"]).should.equal(env_tags) + + +@mock_elasticbeanstalk +def test_update_tags(): + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + conn.create_application(ApplicationName="myapp",) + env_tags = { + "initial key": "initial value", + "to remove": "delete me", + "to update": "original", + } + env = conn.create_environment( + ApplicationName="myapp", + EnvironmentName="myenv", + Tags=tags_dict_to_list(env_tags), + ) + + extra_env_tags = { + "to update": "new", + "extra key": "extra value", + } + conn.update_tags_for_resource( + ResourceArn=env["EnvironmentArn"], + TagsToAdd=tags_dict_to_list(extra_env_tags), + TagsToRemove=["to remove"], + ) + + total_env_tags = env_tags.copy() + total_env_tags.update(extra_env_tags) + del total_env_tags["to remove"] + + tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],) + tags["ResourceArn"].should.equal(env["EnvironmentArn"]) + tags_list_to_dict(tags["ResourceTags"]).should.equal(total_env_tags) + + +@mock_elasticbeanstalk +def test_list_available_solution_stacks(): + conn = boto3.client("elasticbeanstalk", region_name="us-east-1") + stacks = conn.list_available_solution_stacks() + len(stacks["SolutionStacks"]).should.be.greater_than(0) + len(stacks["SolutionStacks"]).should.be.equal(len(stacks["SolutionStackDetails"])) diff --git a/tests/test_ec2/test_instances.py b/tests/test_ec2/test_instances.py index f7d58f9fa..0509e1a45 100644 --- a/tests/test_ec2/test_instances.py +++ b/tests/test_ec2/test_instances.py @@ -9,6 +9,7 @@ from nose.tools import assert_raises import base64 import datetime import ipaddress +import json import six import boto @@ -18,7 +19,7 @@ from boto.exception import EC2ResponseError, EC2ResponseError from freezegun import freeze_time import sure # noqa -from moto import mock_ec2_deprecated, mock_ec2 +from moto import mock_ec2_deprecated, mock_ec2, mock_cloudformation from tests.helpers import requires_boto_gte @@ -1334,6 +1335,12 @@ def test_create_instance_ebs_optimized(): instance.load() instance.ebs_optimized.should.be(False) + instance = ec2_resource.create_instances( + ImageId="ami-12345678", MaxCount=1, MinCount=1, + )[0] + instance.load() + instance.ebs_optimized.should.be(False) + @mock_ec2 def test_run_multiple_instances_in_same_command(): @@ -1414,3 +1421,40 @@ def test_describe_instance_attribute(): invalid_instance_attribute=invalid_instance_attribute ) ex.exception.response["Error"]["Message"].should.equal(message) + + +@mock_ec2 +@mock_cloudformation +def test_volume_size_through_cloudformation(): + ec2 = boto3.client("ec2", region_name="us-east-1") + cf = boto3.client("cloudformation", region_name="us-east-1") + + volume_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "testInstance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-d3adb33f", + "KeyName": "dummy", + "InstanceType": "t2.micro", + "BlockDeviceMappings": [ + {"DeviceName": "/dev/sda2", "Ebs": {"VolumeSize": "50"}} + ], + "Tags": [ + {"Key": "foo", "Value": "bar"}, + {"Key": "blah", "Value": "baz"}, + ], + }, + } + }, + } + template_json = json.dumps(volume_template) + cf.create_stack(StackName="test_stack", TemplateBody=template_json) + instances = ec2.describe_instances() + volume = instances["Reservations"][0]["Instances"][0]["BlockDeviceMappings"][0][ + "Ebs" + ] + + volumes = ec2.describe_volumes(VolumeIds=[volume["VolumeId"]]) + volumes["Volumes"][0]["Size"].should.equal(50) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index ffbd73966..86b892315 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -2218,6 +2218,29 @@ def test_boto3_deleted_versionings_list(): assert len(listed["Contents"]) == 1 +@mock_s3 +def test_boto3_delete_objects_for_specific_version_id(): + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + client.create_bucket(Bucket="blah") + client.put_bucket_versioning( + Bucket="blah", VersioningConfiguration={"Status": "Enabled"} + ) + + client.put_object(Bucket="blah", Key="test1", Body=b"test1a") + client.put_object(Bucket="blah", Key="test1", Body=b"test1b") + + response = client.list_object_versions(Bucket="blah", Prefix="test1") + id_to_delete = [v["VersionId"] for v in response["Versions"] if v["IsLatest"]][0] + + response = client.delete_objects( + Bucket="blah", Delete={"Objects": [{"Key": "test1", "VersionId": id_to_delete}]} + ) + assert response["Deleted"] == [{"Key": "test1", "VersionId": id_to_delete}] + + listed = client.list_objects_v2(Bucket="blah") + assert len(listed["Contents"]) == 1 + + @mock_s3 def test_boto3_delete_versioned_bucket(): client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) @@ -3256,7 +3279,8 @@ def test_boto3_put_object_tagging_on_earliest_version(): # Older version has tags while the most recent does not resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id) resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - resp["TagSet"].should.equal( + sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"]) + sorted_tagset.should.equal( [{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}] ) @@ -3334,7 +3358,8 @@ def test_boto3_put_object_tagging_on_both_version(): resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id) resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - resp["TagSet"].should.equal( + sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"]) + sorted_tagset.should.equal( [{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}] ) @@ -3342,7 +3367,8 @@ def test_boto3_put_object_tagging_on_both_version(): Bucket=bucket_name, Key=key, VersionId=second_object.id ) resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - resp["TagSet"].should.equal( + sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"]) + sorted_tagset.should.equal( [{"Key": "item1", "Value": "baz"}, {"Key": "item2", "Value": "bin"}] ) @@ -3744,6 +3770,28 @@ def test_root_dir_with_empty_name_works(): store_and_read_back_a_key("/") +@parameterized(["mybucket", "my.bucket"]) +@mock_s3 +def test_leading_slashes_not_removed(bucket_name): + """Make sure that leading slashes are not removed internally.""" + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + s3.create_bucket(Bucket=bucket_name) + + uploaded_key = "/key" + invalid_key_1 = "key" + invalid_key_2 = "//key" + + s3.put_object(Bucket=bucket_name, Key=uploaded_key, Body=b"Some body") + + with assert_raises(ClientError) as e: + s3.get_object(Bucket=bucket_name, Key=invalid_key_1) + e.exception.response["Error"]["Code"].should.equal("NoSuchKey") + + with assert_raises(ClientError) as e: + s3.get_object(Bucket=bucket_name, Key=invalid_key_2) + e.exception.response["Error"]["Code"].should.equal("NoSuchKey") + + @parameterized( [("foo/bar/baz",), ("foo",), ("foo/run_dt%3D2019-01-01%252012%253A30%253A00",)] ) @@ -4293,24 +4341,17 @@ def test_s3_config_dict(): FakeAcl, FakeGrant, FakeGrantee, - FakeTag, - FakeTagging, - FakeTagSet, OWNER, ) # Without any buckets: assert not s3_config_query.get_config_resource("some_bucket") - tags = FakeTagging( - FakeTagSet( - [FakeTag("someTag", "someValue"), FakeTag("someOtherTag", "someOtherValue")] - ) - ) + tags = {"someTag": "someValue", "someOtherTag": "someOtherValue"} # With 1 bucket in us-west-2: s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2") - s3_config_query.backends["global"].put_bucket_tagging("bucket1", tags) + s3_config_query.backends["global"].put_bucket_tags("bucket1", tags) # With a log bucket: s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2") diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 49d1dc925..6ec53460a 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -137,6 +137,45 @@ def test_create_secret_with_tags(): ] +@mock_secretsmanager +def test_create_secret_with_description(): + conn = boto3.client("secretsmanager", region_name="us-east-1") + secret_name = "test-secret-with-tags" + + result = conn.create_secret( + Name=secret_name, SecretString="foosecret", Description="desc" + ) + assert result["ARN"] + assert result["Name"] == secret_name + secret_value = conn.get_secret_value(SecretId=secret_name) + assert secret_value["SecretString"] == "foosecret" + secret_details = conn.describe_secret(SecretId=secret_name) + assert secret_details["Description"] == "desc" + + +@mock_secretsmanager +def test_create_secret_with_tags_and_description(): + conn = boto3.client("secretsmanager", region_name="us-east-1") + secret_name = "test-secret-with-tags" + + result = conn.create_secret( + Name=secret_name, + SecretString="foosecret", + Description="desc", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}], + ) + assert result["ARN"] + assert result["Name"] == secret_name + secret_value = conn.get_secret_value(SecretId=secret_name) + assert secret_value["SecretString"] == "foosecret" + secret_details = conn.describe_secret(SecretId=secret_name) + assert secret_details["Tags"] == [ + {"Key": "Foo", "Value": "Bar"}, + {"Key": "Mykey", "Value": "Myvalue"}, + ] + assert secret_details["Description"] == "desc" + + @mock_secretsmanager def test_delete_secret(): conn = boto3.client("secretsmanager", region_name="us-west-2") @@ -690,6 +729,31 @@ def test_put_secret_value_versions_differ_if_same_secret_put_twice(): assert first_version_id != second_version_id +@mock_secretsmanager +def test_put_secret_value_maintains_description_and_tags(): + conn = boto3.client("secretsmanager", region_name="us-west-2") + + conn.create_secret( + Name=DEFAULT_SECRET_NAME, + SecretString="foosecret", + Description="desc", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}], + ) + + conn = boto3.client("secretsmanager", region_name="us-west-2") + conn.put_secret_value( + SecretId=DEFAULT_SECRET_NAME, + SecretString="dupe_secret", + VersionStages=["AWSCURRENT"], + ) + secret_details = conn.describe_secret(SecretId=DEFAULT_SECRET_NAME) + assert secret_details["Tags"] == [ + {"Key": "Foo", "Value": "Bar"}, + {"Key": "Mykey", "Value": "Myvalue"}, + ] + assert secret_details["Description"] == "desc" + + @mock_secretsmanager def test_can_list_secret_version_ids(): conn = boto3.client("secretsmanager", region_name="us-west-2") @@ -739,6 +803,43 @@ def test_update_secret(): assert created_secret["VersionId"] != updated_secret["VersionId"] +@mock_secretsmanager +def test_update_secret_with_tags_and_description(): + conn = boto3.client("secretsmanager", region_name="us-west-2") + + created_secret = conn.create_secret( + Name="test-secret", + SecretString="foosecret", + Description="desc", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}], + ) + + assert created_secret["ARN"] + assert created_secret["Name"] == "test-secret" + assert created_secret["VersionId"] != "" + + secret = conn.get_secret_value(SecretId="test-secret") + assert secret["SecretString"] == "foosecret" + + updated_secret = conn.update_secret( + SecretId="test-secret", SecretString="barsecret" + ) + + assert updated_secret["ARN"] + assert updated_secret["Name"] == "test-secret" + assert updated_secret["VersionId"] != "" + + secret = conn.get_secret_value(SecretId="test-secret") + assert secret["SecretString"] == "barsecret" + assert created_secret["VersionId"] != updated_secret["VersionId"] + secret_details = conn.describe_secret(SecretId="test-secret") + assert secret_details["Tags"] == [ + {"Key": "Foo", "Value": "Bar"}, + {"Key": "Mykey", "Value": "Myvalue"}, + ] + assert secret_details["Description"] == "desc" + + @mock_secretsmanager def test_update_secret_which_does_not_exit(): conn = boto3.client("secretsmanager", region_name="us-west-2") diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index 4dee9184f..efc04beb4 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals +from base64 import b64encode import json import boto @@ -103,6 +104,128 @@ def test_assume_role(): ) +@freeze_time("2012-01-01 12:00:00") +@mock_sts +def test_assume_role_with_saml(): + client = boto3.client("sts", region_name="us-east-1") + + session_name = "session-name" + policy = json.dumps( + { + "Statement": [ + { + "Sid": "Stmt13690092345534", + "Action": ["S3:ListBucket"], + "Effect": "Allow", + "Resource": ["arn:aws:s3:::foobar-tester"], + } + ] + } + ) + role_name = "test-role" + provider_name = "TestProvFed" + user_name = "testuser" + role_input = "arn:aws:iam::{account_id}:role/{role_name}".format( + account_id=ACCOUNT_ID, role_name=role_name + ) + principal_role = "arn:aws:iam:{account_id}:saml-provider/{provider_name}".format( + account_id=ACCOUNT_ID, provider_name=provider_name + ) + saml_assertion = """ + + + http://localhost/ + + + + + http://localhost:3000/ + + + + + + + + + + + NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo= + + + NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo= + + + NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo= + + + + + {username} + + + + + + + urn:amazon:webservices + + + + + {username}@localhost + + + arn:aws:iam::{account_id}:saml-provider/{provider_name},arn:aws:iam::{account_id}:role/{role_name} + + + 900 + + + + + urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport + + + +""".format( + account_id=ACCOUNT_ID, + role_name=role_name, + provider_name=provider_name, + username=user_name, + ).replace( + "\n", "" + ) + + assume_role_response = client.assume_role_with_saml( + RoleArn=role_input, + PrincipalArn=principal_role, + SAMLAssertion=b64encode(saml_assertion.encode("utf-8")).decode("utf-8"), + ) + + credentials = assume_role_response["Credentials"] + if not settings.TEST_SERVER_MODE: + credentials["Expiration"].isoformat().should.equal("2012-01-01T12:15:00+00:00") + credentials["SessionToken"].should.have.length_of(356) + assert credentials["SessionToken"].startswith("FQoGZXIvYXdzE") + credentials["AccessKeyId"].should.have.length_of(20) + assert credentials["AccessKeyId"].startswith("ASIA") + credentials["SecretAccessKey"].should.have.length_of(40) + + assume_role_response["AssumedRoleUser"]["Arn"].should.equal( + "arn:aws:sts::{account_id}:assumed-role/{role_name}/{fed_name}@localhost".format( + account_id=ACCOUNT_ID, role_name=role_name, fed_name=user_name + ) + ) + assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith("AROA") + assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].endswith( + ":{fed_name}@localhost".format(fed_name=user_name) + ) + assume_role_response["AssumedRoleUser"]["AssumedRoleId"].should.have.length_of( + 21 + 1 + len("{fed_name}@localhost".format(fed_name=user_name)) + ) + + @freeze_time("2012-01-01 12:00:00") @mock_sts_deprecated def test_assume_role_with_web_identity(): diff --git a/tests/test_utilities/test_tagging_service.py b/tests/test_utilities/test_tagging_service.py index 249e903fe..1eac276a1 100644 --- a/tests/test_utilities/test_tagging_service.py +++ b/tests/test_utilities/test_tagging_service.py @@ -77,3 +77,34 @@ def test_extract_tag_names(): expected = ["key1", "key2"] expected.should.be.equal(actual) + + +def test_copy_non_existing_arn(): + svc = TaggingService() + tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}] + svc.tag_resource("new_arn", tags) + # + svc.copy_tags("non_existing_arn", "new_arn") + # Copying from a non-existing ARN should a NOOP + # Assert the old tags still exist + actual = sorted( + svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"] + ) + actual.should.equal(tags) + + +def test_copy_existing_arn(): + svc = TaggingService() + tags_old_arn = [{"Key": "key1", "Value": "value1"}] + tags_new_arn = [{"Key": "key2", "Value": "value2"}] + svc.tag_resource("old_arn", tags_old_arn) + svc.tag_resource("new_arn", tags_new_arn) + # + svc.copy_tags("old_arn", "new_arn") + # Assert the old tags still exist + actual = sorted( + svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"] + ) + actual.should.equal( + [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}] + )