Merge https://github.com/spulec/moto into support_optin_regions
This commit is contained in:
commit
ff1beea280
@ -2878,15 +2878,15 @@
|
||||
- [ ] test_failover
|
||||
|
||||
## elasticbeanstalk
|
||||
0% implemented
|
||||
13% implemented
|
||||
- [ ] abort_environment_update
|
||||
- [ ] apply_environment_managed_action
|
||||
- [ ] check_dns_availability
|
||||
- [ ] compose_environments
|
||||
- [ ] create_application
|
||||
- [X] create_application
|
||||
- [ ] create_application_version
|
||||
- [ ] create_configuration_template
|
||||
- [ ] create_environment
|
||||
- [X] create_environment
|
||||
- [ ] create_platform_version
|
||||
- [ ] create_storage_location
|
||||
- [ ] delete_application
|
||||
@ -2903,13 +2903,13 @@
|
||||
- [ ] describe_environment_managed_action_history
|
||||
- [ ] describe_environment_managed_actions
|
||||
- [ ] describe_environment_resources
|
||||
- [ ] describe_environments
|
||||
- [X] describe_environments
|
||||
- [ ] describe_events
|
||||
- [ ] describe_instances_health
|
||||
- [ ] describe_platform_version
|
||||
- [ ] list_available_solution_stacks
|
||||
- [X] list_available_solution_stacks
|
||||
- [ ] list_platform_versions
|
||||
- [ ] list_tags_for_resource
|
||||
- [X] list_tags_for_resource
|
||||
- [ ] rebuild_environment
|
||||
- [ ] request_environment_info
|
||||
- [ ] restart_app_server
|
||||
@ -2921,7 +2921,7 @@
|
||||
- [ ] update_application_version
|
||||
- [ ] update_configuration_template
|
||||
- [ ] update_environment
|
||||
- [ ] update_tags_for_resource
|
||||
- [X] update_tags_for_resource
|
||||
- [ ] validate_configuration_settings
|
||||
|
||||
## elastictranscoder
|
||||
|
@ -3,5 +3,6 @@ include requirements.txt requirements-dev.txt tox.ini
|
||||
include moto/ec2/resources/instance_types.json
|
||||
include moto/ec2/resources/amis.json
|
||||
include moto/cognitoidp/resources/*.json
|
||||
include moto/dynamodb2/parsing/reserved_keywords.txt
|
||||
recursive-include moto/templates *
|
||||
recursive-include tests *
|
||||
|
@ -21,6 +21,7 @@ from .datasync import mock_datasync # noqa
|
||||
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
|
||||
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
|
||||
from .dynamodbstreams import mock_dynamodbstreams # noqa
|
||||
from .elasticbeanstalk import mock_elasticbeanstalk # noqa
|
||||
from .ec2 import mock_ec2, mock_ec2_deprecated # noqa
|
||||
from .ec2_instance_connect import mock_ec2_instance_connect # noqa
|
||||
from .ecr import mock_ecr, mock_ecr_deprecated # noqa
|
||||
|
@ -461,6 +461,7 @@ class RestAPI(BaseModel):
|
||||
self.description = description
|
||||
self.create_date = int(time.time())
|
||||
self.api_key_source = kwargs.get("api_key_source") or "HEADER"
|
||||
self.policy = kwargs.get("policy") or None
|
||||
self.endpoint_configuration = kwargs.get("endpoint_configuration") or {
|
||||
"types": ["EDGE"]
|
||||
}
|
||||
@ -485,6 +486,7 @@ class RestAPI(BaseModel):
|
||||
"apiKeySource": self.api_key_source,
|
||||
"endpointConfiguration": self.endpoint_configuration,
|
||||
"tags": self.tags,
|
||||
"policy": self.policy,
|
||||
}
|
||||
|
||||
def add_child(self, path, parent_id=None):
|
||||
@ -713,6 +715,7 @@ class APIGatewayBackend(BaseBackend):
|
||||
api_key_source=None,
|
||||
endpoint_configuration=None,
|
||||
tags=None,
|
||||
policy=None,
|
||||
):
|
||||
api_id = create_id()
|
||||
rest_api = RestAPI(
|
||||
@ -723,6 +726,7 @@ class APIGatewayBackend(BaseBackend):
|
||||
api_key_source=api_key_source,
|
||||
endpoint_configuration=endpoint_configuration,
|
||||
tags=tags,
|
||||
policy=policy,
|
||||
)
|
||||
self.apis[api_id] = rest_api
|
||||
return rest_api
|
||||
|
@ -59,6 +59,7 @@ class APIGatewayResponse(BaseResponse):
|
||||
api_key_source = self._get_param("apiKeySource")
|
||||
endpoint_configuration = self._get_param("endpointConfiguration")
|
||||
tags = self._get_param("tags")
|
||||
policy = self._get_param("policy")
|
||||
|
||||
# Param validation
|
||||
if api_key_source and api_key_source not in API_KEY_SOURCES:
|
||||
@ -94,6 +95,7 @@ class APIGatewayResponse(BaseResponse):
|
||||
api_key_source=api_key_source,
|
||||
endpoint_configuration=endpoint_configuration,
|
||||
tags=tags,
|
||||
policy=policy,
|
||||
)
|
||||
return 200, {}, json.dumps(rest_api.to_dict())
|
||||
|
||||
|
@ -1006,11 +1006,11 @@ class LambdaBackend(BaseBackend):
|
||||
return True
|
||||
return False
|
||||
|
||||
def add_policy_statement(self, function_name, raw):
|
||||
def add_permission(self, function_name, raw):
|
||||
fn = self.get_function(function_name)
|
||||
fn.policy.add_statement(raw)
|
||||
|
||||
def del_policy_statement(self, function_name, sid, revision=""):
|
||||
def remove_permission(self, function_name, sid, revision=""):
|
||||
fn = self.get_function(function_name)
|
||||
fn.policy.del_statement(sid, revision)
|
||||
|
||||
|
@ -146,7 +146,7 @@ class LambdaResponse(BaseResponse):
|
||||
function_name = path.split("/")[-2]
|
||||
if self.lambda_backend.get_function(function_name):
|
||||
statement = self.body
|
||||
self.lambda_backend.add_policy_statement(function_name, statement)
|
||||
self.lambda_backend.add_permission(function_name, statement)
|
||||
return 200, {}, json.dumps({"Statement": statement})
|
||||
else:
|
||||
return 404, {}, "{}"
|
||||
@ -166,9 +166,7 @@ class LambdaResponse(BaseResponse):
|
||||
statement_id = path.split("/")[-1].split("?")[0]
|
||||
revision = querystring.get("RevisionId", "")
|
||||
if self.lambda_backend.get_function(function_name):
|
||||
self.lambda_backend.del_policy_statement(
|
||||
function_name, statement_id, revision
|
||||
)
|
||||
self.lambda_backend.remove_permission(function_name, statement_id, revision)
|
||||
return 204, {}, "{}"
|
||||
else:
|
||||
return 404, {}, "{}"
|
||||
|
@ -23,6 +23,7 @@ from moto.ec2 import ec2_backends
|
||||
from moto.ec2_instance_connect import ec2_instance_connect_backends
|
||||
from moto.ecr import ecr_backends
|
||||
from moto.ecs import ecs_backends
|
||||
from moto.elasticbeanstalk import eb_backends
|
||||
from moto.elb import elb_backends
|
||||
from moto.elbv2 import elbv2_backends
|
||||
from moto.emr import emr_backends
|
||||
@ -77,6 +78,7 @@ BACKENDS = {
|
||||
"ec2_instance_connect": ec2_instance_connect_backends,
|
||||
"ecr": ecr_backends,
|
||||
"ecs": ecs_backends,
|
||||
"elasticbeanstalk": eb_backends,
|
||||
"elb": elb_backends,
|
||||
"elbv2": elbv2_backends,
|
||||
"events": events_backends,
|
||||
|
@ -239,8 +239,11 @@ class FakeStack(BaseModel):
|
||||
self.cross_stack_resources = cross_stack_resources or {}
|
||||
self.resource_map = self._create_resource_map()
|
||||
self.output_map = self._create_output_map()
|
||||
self._add_stack_event("CREATE_COMPLETE")
|
||||
self.status = "CREATE_COMPLETE"
|
||||
if create_change_set:
|
||||
self.status = "REVIEW_IN_PROGRESS"
|
||||
else:
|
||||
self.create_resources()
|
||||
self._add_stack_event("CREATE_COMPLETE")
|
||||
self.creation_time = datetime.utcnow()
|
||||
|
||||
def _create_resource_map(self):
|
||||
@ -253,7 +256,7 @@ class FakeStack(BaseModel):
|
||||
self.template_dict,
|
||||
self.cross_stack_resources,
|
||||
)
|
||||
resource_map.create()
|
||||
resource_map.load()
|
||||
return resource_map
|
||||
|
||||
def _create_output_map(self):
|
||||
@ -326,6 +329,10 @@ class FakeStack(BaseModel):
|
||||
def exports(self):
|
||||
return self.output_map.exports
|
||||
|
||||
def create_resources(self):
|
||||
self.resource_map.create()
|
||||
self.status = "CREATE_COMPLETE"
|
||||
|
||||
def update(self, template, role_arn=None, parameters=None, tags=None):
|
||||
self._add_stack_event(
|
||||
"UPDATE_IN_PROGRESS", resource_status_reason="User Initiated"
|
||||
@ -640,6 +647,7 @@ class CloudFormationBackend(BaseBackend):
|
||||
else:
|
||||
stack._add_stack_event("UPDATE_IN_PROGRESS")
|
||||
stack._add_stack_event("UPDATE_COMPLETE")
|
||||
stack.create_resources()
|
||||
return True
|
||||
|
||||
def describe_stacks(self, name_or_stack_id):
|
||||
|
@ -531,14 +531,16 @@ class ResourceMap(collections_abc.Mapping):
|
||||
for condition_name in self.lazy_condition_map:
|
||||
self.lazy_condition_map[condition_name]
|
||||
|
||||
def create(self):
|
||||
def load(self):
|
||||
self.load_mapping()
|
||||
self.transform_mapping()
|
||||
self.load_parameters()
|
||||
self.load_conditions()
|
||||
|
||||
def create(self):
|
||||
# Since this is a lazy map, to create every object we just need to
|
||||
# iterate through self.
|
||||
# Assumes that self.load() has been called before
|
||||
self.tags.update(
|
||||
{
|
||||
"aws:cloudformation:stack-name": self.get("AWS::StackName"),
|
||||
|
@ -22,6 +22,14 @@ class Dimension(object):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, item):
|
||||
if isinstance(item, Dimension):
|
||||
return self.name == item.name and self.value == item.value
|
||||
return False
|
||||
|
||||
def __ne__(self, item): # Only needed on Py2; Py3 defines it implicitly
|
||||
return self != item
|
||||
|
||||
|
||||
def daterange(start, stop, step=timedelta(days=1), inclusive=False):
|
||||
"""
|
||||
@ -124,6 +132,17 @@ class MetricDatum(BaseModel):
|
||||
Dimension(dimension["Name"], dimension["Value"]) for dimension in dimensions
|
||||
]
|
||||
|
||||
def filter(self, namespace, name, dimensions):
|
||||
if namespace and namespace != self.namespace:
|
||||
return False
|
||||
if name and name != self.name:
|
||||
return False
|
||||
if dimensions and any(
|
||||
Dimension(d["Name"], d["Value"]) not in self.dimensions for d in dimensions
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Dashboard(BaseModel):
|
||||
def __init__(self, name, body):
|
||||
@ -202,6 +221,15 @@ class CloudWatchBackend(BaseBackend):
|
||||
self.metric_data = []
|
||||
self.paged_metric_data = {}
|
||||
|
||||
@property
|
||||
# Retrieve a list of all OOTB metrics that are provided by metrics providers
|
||||
# Computed on the fly
|
||||
def aws_metric_data(self):
|
||||
md = []
|
||||
for name, service in metric_providers.items():
|
||||
md.extend(service.get_cloudwatch_metrics())
|
||||
return md
|
||||
|
||||
def put_metric_alarm(
|
||||
self,
|
||||
name,
|
||||
@ -295,6 +323,43 @@ class CloudWatchBackend(BaseBackend):
|
||||
)
|
||||
)
|
||||
|
||||
def get_metric_data(self, queries, start_time, end_time):
|
||||
period_data = [
|
||||
md for md in self.metric_data if start_time <= md.timestamp <= end_time
|
||||
]
|
||||
results = []
|
||||
for query in queries:
|
||||
query_ns = query["metric_stat._metric._namespace"]
|
||||
query_name = query["metric_stat._metric._metric_name"]
|
||||
query_data = [
|
||||
md
|
||||
for md in period_data
|
||||
if md.namespace == query_ns and md.name == query_name
|
||||
]
|
||||
metric_values = [m.value for m in query_data]
|
||||
result_vals = []
|
||||
stat = query["metric_stat._stat"]
|
||||
if len(metric_values) > 0:
|
||||
if stat == "Average":
|
||||
result_vals.append(sum(metric_values) / len(metric_values))
|
||||
elif stat == "Minimum":
|
||||
result_vals.append(min(metric_values))
|
||||
elif stat == "Maximum":
|
||||
result_vals.append(max(metric_values))
|
||||
elif stat == "Sum":
|
||||
result_vals.append(sum(metric_values))
|
||||
|
||||
label = query["metric_stat._metric._metric_name"] + " " + stat
|
||||
results.append(
|
||||
{
|
||||
"id": query["id"],
|
||||
"label": label,
|
||||
"vals": result_vals,
|
||||
"timestamps": [datetime.now() for _ in result_vals],
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
def get_metric_statistics(
|
||||
self, namespace, metric_name, start_time, end_time, period, stats
|
||||
):
|
||||
@ -334,7 +399,7 @@ class CloudWatchBackend(BaseBackend):
|
||||
return data
|
||||
|
||||
def get_all_metrics(self):
|
||||
return self.metric_data
|
||||
return self.metric_data + self.aws_metric_data
|
||||
|
||||
def put_dashboard(self, name, body):
|
||||
self.dashboards[name] = Dashboard(name, body)
|
||||
@ -386,7 +451,7 @@ class CloudWatchBackend(BaseBackend):
|
||||
|
||||
self.alarms[alarm_name].update_state(reason, reason_data, state_value)
|
||||
|
||||
def list_metrics(self, next_token, namespace, metric_name):
|
||||
def list_metrics(self, next_token, namespace, metric_name, dimensions):
|
||||
if next_token:
|
||||
if next_token not in self.paged_metric_data:
|
||||
raise RESTError(
|
||||
@ -397,15 +462,16 @@ class CloudWatchBackend(BaseBackend):
|
||||
del self.paged_metric_data[next_token] # Cant reuse same token twice
|
||||
return self._get_paginated(metrics)
|
||||
else:
|
||||
metrics = self.get_filtered_metrics(metric_name, namespace)
|
||||
metrics = self.get_filtered_metrics(metric_name, namespace, dimensions)
|
||||
return self._get_paginated(metrics)
|
||||
|
||||
def get_filtered_metrics(self, metric_name, namespace):
|
||||
def get_filtered_metrics(self, metric_name, namespace, dimensions):
|
||||
metrics = self.get_all_metrics()
|
||||
if namespace:
|
||||
metrics = [md for md in metrics if md.namespace == namespace]
|
||||
if metric_name:
|
||||
metrics = [md for md in metrics if md.name == metric_name]
|
||||
metrics = [
|
||||
md
|
||||
for md in metrics
|
||||
if md.filter(namespace=namespace, name=metric_name, dimensions=dimensions)
|
||||
]
|
||||
return metrics
|
||||
|
||||
def _get_paginated(self, metrics):
|
||||
@ -445,3 +511,8 @@ for region in Session().get_available_regions(
|
||||
cloudwatch_backends[region] = CloudWatchBackend()
|
||||
for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"):
|
||||
cloudwatch_backends[region] = CloudWatchBackend()
|
||||
|
||||
# List of services that provide OOTB CW metrics
|
||||
# See the S3Backend constructor for an example
|
||||
# TODO: We might have to separate this out per region for non-global services
|
||||
metric_providers = {}
|
||||
|
@ -92,6 +92,18 @@ class CloudWatchResponse(BaseResponse):
|
||||
template = self.response_template(PUT_METRIC_DATA_TEMPLATE)
|
||||
return template.render()
|
||||
|
||||
@amzn_request_id
|
||||
def get_metric_data(self):
|
||||
start = dtparse(self._get_param("StartTime"))
|
||||
end = dtparse(self._get_param("EndTime"))
|
||||
queries = self._get_list_prefix("MetricDataQueries.member")
|
||||
results = self.cloudwatch_backend.get_metric_data(
|
||||
start_time=start, end_time=end, queries=queries
|
||||
)
|
||||
|
||||
template = self.response_template(GET_METRIC_DATA_TEMPLATE)
|
||||
return template.render(results=results)
|
||||
|
||||
@amzn_request_id
|
||||
def get_metric_statistics(self):
|
||||
namespace = self._get_param("Namespace")
|
||||
@ -124,9 +136,10 @@ class CloudWatchResponse(BaseResponse):
|
||||
def list_metrics(self):
|
||||
namespace = self._get_param("Namespace")
|
||||
metric_name = self._get_param("MetricName")
|
||||
dimensions = self._get_multi_param("Dimensions.member")
|
||||
next_token = self._get_param("NextToken")
|
||||
next_token, metrics = self.cloudwatch_backend.list_metrics(
|
||||
next_token, namespace, metric_name
|
||||
next_token, namespace, metric_name, dimensions
|
||||
)
|
||||
template = self.response_template(LIST_METRICS_TEMPLATE)
|
||||
return template.render(metrics=metrics, next_token=next_token)
|
||||
@ -285,6 +298,35 @@ PUT_METRIC_DATA_TEMPLATE = """<PutMetricDataResponse xmlns="http://monitoring.am
|
||||
</ResponseMetadata>
|
||||
</PutMetricDataResponse>"""
|
||||
|
||||
GET_METRIC_DATA_TEMPLATE = """<GetMetricDataResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
||||
<ResponseMetadata>
|
||||
<RequestId>
|
||||
{{ request_id }}
|
||||
</RequestId>
|
||||
</ResponseMetadata>
|
||||
<GetMetricDataResult>
|
||||
<MetricDataResults>
|
||||
{% for result in results %}
|
||||
<member>
|
||||
<Id>{{ result.id }}</Id>
|
||||
<Label>{{ result.label }}</Label>
|
||||
<StatusCode>Complete</StatusCode>
|
||||
<Timestamps>
|
||||
{% for val in result.timestamps %}
|
||||
<member>{{ val }}</member>
|
||||
{% endfor %}
|
||||
</Timestamps>
|
||||
<Values>
|
||||
{% for val in result.vals %}
|
||||
<member>{{ val }}</member>
|
||||
{% endfor %}
|
||||
</Values>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</MetricDataResults>
|
||||
</GetMetricDataResult>
|
||||
</GetMetricDataResponse>"""
|
||||
|
||||
GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
||||
<ResponseMetadata>
|
||||
<RequestId>
|
||||
|
@ -1,5 +1,5 @@
|
||||
from moto.core.utils import get_random_hex
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def get_random_identity_id(region):
|
||||
return "{0}:{1}".format(region, get_random_hex(length=19))
|
||||
return "{0}:{1}".format(region, uuid4())
|
||||
|
@ -328,3 +328,25 @@ def py2_strip_unicode_keys(blob):
|
||||
blob = new_set
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def tags_from_query_string(
|
||||
querystring_dict, prefix="Tag", key_suffix="Key", value_suffix="Value"
|
||||
):
|
||||
response_values = {}
|
||||
for key, value in querystring_dict.items():
|
||||
if key.startswith(prefix) and key.endswith(key_suffix):
|
||||
tag_index = key.replace(prefix + ".", "").replace("." + key_suffix, "")
|
||||
tag_key = querystring_dict.get(
|
||||
"{prefix}.{index}.{key_suffix}".format(
|
||||
prefix=prefix, index=tag_index, key_suffix=key_suffix,
|
||||
)
|
||||
)[0]
|
||||
tag_value_key = "{prefix}.{index}.{value_suffix}".format(
|
||||
prefix=prefix, index=tag_index, value_suffix=value_suffix,
|
||||
)
|
||||
if tag_value_key in querystring_dict:
|
||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
||||
else:
|
||||
response_values[tag_key] = None
|
||||
return response_values
|
||||
|
@ -39,6 +39,17 @@ class AttributeDoesNotExist(MockValidationException):
|
||||
super(AttributeDoesNotExist, self).__init__(self.attr_does_not_exist_msg)
|
||||
|
||||
|
||||
class ProvidedKeyDoesNotExist(MockValidationException):
|
||||
provided_key_does_not_exist_msg = (
|
||||
"The provided key element does not match the schema"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super(ProvidedKeyDoesNotExist, self).__init__(
|
||||
self.provided_key_does_not_exist_msg
|
||||
)
|
||||
|
||||
|
||||
class ExpressionAttributeNameNotDefined(InvalidUpdateExpression):
|
||||
name_not_defined_msg = "An expression attribute name used in the document path is not defined; attribute name: {n}"
|
||||
|
||||
@ -131,3 +142,10 @@ class IncorrectOperandType(InvalidUpdateExpression):
|
||||
super(IncorrectOperandType, self).__init__(
|
||||
self.inv_operand_msg.format(f=operator_or_function, t=operand_type)
|
||||
)
|
||||
|
||||
|
||||
class IncorrectDataType(MockValidationException):
|
||||
inc_data_type_msg = "An operand in the update expression has an incorrect data type"
|
||||
|
||||
def __init__(self):
|
||||
super(IncorrectDataType, self).__init__(self.inc_data_type_msg)
|
||||
|
@ -8,7 +8,6 @@ import re
|
||||
import uuid
|
||||
|
||||
from boto3 import Session
|
||||
from botocore.exceptions import ParamValidationError
|
||||
from moto.compat import OrderedDict
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import unix_time
|
||||
@ -20,8 +19,9 @@ from moto.dynamodb2.exceptions import (
|
||||
ItemSizeTooLarge,
|
||||
ItemSizeToUpdateTooLarge,
|
||||
)
|
||||
from moto.dynamodb2.models.utilities import bytesize, attribute_is_list
|
||||
from moto.dynamodb2.models.utilities import bytesize
|
||||
from moto.dynamodb2.models.dynamo_type import DynamoType
|
||||
from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor
|
||||
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
|
||||
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
|
||||
|
||||
@ -71,9 +71,23 @@ class Item(BaseModel):
|
||||
for key, value in attrs.items():
|
||||
self.attrs[key] = DynamoType(value)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
[
|
||||
self.hash_key == other.hash_key,
|
||||
self.hash_key_type == other.hash_key_type,
|
||||
self.range_key == other.range_key,
|
||||
self.range_key_type == other.range_key_type,
|
||||
self.attrs == other.attrs,
|
||||
]
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "Item: {0}".format(self.to_json())
|
||||
|
||||
def size(self):
|
||||
return sum(bytesize(key) + value.size() for key, value in self.attrs.items())
|
||||
|
||||
def to_json(self):
|
||||
attributes = {}
|
||||
for attribute_key, attribute in self.attrs.items():
|
||||
@ -91,192 +105,6 @@ class Item(BaseModel):
|
||||
included = self.attrs
|
||||
return {"Item": included}
|
||||
|
||||
def update(
|
||||
self, update_expression, expression_attribute_names, expression_attribute_values
|
||||
):
|
||||
# Update subexpressions are identifiable by the operator keyword, so split on that and
|
||||
# get rid of the empty leading string.
|
||||
parts = [
|
||||
p
|
||||
for p in re.split(
|
||||
r"\b(SET|REMOVE|ADD|DELETE)\b", update_expression, flags=re.I
|
||||
)
|
||||
if p
|
||||
]
|
||||
# make sure that we correctly found only operator/value pairs
|
||||
assert (
|
||||
len(parts) % 2 == 0
|
||||
), "Mismatched operators and values in update expression: '{}'".format(
|
||||
update_expression
|
||||
)
|
||||
for action, valstr in zip(parts[:-1:2], parts[1::2]):
|
||||
action = action.upper()
|
||||
|
||||
# "Should" retain arguments in side (...)
|
||||
values = re.split(r",(?![^(]*\))", valstr)
|
||||
for value in values:
|
||||
# A Real value
|
||||
value = value.lstrip(":").rstrip(",").strip()
|
||||
for k, v in expression_attribute_names.items():
|
||||
value = re.sub(r"{0}\b".format(k), v, value)
|
||||
|
||||
if action == "REMOVE":
|
||||
key = value
|
||||
attr, list_index = attribute_is_list(key.split(".")[0])
|
||||
if "." not in key:
|
||||
if list_index:
|
||||
new_list = DynamoType(self.attrs[attr])
|
||||
new_list.delete(None, list_index)
|
||||
self.attrs[attr] = new_list
|
||||
else:
|
||||
self.attrs.pop(value, None)
|
||||
else:
|
||||
# Handle nested dict updates
|
||||
self.attrs[attr].delete(".".join(key.split(".")[1:]))
|
||||
elif action == "SET":
|
||||
key, value = value.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
|
||||
# check whether key is a list
|
||||
attr, list_index = attribute_is_list(key.split(".")[0])
|
||||
# If value not exists, changes value to a default if needed, else its the same as it was
|
||||
value = self._get_default(value)
|
||||
# If operation == list_append, get the original value and append it
|
||||
value = self._get_appended_list(value, expression_attribute_values)
|
||||
|
||||
if type(value) != DynamoType:
|
||||
if value in expression_attribute_values:
|
||||
dyn_value = DynamoType(expression_attribute_values[value])
|
||||
else:
|
||||
dyn_value = DynamoType({"S": value})
|
||||
else:
|
||||
dyn_value = value
|
||||
|
||||
if "." in key and attr not in self.attrs:
|
||||
raise ValueError # Setting nested attr not allowed if first attr does not exist yet
|
||||
elif attr not in self.attrs:
|
||||
try:
|
||||
self.attrs[attr] = dyn_value # set new top-level attribute
|
||||
except ItemSizeTooLarge:
|
||||
raise ItemSizeToUpdateTooLarge()
|
||||
else:
|
||||
self.attrs[attr].set(
|
||||
".".join(key.split(".")[1:]), dyn_value, list_index
|
||||
) # set value recursively
|
||||
|
||||
elif action == "ADD":
|
||||
key, value = value.split(" ", 1)
|
||||
key = key.strip()
|
||||
value_str = value.strip()
|
||||
if value_str in expression_attribute_values:
|
||||
dyn_value = DynamoType(expression_attribute_values[value])
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
# Handle adding numbers - value gets added to existing value,
|
||||
# or added to 0 if it doesn't exist yet
|
||||
if dyn_value.is_number():
|
||||
existing = self.attrs.get(key, DynamoType({"N": "0"}))
|
||||
if not existing.same_type(dyn_value):
|
||||
raise TypeError()
|
||||
self.attrs[key] = DynamoType(
|
||||
{
|
||||
"N": str(
|
||||
decimal.Decimal(existing.value)
|
||||
+ decimal.Decimal(dyn_value.value)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
# Handle adding sets - value is added to the set, or set is
|
||||
# created with only this value if it doesn't exist yet
|
||||
# New value must be of same set type as previous value
|
||||
elif dyn_value.is_set():
|
||||
key_head = key.split(".")[0]
|
||||
key_tail = ".".join(key.split(".")[1:])
|
||||
if key_head not in self.attrs:
|
||||
self.attrs[key_head] = DynamoType({dyn_value.type: {}})
|
||||
existing = self.attrs.get(key_head)
|
||||
existing = existing.get(key_tail)
|
||||
if existing.value and not existing.same_type(dyn_value):
|
||||
raise TypeError()
|
||||
new_set = set(existing.value or []).union(dyn_value.value)
|
||||
existing.set(
|
||||
key=None,
|
||||
new_value=DynamoType({dyn_value.type: list(new_set)}),
|
||||
)
|
||||
else: # Number and Sets are the only supported types for ADD
|
||||
raise TypeError
|
||||
|
||||
elif action == "DELETE":
|
||||
key, value = value.split(" ", 1)
|
||||
key = key.strip()
|
||||
value_str = value.strip()
|
||||
if value_str in expression_attribute_values:
|
||||
dyn_value = DynamoType(expression_attribute_values[value])
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
if not dyn_value.is_set():
|
||||
raise TypeError
|
||||
key_head = key.split(".")[0]
|
||||
key_tail = ".".join(key.split(".")[1:])
|
||||
existing = self.attrs.get(key_head)
|
||||
existing = existing.get(key_tail)
|
||||
if existing:
|
||||
if not existing.same_type(dyn_value):
|
||||
raise TypeError
|
||||
new_set = set(existing.value).difference(dyn_value.value)
|
||||
existing.set(
|
||||
key=None,
|
||||
new_value=DynamoType({existing.type: list(new_set)}),
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"{} update action not yet supported".format(action)
|
||||
)
|
||||
|
||||
def _get_appended_list(self, value, expression_attribute_values):
|
||||
if type(value) != DynamoType:
|
||||
list_append_re = re.match("list_append\\((.+),(.+)\\)", value)
|
||||
if list_append_re:
|
||||
new_value = expression_attribute_values[list_append_re.group(2).strip()]
|
||||
old_list_key = list_append_re.group(1)
|
||||
# old_key could be a function itself (if_not_exists)
|
||||
if old_list_key.startswith("if_not_exists"):
|
||||
old_list = self._get_default(old_list_key)
|
||||
if not isinstance(old_list, DynamoType):
|
||||
old_list = DynamoType(expression_attribute_values[old_list])
|
||||
else:
|
||||
old_list = self.attrs[old_list_key.split(".")[0]]
|
||||
if "." in old_list_key:
|
||||
# Value is nested inside a map - find the appropriate child attr
|
||||
old_list = old_list.child_attr(
|
||||
".".join(old_list_key.split(".")[1:])
|
||||
)
|
||||
if not old_list.is_list():
|
||||
raise ParamValidationError
|
||||
old_list.value.extend([DynamoType(v) for v in new_value["L"]])
|
||||
value = old_list
|
||||
return value
|
||||
|
||||
def _get_default(self, value):
|
||||
if value.startswith("if_not_exists"):
|
||||
# Function signature
|
||||
match = re.match(
|
||||
r".*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*", value
|
||||
)
|
||||
if not match:
|
||||
raise TypeError
|
||||
|
||||
path, value = match.groups()
|
||||
|
||||
# If it already exists, get its value so we dont overwrite it
|
||||
if path in self.attrs:
|
||||
value = self.attrs[path]
|
||||
return value
|
||||
|
||||
def update_with_attribute_updates(self, attribute_updates):
|
||||
for attribute_name, update_action in attribute_updates.items():
|
||||
action = update_action["Action"]
|
||||
@ -921,6 +749,14 @@ class Table(BaseModel):
|
||||
break
|
||||
|
||||
last_evaluated_key = None
|
||||
size_limit = 1000000 # DynamoDB has a 1MB size limit
|
||||
item_size = sum(res.size() for res in results)
|
||||
if item_size > size_limit:
|
||||
item_size = idx = 0
|
||||
while item_size + results[idx].size() < size_limit:
|
||||
item_size += results[idx].size()
|
||||
idx += 1
|
||||
limit = min(limit, idx) if limit else idx
|
||||
if limit and len(results) > limit:
|
||||
results = results[:limit]
|
||||
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
||||
@ -1198,9 +1034,9 @@ class DynamoDBBackend(BaseBackend):
|
||||
table_name,
|
||||
key,
|
||||
update_expression,
|
||||
attribute_updates,
|
||||
expression_attribute_names,
|
||||
expression_attribute_values,
|
||||
attribute_updates=None,
|
||||
expected=None,
|
||||
condition_expression=None,
|
||||
):
|
||||
@ -1255,17 +1091,18 @@ class DynamoDBBackend(BaseBackend):
|
||||
item = table.get_item(hash_value, range_value)
|
||||
|
||||
if update_expression:
|
||||
UpdateExpressionValidator(
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=expression_attribute_names,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
item=item,
|
||||
).validate()
|
||||
item.update(
|
||||
update_expression,
|
||||
expression_attribute_names,
|
||||
expression_attribute_values,
|
||||
)
|
||||
try:
|
||||
UpdateExpressionExecutor(
|
||||
validated_ast, item, expression_attribute_names
|
||||
).execute()
|
||||
except ItemSizeTooLarge:
|
||||
raise ItemSizeToUpdateTooLarge()
|
||||
else:
|
||||
item.update_with_attribute_updates(attribute_updates)
|
||||
if table.stream_shard is not None:
|
||||
@ -1321,6 +1158,94 @@ class DynamoDBBackend(BaseBackend):
|
||||
|
||||
return table.ttl
|
||||
|
||||
def transact_write_items(self, transact_items):
|
||||
# Create a backup in case any of the transactions fail
|
||||
original_table_state = copy.deepcopy(self.tables)
|
||||
try:
|
||||
for item in transact_items:
|
||||
if "ConditionCheck" in item:
|
||||
item = item["ConditionCheck"]
|
||||
key = item["Key"]
|
||||
table_name = item["TableName"]
|
||||
condition_expression = item.get("ConditionExpression", None)
|
||||
expression_attribute_names = item.get(
|
||||
"ExpressionAttributeNames", None
|
||||
)
|
||||
expression_attribute_values = item.get(
|
||||
"ExpressionAttributeValues", None
|
||||
)
|
||||
current = self.get_item(table_name, key)
|
||||
|
||||
condition_op = get_filter_expression(
|
||||
condition_expression,
|
||||
expression_attribute_names,
|
||||
expression_attribute_values,
|
||||
)
|
||||
if not condition_op.expr(current):
|
||||
raise ValueError("The conditional request failed")
|
||||
elif "Put" in item:
|
||||
item = item["Put"]
|
||||
attrs = item["Item"]
|
||||
table_name = item["TableName"]
|
||||
condition_expression = item.get("ConditionExpression", None)
|
||||
expression_attribute_names = item.get(
|
||||
"ExpressionAttributeNames", None
|
||||
)
|
||||
expression_attribute_values = item.get(
|
||||
"ExpressionAttributeValues", None
|
||||
)
|
||||
self.put_item(
|
||||
table_name,
|
||||
attrs,
|
||||
condition_expression=condition_expression,
|
||||
expression_attribute_names=expression_attribute_names,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
)
|
||||
elif "Delete" in item:
|
||||
item = item["Delete"]
|
||||
key = item["Key"]
|
||||
table_name = item["TableName"]
|
||||
condition_expression = item.get("ConditionExpression", None)
|
||||
expression_attribute_names = item.get(
|
||||
"ExpressionAttributeNames", None
|
||||
)
|
||||
expression_attribute_values = item.get(
|
||||
"ExpressionAttributeValues", None
|
||||
)
|
||||
self.delete_item(
|
||||
table_name,
|
||||
key,
|
||||
condition_expression=condition_expression,
|
||||
expression_attribute_names=expression_attribute_names,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
)
|
||||
elif "Update" in item:
|
||||
item = item["Update"]
|
||||
key = item["Key"]
|
||||
table_name = item["TableName"]
|
||||
update_expression = item["UpdateExpression"]
|
||||
condition_expression = item.get("ConditionExpression", None)
|
||||
expression_attribute_names = item.get(
|
||||
"ExpressionAttributeNames", None
|
||||
)
|
||||
expression_attribute_values = item.get(
|
||||
"ExpressionAttributeValues", None
|
||||
)
|
||||
self.update_item(
|
||||
table_name,
|
||||
key,
|
||||
update_expression=update_expression,
|
||||
condition_expression=condition_expression,
|
||||
expression_attribute_names=expression_attribute_names,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
)
|
||||
else:
|
||||
raise ValueError
|
||||
except: # noqa: E722 Do not use bare except
|
||||
# Rollback to the original state, and reraise the error
|
||||
self.tables = original_table_state
|
||||
raise
|
||||
|
||||
|
||||
dynamodb_backends = {}
|
||||
for region in Session().get_available_regions("dynamodb"):
|
||||
|
@ -1,10 +1,53 @@
|
||||
import six
|
||||
|
||||
from moto.dynamodb2.comparisons import get_comparison_func
|
||||
from moto.dynamodb2.exceptions import InvalidUpdateExpression
|
||||
from moto.dynamodb2.exceptions import InvalidUpdateExpression, IncorrectDataType
|
||||
from moto.dynamodb2.models.utilities import attribute_is_list, bytesize
|
||||
|
||||
|
||||
class DDBType(object):
|
||||
"""
|
||||
Official documentation at https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html
|
||||
"""
|
||||
|
||||
BINARY_SET = "BS"
|
||||
NUMBER_SET = "NS"
|
||||
STRING_SET = "SS"
|
||||
STRING = "S"
|
||||
NUMBER = "N"
|
||||
MAP = "M"
|
||||
LIST = "L"
|
||||
BOOLEAN = "BOOL"
|
||||
BINARY = "B"
|
||||
NULL = "NULL"
|
||||
|
||||
|
||||
class DDBTypeConversion(object):
|
||||
_human_type_mapping = {
|
||||
val: key.replace("_", " ")
|
||||
for key, val in DDBType.__dict__.items()
|
||||
if key.upper() == key
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_human_type(cls, abbreviated_type):
|
||||
"""
|
||||
Args:
|
||||
abbreviated_type(str): An attribute of DDBType
|
||||
|
||||
Returns:
|
||||
str: The human readable form of the DDBType.
|
||||
"""
|
||||
try:
|
||||
human_type_str = cls._human_type_mapping[abbreviated_type]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
"Invalid abbreviated_type {at}".format(at=abbreviated_type)
|
||||
)
|
||||
|
||||
return human_type_str
|
||||
|
||||
|
||||
class DynamoType(object):
|
||||
"""
|
||||
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
|
||||
@ -50,13 +93,22 @@ class DynamoType(object):
|
||||
self.value = new_value.value
|
||||
else:
|
||||
if attr not in self.value: # nonexistingattribute
|
||||
type_of_new_attr = "M" if "." in key else new_value.type
|
||||
type_of_new_attr = DDBType.MAP if "." in key else new_value.type
|
||||
self.value[attr] = DynamoType({type_of_new_attr: {}})
|
||||
# {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value)
|
||||
self.value[attr].set(
|
||||
".".join(key.split(".")[1:]), new_value, list_index
|
||||
)
|
||||
|
||||
def __contains__(self, item):
|
||||
if self.type == DDBType.STRING:
|
||||
return False
|
||||
try:
|
||||
self.__getitem__(item)
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def delete(self, key, index=None):
|
||||
if index:
|
||||
if not key:
|
||||
@ -126,27 +178,35 @@ class DynamoType(object):
|
||||
def __add__(self, other):
|
||||
if self.type != other.type:
|
||||
raise TypeError("Different types of operandi is not allowed.")
|
||||
if self.type == "N":
|
||||
return DynamoType({"N": "{v}".format(v=int(self.value) + int(other.value))})
|
||||
if self.is_number():
|
||||
self_value = float(self.value) if "." in self.value else int(self.value)
|
||||
other_value = float(other.value) if "." in other.value else int(other.value)
|
||||
return DynamoType(
|
||||
{DDBType.NUMBER: "{v}".format(v=self_value + other_value)}
|
||||
)
|
||||
else:
|
||||
raise TypeError("Sum only supported for Numbers.")
|
||||
raise IncorrectDataType()
|
||||
|
||||
def __sub__(self, other):
|
||||
if self.type != other.type:
|
||||
raise TypeError("Different types of operandi is not allowed.")
|
||||
if self.type == "N":
|
||||
return DynamoType({"N": "{v}".format(v=int(self.value) - int(other.value))})
|
||||
if self.type == DDBType.NUMBER:
|
||||
self_value = float(self.value) if "." in self.value else int(self.value)
|
||||
other_value = float(other.value) if "." in other.value else int(other.value)
|
||||
return DynamoType(
|
||||
{DDBType.NUMBER: "{v}".format(v=self_value - other_value)}
|
||||
)
|
||||
else:
|
||||
raise TypeError("Sum only supported for Numbers.")
|
||||
|
||||
def __getitem__(self, item):
|
||||
if isinstance(item, six.string_types):
|
||||
# If our DynamoType is a map it should be subscriptable with a key
|
||||
if self.type == "M":
|
||||
if self.type == DDBType.MAP:
|
||||
return self.value[item]
|
||||
elif isinstance(item, int):
|
||||
# If our DynamoType is a list is should be subscriptable with an index
|
||||
if self.type == "L":
|
||||
if self.type == DDBType.LIST:
|
||||
return self.value[item]
|
||||
raise TypeError(
|
||||
"This DynamoType {dt} is not subscriptable by a {it}".format(
|
||||
@ -154,6 +214,20 @@ class DynamoType(object):
|
||||
)
|
||||
)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(key, int):
|
||||
if self.is_list():
|
||||
if key >= len(self.value):
|
||||
# DynamoDB doesn't care you are out of box just add it to the end.
|
||||
self.value.append(value)
|
||||
else:
|
||||
self.value[key] = value
|
||||
elif isinstance(key, six.string_types):
|
||||
if self.is_map():
|
||||
self.value[key] = value
|
||||
else:
|
||||
raise NotImplementedError("No set_item for {t}".format(t=type(key)))
|
||||
|
||||
@property
|
||||
def cast_value(self):
|
||||
if self.is_number():
|
||||
@ -222,16 +296,22 @@ class DynamoType(object):
|
||||
return comparison_func(self.cast_value, *range_values)
|
||||
|
||||
def is_number(self):
|
||||
return self.type == "N"
|
||||
return self.type == DDBType.NUMBER
|
||||
|
||||
def is_set(self):
|
||||
return self.type == "SS" or self.type == "NS" or self.type == "BS"
|
||||
return self.type in (DDBType.STRING_SET, DDBType.NUMBER_SET, DDBType.BINARY_SET)
|
||||
|
||||
def is_list(self):
|
||||
return self.type == "L"
|
||||
return self.type == DDBType.LIST
|
||||
|
||||
def is_map(self):
|
||||
return self.type == "M"
|
||||
return self.type == DDBType.MAP
|
||||
|
||||
def same_type(self, other):
|
||||
return self.type == other.type
|
||||
|
||||
def pop(self, key, *args, **kwargs):
|
||||
if self.is_map() or self.is_list():
|
||||
self.value.pop(key, *args, **kwargs)
|
||||
else:
|
||||
raise TypeError("pop not supported for DynamoType {t}".format(t=self.type))
|
||||
|
262
moto/dynamodb2/parsing/executors.py
Normal file
262
moto/dynamodb2/parsing/executors.py
Normal file
@ -0,0 +1,262 @@
|
||||
from abc import abstractmethod
|
||||
|
||||
from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType
|
||||
from moto.dynamodb2.models import DynamoType
|
||||
from moto.dynamodb2.models.dynamo_type import DDBTypeConversion, DDBType
|
||||
from moto.dynamodb2.parsing.ast_nodes import (
|
||||
UpdateExpressionSetAction,
|
||||
UpdateExpressionDeleteAction,
|
||||
UpdateExpressionRemoveAction,
|
||||
UpdateExpressionAddAction,
|
||||
UpdateExpressionPath,
|
||||
DDBTypedValue,
|
||||
ExpressionAttribute,
|
||||
ExpressionSelector,
|
||||
ExpressionAttributeName,
|
||||
)
|
||||
from moto.dynamodb2.parsing.validators import ExpressionPathResolver
|
||||
|
||||
|
||||
class NodeExecutor(object):
|
||||
def __init__(self, ast_node, expression_attribute_names):
|
||||
self.node = ast_node
|
||||
self.expression_attribute_names = expression_attribute_names
|
||||
|
||||
@abstractmethod
|
||||
def execute(self, item):
|
||||
pass
|
||||
|
||||
def get_item_part_for_path_nodes(self, item, path_nodes):
|
||||
"""
|
||||
For a list of path nodes travers the item by following the path_nodes
|
||||
Args:
|
||||
item(Item):
|
||||
path_nodes(list):
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if len(path_nodes) == 0:
|
||||
return item.attrs
|
||||
else:
|
||||
return ExpressionPathResolver(
|
||||
self.expression_attribute_names
|
||||
).resolve_expression_path_nodes_to_dynamo_type(item, path_nodes)
|
||||
|
||||
def get_item_before_end_of_path(self, item):
|
||||
"""
|
||||
Get the part ot the item where the item will perform the action. For most actions this should be the parent. As
|
||||
that element will need to be modified by the action.
|
||||
Args:
|
||||
item(Item):
|
||||
|
||||
Returns:
|
||||
DynamoType or dict: The path to be set
|
||||
"""
|
||||
return self.get_item_part_for_path_nodes(
|
||||
item, self.get_path_expression_nodes()[:-1]
|
||||
)
|
||||
|
||||
def get_item_at_end_of_path(self, item):
|
||||
"""
|
||||
For a DELETE the path points at the stringset so we need to evaluate the full path.
|
||||
Args:
|
||||
item(Item):
|
||||
|
||||
Returns:
|
||||
DynamoType or dict: The path to be set
|
||||
"""
|
||||
return self.get_item_part_for_path_nodes(item, self.get_path_expression_nodes())
|
||||
|
||||
# Get the part ot the item where the item will perform the action. For most actions this should be the parent. As
|
||||
# that element will need to be modified by the action.
|
||||
get_item_part_in_which_to_perform_action = get_item_before_end_of_path
|
||||
|
||||
def get_path_expression_nodes(self):
|
||||
update_expression_path = self.node.children[0]
|
||||
assert isinstance(update_expression_path, UpdateExpressionPath)
|
||||
return update_expression_path.children
|
||||
|
||||
def get_element_to_action(self):
|
||||
return self.get_path_expression_nodes()[-1]
|
||||
|
||||
def get_action_value(self):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
DynamoType: The value to be set
|
||||
"""
|
||||
ddb_typed_value = self.node.children[1]
|
||||
assert isinstance(ddb_typed_value, DDBTypedValue)
|
||||
dynamo_type_value = ddb_typed_value.children[0]
|
||||
assert isinstance(dynamo_type_value, DynamoType)
|
||||
return dynamo_type_value
|
||||
|
||||
|
||||
class SetExecutor(NodeExecutor):
|
||||
def execute(self, item):
|
||||
self.set(
|
||||
item_part_to_modify_with_set=self.get_item_part_in_which_to_perform_action(
|
||||
item
|
||||
),
|
||||
element_to_set=self.get_element_to_action(),
|
||||
value_to_set=self.get_action_value(),
|
||||
expression_attribute_names=self.expression_attribute_names,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set(
|
||||
cls,
|
||||
item_part_to_modify_with_set,
|
||||
element_to_set,
|
||||
value_to_set,
|
||||
expression_attribute_names,
|
||||
):
|
||||
if isinstance(element_to_set, ExpressionAttribute):
|
||||
attribute_name = element_to_set.get_attribute_name()
|
||||
item_part_to_modify_with_set[attribute_name] = value_to_set
|
||||
elif isinstance(element_to_set, ExpressionSelector):
|
||||
index = element_to_set.get_index()
|
||||
item_part_to_modify_with_set[index] = value_to_set
|
||||
elif isinstance(element_to_set, ExpressionAttributeName):
|
||||
attribute_name = expression_attribute_names[
|
||||
element_to_set.get_attribute_name_placeholder()
|
||||
]
|
||||
item_part_to_modify_with_set[attribute_name] = value_to_set
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Moto does not support setting {t} yet".format(t=type(element_to_set))
|
||||
)
|
||||
|
||||
|
||||
class DeleteExecutor(NodeExecutor):
|
||||
operator = "operator: DELETE"
|
||||
|
||||
def execute(self, item):
|
||||
string_set_to_remove = self.get_action_value()
|
||||
assert isinstance(string_set_to_remove, DynamoType)
|
||||
if not string_set_to_remove.is_set():
|
||||
raise IncorrectOperandType(
|
||||
self.operator,
|
||||
DDBTypeConversion.get_human_type(string_set_to_remove.type),
|
||||
)
|
||||
|
||||
string_set = self.get_item_at_end_of_path(item)
|
||||
assert isinstance(string_set, DynamoType)
|
||||
if string_set.type != string_set_to_remove.type:
|
||||
raise IncorrectDataType()
|
||||
# String set is currently implemented as a list
|
||||
string_set_list = string_set.value
|
||||
|
||||
stringset_to_remove_list = string_set_to_remove.value
|
||||
|
||||
for value in stringset_to_remove_list:
|
||||
try:
|
||||
string_set_list.remove(value)
|
||||
except (KeyError, ValueError):
|
||||
# DynamoDB does not mind if value is not present
|
||||
pass
|
||||
|
||||
|
||||
class RemoveExecutor(NodeExecutor):
|
||||
def execute(self, item):
|
||||
element_to_remove = self.get_element_to_action()
|
||||
if isinstance(element_to_remove, ExpressionAttribute):
|
||||
attribute_name = element_to_remove.get_attribute_name()
|
||||
self.get_item_part_in_which_to_perform_action(item).pop(
|
||||
attribute_name, None
|
||||
)
|
||||
elif isinstance(element_to_remove, ExpressionAttributeName):
|
||||
attribute_name = self.expression_attribute_names[
|
||||
element_to_remove.get_attribute_name_placeholder()
|
||||
]
|
||||
self.get_item_part_in_which_to_perform_action(item).pop(
|
||||
attribute_name, None
|
||||
)
|
||||
elif isinstance(element_to_remove, ExpressionSelector):
|
||||
index = element_to_remove.get_index()
|
||||
try:
|
||||
self.get_item_part_in_which_to_perform_action(item).pop(index)
|
||||
except IndexError:
|
||||
# DynamoDB does not care that index is out of bounds, it will just do nothing.
|
||||
pass
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Moto does not support setting {t} yet".format(
|
||||
t=type(element_to_remove)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class AddExecutor(NodeExecutor):
|
||||
def execute(self, item):
|
||||
value_to_add = self.get_action_value()
|
||||
if isinstance(value_to_add, DynamoType):
|
||||
if value_to_add.is_set():
|
||||
current_string_set = self.get_item_at_end_of_path(item)
|
||||
assert isinstance(current_string_set, DynamoType)
|
||||
if not current_string_set.type == value_to_add.type:
|
||||
raise IncorrectDataType()
|
||||
# Sets are implemented as list
|
||||
for value in value_to_add.value:
|
||||
if value in current_string_set.value:
|
||||
continue
|
||||
else:
|
||||
current_string_set.value.append(value)
|
||||
elif value_to_add.type == DDBType.NUMBER:
|
||||
existing_value = self.get_item_at_end_of_path(item)
|
||||
assert isinstance(existing_value, DynamoType)
|
||||
if not existing_value.type == DDBType.NUMBER:
|
||||
raise IncorrectDataType()
|
||||
new_value = existing_value + value_to_add
|
||||
SetExecutor.set(
|
||||
item_part_to_modify_with_set=self.get_item_before_end_of_path(item),
|
||||
element_to_set=self.get_element_to_action(),
|
||||
value_to_set=new_value,
|
||||
expression_attribute_names=self.expression_attribute_names,
|
||||
)
|
||||
else:
|
||||
raise IncorrectDataType()
|
||||
|
||||
|
||||
class UpdateExpressionExecutor(object):
|
||||
execution_map = {
|
||||
UpdateExpressionSetAction: SetExecutor,
|
||||
UpdateExpressionAddAction: AddExecutor,
|
||||
UpdateExpressionRemoveAction: RemoveExecutor,
|
||||
UpdateExpressionDeleteAction: DeleteExecutor,
|
||||
}
|
||||
|
||||
def __init__(self, update_ast, item, expression_attribute_names):
|
||||
self.update_ast = update_ast
|
||||
self.item = item
|
||||
self.expression_attribute_names = expression_attribute_names
|
||||
|
||||
def execute(self, node=None):
|
||||
"""
|
||||
As explained in moto.dynamodb2.parsing.expressions.NestableExpressionParserMixin._create_node the order of nodes
|
||||
in the AST can be translated of the order of statements in the expression. As such we can start at the root node
|
||||
and process the nodes 1-by-1. If no specific execution for the node type is defined we can execute the children
|
||||
in order since it will be a container node that is expandable and left child will be first in the statement.
|
||||
|
||||
Args:
|
||||
node(Node):
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if node is None:
|
||||
node = self.update_ast
|
||||
|
||||
node_executor = self.get_specific_execution(node)
|
||||
if node_executor is None:
|
||||
for node in node.children:
|
||||
self.execute(node)
|
||||
else:
|
||||
node_executor(node, self.expression_attribute_names).execute(self.item)
|
||||
|
||||
def get_specific_execution(self, node):
|
||||
for node_class in self.execution_map:
|
||||
if isinstance(node, node_class):
|
||||
return self.execution_map[node_class]
|
||||
return None
|
@ -11,6 +11,7 @@ from moto.dynamodb2.exceptions import (
|
||||
ExpressionAttributeNameNotDefined,
|
||||
IncorrectOperandType,
|
||||
InvalidUpdateExpressionInvalidDocumentPath,
|
||||
ProvidedKeyDoesNotExist,
|
||||
)
|
||||
from moto.dynamodb2.models import DynamoType
|
||||
from moto.dynamodb2.parsing.ast_nodes import (
|
||||
@ -56,6 +57,76 @@ class ExpressionAttributeValueProcessor(DepthFirstTraverser):
|
||||
return DDBTypedValue(DynamoType(target))
|
||||
|
||||
|
||||
class ExpressionPathResolver(object):
|
||||
def __init__(self, expression_attribute_names):
|
||||
self.expression_attribute_names = expression_attribute_names
|
||||
|
||||
@classmethod
|
||||
def raise_exception_if_keyword(cls, attribute):
|
||||
if attribute.upper() in ReservedKeywords.get_reserved_keywords():
|
||||
raise AttributeIsReservedKeyword(attribute)
|
||||
|
||||
def resolve_expression_path(self, item, update_expression_path):
|
||||
assert isinstance(update_expression_path, UpdateExpressionPath)
|
||||
return self.resolve_expression_path_nodes(item, update_expression_path.children)
|
||||
|
||||
def resolve_expression_path_nodes(self, item, update_expression_path_nodes):
|
||||
target = item.attrs
|
||||
|
||||
for child in update_expression_path_nodes:
|
||||
# First replace placeholder with attribute_name
|
||||
attr_name = None
|
||||
if isinstance(child, ExpressionAttributeName):
|
||||
attr_placeholder = child.get_attribute_name_placeholder()
|
||||
try:
|
||||
attr_name = self.expression_attribute_names[attr_placeholder]
|
||||
except KeyError:
|
||||
raise ExpressionAttributeNameNotDefined(attr_placeholder)
|
||||
elif isinstance(child, ExpressionAttribute):
|
||||
attr_name = child.get_attribute_name()
|
||||
self.raise_exception_if_keyword(attr_name)
|
||||
if attr_name is not None:
|
||||
# Resolv attribute_name
|
||||
try:
|
||||
target = target[attr_name]
|
||||
except (KeyError, TypeError):
|
||||
if child == update_expression_path_nodes[-1]:
|
||||
return NoneExistingPath(creatable=True)
|
||||
return NoneExistingPath()
|
||||
else:
|
||||
if isinstance(child, ExpressionPathDescender):
|
||||
continue
|
||||
elif isinstance(child, ExpressionSelector):
|
||||
index = child.get_index()
|
||||
if target.is_list():
|
||||
try:
|
||||
target = target[index]
|
||||
except IndexError:
|
||||
# When a list goes out of bounds when assigning that is no problem when at the assignment
|
||||
# side. It will just append to the list.
|
||||
if child == update_expression_path_nodes[-1]:
|
||||
return NoneExistingPath(creatable=True)
|
||||
return NoneExistingPath()
|
||||
else:
|
||||
raise InvalidUpdateExpressionInvalidDocumentPath
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Path resolution for {t}".format(t=type(child))
|
||||
)
|
||||
if not isinstance(target, DynamoType):
|
||||
print(target)
|
||||
return DDBTypedValue(target)
|
||||
|
||||
def resolve_expression_path_nodes_to_dynamo_type(
|
||||
self, item, update_expression_path_nodes
|
||||
):
|
||||
node = self.resolve_expression_path_nodes(item, update_expression_path_nodes)
|
||||
if isinstance(node, NoneExistingPath):
|
||||
raise ProvidedKeyDoesNotExist()
|
||||
assert isinstance(node, DDBTypedValue)
|
||||
return node.get_value()
|
||||
|
||||
|
||||
class ExpressionAttributeResolvingProcessor(DepthFirstTraverser):
|
||||
def _processing_map(self):
|
||||
return {
|
||||
@ -107,55 +178,9 @@ class ExpressionAttributeResolvingProcessor(DepthFirstTraverser):
|
||||
return node
|
||||
|
||||
def resolve_expression_path(self, node):
|
||||
assert isinstance(node, UpdateExpressionPath)
|
||||
|
||||
target = deepcopy(self.item.attrs)
|
||||
for child in node.children:
|
||||
# First replace placeholder with attribute_name
|
||||
attr_name = None
|
||||
if isinstance(child, ExpressionAttributeName):
|
||||
attr_placeholder = child.get_attribute_name_placeholder()
|
||||
try:
|
||||
attr_name = self.expression_attribute_names[attr_placeholder]
|
||||
except KeyError:
|
||||
raise ExpressionAttributeNameNotDefined(attr_placeholder)
|
||||
elif isinstance(child, ExpressionAttribute):
|
||||
attr_name = child.get_attribute_name()
|
||||
self.raise_exception_if_keyword(attr_name)
|
||||
if attr_name is not None:
|
||||
# Resolv attribute_name
|
||||
try:
|
||||
target = target[attr_name]
|
||||
except (KeyError, TypeError):
|
||||
if child == node.children[-1]:
|
||||
return NoneExistingPath(creatable=True)
|
||||
return NoneExistingPath()
|
||||
else:
|
||||
if isinstance(child, ExpressionPathDescender):
|
||||
continue
|
||||
elif isinstance(child, ExpressionSelector):
|
||||
index = child.get_index()
|
||||
if target.is_list():
|
||||
try:
|
||||
target = target[index]
|
||||
except IndexError:
|
||||
# When a list goes out of bounds when assigning that is no problem when at the assignment
|
||||
# side. It will just append to the list.
|
||||
if child == node.children[-1]:
|
||||
return NoneExistingPath(creatable=True)
|
||||
return NoneExistingPath()
|
||||
else:
|
||||
raise InvalidUpdateExpressionInvalidDocumentPath
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Path resolution for {t}".format(t=type(child))
|
||||
)
|
||||
return DDBTypedValue(DynamoType(target))
|
||||
|
||||
@classmethod
|
||||
def raise_exception_if_keyword(cls, attribute):
|
||||
if attribute.upper() in ReservedKeywords.get_reserved_keywords():
|
||||
raise AttributeIsReservedKeyword(attribute)
|
||||
return ExpressionPathResolver(
|
||||
self.expression_attribute_names
|
||||
).resolve_expression_path(self.item, node)
|
||||
|
||||
|
||||
class UpdateExpressionFunctionEvaluator(DepthFirstTraverser):
|
||||
@ -183,7 +208,9 @@ class UpdateExpressionFunctionEvaluator(DepthFirstTraverser):
|
||||
assert isinstance(result, (DDBTypedValue, NoneExistingPath))
|
||||
return result
|
||||
elif function_name == "list_append":
|
||||
first_arg = self.get_list_from_ddb_typed_value(first_arg, function_name)
|
||||
first_arg = deepcopy(
|
||||
self.get_list_from_ddb_typed_value(first_arg, function_name)
|
||||
)
|
||||
second_arg = self.get_list_from_ddb_typed_value(second_arg, function_name)
|
||||
for list_element in second_arg.value:
|
||||
first_arg.value.append(list_element)
|
||||
|
@ -470,8 +470,10 @@ class DynamoHandler(BaseResponse):
|
||||
for k, v in six.iteritems(self.body.get("ExpressionAttributeNames", {}))
|
||||
)
|
||||
|
||||
if " AND " in key_condition_expression:
|
||||
expressions = key_condition_expression.split(" AND ", 1)
|
||||
if " and " in key_condition_expression.lower():
|
||||
expressions = re.split(
|
||||
" AND ", key_condition_expression, maxsplit=1, flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
index_hash_key = [key for key in index if key["KeyType"] == "HASH"][0]
|
||||
hash_key_var = reverse_attribute_lookup.get(
|
||||
@ -760,12 +762,12 @@ class DynamoHandler(BaseResponse):
|
||||
item = self.dynamodb_backend.update_item(
|
||||
name,
|
||||
key,
|
||||
update_expression,
|
||||
attribute_updates,
|
||||
expression_attribute_names,
|
||||
expression_attribute_values,
|
||||
expected,
|
||||
condition_expression,
|
||||
update_expression=update_expression,
|
||||
attribute_updates=attribute_updates,
|
||||
expression_attribute_names=expression_attribute_names,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
expected=expected,
|
||||
condition_expression=condition_expression,
|
||||
)
|
||||
except MockValidationException as mve:
|
||||
er = "com.amazonaws.dynamodb.v20111205#ValidationException"
|
||||
@ -922,3 +924,15 @@ class DynamoHandler(BaseResponse):
|
||||
result.update({"ConsumedCapacity": [v for v in consumed_capacity.values()]})
|
||||
|
||||
return dynamo_json_dump(result)
|
||||
|
||||
def transact_write_items(self):
|
||||
transact_items = self.body["TransactItems"]
|
||||
try:
|
||||
self.dynamodb_backend.transact_write_items(transact_items)
|
||||
except ValueError:
|
||||
er = "com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException"
|
||||
return self.error(
|
||||
er, "A condition specified in the operation could not be evaluated."
|
||||
)
|
||||
response = {"ConsumedCapacity": [], "ItemCollectionMetrics": {}}
|
||||
return dynamo_json_dump(response)
|
||||
|
@ -557,6 +557,10 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
||||
# worst case we'll get IP address exaustion... rarely
|
||||
pass
|
||||
|
||||
def add_block_device(self, size, device_path):
|
||||
volume = self.ec2_backend.create_volume(size, self.region_name)
|
||||
self.ec2_backend.attach_volume(volume.id, self.id, device_path)
|
||||
|
||||
def setup_defaults(self):
|
||||
# Default have an instance with root volume should you not wish to
|
||||
# override with attach volume cmd.
|
||||
@ -564,9 +568,10 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
||||
self.ec2_backend.attach_volume(volume.id, self.id, "/dev/sda1")
|
||||
|
||||
def teardown_defaults(self):
|
||||
volume_id = self.block_device_mapping["/dev/sda1"].volume_id
|
||||
self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1")
|
||||
self.ec2_backend.delete_volume(volume_id)
|
||||
if "/dev/sda1" in self.block_device_mapping:
|
||||
volume_id = self.block_device_mapping["/dev/sda1"].volume_id
|
||||
self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1")
|
||||
self.ec2_backend.delete_volume(volume_id)
|
||||
|
||||
@property
|
||||
def get_block_device_mapping(self):
|
||||
@ -621,6 +626,7 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
||||
subnet_id=properties.get("SubnetId"),
|
||||
key_name=properties.get("KeyName"),
|
||||
private_ip=properties.get("PrivateIpAddress"),
|
||||
block_device_mappings=properties.get("BlockDeviceMappings", {}),
|
||||
)
|
||||
instance = reservation.instances[0]
|
||||
for tag in properties.get("Tags", []):
|
||||
@ -880,7 +886,14 @@ class InstanceBackend(object):
|
||||
)
|
||||
new_reservation.instances.append(new_instance)
|
||||
new_instance.add_tags(instance_tags)
|
||||
new_instance.setup_defaults()
|
||||
if "block_device_mappings" in kwargs:
|
||||
for block_device in kwargs["block_device_mappings"]:
|
||||
new_instance.add_block_device(
|
||||
block_device["Ebs"]["VolumeSize"], block_device["DeviceName"]
|
||||
)
|
||||
else:
|
||||
new_instance.setup_defaults()
|
||||
|
||||
return new_reservation
|
||||
|
||||
def start_instances(self, instance_ids):
|
||||
|
@ -52,7 +52,7 @@ class InstanceResponse(BaseResponse):
|
||||
private_ip = self._get_param("PrivateIpAddress")
|
||||
associate_public_ip = self._get_param("AssociatePublicIpAddress")
|
||||
key_name = self._get_param("KeyName")
|
||||
ebs_optimized = self._get_param("EbsOptimized")
|
||||
ebs_optimized = self._get_param("EbsOptimized") or False
|
||||
instance_initiated_shutdown_behavior = self._get_param(
|
||||
"InstanceInitiatedShutdownBehavior"
|
||||
)
|
||||
|
@ -2,7 +2,8 @@ from __future__ import unicode_literals
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from moto.ec2.models import validate_resource_ids
|
||||
from moto.ec2.utils import tags_from_query_string, filters_from_querystring
|
||||
from moto.ec2.utils import filters_from_querystring
|
||||
from moto.core.utils import tags_from_query_string
|
||||
|
||||
|
||||
class TagResponse(BaseResponse):
|
||||
|
@ -196,22 +196,6 @@ def split_route_id(route_id):
|
||||
return values[0], values[1]
|
||||
|
||||
|
||||
def tags_from_query_string(querystring_dict):
|
||||
prefix = "Tag"
|
||||
suffix = "Key"
|
||||
response_values = {}
|
||||
for key, value in querystring_dict.items():
|
||||
if key.startswith(prefix) and key.endswith(suffix):
|
||||
tag_index = key.replace(prefix + ".", "").replace("." + suffix, "")
|
||||
tag_key = querystring_dict.get("Tag.{0}.Key".format(tag_index))[0]
|
||||
tag_value_key = "Tag.{0}.Value".format(tag_index)
|
||||
if tag_value_key in querystring_dict:
|
||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
||||
else:
|
||||
response_values[tag_key] = None
|
||||
return response_values
|
||||
|
||||
|
||||
def dhcp_configuration_from_querystring(querystring, option="DhcpConfiguration"):
|
||||
"""
|
||||
turn:
|
||||
|
4
moto/elasticbeanstalk/__init__.py
Normal file
4
moto/elasticbeanstalk/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from .models import eb_backends
|
||||
from moto.core.models import base_decorator
|
||||
|
||||
mock_elasticbeanstalk = base_decorator(eb_backends)
|
15
moto/elasticbeanstalk/exceptions.py
Normal file
15
moto/elasticbeanstalk/exceptions.py
Normal file
@ -0,0 +1,15 @@
|
||||
from moto.core.exceptions import RESTError
|
||||
|
||||
|
||||
class InvalidParameterValueError(RESTError):
|
||||
def __init__(self, message):
|
||||
super(InvalidParameterValueError, self).__init__(
|
||||
"InvalidParameterValue", message
|
||||
)
|
||||
|
||||
|
||||
class ResourceNotFoundException(RESTError):
|
||||
def __init__(self, message):
|
||||
super(ResourceNotFoundException, self).__init__(
|
||||
"ResourceNotFoundException", message
|
||||
)
|
152
moto/elasticbeanstalk/models.py
Normal file
152
moto/elasticbeanstalk/models.py
Normal file
@ -0,0 +1,152 @@
|
||||
import weakref
|
||||
|
||||
from boto3 import Session
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from .exceptions import InvalidParameterValueError, ResourceNotFoundException
|
||||
|
||||
|
||||
class FakeEnvironment(BaseModel):
|
||||
def __init__(
|
||||
self, application, environment_name, solution_stack_name, tags,
|
||||
):
|
||||
self.application = weakref.proxy(
|
||||
application
|
||||
) # weakref to break circular dependencies
|
||||
self.environment_name = environment_name
|
||||
self.solution_stack_name = solution_stack_name
|
||||
self.tags = tags
|
||||
|
||||
@property
|
||||
def application_name(self):
|
||||
return self.application.application_name
|
||||
|
||||
@property
|
||||
def environment_arn(self):
|
||||
return (
|
||||
"arn:aws:elasticbeanstalk:{region}:{account_id}:"
|
||||
"environment/{application_name}/{environment_name}".format(
|
||||
region=self.region,
|
||||
account_id="123456789012",
|
||||
application_name=self.application_name,
|
||||
environment_name=self.environment_name,
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def platform_arn(self):
|
||||
return "TODO" # TODO
|
||||
|
||||
@property
|
||||
def region(self):
|
||||
return self.application.region
|
||||
|
||||
|
||||
class FakeApplication(BaseModel):
|
||||
def __init__(self, backend, application_name):
|
||||
self.backend = weakref.proxy(backend) # weakref to break cycles
|
||||
self.application_name = application_name
|
||||
self.environments = dict()
|
||||
|
||||
def create_environment(
|
||||
self, environment_name, solution_stack_name, tags,
|
||||
):
|
||||
if environment_name in self.environments:
|
||||
raise InvalidParameterValueError
|
||||
|
||||
env = FakeEnvironment(
|
||||
application=self,
|
||||
environment_name=environment_name,
|
||||
solution_stack_name=solution_stack_name,
|
||||
tags=tags,
|
||||
)
|
||||
self.environments[environment_name] = env
|
||||
|
||||
return env
|
||||
|
||||
@property
|
||||
def region(self):
|
||||
return self.backend.region
|
||||
|
||||
|
||||
class EBBackend(BaseBackend):
|
||||
def __init__(self, region):
|
||||
self.region = region
|
||||
self.applications = dict()
|
||||
|
||||
def reset(self):
|
||||
# preserve region
|
||||
region = self.region
|
||||
self._reset_model_refs()
|
||||
self.__dict__ = {}
|
||||
self.__init__(region)
|
||||
|
||||
def create_application(self, application_name):
|
||||
if application_name in self.applications:
|
||||
raise InvalidParameterValueError(
|
||||
"Application {} already exists.".format(application_name)
|
||||
)
|
||||
new_app = FakeApplication(backend=self, application_name=application_name,)
|
||||
self.applications[application_name] = new_app
|
||||
return new_app
|
||||
|
||||
def create_environment(self, app, environment_name, stack_name, tags):
|
||||
return app.create_environment(
|
||||
environment_name=environment_name,
|
||||
solution_stack_name=stack_name,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
def describe_environments(self):
|
||||
envs = []
|
||||
for app in self.applications.values():
|
||||
for env in app.environments.values():
|
||||
envs.append(env)
|
||||
return envs
|
||||
|
||||
def list_available_solution_stacks(self):
|
||||
# Implemented in response.py
|
||||
pass
|
||||
|
||||
def update_tags_for_resource(self, resource_arn, tags_to_add, tags_to_remove):
|
||||
try:
|
||||
res = self._find_environment_by_arn(resource_arn)
|
||||
except KeyError:
|
||||
raise ResourceNotFoundException(
|
||||
"Resource not found for ARN '{}'.".format(resource_arn)
|
||||
)
|
||||
|
||||
for key, value in tags_to_add.items():
|
||||
res.tags[key] = value
|
||||
|
||||
for key in tags_to_remove:
|
||||
del res.tags[key]
|
||||
|
||||
def list_tags_for_resource(self, resource_arn):
|
||||
try:
|
||||
res = self._find_environment_by_arn(resource_arn)
|
||||
except KeyError:
|
||||
raise ResourceNotFoundException(
|
||||
"Resource not found for ARN '{}'.".format(resource_arn)
|
||||
)
|
||||
return res.tags
|
||||
|
||||
def _find_environment_by_arn(self, arn):
|
||||
for app in self.applications.keys():
|
||||
for env in self.applications[app].environments.values():
|
||||
if env.environment_arn == arn:
|
||||
return env
|
||||
raise KeyError()
|
||||
|
||||
|
||||
eb_backends = {}
|
||||
for region in Session().get_available_regions("elasticbeanstalk"):
|
||||
eb_backends[region] = EBBackend(region)
|
||||
for region in Session().get_available_regions(
|
||||
"elasticbeanstalk", partition_name="aws-us-gov"
|
||||
):
|
||||
eb_backends[region] = EBBackend(region)
|
||||
for region in Session().get_available_regions(
|
||||
"elasticbeanstalk", partition_name="aws-cn"
|
||||
):
|
||||
eb_backends[region] = EBBackend(region)
|
1386
moto/elasticbeanstalk/responses.py
Normal file
1386
moto/elasticbeanstalk/responses.py
Normal file
File diff suppressed because it is too large
Load Diff
11
moto/elasticbeanstalk/urls.py
Normal file
11
moto/elasticbeanstalk/urls.py
Normal file
@ -0,0 +1,11 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .responses import EBResponse
|
||||
|
||||
url_bases = [
|
||||
r"https?://elasticbeanstalk.(?P<region>[a-zA-Z0-9\-_]+).amazonaws.com",
|
||||
]
|
||||
|
||||
url_paths = {
|
||||
"{0}/$": EBResponse.dispatch,
|
||||
}
|
@ -10,9 +10,10 @@ from six.moves.urllib.parse import urlparse
|
||||
from moto.core.responses import AWSServiceSpec
|
||||
from moto.core.responses import BaseResponse
|
||||
from moto.core.responses import xml_to_json_response
|
||||
from moto.core.utils import tags_from_query_string
|
||||
from .exceptions import EmrError
|
||||
from .models import emr_backends
|
||||
from .utils import steps_from_query_string, tags_from_query_string
|
||||
from .utils import steps_from_query_string
|
||||
|
||||
|
||||
def generate_boto3_response(operation):
|
||||
@ -91,7 +92,7 @@ class ElasticMapReduceResponse(BaseResponse):
|
||||
@generate_boto3_response("AddTags")
|
||||
def add_tags(self):
|
||||
cluster_id = self._get_param("ResourceId")
|
||||
tags = tags_from_query_string(self.querystring)
|
||||
tags = tags_from_query_string(self.querystring, prefix="Tags")
|
||||
self.backend.add_tags(cluster_id, tags)
|
||||
template = self.response_template(ADD_TAGS_TEMPLATE)
|
||||
return template.render()
|
||||
|
@ -22,22 +22,6 @@ def random_instance_group_id(size=13):
|
||||
return "i-{0}".format(random_id())
|
||||
|
||||
|
||||
def tags_from_query_string(querystring_dict):
|
||||
prefix = "Tags"
|
||||
suffix = "Key"
|
||||
response_values = {}
|
||||
for key, value in querystring_dict.items():
|
||||
if key.startswith(prefix) and key.endswith(suffix):
|
||||
tag_index = key.replace(prefix + ".", "").replace("." + suffix, "")
|
||||
tag_key = querystring_dict.get("Tags.{0}.Key".format(tag_index))[0]
|
||||
tag_value_key = "Tags.{0}.Value".format(tag_index)
|
||||
if tag_value_key in querystring_dict:
|
||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
||||
else:
|
||||
response_values[tag_key] = None
|
||||
return response_values
|
||||
|
||||
|
||||
def steps_from_query_string(querystring_dict):
|
||||
steps = []
|
||||
for step in querystring_dict:
|
||||
|
@ -145,10 +145,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
||||
# Do S3, resource type s3
|
||||
if not resource_type_filters or "s3" in resource_type_filters:
|
||||
for bucket in self.s3_backend.buckets.values():
|
||||
tags = []
|
||||
for tag in bucket.tags.tag_set.tags:
|
||||
tags.append({"Key": tag.key, "Value": tag.value})
|
||||
|
||||
tags = self.s3_backend.tagger.list_tags_for_resource(bucket.arn)["Tags"]
|
||||
if not tags or not tag_filter(
|
||||
tags
|
||||
): # Skip if no tags, or invalid filter
|
||||
@ -362,8 +359,9 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
||||
|
||||
# Do S3, resource type s3
|
||||
for bucket in self.s3_backend.buckets.values():
|
||||
for tag in bucket.tags.tag_set.tags:
|
||||
yield tag.key
|
||||
tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn)
|
||||
for key, _ in tags.items():
|
||||
yield key
|
||||
|
||||
# EC2 tags
|
||||
def get_ec2_keys(res_id):
|
||||
@ -414,9 +412,10 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
||||
|
||||
# Do S3, resource type s3
|
||||
for bucket in self.s3_backend.buckets.values():
|
||||
for tag in bucket.tags.tag_set.tags:
|
||||
if tag.key == tag_key:
|
||||
yield tag.value
|
||||
tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn)
|
||||
for key, value in tags.items():
|
||||
if key == tag_key:
|
||||
yield value
|
||||
|
||||
# EC2 tags
|
||||
def get_ec2_values(res_id):
|
||||
|
@ -22,6 +22,8 @@ import six
|
||||
from bisect import insort
|
||||
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
||||
from moto.cloudwatch.models import MetricDatum
|
||||
from moto.utilities.tagging_service import TaggingService
|
||||
from .exceptions import (
|
||||
BucketAlreadyExists,
|
||||
MissingBucket,
|
||||
@ -34,7 +36,6 @@ from .exceptions import (
|
||||
MalformedXML,
|
||||
InvalidStorageClass,
|
||||
InvalidTargetBucketForLogging,
|
||||
DuplicateTagKeys,
|
||||
CrossLocationLoggingProhibitted,
|
||||
NoSuchPublicAccessBlockConfiguration,
|
||||
InvalidPublicAccessBlockConfiguration,
|
||||
@ -94,6 +95,7 @@ class FakeKey(BaseModel):
|
||||
version_id=0,
|
||||
max_buffer_size=DEFAULT_KEY_BUFFER_SIZE,
|
||||
multipart=None,
|
||||
bucket_name=None,
|
||||
):
|
||||
self.name = name
|
||||
self.last_modified = datetime.datetime.utcnow()
|
||||
@ -105,8 +107,8 @@ class FakeKey(BaseModel):
|
||||
self._etag = etag
|
||||
self._version_id = version_id
|
||||
self._is_versioned = is_versioned
|
||||
self._tagging = FakeTagging()
|
||||
self.multipart = multipart
|
||||
self.bucket_name = bucket_name
|
||||
|
||||
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
|
||||
self._max_buffer_size = max_buffer_size
|
||||
@ -126,6 +128,13 @@ class FakeKey(BaseModel):
|
||||
self.lock.release()
|
||||
return r
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
# S3 Objects don't have an ARN, but we do need something unique when creating tags against this resource
|
||||
return "arn:aws:s3:::{}/{}/{}".format(
|
||||
self.bucket_name, self.name, self.version_id
|
||||
)
|
||||
|
||||
@value.setter
|
||||
def value(self, new_value):
|
||||
self._value_buffer.seek(0)
|
||||
@ -152,9 +161,6 @@ class FakeKey(BaseModel):
|
||||
self._metadata = {}
|
||||
self._metadata.update(metadata)
|
||||
|
||||
def set_tagging(self, tagging):
|
||||
self._tagging = tagging
|
||||
|
||||
def set_storage_class(self, storage):
|
||||
if storage is not None and storage not in STORAGE_CLASS:
|
||||
raise InvalidStorageClass(storage=storage)
|
||||
@ -210,10 +216,6 @@ class FakeKey(BaseModel):
|
||||
def metadata(self):
|
||||
return self._metadata
|
||||
|
||||
@property
|
||||
def tagging(self):
|
||||
return self._tagging
|
||||
|
||||
@property
|
||||
def response_dict(self):
|
||||
res = {
|
||||
@ -471,26 +473,10 @@ def get_canned_acl(acl):
|
||||
return FakeAcl(grants=grants)
|
||||
|
||||
|
||||
class FakeTagging(BaseModel):
|
||||
def __init__(self, tag_set=None):
|
||||
self.tag_set = tag_set or FakeTagSet()
|
||||
|
||||
|
||||
class FakeTagSet(BaseModel):
|
||||
def __init__(self, tags=None):
|
||||
self.tags = tags or []
|
||||
|
||||
|
||||
class FakeTag(BaseModel):
|
||||
def __init__(self, key, value=None):
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
|
||||
class LifecycleFilter(BaseModel):
|
||||
def __init__(self, prefix=None, tag=None, and_filter=None):
|
||||
self.prefix = prefix
|
||||
self.tag = tag
|
||||
(self.tag_key, self.tag_value) = tag if tag else (None, None)
|
||||
self.and_filter = and_filter
|
||||
|
||||
def to_config_dict(self):
|
||||
@ -499,11 +485,11 @@ class LifecycleFilter(BaseModel):
|
||||
"predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix}
|
||||
}
|
||||
|
||||
elif self.tag:
|
||||
elif self.tag_key:
|
||||
return {
|
||||
"predicate": {
|
||||
"type": "LifecycleTagPredicate",
|
||||
"tag": {"key": self.tag.key, "value": self.tag.value},
|
||||
"tag": {"key": self.tag_key, "value": self.tag_value},
|
||||
}
|
||||
}
|
||||
|
||||
@ -527,12 +513,9 @@ class LifecycleAndFilter(BaseModel):
|
||||
if self.prefix is not None:
|
||||
data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix})
|
||||
|
||||
for tag in self.tags:
|
||||
for key, value in self.tags.items():
|
||||
data.append(
|
||||
{
|
||||
"type": "LifecycleTagPredicate",
|
||||
"tag": {"key": tag.key, "value": tag.value},
|
||||
}
|
||||
{"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},}
|
||||
)
|
||||
|
||||
return data
|
||||
@ -787,7 +770,6 @@ class FakeBucket(BaseModel):
|
||||
self.policy = None
|
||||
self.website_configuration = None
|
||||
self.acl = get_canned_acl("private")
|
||||
self.tags = FakeTagging()
|
||||
self.cors = []
|
||||
self.logging = {}
|
||||
self.notification_configuration = None
|
||||
@ -879,7 +861,7 @@ class FakeBucket(BaseModel):
|
||||
and_filter = None
|
||||
if rule["Filter"].get("And"):
|
||||
filters += 1
|
||||
and_tags = []
|
||||
and_tags = {}
|
||||
if rule["Filter"]["And"].get("Tag"):
|
||||
if not isinstance(rule["Filter"]["And"]["Tag"], list):
|
||||
rule["Filter"]["And"]["Tag"] = [
|
||||
@ -887,7 +869,7 @@ class FakeBucket(BaseModel):
|
||||
]
|
||||
|
||||
for t in rule["Filter"]["And"]["Tag"]:
|
||||
and_tags.append(FakeTag(t["Key"], t.get("Value", "")))
|
||||
and_tags[t["Key"]] = t.get("Value", "")
|
||||
|
||||
try:
|
||||
and_prefix = (
|
||||
@ -901,7 +883,7 @@ class FakeBucket(BaseModel):
|
||||
filter_tag = None
|
||||
if rule["Filter"].get("Tag"):
|
||||
filters += 1
|
||||
filter_tag = FakeTag(
|
||||
filter_tag = (
|
||||
rule["Filter"]["Tag"]["Key"],
|
||||
rule["Filter"]["Tag"].get("Value", ""),
|
||||
)
|
||||
@ -988,16 +970,6 @@ class FakeBucket(BaseModel):
|
||||
def delete_cors(self):
|
||||
self.cors = []
|
||||
|
||||
def set_tags(self, tagging):
|
||||
self.tags = tagging
|
||||
|
||||
def delete_tags(self):
|
||||
self.tags = FakeTagging()
|
||||
|
||||
@property
|
||||
def tagging(self):
|
||||
return self.tags
|
||||
|
||||
def set_logging(self, logging_config, bucket_backend):
|
||||
if not logging_config:
|
||||
self.logging = {}
|
||||
@ -1085,6 +1057,10 @@ class FakeBucket(BaseModel):
|
||||
def set_acl(self, acl):
|
||||
self.acl = acl
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:s3:::{}".format(self.name)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.name
|
||||
@ -1110,7 +1086,7 @@ class FakeBucket(BaseModel):
|
||||
int(time.mktime(self.creation_date.timetuple()))
|
||||
), # PY2 and 3 compatible
|
||||
"configurationItemMD5Hash": "",
|
||||
"arn": "arn:aws:s3:::{}".format(self.name),
|
||||
"arn": self.arn,
|
||||
"resourceType": "AWS::S3::Bucket",
|
||||
"resourceId": self.name,
|
||||
"resourceName": self.name,
|
||||
@ -1119,7 +1095,7 @@ class FakeBucket(BaseModel):
|
||||
"resourceCreationTime": str(self.creation_date),
|
||||
"relatedEvents": [],
|
||||
"relationships": [],
|
||||
"tags": {tag.key: tag.value for tag in self.tagging.tag_set.tags},
|
||||
"tags": s3_backend.tagger.get_tag_dict_for_resource(self.arn),
|
||||
"configuration": {
|
||||
"name": self.name,
|
||||
"owner": {"id": OWNER},
|
||||
@ -1181,6 +1157,42 @@ class S3Backend(BaseBackend):
|
||||
def __init__(self):
|
||||
self.buckets = {}
|
||||
self.account_public_access_block = None
|
||||
self.tagger = TaggingService()
|
||||
|
||||
# TODO: This is broken! DO NOT IMPORT MUTABLE DATA TYPES FROM OTHER AREAS -- THIS BREAKS UNMOCKING!
|
||||
# WRAP WITH A GETTER/SETTER FUNCTION
|
||||
# Register this class as a CloudWatch Metric Provider
|
||||
# Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available
|
||||
# metric_providers["S3"] = self
|
||||
|
||||
def get_cloudwatch_metrics(self):
|
||||
metrics = []
|
||||
for name, bucket in self.buckets.items():
|
||||
metrics.append(
|
||||
MetricDatum(
|
||||
namespace="AWS/S3",
|
||||
name="BucketSizeBytes",
|
||||
value=bucket.keys.item_size(),
|
||||
dimensions=[
|
||||
{"Name": "StorageType", "Value": "StandardStorage"},
|
||||
{"Name": "BucketName", "Value": name},
|
||||
],
|
||||
timestamp=datetime.datetime.now(),
|
||||
)
|
||||
)
|
||||
metrics.append(
|
||||
MetricDatum(
|
||||
namespace="AWS/S3",
|
||||
name="NumberOfObjects",
|
||||
value=len(bucket.keys),
|
||||
dimensions=[
|
||||
{"Name": "StorageType", "Value": "AllStorageTypes"},
|
||||
{"Name": "BucketName", "Value": name},
|
||||
],
|
||||
timestamp=datetime.datetime.now(),
|
||||
)
|
||||
)
|
||||
return metrics
|
||||
|
||||
def create_bucket(self, bucket_name, region_name):
|
||||
if bucket_name in self.buckets:
|
||||
@ -1350,23 +1362,32 @@ class S3Backend(BaseBackend):
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_key_tagging(self, bucket_name, key_name, tagging, version_id=None):
|
||||
key = self.get_key(bucket_name, key_name, version_id)
|
||||
def get_key_tags(self, key):
|
||||
return self.tagger.list_tags_for_resource(key.arn)
|
||||
|
||||
def set_key_tags(self, key, tags, key_name=None):
|
||||
if key is None:
|
||||
raise MissingKey(key_name)
|
||||
key.set_tagging(tagging)
|
||||
self.tagger.delete_all_tags_for_resource(key.arn)
|
||||
self.tagger.tag_resource(
|
||||
key.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
|
||||
)
|
||||
return key
|
||||
|
||||
def put_bucket_tagging(self, bucket_name, tagging):
|
||||
tag_keys = [tag.key for tag in tagging.tag_set.tags]
|
||||
if len(tag_keys) != len(set(tag_keys)):
|
||||
raise DuplicateTagKeys()
|
||||
def get_bucket_tags(self, bucket_name):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
bucket.set_tags(tagging)
|
||||
return self.tagger.list_tags_for_resource(bucket.arn)
|
||||
|
||||
def put_bucket_tags(self, bucket_name, tags):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
self.tagger.delete_all_tags_for_resource(bucket.arn)
|
||||
self.tagger.tag_resource(
|
||||
bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
|
||||
)
|
||||
|
||||
def delete_bucket_tagging(self, bucket_name):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
bucket.delete_tags()
|
||||
self.tagger.delete_all_tags_for_resource(bucket.arn)
|
||||
|
||||
def put_bucket_cors(self, bucket_name, cors_rules):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
@ -1574,6 +1595,7 @@ class S3Backend(BaseBackend):
|
||||
key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id)
|
||||
|
||||
new_key = key.copy(dest_key_name, dest_bucket.is_versioned)
|
||||
self.tagger.copy_tags(key.arn, new_key.arn)
|
||||
|
||||
if storage is not None:
|
||||
new_key.set_storage_class(storage)
|
||||
|
@ -24,6 +24,7 @@ from moto.s3bucket_path.utils import (
|
||||
|
||||
from .exceptions import (
|
||||
BucketAlreadyExists,
|
||||
DuplicateTagKeys,
|
||||
S3ClientError,
|
||||
MissingBucket,
|
||||
MissingKey,
|
||||
@ -43,9 +44,6 @@ from .models import (
|
||||
FakeGrant,
|
||||
FakeAcl,
|
||||
FakeKey,
|
||||
FakeTagging,
|
||||
FakeTagSet,
|
||||
FakeTag,
|
||||
)
|
||||
from .utils import (
|
||||
bucket_name_from_url,
|
||||
@ -134,7 +132,8 @@ ACTION_MAP = {
|
||||
|
||||
|
||||
def parse_key_name(pth):
|
||||
return pth.lstrip("/")
|
||||
# strip the first '/' left by urlparse
|
||||
return pth[1:] if pth.startswith("/") else pth
|
||||
|
||||
|
||||
def is_delete_keys(request, path, bucket_name):
|
||||
@ -378,13 +377,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
||||
return template.render(obj=bucket)
|
||||
elif "tagging" in querystring:
|
||||
bucket = self.backend.get_bucket(bucket_name)
|
||||
tags = self.backend.get_bucket_tags(bucket_name)["Tags"]
|
||||
# "Special Error" if no tags:
|
||||
if len(bucket.tagging.tag_set.tags) == 0:
|
||||
if len(tags) == 0:
|
||||
template = self.response_template(S3_NO_BUCKET_TAGGING)
|
||||
return 404, {}, template.render(bucket_name=bucket_name)
|
||||
template = self.response_template(S3_BUCKET_TAGGING_RESPONSE)
|
||||
return template.render(bucket=bucket)
|
||||
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
|
||||
return template.render(tags=tags)
|
||||
elif "logging" in querystring:
|
||||
bucket = self.backend.get_bucket(bucket_name)
|
||||
if not bucket.logging:
|
||||
@ -652,7 +651,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
return ""
|
||||
elif "tagging" in querystring:
|
||||
tagging = self._bucket_tagging_from_xml(body)
|
||||
self.backend.put_bucket_tagging(bucket_name, tagging)
|
||||
self.backend.put_bucket_tags(bucket_name, tagging)
|
||||
return ""
|
||||
elif "website" in querystring:
|
||||
self.backend.set_bucket_website_configuration(bucket_name, body)
|
||||
@ -839,27 +838,35 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
|
||||
def _bucket_response_delete_keys(self, request, body, bucket_name):
|
||||
template = self.response_template(S3_DELETE_KEYS_RESPONSE)
|
||||
body_dict = xmltodict.parse(body)
|
||||
|
||||
keys = minidom.parseString(body).getElementsByTagName("Key")
|
||||
deleted_names = []
|
||||
error_names = []
|
||||
if len(keys) == 0:
|
||||
objects = body_dict["Delete"].get("Object", [])
|
||||
if not isinstance(objects, list):
|
||||
# We expect a list of objects, but when there is a single <Object> node xmltodict does not
|
||||
# return a list.
|
||||
objects = [objects]
|
||||
if len(objects) == 0:
|
||||
raise MalformedXML()
|
||||
|
||||
for k in keys:
|
||||
key_name = k.firstChild.nodeValue
|
||||
deleted_objects = []
|
||||
error_names = []
|
||||
|
||||
for object_ in objects:
|
||||
key_name = object_["Key"]
|
||||
version_id = object_.get("VersionId", None)
|
||||
|
||||
success = self.backend.delete_key(
|
||||
bucket_name, undo_clean_key_name(key_name)
|
||||
bucket_name, undo_clean_key_name(key_name), version_id=version_id
|
||||
)
|
||||
if success:
|
||||
deleted_names.append(key_name)
|
||||
deleted_objects.append((key_name, version_id))
|
||||
else:
|
||||
error_names.append(key_name)
|
||||
|
||||
return (
|
||||
200,
|
||||
{},
|
||||
template.render(deleted=deleted_names, delete_errors=error_names),
|
||||
template.render(deleted=deleted_objects, delete_errors=error_names),
|
||||
)
|
||||
|
||||
def _handle_range_header(self, request, headers, response_content):
|
||||
@ -1098,8 +1105,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
||||
return 200, response_headers, template.render(obj=key)
|
||||
if "tagging" in query:
|
||||
tags = self.backend.get_key_tags(key)["Tags"]
|
||||
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
|
||||
return 200, response_headers, template.render(obj=key)
|
||||
return 200, response_headers, template.render(tags=tags)
|
||||
|
||||
response_headers.update(key.metadata)
|
||||
response_headers.update(key.response_dict)
|
||||
@ -1171,8 +1179,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
version_id = query["versionId"][0]
|
||||
else:
|
||||
version_id = None
|
||||
key = self.backend.get_key(bucket_name, key_name, version_id=version_id)
|
||||
tagging = self._tagging_from_xml(body)
|
||||
self.backend.set_key_tagging(bucket_name, key_name, tagging, version_id)
|
||||
self.backend.set_key_tags(key, tagging, key_name)
|
||||
return 200, response_headers, ""
|
||||
|
||||
if "x-amz-copy-source" in request.headers:
|
||||
@ -1213,7 +1222,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
tdirective = request.headers.get("x-amz-tagging-directive")
|
||||
if tdirective == "REPLACE":
|
||||
tagging = self._tagging_from_headers(request.headers)
|
||||
new_key.set_tagging(tagging)
|
||||
self.backend.set_key_tags(new_key, tagging)
|
||||
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
||||
response_headers.update(new_key.response_dict)
|
||||
return 200, response_headers, template.render(key=new_key)
|
||||
@ -1237,7 +1246,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
new_key.website_redirect_location = request.headers.get(
|
||||
"x-amz-website-redirect-location"
|
||||
)
|
||||
new_key.set_tagging(tagging)
|
||||
self.backend.set_key_tags(new_key, tagging)
|
||||
|
||||
response_headers.update(new_key.response_dict)
|
||||
return 200, response_headers, ""
|
||||
@ -1365,55 +1374,45 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
return None
|
||||
|
||||
def _tagging_from_headers(self, headers):
|
||||
tags = {}
|
||||
if headers.get("x-amz-tagging"):
|
||||
parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True)
|
||||
tags = []
|
||||
for tag in parsed_header.items():
|
||||
tags.append(FakeTag(tag[0], tag[1][0]))
|
||||
|
||||
tag_set = FakeTagSet(tags)
|
||||
tagging = FakeTagging(tag_set)
|
||||
return tagging
|
||||
else:
|
||||
return FakeTagging()
|
||||
tags[tag[0]] = tag[1][0]
|
||||
return tags
|
||||
|
||||
def _tagging_from_xml(self, xml):
|
||||
parsed_xml = xmltodict.parse(xml, force_list={"Tag": True})
|
||||
|
||||
tags = []
|
||||
tags = {}
|
||||
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
||||
tags.append(FakeTag(tag["Key"], tag["Value"]))
|
||||
tags[tag["Key"]] = tag["Value"]
|
||||
|
||||
tag_set = FakeTagSet(tags)
|
||||
tagging = FakeTagging(tag_set)
|
||||
return tagging
|
||||
return tags
|
||||
|
||||
def _bucket_tagging_from_xml(self, xml):
|
||||
parsed_xml = xmltodict.parse(xml)
|
||||
|
||||
tags = []
|
||||
tags = {}
|
||||
# Optional if no tags are being sent:
|
||||
if parsed_xml["Tagging"].get("TagSet"):
|
||||
# If there is only 1 tag, then it's not a list:
|
||||
if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list):
|
||||
tags.append(
|
||||
FakeTag(
|
||||
parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"],
|
||||
parsed_xml["Tagging"]["TagSet"]["Tag"]["Value"],
|
||||
)
|
||||
)
|
||||
tags[parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"]] = parsed_xml[
|
||||
"Tagging"
|
||||
]["TagSet"]["Tag"]["Value"]
|
||||
else:
|
||||
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
||||
tags.append(FakeTag(tag["Key"], tag["Value"]))
|
||||
if tag["Key"] in tags:
|
||||
raise DuplicateTagKeys()
|
||||
tags[tag["Key"]] = tag["Value"]
|
||||
|
||||
# Verify that "aws:" is not in the tags. If so, then this is a problem:
|
||||
for tag in tags:
|
||||
if tag.key.startswith("aws:"):
|
||||
for key, _ in tags.items():
|
||||
if key.startswith("aws:"):
|
||||
raise NoSystemTags()
|
||||
|
||||
tag_set = FakeTagSet(tags)
|
||||
tagging = FakeTagging(tag_set)
|
||||
return tagging
|
||||
return tags
|
||||
|
||||
def _cors_from_xml(self, xml):
|
||||
parsed_xml = xmltodict.parse(xml)
|
||||
@ -1733,10 +1732,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
{% if rule.filter.prefix != None %}
|
||||
<Prefix>{{ rule.filter.prefix }}</Prefix>
|
||||
{% endif %}
|
||||
{% if rule.filter.tag %}
|
||||
{% if rule.filter.tag_key %}
|
||||
<Tag>
|
||||
<Key>{{ rule.filter.tag.key }}</Key>
|
||||
<Value>{{ rule.filter.tag.value }}</Value>
|
||||
<Key>{{ rule.filter.tag_key }}</Key>
|
||||
<Value>{{ rule.filter.tag_value }}</Value>
|
||||
</Tag>
|
||||
{% endif %}
|
||||
{% if rule.filter.and_filter %}
|
||||
@ -1744,10 +1743,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
{% if rule.filter.and_filter.prefix != None %}
|
||||
<Prefix>{{ rule.filter.and_filter.prefix }}</Prefix>
|
||||
{% endif %}
|
||||
{% for tag in rule.filter.and_filter.tags %}
|
||||
{% for key, value in rule.filter.and_filter.tags.items() %}
|
||||
<Tag>
|
||||
<Key>{{ tag.key }}</Key>
|
||||
<Value>{{ tag.value }}</Value>
|
||||
<Key>{{ key }}</Key>
|
||||
<Value>{{ value }}</Value>
|
||||
</Tag>
|
||||
{% endfor %}
|
||||
</And>
|
||||
@ -1861,9 +1860,10 @@ S3_BUCKET_GET_VERSIONS = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
S3_DELETE_KEYS_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
|
||||
{% for k in deleted %}
|
||||
{% for k, v in deleted %}
|
||||
<Deleted>
|
||||
<Key>{{k}}</Key>
|
||||
{% if v %}<VersionId>{{v}}</VersionId>{% endif %}
|
||||
</Deleted>
|
||||
{% endfor %}
|
||||
{% for k in delete_errors %}
|
||||
@ -1908,22 +1908,10 @@ S3_OBJECT_TAGGING_RESPONSE = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<TagSet>
|
||||
{% for tag in obj.tagging.tag_set.tags %}
|
||||
{% for tag in tags %}
|
||||
<Tag>
|
||||
<Key>{{ tag.key }}</Key>
|
||||
<Value>{{ tag.value }}</Value>
|
||||
</Tag>
|
||||
{% endfor %}
|
||||
</TagSet>
|
||||
</Tagging>"""
|
||||
|
||||
S3_BUCKET_TAGGING_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Tagging>
|
||||
<TagSet>
|
||||
{% for tag in bucket.tagging.tag_set.tags %}
|
||||
<Tag>
|
||||
<Key>{{ tag.key }}</Key>
|
||||
<Value>{{ tag.value }}</Value>
|
||||
<Key>{{ tag.Key }}</Key>
|
||||
<Value>{{ tag.Value }}</Value>
|
||||
</Tag>
|
||||
{% endfor %}
|
||||
</TagSet>
|
||||
|
@ -15,5 +15,5 @@ url_paths = {
|
||||
# path-based bucket + key
|
||||
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
|
||||
# subdomain bucket + key with empty first part of path
|
||||
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
|
||||
"{0}/(?P<key_name>/.*)$": S3ResponseInstance.key_or_control_response,
|
||||
}
|
||||
|
@ -146,6 +146,12 @@ class _VersionedKeyStore(dict):
|
||||
for key in self:
|
||||
yield key, self.getlist(key)
|
||||
|
||||
def item_size(self):
|
||||
size = 0
|
||||
for val in self.values():
|
||||
size += sys.getsizeof(val)
|
||||
return size
|
||||
|
||||
items = iteritems = _iteritems
|
||||
lists = iterlists = _iterlists
|
||||
values = itervalues = _itervalues
|
||||
|
@ -121,8 +121,16 @@ class SecretsManagerBackend(BaseBackend):
|
||||
"You can't perform this operation on the secret because it was marked for deletion."
|
||||
)
|
||||
|
||||
secret = self.secrets[secret_id]
|
||||
tags = secret["tags"]
|
||||
description = secret["description"]
|
||||
|
||||
version_id = self._add_secret(
|
||||
secret_id, secret_string=secret_string, secret_binary=secret_binary
|
||||
secret_id,
|
||||
secret_string=secret_string,
|
||||
secret_binary=secret_binary,
|
||||
description=description,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
response = json.dumps(
|
||||
@ -136,7 +144,13 @@ class SecretsManagerBackend(BaseBackend):
|
||||
return response
|
||||
|
||||
def create_secret(
|
||||
self, name, secret_string=None, secret_binary=None, tags=[], **kwargs
|
||||
self,
|
||||
name,
|
||||
secret_string=None,
|
||||
secret_binary=None,
|
||||
description=None,
|
||||
tags=[],
|
||||
**kwargs
|
||||
):
|
||||
|
||||
# error if secret exists
|
||||
@ -146,7 +160,11 @@ class SecretsManagerBackend(BaseBackend):
|
||||
)
|
||||
|
||||
version_id = self._add_secret(
|
||||
name, secret_string=secret_string, secret_binary=secret_binary, tags=tags
|
||||
name,
|
||||
secret_string=secret_string,
|
||||
secret_binary=secret_binary,
|
||||
description=description,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
response = json.dumps(
|
||||
@ -164,6 +182,7 @@ class SecretsManagerBackend(BaseBackend):
|
||||
secret_id,
|
||||
secret_string=None,
|
||||
secret_binary=None,
|
||||
description=None,
|
||||
tags=[],
|
||||
version_id=None,
|
||||
version_stages=None,
|
||||
@ -216,13 +235,27 @@ class SecretsManagerBackend(BaseBackend):
|
||||
secret["rotation_lambda_arn"] = ""
|
||||
secret["auto_rotate_after_days"] = 0
|
||||
secret["tags"] = tags
|
||||
secret["description"] = description
|
||||
|
||||
return version_id
|
||||
|
||||
def put_secret_value(self, secret_id, secret_string, secret_binary, version_stages):
|
||||
|
||||
if secret_id in self.secrets.keys():
|
||||
secret = self.secrets[secret_id]
|
||||
tags = secret["tags"]
|
||||
description = secret["description"]
|
||||
else:
|
||||
tags = []
|
||||
description = ""
|
||||
|
||||
version_id = self._add_secret(
|
||||
secret_id, secret_string, secret_binary, version_stages=version_stages
|
||||
secret_id,
|
||||
secret_string,
|
||||
secret_binary,
|
||||
description=description,
|
||||
tags=tags,
|
||||
version_stages=version_stages,
|
||||
)
|
||||
|
||||
response = json.dumps(
|
||||
@ -246,7 +279,7 @@ class SecretsManagerBackend(BaseBackend):
|
||||
{
|
||||
"ARN": secret_arn(self.region, secret["secret_id"]),
|
||||
"Name": secret["name"],
|
||||
"Description": "",
|
||||
"Description": secret.get("description", ""),
|
||||
"KmsKeyId": "",
|
||||
"RotationEnabled": secret["rotation_enabled"],
|
||||
"RotationLambdaARN": secret["rotation_lambda_arn"],
|
||||
@ -310,6 +343,7 @@ class SecretsManagerBackend(BaseBackend):
|
||||
self._add_secret(
|
||||
secret_id,
|
||||
old_secret_version["secret_string"],
|
||||
secret["description"],
|
||||
secret["tags"],
|
||||
version_id=new_version_id,
|
||||
version_stages=["AWSCURRENT"],
|
||||
@ -416,7 +450,7 @@ class SecretsManagerBackend(BaseBackend):
|
||||
{
|
||||
"ARN": secret_arn(self.region, secret["secret_id"]),
|
||||
"DeletedDate": secret.get("deleted_date", None),
|
||||
"Description": "",
|
||||
"Description": secret.get("description", ""),
|
||||
"KmsKeyId": "",
|
||||
"LastAccessedDate": None,
|
||||
"LastChangedDate": None,
|
||||
|
@ -21,11 +21,13 @@ class SecretsManagerResponse(BaseResponse):
|
||||
name = self._get_param("Name")
|
||||
secret_string = self._get_param("SecretString")
|
||||
secret_binary = self._get_param("SecretBinary")
|
||||
description = self._get_param("Description", if_none="")
|
||||
tags = self._get_param("Tags", if_none=[])
|
||||
return secretsmanager_backends[self.region].create_secret(
|
||||
name=name,
|
||||
secret_string=secret_string,
|
||||
secret_binary=secret_binary,
|
||||
description=description,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
@ -29,6 +30,7 @@ UNSIGNED_REQUESTS = {
|
||||
"AWSCognitoIdentityService": ("cognito-identity", "us-east-1"),
|
||||
"AWSCognitoIdentityProviderService": ("cognito-idp", "us-east-1"),
|
||||
}
|
||||
UNSIGNED_ACTIONS = {"AssumeRoleWithSAML": ("sts", "us-east-1")}
|
||||
|
||||
|
||||
class DomainDispatcherApplication(object):
|
||||
@ -77,9 +79,13 @@ class DomainDispatcherApplication(object):
|
||||
else:
|
||||
# Unsigned request
|
||||
target = environ.get("HTTP_X_AMZ_TARGET")
|
||||
action = self.get_action_from_body(environ)
|
||||
if target:
|
||||
service, _ = target.split(".", 1)
|
||||
service, region = UNSIGNED_REQUESTS.get(service, DEFAULT_SERVICE_REGION)
|
||||
elif action and action in UNSIGNED_ACTIONS:
|
||||
# See if we can match the Action to a known service
|
||||
service, region = UNSIGNED_ACTIONS.get(action)
|
||||
else:
|
||||
# S3 is the last resort when the target is also unknown
|
||||
service, region = DEFAULT_SERVICE_REGION
|
||||
@ -130,6 +136,26 @@ class DomainDispatcherApplication(object):
|
||||
self.app_instances[backend] = app
|
||||
return app
|
||||
|
||||
def get_action_from_body(self, environ):
|
||||
body = None
|
||||
try:
|
||||
# AWS requests use querystrings as the body (Action=x&Data=y&...)
|
||||
simple_form = environ["CONTENT_TYPE"].startswith(
|
||||
"application/x-www-form-urlencoded"
|
||||
)
|
||||
request_body_size = int(environ["CONTENT_LENGTH"])
|
||||
if simple_form and request_body_size:
|
||||
body = environ["wsgi.input"].read(request_body_size).decode("utf-8")
|
||||
body_dict = dict(x.split("=") for x in body.split("&"))
|
||||
return body_dict["Action"]
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
finally:
|
||||
if body:
|
||||
# We've consumed the body = need to reset it
|
||||
environ["wsgi.input"] = io.StringIO(body)
|
||||
return None
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
backend_app = self.get_application(environ)
|
||||
return backend_app(environ, start_response)
|
||||
|
@ -1,5 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
from base64 import b64decode
|
||||
import datetime
|
||||
import xmltodict
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds
|
||||
from moto.core import ACCOUNT_ID
|
||||
@ -79,5 +81,24 @@ class STSBackend(BaseBackend):
|
||||
def assume_role_with_web_identity(self, **kwargs):
|
||||
return self.assume_role(**kwargs)
|
||||
|
||||
def assume_role_with_saml(self, **kwargs):
|
||||
del kwargs["principal_arn"]
|
||||
saml_assertion_encoded = kwargs.pop("saml_assertion")
|
||||
saml_assertion_decoded = b64decode(saml_assertion_encoded)
|
||||
saml_assertion = xmltodict.parse(saml_assertion_decoded.decode("utf-8"))
|
||||
kwargs["duration"] = int(
|
||||
saml_assertion["samlp:Response"]["Assertion"]["AttributeStatement"][
|
||||
"Attribute"
|
||||
][2]["AttributeValue"]
|
||||
)
|
||||
kwargs["role_session_name"] = saml_assertion["samlp:Response"]["Assertion"][
|
||||
"AttributeStatement"
|
||||
]["Attribute"][0]["AttributeValue"]
|
||||
kwargs["external_id"] = None
|
||||
kwargs["policy"] = None
|
||||
role = AssumedRole(**kwargs)
|
||||
self.assumed_roles.append(role)
|
||||
return role
|
||||
|
||||
|
||||
sts_backend = STSBackend()
|
||||
|
@ -71,6 +71,19 @@ class TokenResponse(BaseResponse):
|
||||
template = self.response_template(ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE)
|
||||
return template.render(role=role)
|
||||
|
||||
def assume_role_with_saml(self):
|
||||
role_arn = self.querystring.get("RoleArn")[0]
|
||||
principal_arn = self.querystring.get("PrincipalArn")[0]
|
||||
saml_assertion = self.querystring.get("SAMLAssertion")[0]
|
||||
|
||||
role = sts_backend.assume_role_with_saml(
|
||||
role_arn=role_arn,
|
||||
principal_arn=principal_arn,
|
||||
saml_assertion=saml_assertion,
|
||||
)
|
||||
template = self.response_template(ASSUME_ROLE_WITH_SAML_RESPONSE)
|
||||
return template.render(role=role)
|
||||
|
||||
def get_caller_identity(self):
|
||||
template = self.response_template(GET_CALLER_IDENTITY_RESPONSE)
|
||||
|
||||
@ -168,6 +181,30 @@ ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE = """<AssumeRoleWithWebIdentityResponse x
|
||||
</AssumeRoleWithWebIdentityResponse>"""
|
||||
|
||||
|
||||
ASSUME_ROLE_WITH_SAML_RESPONSE = """<AssumeRoleWithSAMLResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
|
||||
<AssumeRoleWithSAMLResult>
|
||||
<Audience>https://signin.aws.amazon.com/saml</Audience>
|
||||
<AssumedRoleUser>
|
||||
<AssumedRoleId>{{ role.user_id }}</AssumedRoleId>
|
||||
<Arn>{{ role.arn }}</Arn>
|
||||
</AssumedRoleUser>
|
||||
<Credentials>
|
||||
<AccessKeyId>{{ role.access_key_id }}</AccessKeyId>
|
||||
<SecretAccessKey>{{ role.secret_access_key }}</SecretAccessKey>
|
||||
<SessionToken>{{ role.session_token }}</SessionToken>
|
||||
<Expiration>{{ role.expiration_ISO8601 }}</Expiration>
|
||||
</Credentials>
|
||||
<Subject>{{ role.user_id }}</Subject>
|
||||
<NameQualifier>B64EncodedStringOfHashOfIssuerAccountIdAndUserId=</NameQualifier>
|
||||
<SubjectType>persistent</SubjectType>
|
||||
<Issuer>http://localhost:3000/</Issuer>
|
||||
</AssumeRoleWithSAMLResult>
|
||||
<ResponseMetadata>
|
||||
<RequestId>c6104cbe-af31-11e0-8154-cbc7ccf896c7</RequestId>
|
||||
</ResponseMetadata>
|
||||
</AssumeRoleWithSAMLResponse>"""
|
||||
|
||||
|
||||
GET_CALLER_IDENTITY_RESPONSE = """<GetCallerIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
|
||||
<GetCallerIdentityResult>
|
||||
<Arn>{{ arn }}</Arn>
|
||||
|
@ -5,15 +5,23 @@ class TaggingService:
|
||||
self.valueName = valueName
|
||||
self.tags = {}
|
||||
|
||||
def get_tag_dict_for_resource(self, arn):
|
||||
result = {}
|
||||
if self.has_tags(arn):
|
||||
for k, v in self.tags[arn].items():
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def list_tags_for_resource(self, arn):
|
||||
result = []
|
||||
if arn in self.tags:
|
||||
if self.has_tags(arn):
|
||||
for k, v in self.tags[arn].items():
|
||||
result.append({self.keyName: k, self.valueName: v})
|
||||
return {self.tagName: result}
|
||||
|
||||
def delete_all_tags_for_resource(self, arn):
|
||||
del self.tags[arn]
|
||||
if self.has_tags(arn):
|
||||
del self.tags[arn]
|
||||
|
||||
def has_tags(self, arn):
|
||||
return arn in self.tags
|
||||
@ -27,6 +35,12 @@ class TaggingService:
|
||||
else:
|
||||
self.tags[arn][t[self.keyName]] = None
|
||||
|
||||
def copy_tags(self, from_arn, to_arn):
|
||||
if self.has_tags(from_arn):
|
||||
self.tag_resource(
|
||||
to_arn, self.list_tags_for_resource(from_arn)[self.tagName]
|
||||
)
|
||||
|
||||
def untag_resource_using_names(self, arn, tag_names):
|
||||
for name in tag_names:
|
||||
if name in self.tags.get(arn, {}):
|
||||
|
1
setup.py
1
setup.py
@ -101,5 +101,4 @@ setup(
|
||||
project_urls={
|
||||
"Documentation": "http://docs.getmoto.org/en/latest/",
|
||||
},
|
||||
data_files=[('', ['moto/dynamodb2/parsing/reserved_keywords.txt'])],
|
||||
)
|
||||
|
@ -69,6 +69,22 @@ def test_create_rest_api_with_tags():
|
||||
response["tags"].should.equal({"MY_TAG1": "MY_VALUE1"})
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
def test_create_rest_api_with_policy():
|
||||
client = boto3.client("apigateway", region_name="us-west-2")
|
||||
|
||||
policy = '{"Version": "2012-10-17","Statement": []}'
|
||||
response = client.create_rest_api(
|
||||
name="my_api", description="this is my api", policy=policy
|
||||
)
|
||||
api_id = response["id"]
|
||||
|
||||
response = client.get_rest_api(restApiId=api_id)
|
||||
|
||||
assert "policy" in response
|
||||
response["policy"].should.equal(policy)
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
def test_create_rest_api_invalid_apikeysource():
|
||||
client = boto3.client("apigateway", region_name="us-west-2")
|
||||
|
@ -1677,6 +1677,42 @@ def test_create_function_with_unknown_arn():
|
||||
)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
def test_remove_function_permission():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
zip_content = get_test_zip_file1()
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role=(get_role_name()),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
conn.add_permission(
|
||||
FunctionName="testFunction",
|
||||
StatementId="1",
|
||||
Action="lambda:InvokeFunction",
|
||||
Principal="432143214321",
|
||||
SourceArn="arn:aws:lambda:us-west-2:account-id:function:helloworld",
|
||||
SourceAccount="123412341234",
|
||||
EventSourceToken="blah",
|
||||
Qualifier="2",
|
||||
)
|
||||
|
||||
remove = conn.remove_permission(
|
||||
FunctionName="testFunction", StatementId="1", Qualifier="2",
|
||||
)
|
||||
remove["ResponseMetadata"]["HTTPStatusCode"].should.equal(204)
|
||||
policy = conn.get_policy(FunctionName="testFunction", Qualifier="2")["Policy"]
|
||||
policy = json.loads(policy)
|
||||
policy["Statement"].should.equal([])
|
||||
|
||||
|
||||
def create_invalid_lambda(role):
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
zip_content = get_test_zip_file1()
|
||||
|
@ -835,8 +835,10 @@ def test_describe_change_set():
|
||||
)
|
||||
|
||||
stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet")
|
||||
|
||||
stack["ChangeSetName"].should.equal("NewChangeSet")
|
||||
stack["StackName"].should.equal("NewStack")
|
||||
stack["Status"].should.equal("REVIEW_IN_PROGRESS")
|
||||
|
||||
cf_conn.create_change_set(
|
||||
StackName="NewStack",
|
||||
@ -851,15 +853,30 @@ def test_describe_change_set():
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_ec2
|
||||
def test_execute_change_set_w_arn():
|
||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||
# Verify no instances exist at the moment
|
||||
ec2.describe_instances()["Reservations"].should.have.length_of(0)
|
||||
# Create a Change set, and verify no resources have been created yet
|
||||
change_set = cf_conn.create_change_set(
|
||||
StackName="NewStack",
|
||||
TemplateBody=dummy_template_json,
|
||||
ChangeSetName="NewChangeSet",
|
||||
ChangeSetType="CREATE",
|
||||
)
|
||||
ec2.describe_instances()["Reservations"].should.have.length_of(0)
|
||||
cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal(
|
||||
"REVIEW_IN_PROGRESS"
|
||||
)
|
||||
# Execute change set
|
||||
cf_conn.execute_change_set(ChangeSetName=change_set["Id"])
|
||||
# Verify that the status has changed, and the appropriate resources have been created
|
||||
cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal(
|
||||
"CREATE_COMPLETE"
|
||||
)
|
||||
ec2.describe_instances()["Reservations"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
|
@ -1,9 +1,10 @@
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
from boto.s3.key import Key
|
||||
from datetime import datetime
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
from moto import mock_cloudwatch_deprecated, mock_s3_deprecated
|
||||
|
||||
|
||||
def alarm_fixture(name="tester", action=None):
|
||||
@ -83,7 +84,8 @@ def test_put_metric_data():
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()
|
||||
metrics.should.have.length_of(1)
|
||||
metric_names = [m for m in metrics if m.name == "metric"]
|
||||
metric_names.should.have(1)
|
||||
metric = metrics[0]
|
||||
metric.namespace.should.equal("tester")
|
||||
metric.name.should.equal("metric")
|
||||
@ -153,3 +155,36 @@ def test_get_metric_statistics():
|
||||
datapoint = datapoints[0]
|
||||
datapoint.should.have.key("Minimum").which.should.equal(1.5)
|
||||
datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp)
|
||||
|
||||
|
||||
# TODO: THIS IS CURRENTLY BROKEN!
|
||||
# @mock_s3_deprecated
|
||||
# @mock_cloudwatch_deprecated
|
||||
# def test_cloudwatch_return_s3_metrics():
|
||||
#
|
||||
# region = "us-east-1"
|
||||
#
|
||||
# cw = boto.ec2.cloudwatch.connect_to_region(region)
|
||||
# s3 = boto.s3.connect_to_region(region)
|
||||
#
|
||||
# bucket_name_1 = "test-bucket-1"
|
||||
# bucket_name_2 = "test-bucket-2"
|
||||
#
|
||||
# bucket1 = s3.create_bucket(bucket_name=bucket_name_1)
|
||||
# key = Key(bucket1)
|
||||
# key.key = "the-key"
|
||||
# key.set_contents_from_string("foobar" * 4)
|
||||
# s3.create_bucket(bucket_name=bucket_name_2)
|
||||
#
|
||||
# metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1})
|
||||
# # Verify that the OOTB S3 metrics are available for the created buckets
|
||||
# len(metrics_s3_bucket_1).should.be(2)
|
||||
# metric_names = [m.name for m in metrics_s3_bucket_1]
|
||||
# sorted(metric_names).should.equal(
|
||||
# ["Metric:BucketSizeBytes", "Metric:NumberOfObjects"]
|
||||
# )
|
||||
#
|
||||
# # Explicit clean up - the metrics for these buckets are messing with subsequent tests
|
||||
# key.delete()
|
||||
# s3.delete_bucket(bucket_name_1)
|
||||
# s3.delete_bucket(bucket_name_2)
|
||||
|
@ -3,6 +3,7 @@
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
from freezegun import freeze_time
|
||||
from nose.tools import assert_raises
|
||||
from uuid import uuid4
|
||||
import pytz
|
||||
@ -211,6 +212,35 @@ def test_get_metric_statistics():
|
||||
datapoint["Sum"].should.equal(1.5)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
@freeze_time("2020-02-10 18:44:05")
|
||||
def test_custom_timestamp():
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
time = "2020-02-10T18:44:09Z"
|
||||
cw = boto3.client("cloudwatch", "eu-west-1")
|
||||
|
||||
cw.put_metric_data(
|
||||
Namespace="tester",
|
||||
MetricData=[dict(MetricName="metric1", Value=1.5, Timestamp=time)],
|
||||
)
|
||||
|
||||
cw.put_metric_data(
|
||||
Namespace="tester",
|
||||
MetricData=[
|
||||
dict(MetricName="metric2", Value=1.5, Timestamp=datetime(2020, 2, 10))
|
||||
],
|
||||
)
|
||||
|
||||
stats = cw.get_metric_statistics(
|
||||
Namespace="tester",
|
||||
MetricName="metric",
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
Period=60,
|
||||
Statistics=["SampleCount", "Sum"],
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_list_metrics():
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
@ -233,8 +263,16 @@ def test_list_metrics():
|
||||
# Verify format
|
||||
res.should.equal(
|
||||
[
|
||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
||||
{
|
||||
u"Namespace": "list_test_1/",
|
||||
u"Dimensions": [],
|
||||
u"MetricName": "metric1",
|
||||
},
|
||||
{
|
||||
u"Namespace": "list_test_1/",
|
||||
u"Dimensions": [],
|
||||
u"MetricName": "metric1",
|
||||
},
|
||||
]
|
||||
)
|
||||
# Verify unknown namespace still has no results
|
||||
@ -292,3 +330,232 @@ def create_metrics(cloudwatch, namespace, metrics=5, data_points=5):
|
||||
Namespace=namespace,
|
||||
MetricData=[{"MetricName": metric_name, "Value": j, "Unit": "Seconds"}],
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_data_within_timeframe():
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
namespace1 = "my_namespace/"
|
||||
# put metric data
|
||||
values = [0, 2, 4, 3.5, 7, 100]
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace1,
|
||||
MetricData=[
|
||||
{"MetricName": "metric1", "Value": val, "Unit": "Seconds"} for val in values
|
||||
],
|
||||
)
|
||||
# get_metric_data
|
||||
stats = ["Average", "Sum", "Minimum", "Maximum"]
|
||||
response = cloudwatch.get_metric_data(
|
||||
MetricDataQueries=[
|
||||
{
|
||||
"Id": "result_" + stat,
|
||||
"MetricStat": {
|
||||
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||
"Period": 60,
|
||||
"Stat": stat,
|
||||
},
|
||||
}
|
||||
for stat in stats
|
||||
],
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
)
|
||||
#
|
||||
# Assert Average/Min/Max/Sum is returned as expected
|
||||
avg = [
|
||||
res for res in response["MetricDataResults"] if res["Id"] == "result_Average"
|
||||
][0]
|
||||
avg["Label"].should.equal("metric1 Average")
|
||||
avg["StatusCode"].should.equal("Complete")
|
||||
[int(val) for val in avg["Values"]].should.equal([19])
|
||||
|
||||
sum_ = [res for res in response["MetricDataResults"] if res["Id"] == "result_Sum"][
|
||||
0
|
||||
]
|
||||
sum_["Label"].should.equal("metric1 Sum")
|
||||
sum_["StatusCode"].should.equal("Complete")
|
||||
[val for val in sum_["Values"]].should.equal([sum(values)])
|
||||
|
||||
min_ = [
|
||||
res for res in response["MetricDataResults"] if res["Id"] == "result_Minimum"
|
||||
][0]
|
||||
min_["Label"].should.equal("metric1 Minimum")
|
||||
min_["StatusCode"].should.equal("Complete")
|
||||
[int(val) for val in min_["Values"]].should.equal([0])
|
||||
|
||||
max_ = [
|
||||
res for res in response["MetricDataResults"] if res["Id"] == "result_Maximum"
|
||||
][0]
|
||||
max_["Label"].should.equal("metric1 Maximum")
|
||||
max_["StatusCode"].should.equal("Complete")
|
||||
[int(val) for val in max_["Values"]].should.equal([100])
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_data_partially_within_timeframe():
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
yesterday = utc_now - timedelta(days=1)
|
||||
last_week = utc_now - timedelta(days=7)
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
namespace1 = "my_namespace/"
|
||||
# put metric data
|
||||
values = [0, 2, 4, 3.5, 7, 100]
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace1,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric1",
|
||||
"Value": 10,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": utc_now,
|
||||
}
|
||||
],
|
||||
)
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace1,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric1",
|
||||
"Value": 20,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": yesterday,
|
||||
}
|
||||
],
|
||||
)
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace1,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric1",
|
||||
"Value": 50,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": last_week,
|
||||
}
|
||||
],
|
||||
)
|
||||
# get_metric_data
|
||||
response = cloudwatch.get_metric_data(
|
||||
MetricDataQueries=[
|
||||
{
|
||||
"Id": "result",
|
||||
"MetricStat": {
|
||||
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||
"Period": 60,
|
||||
"Stat": "Sum",
|
||||
},
|
||||
}
|
||||
],
|
||||
StartTime=yesterday - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
)
|
||||
#
|
||||
# Assert Last week's data is not returned
|
||||
len(response["MetricDataResults"]).should.equal(1)
|
||||
sum_ = response["MetricDataResults"][0]
|
||||
sum_["Label"].should.equal("metric1 Sum")
|
||||
sum_["StatusCode"].should.equal("Complete")
|
||||
sum_["Values"].should.equal([30.0])
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_data_outside_timeframe():
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
last_week = utc_now - timedelta(days=7)
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
namespace1 = "my_namespace/"
|
||||
# put metric data
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace1,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric1",
|
||||
"Value": 50,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": last_week,
|
||||
}
|
||||
],
|
||||
)
|
||||
# get_metric_data
|
||||
response = cloudwatch.get_metric_data(
|
||||
MetricDataQueries=[
|
||||
{
|
||||
"Id": "result",
|
||||
"MetricStat": {
|
||||
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||
"Period": 60,
|
||||
"Stat": "Sum",
|
||||
},
|
||||
}
|
||||
],
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
)
|
||||
#
|
||||
# Assert Last week's data is not returned
|
||||
len(response["MetricDataResults"]).should.equal(1)
|
||||
response["MetricDataResults"][0]["Id"].should.equal("result")
|
||||
response["MetricDataResults"][0]["StatusCode"].should.equal("Complete")
|
||||
response["MetricDataResults"][0]["Values"].should.equal([])
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_data_for_multiple_metrics():
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
namespace = "my_namespace/"
|
||||
# put metric data
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric1",
|
||||
"Value": 50,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": utc_now,
|
||||
}
|
||||
],
|
||||
)
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace,
|
||||
MetricData=[
|
||||
{
|
||||
"MetricName": "metric2",
|
||||
"Value": 25,
|
||||
"Unit": "Seconds",
|
||||
"Timestamp": utc_now,
|
||||
}
|
||||
],
|
||||
)
|
||||
# get_metric_data
|
||||
response = cloudwatch.get_metric_data(
|
||||
MetricDataQueries=[
|
||||
{
|
||||
"Id": "result1",
|
||||
"MetricStat": {
|
||||
"Metric": {"Namespace": namespace, "MetricName": "metric1"},
|
||||
"Period": 60,
|
||||
"Stat": "Sum",
|
||||
},
|
||||
},
|
||||
{
|
||||
"Id": "result2",
|
||||
"MetricStat": {
|
||||
"Metric": {"Namespace": namespace, "MetricName": "metric2"},
|
||||
"Period": 60,
|
||||
"Stat": "Sum",
|
||||
},
|
||||
},
|
||||
],
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
)
|
||||
#
|
||||
len(response["MetricDataResults"]).should.equal(2)
|
||||
|
||||
res1 = [res for res in response["MetricDataResults"] if res["Id"] == "result1"][0]
|
||||
res1["Values"].should.equal([50.0])
|
||||
|
||||
res2 = [res for res in response["MetricDataResults"] if res["Id"] == "result2"][0]
|
||||
res2["Values"].should.equal([25.0])
|
||||
|
@ -7,6 +7,7 @@ from nose.tools import assert_raises
|
||||
from moto import mock_cognitoidentity
|
||||
from moto.cognitoidentity.utils import get_random_identity_id
|
||||
from moto.core import ACCOUNT_ID
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
@ -83,8 +84,10 @@ def test_describe_identity_pool_with_invalid_id_raises_error():
|
||||
|
||||
# testing a helper function
|
||||
def test_get_random_identity_id():
|
||||
assert len(get_random_identity_id("us-west-2")) > 0
|
||||
assert len(get_random_identity_id("us-west-2").split(":")[1]) == 19
|
||||
identity_id = get_random_identity_id("us-west-2")
|
||||
region, id = identity_id.split(":")
|
||||
region.should.equal("us-west-2")
|
||||
UUID(id, version=4) # Will throw an error if it's not a valid UUID
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
@ -96,7 +99,6 @@ def test_get_id():
|
||||
IdentityPoolId="us-west-2:12345",
|
||||
Logins={"someurl": "12345"},
|
||||
)
|
||||
print(result)
|
||||
assert (
|
||||
result.get("IdentityId", "").startswith("us-west-2")
|
||||
or result.get("ResponseMetadata").get("HTTPStatusCode") == 200
|
||||
|
@ -48,6 +48,5 @@ def test_get_id():
|
||||
},
|
||||
)
|
||||
|
||||
print(res.data)
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert ":" in json_data["IdentityId"]
|
||||
|
@ -11,6 +11,8 @@ from moto import mock_s3
|
||||
from moto.config import mock_config
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
import sure # noqa
|
||||
|
||||
|
||||
@mock_config
|
||||
def test_put_configuration_recorder():
|
||||
|
@ -1,21 +1,17 @@
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
import re
|
||||
from decimal import Decimal
|
||||
|
||||
import six
|
||||
import boto
|
||||
import boto3
|
||||
from boto3.dynamodb.conditions import Attr, Key
|
||||
import re
|
||||
import requests
|
||||
import sure # noqa
|
||||
from moto import mock_dynamodb2, mock_dynamodb2_deprecated
|
||||
from moto.dynamodb2 import dynamodb_backend2, dynamodb_backends2
|
||||
from boto.exception import JSONResponseError
|
||||
from botocore.exceptions import ClientError, ParamValidationError
|
||||
from tests.helpers import requires_boto_gte
|
||||
import tests.backport_assert_raises
|
||||
|
||||
import moto.dynamodb2.comparisons
|
||||
import moto.dynamodb2.models
|
||||
@ -1454,6 +1450,13 @@ def test_filter_expression():
|
||||
filter_expr.expr(row1).should.be(True)
|
||||
filter_expr.expr(row2).should.be(False)
|
||||
|
||||
# lowercase AND test
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression(
|
||||
"Id > :v0 and Subs < :v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "7"}}
|
||||
)
|
||||
filter_expr.expr(row1).should.be(True)
|
||||
filter_expr.expr(row2).should.be(False)
|
||||
|
||||
# OR test
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression(
|
||||
"Id = :v0 OR Id=:v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "8"}}
|
||||
@ -2785,7 +2788,7 @@ def test_query_gsi_with_range_key():
|
||||
res = dynamodb.query(
|
||||
TableName="test",
|
||||
IndexName="test_gsi",
|
||||
KeyConditionExpression="gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key",
|
||||
KeyConditionExpression="gsi_hash_key = :gsi_hash_key and gsi_range_key = :gsi_range_key",
|
||||
ExpressionAttributeValues={
|
||||
":gsi_hash_key": {"S": "key1"},
|
||||
":gsi_range_key": {"S": "range1"},
|
||||
@ -3214,6 +3217,25 @@ def test_remove_top_level_attribute():
|
||||
result.should.equal({"id": {"S": "foo"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_remove_top_level_attribute_non_existent():
|
||||
"""
|
||||
Remove statements do not require attribute to exist they silently pass
|
||||
"""
|
||||
table_name = "test_remove"
|
||||
client = create_table_with_list(table_name)
|
||||
ddb_item = {"id": {"S": "foo"}, "item": {"S": "bar"}}
|
||||
client.put_item(TableName=table_name, Item=ddb_item)
|
||||
client.update_item(
|
||||
TableName=table_name,
|
||||
Key={"id": {"S": "foo"}},
|
||||
UpdateExpression="REMOVE non_existent_attribute",
|
||||
ExpressionAttributeNames={"#i": "item"},
|
||||
)
|
||||
result = client.get_item(TableName=table_name, Key={"id": {"S": "foo"}})["Item"]
|
||||
result.should.equal(ddb_item)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_remove_list_index__remove_existing_index():
|
||||
table_name = "test_list_index_access"
|
||||
@ -4212,6 +4234,396 @@ def test_gsi_verify_negative_number_order():
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_put():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Put multiple items
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Put": {
|
||||
"Item": {"id": {"S": "foo{}".format(str(i))}, "foo": {"S": "bar"},},
|
||||
"TableName": "test-table",
|
||||
}
|
||||
}
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
# Assert all are present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(5)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_put_conditional_expressions():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo2"},},
|
||||
)
|
||||
# Put multiple items
|
||||
with assert_raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Put": {
|
||||
"Item": {
|
||||
"id": {"S": "foo{}".format(str(i))},
|
||||
"foo": {"S": "bar"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "#i <> :i",
|
||||
"ExpressionAttributeNames": {"#i": "id"},
|
||||
"ExpressionAttributeValues": {
|
||||
":i": {
|
||||
"S": "foo2"
|
||||
} # This item already exist, so the ConditionExpression should fail
|
||||
},
|
||||
}
|
||||
}
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal(
|
||||
"ConditionalCheckFailedException"
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"A condition specified in the operation could not be evaluated."
|
||||
)
|
||||
# Assert all are present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"id": {"S": "foo2"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_conditioncheck_passes():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item without email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
)
|
||||
# Put an email address, after verifying it doesn't exist yet
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"ConditionCheck": {
|
||||
"Key": {"id": {"S": "foo"}},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
}
|
||||
},
|
||||
{
|
||||
"Put": {
|
||||
"Item": {
|
||||
"id": {"S": "foo"},
|
||||
"email_address": {"S": "test@moto.com"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
}
|
||||
},
|
||||
]
|
||||
)
|
||||
# Assert all are present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_conditioncheck_fails():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item with email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table",
|
||||
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
|
||||
)
|
||||
# Try to put an email address, but verify whether it exists
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"ConditionCheck": {
|
||||
"Key": {"id": {"S": "foo"}},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
}
|
||||
},
|
||||
{
|
||||
"Put": {
|
||||
"Item": {
|
||||
"id": {"S": "foo"},
|
||||
"email_address": {"S": "update@moto.com"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
}
|
||||
},
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal(
|
||||
"ConditionalCheckFailedException"
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"A condition specified in the operation could not be evaluated."
|
||||
)
|
||||
|
||||
# Assert the original email address is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_delete():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
)
|
||||
# Delete the item
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{"Delete": {"Key": {"id": {"S": "foo"}}, "TableName": "test-table",}}
|
||||
]
|
||||
)
|
||||
# Assert the item is deleted
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_delete_with_successful_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item without email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
)
|
||||
# ConditionExpression will pass - no email address has been specified yet
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Delete": {
|
||||
"Key": {"id": {"S": "foo"},},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
# Assert the item is deleted
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item with email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table",
|
||||
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
|
||||
)
|
||||
# Try to delete an item that does not have an email address
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Delete": {
|
||||
"Key": {"id": {"S": "foo"},},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal(
|
||||
"ConditionalCheckFailedException"
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"A condition specified in the operation could not be evaluated."
|
||||
)
|
||||
# Assert the original item is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_update():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item
|
||||
dynamodb.put_item(TableName="test-table", Item={"id": {"S": "foo"}})
|
||||
# Update the item
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Update": {
|
||||
"Key": {"id": {"S": "foo"}},
|
||||
"TableName": "test-table",
|
||||
"UpdateExpression": "SET #e = :v",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
"ExpressionAttributeValues": {":v": {"S": "test@moto.com"}},
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
# Assert the item is updated
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_transact_write_items_update_with_failed_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
# Insert an item with email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table",
|
||||
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
|
||||
)
|
||||
# Try to update an item that does not have an email address
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Update": {
|
||||
"Key": {"id": {"S": "foo"}},
|
||||
"TableName": "test-table",
|
||||
"UpdateExpression": "SET #e = :v",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
"ExpressionAttributeValues": {":v": {"S": "update@moto.com"}},
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal(
|
||||
"ConditionalCheckFailedException"
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"A condition specified in the operation could not be evaluated."
|
||||
)
|
||||
# Assert the original item is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_dynamodb_max_1mb_limit():
|
||||
ddb = boto3.resource("dynamodb", region_name="eu-west-1")
|
||||
|
||||
table_name = "populated-mock-table"
|
||||
table = ddb.create_table(
|
||||
TableName=table_name,
|
||||
KeySchema=[
|
||||
{"AttributeName": "partition_key", "KeyType": "HASH"},
|
||||
{"AttributeName": "sort_key", "KeyType": "RANGE"},
|
||||
],
|
||||
AttributeDefinitions=[
|
||||
{"AttributeName": "partition_key", "AttributeType": "S"},
|
||||
{"AttributeName": "sort_key", "AttributeType": "S"},
|
||||
],
|
||||
BillingMode="PAY_PER_REQUEST",
|
||||
)
|
||||
|
||||
# Populate the table
|
||||
items = [
|
||||
{
|
||||
"partition_key": "partition_key_val", # size=30
|
||||
"sort_key": "sort_key_value____" + str(i), # size=30
|
||||
}
|
||||
for i in range(10000, 29999)
|
||||
]
|
||||
with table.batch_writer() as batch:
|
||||
for item in items:
|
||||
batch.put_item(Item=item)
|
||||
|
||||
response = table.query(
|
||||
KeyConditionExpression=Key("partition_key").eq("partition_key_val")
|
||||
)
|
||||
# We shouldn't get everything back - the total result set is well over 1MB
|
||||
len(items).should.be.greater_than(response["Count"])
|
||||
response["LastEvaluatedKey"].shouldnt.be(None)
|
||||
|
||||
|
||||
def assert_raise_syntax_error(client_error, token, near):
|
||||
"""
|
||||
Assert whether a client_error is as expected Syntax error. Syntax error looks like: `syntax_error_template`
|
||||
@ -4286,3 +4698,251 @@ def test_list_tables_exclusive_start_table_name_empty():
|
||||
resp = client.list_tables(Limit=1, ExclusiveStartTableName="whatever")
|
||||
|
||||
len(resp["TableNames"]).should.equal(0)
|
||||
|
||||
|
||||
def assert_correct_client_error(
|
||||
client_error, code, message_template, message_values=None, braces=None
|
||||
):
|
||||
"""
|
||||
Assert whether a client_error is as expected. Allow for a list of values to be passed into the message
|
||||
|
||||
Args:
|
||||
client_error(ClientError): The ClientError exception that was raised
|
||||
code(str): The code for the error (e.g. ValidationException)
|
||||
message_template(str): Error message template. if message_values is not None then this template has a {values}
|
||||
as placeholder. For example:
|
||||
'Value provided in ExpressionAttributeValues unused in expressions: keys: {values}'
|
||||
message_values(list of str|None): The values that are passed in the error message
|
||||
braces(list of str|None): List of length 2 with opening and closing brace for the values. By default it will be
|
||||
surrounded by curly brackets
|
||||
"""
|
||||
braces = braces or ["{", "}"]
|
||||
assert client_error.response["Error"]["Code"] == code
|
||||
if message_values is not None:
|
||||
values_string = "{open_brace}(?P<values>.*){close_brace}".format(
|
||||
open_brace=braces[0], close_brace=braces[1]
|
||||
)
|
||||
re_msg = re.compile(message_template.format(values=values_string))
|
||||
match_result = re_msg.match(client_error.response["Error"]["Message"])
|
||||
assert match_result is not None
|
||||
values_string = match_result.groupdict()["values"]
|
||||
values = [key for key in values_string.split(", ")]
|
||||
assert len(message_values) == len(values)
|
||||
for value in message_values:
|
||||
assert value in values
|
||||
else:
|
||||
assert client_error.response["Error"]["Message"] == message_template
|
||||
|
||||
|
||||
def create_simple_table_and_return_client():
|
||||
dynamodb = boto3.client("dynamodb", region_name="eu-west-1")
|
||||
dynamodb.create_table(
|
||||
TableName="moto-test",
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"},],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
|
||||
)
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "MyStr": {"S": "1"},},
|
||||
)
|
||||
return dynamodb
|
||||
|
||||
|
||||
# https://github.com/spulec/moto/issues/2806
|
||||
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html
|
||||
# #DDB-UpdateItem-request-UpdateExpression
|
||||
@mock_dynamodb2
|
||||
def test_update_item_with_attribute_in_right_hand_side_and_operation():
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET myNum = myNum+:val",
|
||||
ExpressionAttributeValues={":val": {"N": "3"}},
|
||||
)
|
||||
|
||||
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
|
||||
assert result["Item"]["myNum"]["N"] == "4"
|
||||
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET myNum = myNum - :val",
|
||||
ExpressionAttributeValues={":val": {"N": "1"}},
|
||||
)
|
||||
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
|
||||
assert result["Item"]["myNum"]["N"] == "3"
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_non_existing_attribute_should_raise_exception():
|
||||
"""
|
||||
Does error message get correctly raised if attribute is referenced but it does not exist for the item.
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET MyStr = no_attr + MyStr",
|
||||
)
|
||||
assert False, "Validation exception not thrown"
|
||||
except dynamodb.exceptions.ClientError as e:
|
||||
assert_correct_client_error(
|
||||
e,
|
||||
"ValidationException",
|
||||
"The provided expression refers to an attribute that does not exist in the item",
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_expression_with_plus_in_attribute_name():
|
||||
"""
|
||||
Does error message get correctly raised if attribute contains a plus and is passed in without an AttributeName. And
|
||||
lhs & rhs are not attribute IDs by themselve.
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my+Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
)
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET MyStr = my+Num",
|
||||
)
|
||||
assert False, "Validation exception not thrown"
|
||||
except dynamodb.exceptions.ClientError as e:
|
||||
assert_correct_client_error(
|
||||
e,
|
||||
"ValidationException",
|
||||
"The provided expression refers to an attribute that does not exist in the item",
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_expression_with_minus_in_attribute_name():
|
||||
"""
|
||||
Does error message get correctly raised if attribute contains a minus and is passed in without an AttributeName. And
|
||||
lhs & rhs are not attribute IDs by themselve.
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my-Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
)
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET MyStr = my-Num",
|
||||
)
|
||||
assert False, "Validation exception not thrown"
|
||||
except dynamodb.exceptions.ClientError as e:
|
||||
assert_correct_client_error(
|
||||
e,
|
||||
"ValidationException",
|
||||
"The provided expression refers to an attribute that does not exist in the item",
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_expression_with_space_in_attribute_name():
|
||||
"""
|
||||
Does error message get correctly raised if attribute contains a space and is passed in without an AttributeName. And
|
||||
lhs & rhs are not attribute IDs by themselves.
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
)
|
||||
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET MyStr = my Num",
|
||||
)
|
||||
assert False, "Validation exception not thrown"
|
||||
except dynamodb.exceptions.ClientError as e:
|
||||
assert_raise_syntax_error(e, "Num", "my Num")
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_summing_up_2_strings_raises_exception():
|
||||
"""
|
||||
Update set supports different DynamoDB types but some operations are not supported. For example summing up 2 strings
|
||||
raises an exception. It results in ClientError with code ValidationException:
|
||||
Saying An operand in the update expression has an incorrect data type
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET MyStr = MyStr + MyStr",
|
||||
)
|
||||
assert False, "Validation exception not thrown"
|
||||
except dynamodb.exceptions.ClientError as e:
|
||||
assert_correct_client_error(
|
||||
e,
|
||||
"ValidationException",
|
||||
"An operand in the update expression has an incorrect data type",
|
||||
)
|
||||
|
||||
|
||||
# https://github.com/spulec/moto/issues/2806
|
||||
@mock_dynamodb2
|
||||
def test_update_item_with_attribute_in_right_hand_side():
|
||||
"""
|
||||
After tokenization and building expression make sure referenced attributes are replaced with their current value
|
||||
"""
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
|
||||
# Make sure there are 2 values
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "myVal1": {"S": "Value1"}, "myVal2": {"S": "Value2"}},
|
||||
)
|
||||
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET myVal1 = myVal2",
|
||||
)
|
||||
|
||||
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
|
||||
assert result["Item"]["myVal1"]["S"] == result["Item"]["myVal2"]["S"] == "Value2"
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_multiple_updates():
|
||||
dynamodb = create_simple_table_and_return_client()
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "path": {"N": "6"}},
|
||||
)
|
||||
dynamodb.update_item(
|
||||
TableName="moto-test",
|
||||
Key={"id": {"S": "1"}},
|
||||
UpdateExpression="SET myNum = #p + :val, newAttr = myNum",
|
||||
ExpressionAttributeValues={":val": {"N": "1"}},
|
||||
ExpressionAttributeNames={"#p": "path"},
|
||||
)
|
||||
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})["Item"]
|
||||
expected_result = {
|
||||
"myNum": {"N": "7"},
|
||||
"newAttr": {"N": "1"},
|
||||
"path": {"N": "6"},
|
||||
"id": {"S": "1"},
|
||||
}
|
||||
assert result == expected_result
|
||||
|
446
tests/test_dynamodb2/test_dynamodb_executor.py
Normal file
446
tests/test_dynamodb2/test_dynamodb_executor.py
Normal file
@ -0,0 +1,446 @@
|
||||
from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType
|
||||
from moto.dynamodb2.models import Item, DynamoType
|
||||
from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor
|
||||
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
|
||||
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
|
||||
from parameterized import parameterized
|
||||
|
||||
|
||||
def test_execution_of_if_not_exists_not_existing_value():
|
||||
update_expression = "SET a = if_not_exists(b, a)"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"S": "A"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"S": "A"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_if_not_exists_with_existing_attribute_should_return_attribute():
|
||||
update_expression = "SET a = if_not_exists(b, a)"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"S": "A"}, "b": {"S": "B"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"S": "B"}, "b": {"S": "B"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_if_not_exists_with_existing_attribute_should_return_value():
|
||||
update_expression = "SET a = if_not_exists(b, :val)"
|
||||
update_expression_values = {":val": {"N": "4"}}
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "b": {"N": "3"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=update_expression_values,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "b": {"N": "3"}, "a": {"N": "3"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_if_not_exists_with_non_existing_attribute_should_return_value():
|
||||
update_expression = "SET a = if_not_exists(b, :val)"
|
||||
update_expression_values = {":val": {"N": "4"}}
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=update_expression_values,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"N": "4"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_sum_operation():
|
||||
update_expression = "SET a = a + b"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"N": "7"}, "b": {"N": "4"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_remove():
|
||||
update_expression = "Remove a"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "1"}, "b": {"N": "4"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_remove_in_map():
|
||||
update_expression = "Remove itemmap.itemlist[1].foo11"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"itemmap": {
|
||||
"M": {
|
||||
"itemlist": {
|
||||
"L": [
|
||||
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
|
||||
{"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"itemmap": {
|
||||
"M": {
|
||||
"itemlist": {
|
||||
"L": [
|
||||
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
|
||||
{"M": {"foo10": {"S": "bar1"},}},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_remove_in_list():
|
||||
update_expression = "Remove itemmap.itemlist[1]"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"itemmap": {
|
||||
"M": {
|
||||
"itemlist": {
|
||||
"L": [
|
||||
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
|
||||
{"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=None,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"itemmap": {
|
||||
"M": {
|
||||
"itemlist": {
|
||||
"L": [{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_delete_element_from_set():
|
||||
update_expression = "delete s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values={":value": {"SS": ["value2", "value5"]}},
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value3"]},},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_add_number():
|
||||
update_expression = "add s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values={":value": {"N": "10"}},
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "15"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
def test_execution_of_add_set_to_a_number():
|
||||
update_expression = "add s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values={":value": {"SS": ["s1"]}},
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "15"}},
|
||||
)
|
||||
assert expected_item == item
|
||||
assert False
|
||||
except IncorrectDataType:
|
||||
assert True
|
||||
|
||||
|
||||
def test_execution_of_add_to_a_set():
|
||||
update_expression = "ADD s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values={":value": {"SS": ["value2", "value5"]}},
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
expected_item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"SS": ["value1", "value2", "value3", "value5"]},
|
||||
},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
@parameterized(
|
||||
[
|
||||
({":value": {"S": "10"}}, "STRING",),
|
||||
({":value": {"N": "10"}}, "NUMBER",),
|
||||
({":value": {"B": "10"}}, "BINARY",),
|
||||
({":value": {"BOOL": True}}, "BOOLEAN",),
|
||||
({":value": {"NULL": True}}, "NULL",),
|
||||
({":value": {"M": {"el0": {"S": "10"}}}}, "MAP",),
|
||||
({":value": {"L": []}}, "LIST",),
|
||||
]
|
||||
)
|
||||
def test_execution_of__delete_element_from_set_invalid_value(
|
||||
expression_attribute_values, unexpected_data_type
|
||||
):
|
||||
"""A delete statement must use a value of type SS in order to delete elements from a set."""
|
||||
update_expression = "delete s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values=expression_attribute_values,
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
assert False, "Must raise exception"
|
||||
except IncorrectOperandType as e:
|
||||
assert e.operator_or_function == "operator: DELETE"
|
||||
assert e.operand_type == unexpected_data_type
|
||||
|
||||
|
||||
def test_execution_of_delete_element_from_a_string_attribute():
|
||||
"""A delete statement must use a value of type SS in order to delete elements from a set."""
|
||||
update_expression = "delete s :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"S": "5"},},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
expression_attribute_names=None,
|
||||
expression_attribute_values={":value": {"SS": ["value2"]}},
|
||||
item=item,
|
||||
).validate()
|
||||
UpdateExpressionExecutor(validated_ast, item, None).execute()
|
||||
assert False, "Must raise exception"
|
||||
except IncorrectDataType:
|
||||
assert True
|
@ -8,6 +8,8 @@ from boto3.dynamodb.conditions import Key
|
||||
from botocore.exceptions import ClientError
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_dynamodb2, mock_dynamodb2_deprecated
|
||||
from boto.exception import JSONResponseError
|
||||
from tests.helpers import requires_boto_gte
|
||||
@ -1273,6 +1275,15 @@ def test_update_item_with_expression():
|
||||
)
|
||||
|
||||
|
||||
def assert_failure_due_to_key_not_in_schema(func, **kwargs):
|
||||
with assert_raises(ClientError) as ex:
|
||||
func(**kwargs)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"The provided key element does not match the schema"
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_item_add_with_expression():
|
||||
table = _create_table_with_range_key()
|
||||
@ -1299,14 +1310,13 @@ def test_update_item_add_with_expression():
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a string value to a non-existing set
|
||||
# Should just create the set in the background
|
||||
table.update_item(
|
||||
# Should throw: 'The provided key element does not match the schema'
|
||||
assert_failure_due_to_key_not_in_schema(
|
||||
table.update_item,
|
||||
Key=item_key,
|
||||
UpdateExpression="ADD non_existing_str_set :v",
|
||||
ExpressionAttributeValues={":v": {"item4"}},
|
||||
)
|
||||
current_item["non_existing_str_set"] = {"item4"}
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a num value to a num set
|
||||
table.update_item(
|
||||
@ -1381,15 +1391,14 @@ def test_update_item_add_with_nested_sets():
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a string value to a non-existing set
|
||||
# Should just create the set in the background
|
||||
table.update_item(
|
||||
# Should raise
|
||||
assert_failure_due_to_key_not_in_schema(
|
||||
table.update_item,
|
||||
Key=item_key,
|
||||
UpdateExpression="ADD #ns.#ne :v",
|
||||
ExpressionAttributeNames={"#ns": "nested", "#ne": "non_existing_str_set"},
|
||||
ExpressionAttributeValues={":v": {"new_item"}},
|
||||
)
|
||||
current_item["nested"]["non_existing_str_set"] = {"new_item"}
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
|
130
tests/test_eb/test_eb.py
Normal file
130
tests/test_eb/test_eb.py
Normal file
@ -0,0 +1,130 @@
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from moto import mock_elasticbeanstalk
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_application():
|
||||
# Create Elastic Beanstalk Application
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
app = conn.create_application(ApplicationName="myapp",)
|
||||
app["Application"]["ApplicationName"].should.equal("myapp")
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_application_dup():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application.when.called_with(ApplicationName="myapp",).should.throw(
|
||||
ClientError
|
||||
)
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_describe_applications():
|
||||
# Create Elastic Beanstalk Application
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
|
||||
apps = conn.describe_applications()
|
||||
len(apps["Applications"]).should.equal(1)
|
||||
apps["Applications"][0]["ApplicationName"].should.equal("myapp")
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_environment():
|
||||
# Create Elastic Beanstalk Environment
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
app = conn.create_application(ApplicationName="myapp",)
|
||||
env = conn.create_environment(ApplicationName="myapp", EnvironmentName="myenv",)
|
||||
env["EnvironmentName"].should.equal("myenv")
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_describe_environments():
|
||||
# List Elastic Beanstalk Envs
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_environment(
|
||||
ApplicationName="myapp", EnvironmentName="myenv",
|
||||
)
|
||||
|
||||
envs = conn.describe_environments()
|
||||
envs = envs["Environments"]
|
||||
len(envs).should.equal(1)
|
||||
envs[0]["ApplicationName"].should.equal("myapp")
|
||||
envs[0]["EnvironmentName"].should.equal("myenv")
|
||||
|
||||
|
||||
def tags_dict_to_list(tag_dict):
|
||||
tag_list = []
|
||||
for key, value in tag_dict.items():
|
||||
tag_list.append({"Key": key, "Value": value})
|
||||
return tag_list
|
||||
|
||||
|
||||
def tags_list_to_dict(tag_list):
|
||||
tag_dict = {}
|
||||
for tag in tag_list:
|
||||
tag_dict[tag["Key"]] = tag["Value"]
|
||||
return tag_dict
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_environment_tags():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
env_tags = {"initial key": "initial value"}
|
||||
env = conn.create_environment(
|
||||
ApplicationName="myapp",
|
||||
EnvironmentName="myenv",
|
||||
Tags=tags_dict_to_list(env_tags),
|
||||
)
|
||||
|
||||
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||
tags_list_to_dict(tags["ResourceTags"]).should.equal(env_tags)
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_update_tags():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
env_tags = {
|
||||
"initial key": "initial value",
|
||||
"to remove": "delete me",
|
||||
"to update": "original",
|
||||
}
|
||||
env = conn.create_environment(
|
||||
ApplicationName="myapp",
|
||||
EnvironmentName="myenv",
|
||||
Tags=tags_dict_to_list(env_tags),
|
||||
)
|
||||
|
||||
extra_env_tags = {
|
||||
"to update": "new",
|
||||
"extra key": "extra value",
|
||||
}
|
||||
conn.update_tags_for_resource(
|
||||
ResourceArn=env["EnvironmentArn"],
|
||||
TagsToAdd=tags_dict_to_list(extra_env_tags),
|
||||
TagsToRemove=["to remove"],
|
||||
)
|
||||
|
||||
total_env_tags = env_tags.copy()
|
||||
total_env_tags.update(extra_env_tags)
|
||||
del total_env_tags["to remove"]
|
||||
|
||||
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||
tags_list_to_dict(tags["ResourceTags"]).should.equal(total_env_tags)
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_list_available_solution_stacks():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
stacks = conn.list_available_solution_stacks()
|
||||
len(stacks["SolutionStacks"]).should.be.greater_than(0)
|
||||
len(stacks["SolutionStacks"]).should.be.equal(len(stacks["SolutionStackDetails"]))
|
@ -9,6 +9,7 @@ from nose.tools import assert_raises
|
||||
import base64
|
||||
import datetime
|
||||
import ipaddress
|
||||
import json
|
||||
|
||||
import six
|
||||
import boto
|
||||
@ -18,7 +19,7 @@ from boto.exception import EC2ResponseError, EC2ResponseError
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
from moto import mock_ec2_deprecated, mock_ec2, mock_cloudformation
|
||||
from tests.helpers import requires_boto_gte
|
||||
|
||||
|
||||
@ -1334,6 +1335,12 @@ def test_create_instance_ebs_optimized():
|
||||
instance.load()
|
||||
instance.ebs_optimized.should.be(False)
|
||||
|
||||
instance = ec2_resource.create_instances(
|
||||
ImageId="ami-12345678", MaxCount=1, MinCount=1,
|
||||
)[0]
|
||||
instance.load()
|
||||
instance.ebs_optimized.should.be(False)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_run_multiple_instances_in_same_command():
|
||||
@ -1414,3 +1421,40 @@ def test_describe_instance_attribute():
|
||||
invalid_instance_attribute=invalid_instance_attribute
|
||||
)
|
||||
ex.exception.response["Error"]["Message"].should.equal(message)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@mock_cloudformation
|
||||
def test_volume_size_through_cloudformation():
|
||||
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
volume_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"ImageId": "ami-d3adb33f",
|
||||
"KeyName": "dummy",
|
||||
"InstanceType": "t2.micro",
|
||||
"BlockDeviceMappings": [
|
||||
{"DeviceName": "/dev/sda2", "Ebs": {"VolumeSize": "50"}}
|
||||
],
|
||||
"Tags": [
|
||||
{"Key": "foo", "Value": "bar"},
|
||||
{"Key": "blah", "Value": "baz"},
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
template_json = json.dumps(volume_template)
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
instances = ec2.describe_instances()
|
||||
volume = instances["Reservations"][0]["Instances"][0]["BlockDeviceMappings"][0][
|
||||
"Ebs"
|
||||
]
|
||||
|
||||
volumes = ec2.describe_volumes(VolumeIds=[volume["VolumeId"]])
|
||||
volumes["Volumes"][0]["Size"].should.equal(50)
|
||||
|
@ -2218,6 +2218,29 @@ def test_boto3_deleted_versionings_list():
|
||||
assert len(listed["Contents"]) == 1
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_delete_objects_for_specific_version_id():
|
||||
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
client.create_bucket(Bucket="blah")
|
||||
client.put_bucket_versioning(
|
||||
Bucket="blah", VersioningConfiguration={"Status": "Enabled"}
|
||||
)
|
||||
|
||||
client.put_object(Bucket="blah", Key="test1", Body=b"test1a")
|
||||
client.put_object(Bucket="blah", Key="test1", Body=b"test1b")
|
||||
|
||||
response = client.list_object_versions(Bucket="blah", Prefix="test1")
|
||||
id_to_delete = [v["VersionId"] for v in response["Versions"] if v["IsLatest"]][0]
|
||||
|
||||
response = client.delete_objects(
|
||||
Bucket="blah", Delete={"Objects": [{"Key": "test1", "VersionId": id_to_delete}]}
|
||||
)
|
||||
assert response["Deleted"] == [{"Key": "test1", "VersionId": id_to_delete}]
|
||||
|
||||
listed = client.list_objects_v2(Bucket="blah")
|
||||
assert len(listed["Contents"]) == 1
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_delete_versioned_bucket():
|
||||
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
@ -3256,7 +3279,8 @@ def test_boto3_put_object_tagging_on_earliest_version():
|
||||
# Older version has tags while the most recent does not
|
||||
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
resp["TagSet"].should.equal(
|
||||
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||
sorted_tagset.should.equal(
|
||||
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
||||
)
|
||||
|
||||
@ -3334,7 +3358,8 @@ def test_boto3_put_object_tagging_on_both_version():
|
||||
|
||||
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
resp["TagSet"].should.equal(
|
||||
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||
sorted_tagset.should.equal(
|
||||
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
||||
)
|
||||
|
||||
@ -3342,7 +3367,8 @@ def test_boto3_put_object_tagging_on_both_version():
|
||||
Bucket=bucket_name, Key=key, VersionId=second_object.id
|
||||
)
|
||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
resp["TagSet"].should.equal(
|
||||
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||
sorted_tagset.should.equal(
|
||||
[{"Key": "item1", "Value": "baz"}, {"Key": "item2", "Value": "bin"}]
|
||||
)
|
||||
|
||||
@ -3744,6 +3770,28 @@ def test_root_dir_with_empty_name_works():
|
||||
store_and_read_back_a_key("/")
|
||||
|
||||
|
||||
@parameterized(["mybucket", "my.bucket"])
|
||||
@mock_s3
|
||||
def test_leading_slashes_not_removed(bucket_name):
|
||||
"""Make sure that leading slashes are not removed internally."""
|
||||
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
s3.create_bucket(Bucket=bucket_name)
|
||||
|
||||
uploaded_key = "/key"
|
||||
invalid_key_1 = "key"
|
||||
invalid_key_2 = "//key"
|
||||
|
||||
s3.put_object(Bucket=bucket_name, Key=uploaded_key, Body=b"Some body")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
s3.get_object(Bucket=bucket_name, Key=invalid_key_1)
|
||||
e.exception.response["Error"]["Code"].should.equal("NoSuchKey")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
s3.get_object(Bucket=bucket_name, Key=invalid_key_2)
|
||||
e.exception.response["Error"]["Code"].should.equal("NoSuchKey")
|
||||
|
||||
|
||||
@parameterized(
|
||||
[("foo/bar/baz",), ("foo",), ("foo/run_dt%3D2019-01-01%252012%253A30%253A00",)]
|
||||
)
|
||||
@ -4293,24 +4341,17 @@ def test_s3_config_dict():
|
||||
FakeAcl,
|
||||
FakeGrant,
|
||||
FakeGrantee,
|
||||
FakeTag,
|
||||
FakeTagging,
|
||||
FakeTagSet,
|
||||
OWNER,
|
||||
)
|
||||
|
||||
# Without any buckets:
|
||||
assert not s3_config_query.get_config_resource("some_bucket")
|
||||
|
||||
tags = FakeTagging(
|
||||
FakeTagSet(
|
||||
[FakeTag("someTag", "someValue"), FakeTag("someOtherTag", "someOtherValue")]
|
||||
)
|
||||
)
|
||||
tags = {"someTag": "someValue", "someOtherTag": "someOtherValue"}
|
||||
|
||||
# With 1 bucket in us-west-2:
|
||||
s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2")
|
||||
s3_config_query.backends["global"].put_bucket_tagging("bucket1", tags)
|
||||
s3_config_query.backends["global"].put_bucket_tags("bucket1", tags)
|
||||
|
||||
# With a log bucket:
|
||||
s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2")
|
||||
|
@ -137,6 +137,45 @@ def test_create_secret_with_tags():
|
||||
]
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_create_secret_with_description():
|
||||
conn = boto3.client("secretsmanager", region_name="us-east-1")
|
||||
secret_name = "test-secret-with-tags"
|
||||
|
||||
result = conn.create_secret(
|
||||
Name=secret_name, SecretString="foosecret", Description="desc"
|
||||
)
|
||||
assert result["ARN"]
|
||||
assert result["Name"] == secret_name
|
||||
secret_value = conn.get_secret_value(SecretId=secret_name)
|
||||
assert secret_value["SecretString"] == "foosecret"
|
||||
secret_details = conn.describe_secret(SecretId=secret_name)
|
||||
assert secret_details["Description"] == "desc"
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_create_secret_with_tags_and_description():
|
||||
conn = boto3.client("secretsmanager", region_name="us-east-1")
|
||||
secret_name = "test-secret-with-tags"
|
||||
|
||||
result = conn.create_secret(
|
||||
Name=secret_name,
|
||||
SecretString="foosecret",
|
||||
Description="desc",
|
||||
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
|
||||
)
|
||||
assert result["ARN"]
|
||||
assert result["Name"] == secret_name
|
||||
secret_value = conn.get_secret_value(SecretId=secret_name)
|
||||
assert secret_value["SecretString"] == "foosecret"
|
||||
secret_details = conn.describe_secret(SecretId=secret_name)
|
||||
assert secret_details["Tags"] == [
|
||||
{"Key": "Foo", "Value": "Bar"},
|
||||
{"Key": "Mykey", "Value": "Myvalue"},
|
||||
]
|
||||
assert secret_details["Description"] == "desc"
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_delete_secret():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
@ -690,6 +729,31 @@ def test_put_secret_value_versions_differ_if_same_secret_put_twice():
|
||||
assert first_version_id != second_version_id
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_put_secret_value_maintains_description_and_tags():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
|
||||
conn.create_secret(
|
||||
Name=DEFAULT_SECRET_NAME,
|
||||
SecretString="foosecret",
|
||||
Description="desc",
|
||||
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
|
||||
)
|
||||
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
conn.put_secret_value(
|
||||
SecretId=DEFAULT_SECRET_NAME,
|
||||
SecretString="dupe_secret",
|
||||
VersionStages=["AWSCURRENT"],
|
||||
)
|
||||
secret_details = conn.describe_secret(SecretId=DEFAULT_SECRET_NAME)
|
||||
assert secret_details["Tags"] == [
|
||||
{"Key": "Foo", "Value": "Bar"},
|
||||
{"Key": "Mykey", "Value": "Myvalue"},
|
||||
]
|
||||
assert secret_details["Description"] == "desc"
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_can_list_secret_version_ids():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
@ -739,6 +803,43 @@ def test_update_secret():
|
||||
assert created_secret["VersionId"] != updated_secret["VersionId"]
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_update_secret_with_tags_and_description():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
|
||||
created_secret = conn.create_secret(
|
||||
Name="test-secret",
|
||||
SecretString="foosecret",
|
||||
Description="desc",
|
||||
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
|
||||
)
|
||||
|
||||
assert created_secret["ARN"]
|
||||
assert created_secret["Name"] == "test-secret"
|
||||
assert created_secret["VersionId"] != ""
|
||||
|
||||
secret = conn.get_secret_value(SecretId="test-secret")
|
||||
assert secret["SecretString"] == "foosecret"
|
||||
|
||||
updated_secret = conn.update_secret(
|
||||
SecretId="test-secret", SecretString="barsecret"
|
||||
)
|
||||
|
||||
assert updated_secret["ARN"]
|
||||
assert updated_secret["Name"] == "test-secret"
|
||||
assert updated_secret["VersionId"] != ""
|
||||
|
||||
secret = conn.get_secret_value(SecretId="test-secret")
|
||||
assert secret["SecretString"] == "barsecret"
|
||||
assert created_secret["VersionId"] != updated_secret["VersionId"]
|
||||
secret_details = conn.describe_secret(SecretId="test-secret")
|
||||
assert secret_details["Tags"] == [
|
||||
{"Key": "Foo", "Value": "Bar"},
|
||||
{"Key": "Mykey", "Value": "Myvalue"},
|
||||
]
|
||||
assert secret_details["Description"] == "desc"
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_update_secret_which_does_not_exit():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
|
@ -1,4 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
from base64 import b64encode
|
||||
import json
|
||||
|
||||
import boto
|
||||
@ -103,6 +104,128 @@ def test_assume_role():
|
||||
)
|
||||
|
||||
|
||||
@freeze_time("2012-01-01 12:00:00")
|
||||
@mock_sts
|
||||
def test_assume_role_with_saml():
|
||||
client = boto3.client("sts", region_name="us-east-1")
|
||||
|
||||
session_name = "session-name"
|
||||
policy = json.dumps(
|
||||
{
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "Stmt13690092345534",
|
||||
"Action": ["S3:ListBucket"],
|
||||
"Effect": "Allow",
|
||||
"Resource": ["arn:aws:s3:::foobar-tester"],
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
role_name = "test-role"
|
||||
provider_name = "TestProvFed"
|
||||
user_name = "testuser"
|
||||
role_input = "arn:aws:iam::{account_id}:role/{role_name}".format(
|
||||
account_id=ACCOUNT_ID, role_name=role_name
|
||||
)
|
||||
principal_role = "arn:aws:iam:{account_id}:saml-provider/{provider_name}".format(
|
||||
account_id=ACCOUNT_ID, provider_name=provider_name
|
||||
)
|
||||
saml_assertion = """
|
||||
<?xml version="1.0"?>
|
||||
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="_00000000-0000-0000-0000-000000000000" Version="2.0" IssueInstant="2012-01-01T12:00:00.000Z" Destination="https://signin.aws.amazon.com/saml" Consent="urn:oasis:names:tc:SAML:2.0:consent:unspecified">
|
||||
<Issuer xmlns="urn:oasis:names:tc:SAML:2.0:assertion">http://localhost/</Issuer>
|
||||
<samlp:Status>
|
||||
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/>
|
||||
</samlp:Status>
|
||||
<Assertion xmlns="urn:oasis:names:tc:SAML:2.0:assertion" ID="_00000000-0000-0000-0000-000000000000" IssueInstant="2012-12-01T12:00:00.000Z" Version="2.0">
|
||||
<Issuer>http://localhost:3000/</Issuer>
|
||||
<ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
|
||||
<ds:SignedInfo>
|
||||
<ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
|
||||
<ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/>
|
||||
<ds:Reference URI="#_00000000-0000-0000-0000-000000000000">
|
||||
<ds:Transforms>
|
||||
<ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
|
||||
<ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
|
||||
</ds:Transforms>
|
||||
<ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/>
|
||||
<ds:DigestValue>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:DigestValue>
|
||||
</ds:Reference>
|
||||
</ds:SignedInfo>
|
||||
<ds:SignatureValue>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:SignatureValue>
|
||||
<KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">
|
||||
<ds:X509Data>
|
||||
<ds:X509Certificate>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:X509Certificate>
|
||||
</ds:X509Data>
|
||||
</KeyInfo>
|
||||
</ds:Signature>
|
||||
<Subject>
|
||||
<NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:persistent">{username}</NameID>
|
||||
<SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
|
||||
<SubjectConfirmationData NotOnOrAfter="2012-01-01T13:00:00.000Z" Recipient="https://signin.aws.amazon.com/saml"/>
|
||||
</SubjectConfirmation>
|
||||
</Subject>
|
||||
<Conditions NotBefore="2012-01-01T12:00:00.000Z" NotOnOrAfter="2012-01-01T13:00:00.000Z">
|
||||
<AudienceRestriction>
|
||||
<Audience>urn:amazon:webservices</Audience>
|
||||
</AudienceRestriction>
|
||||
</Conditions>
|
||||
<AttributeStatement>
|
||||
<Attribute Name="https://aws.amazon.com/SAML/Attributes/RoleSessionName">
|
||||
<AttributeValue>{username}@localhost</AttributeValue>
|
||||
</Attribute>
|
||||
<Attribute Name="https://aws.amazon.com/SAML/Attributes/Role">
|
||||
<AttributeValue>arn:aws:iam::{account_id}:saml-provider/{provider_name},arn:aws:iam::{account_id}:role/{role_name}</AttributeValue>
|
||||
</Attribute>
|
||||
<Attribute Name="https://aws.amazon.com/SAML/Attributes/SessionDuration">
|
||||
<AttributeValue>900</AttributeValue>
|
||||
</Attribute>
|
||||
</AttributeStatement>
|
||||
<AuthnStatement AuthnInstant="2012-01-01T12:00:00.000Z" SessionIndex="_00000000-0000-0000-0000-000000000000">
|
||||
<AuthnContext>
|
||||
<AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport</AuthnContextClassRef>
|
||||
</AuthnContext>
|
||||
</AuthnStatement>
|
||||
</Assertion>
|
||||
</samlp:Response>""".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=role_name,
|
||||
provider_name=provider_name,
|
||||
username=user_name,
|
||||
).replace(
|
||||
"\n", ""
|
||||
)
|
||||
|
||||
assume_role_response = client.assume_role_with_saml(
|
||||
RoleArn=role_input,
|
||||
PrincipalArn=principal_role,
|
||||
SAMLAssertion=b64encode(saml_assertion.encode("utf-8")).decode("utf-8"),
|
||||
)
|
||||
|
||||
credentials = assume_role_response["Credentials"]
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
credentials["Expiration"].isoformat().should.equal("2012-01-01T12:15:00+00:00")
|
||||
credentials["SessionToken"].should.have.length_of(356)
|
||||
assert credentials["SessionToken"].startswith("FQoGZXIvYXdzE")
|
||||
credentials["AccessKeyId"].should.have.length_of(20)
|
||||
assert credentials["AccessKeyId"].startswith("ASIA")
|
||||
credentials["SecretAccessKey"].should.have.length_of(40)
|
||||
|
||||
assume_role_response["AssumedRoleUser"]["Arn"].should.equal(
|
||||
"arn:aws:sts::{account_id}:assumed-role/{role_name}/{fed_name}@localhost".format(
|
||||
account_id=ACCOUNT_ID, role_name=role_name, fed_name=user_name
|
||||
)
|
||||
)
|
||||
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith("AROA")
|
||||
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].endswith(
|
||||
":{fed_name}@localhost".format(fed_name=user_name)
|
||||
)
|
||||
assume_role_response["AssumedRoleUser"]["AssumedRoleId"].should.have.length_of(
|
||||
21 + 1 + len("{fed_name}@localhost".format(fed_name=user_name))
|
||||
)
|
||||
|
||||
|
||||
@freeze_time("2012-01-01 12:00:00")
|
||||
@mock_sts_deprecated
|
||||
def test_assume_role_with_web_identity():
|
||||
|
@ -77,3 +77,34 @@ def test_extract_tag_names():
|
||||
expected = ["key1", "key2"]
|
||||
|
||||
expected.should.be.equal(actual)
|
||||
|
||||
|
||||
def test_copy_non_existing_arn():
|
||||
svc = TaggingService()
|
||||
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||
svc.tag_resource("new_arn", tags)
|
||||
#
|
||||
svc.copy_tags("non_existing_arn", "new_arn")
|
||||
# Copying from a non-existing ARN should a NOOP
|
||||
# Assert the old tags still exist
|
||||
actual = sorted(
|
||||
svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"]
|
||||
)
|
||||
actual.should.equal(tags)
|
||||
|
||||
|
||||
def test_copy_existing_arn():
|
||||
svc = TaggingService()
|
||||
tags_old_arn = [{"Key": "key1", "Value": "value1"}]
|
||||
tags_new_arn = [{"Key": "key2", "Value": "value2"}]
|
||||
svc.tag_resource("old_arn", tags_old_arn)
|
||||
svc.tag_resource("new_arn", tags_new_arn)
|
||||
#
|
||||
svc.copy_tags("old_arn", "new_arn")
|
||||
# Assert the old tags still exist
|
||||
actual = sorted(
|
||||
svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"]
|
||||
)
|
||||
actual.should.equal(
|
||||
[{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user