Merge pull request #41 from spulec/master

Merge upstream
This commit is contained in:
Bert Blommers 2020-04-28 13:29:57 +01:00 committed by GitHub
commit 55d7a28968
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 2473 additions and 527 deletions

View File

@ -3,5 +3,6 @@ include requirements.txt requirements-dev.txt tox.ini
include moto/ec2/resources/instance_types.json include moto/ec2/resources/instance_types.json
include moto/ec2/resources/amis.json include moto/ec2/resources/amis.json
include moto/cognitoidp/resources/*.json include moto/cognitoidp/resources/*.json
include moto/dynamodb2/parsing/reserved_keywords.txt
recursive-include moto/templates * recursive-include moto/templates *
recursive-include tests * recursive-include tests *

View File

@ -1,62 +1,114 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from .acm import mock_acm # noqa import importlib
from .apigateway import mock_apigateway, mock_apigateway_deprecated # noqa
from .athena import mock_athena # noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # noqa def lazy_load(module_name, element):
from .awslambda import mock_lambda, mock_lambda_deprecated # noqa def f(*args, **kwargs):
from .batch import mock_batch # noqa module = importlib.import_module(module_name, "moto")
from .cloudformation import mock_cloudformation # noqa return getattr(module, element)(*args, **kwargs)
from .cloudformation import mock_cloudformation_deprecated # noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # noqa return f
from .codecommit import mock_codecommit # noqa
from .codepipeline import mock_codepipeline # noqa
from .cognitoidentity import mock_cognitoidentity # noqa mock_acm = lazy_load(".acm", "mock_acm")
from .cognitoidentity import mock_cognitoidentity_deprecated # noqa mock_apigateway = lazy_load(".apigateway", "mock_apigateway")
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # noqa mock_apigateway_deprecated = lazy_load(".apigateway", "mock_apigateway_deprecated")
from .config import mock_config # noqa mock_athena = lazy_load(".athena", "mock_athena")
from .datapipeline import mock_datapipeline # noqa mock_autoscaling = lazy_load(".autoscaling", "mock_autoscaling")
from .datapipeline import mock_datapipeline_deprecated # noqa mock_autoscaling_deprecated = lazy_load(".autoscaling", "mock_autoscaling_deprecated")
from .datasync import mock_datasync # noqa mock_lambda = lazy_load(".awslambda", "mock_lambda")
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa mock_lambda_deprecated = lazy_load(".awslambda", "mock_lambda_deprecated")
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa mock_batch = lazy_load(".batch", "mock_batch")
from .dynamodbstreams import mock_dynamodbstreams # noqa mock_batch = lazy_load(".batch", "mock_batch")
from .elasticbeanstalk import mock_elasticbeanstalk # noqa mock_cloudformation = lazy_load(".cloudformation", "mock_cloudformation")
from .ec2 import mock_ec2, mock_ec2_deprecated # noqa mock_cloudformation_deprecated = lazy_load(
from .ec2_instance_connect import mock_ec2_instance_connect # noqa ".cloudformation", "mock_cloudformation_deprecated"
from .ecr import mock_ecr, mock_ecr_deprecated # noqa )
from .ecs import mock_ecs, mock_ecs_deprecated # noqa mock_cloudwatch = lazy_load(".cloudwatch", "mock_cloudwatch")
from .elb import mock_elb, mock_elb_deprecated # noqa mock_cloudwatch_deprecated = lazy_load(".cloudwatch", "mock_cloudwatch_deprecated")
from .elbv2 import mock_elbv2 # noqa mock_codecommit = lazy_load(".codecommit", "mock_codecommit")
from .emr import mock_emr, mock_emr_deprecated # noqa mock_codepipeline = lazy_load(".codepipeline", "mock_codepipeline")
from .events import mock_events # noqa mock_cognitoidentity = lazy_load(".cognitoidentity", "mock_cognitoidentity")
from .glacier import mock_glacier, mock_glacier_deprecated # noqa mock_cognitoidentity_deprecated = lazy_load(
from .glue import mock_glue # noqa ".cognitoidentity", "mock_cognitoidentity_deprecated"
from .iam import mock_iam, mock_iam_deprecated # noqa )
from .iot import mock_iot # noqa mock_cognitoidp = lazy_load(".cognitoidp", "mock_cognitoidp")
from .iotdata import mock_iotdata # noqa mock_cognitoidp_deprecated = lazy_load(".cognitoidp", "mock_cognitoidp_deprecated")
from .kinesis import mock_kinesis, mock_kinesis_deprecated # noqa mock_config = lazy_load(".config", "mock_config")
from .kms import mock_kms, mock_kms_deprecated # noqa mock_datapipeline = lazy_load(".datapipeline", "mock_datapipeline")
from .logs import mock_logs, mock_logs_deprecated # noqa mock_datapipeline_deprecated = lazy_load(
from .opsworks import mock_opsworks, mock_opsworks_deprecated # noqa ".datapipeline", "mock_datapipeline_deprecated"
from .organizations import mock_organizations # noqa )
from .polly import mock_polly # noqa mock_datasync = lazy_load(".datasync", "mock_datasync")
from .rds import mock_rds, mock_rds_deprecated # noqa mock_dynamodb = lazy_load(".dynamodb", "mock_dynamodb")
from .rds2 import mock_rds2, mock_rds2_deprecated # noqa mock_dynamodb_deprecated = lazy_load(".dynamodb", "mock_dynamodb_deprecated")
from .redshift import mock_redshift, mock_redshift_deprecated # noqa mock_dynamodb2 = lazy_load(".dynamodb2", "mock_dynamodb2")
from .resourcegroups import mock_resourcegroups # noqa mock_dynamodb2_deprecated = lazy_load(".dynamodb2", "mock_dynamodb2_deprecated")
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # noqa mock_dynamodbstreams = lazy_load(".dynamodbstreams", "mock_dynamodbstreams")
from .route53 import mock_route53, mock_route53_deprecated # noqa mock_elasticbeanstalk = lazy_load(".elasticbeanstalk", "mock_elasticbeanstalk")
from .s3 import mock_s3, mock_s3_deprecated # noqa mock_ec2 = lazy_load(".ec2", "mock_ec2")
from .secretsmanager import mock_secretsmanager # noqa mock_ec2_deprecated = lazy_load(".ec2", "mock_ec2_deprecated")
from .ses import mock_ses, mock_ses_deprecated # noqa mock_ec2_instance_connect = lazy_load(
from .sns import mock_sns, mock_sns_deprecated # noqa ".ec2_instance_connect", "mock_ec2_instance_connect"
from .sqs import mock_sqs, mock_sqs_deprecated # noqa )
from .ssm import mock_ssm # noqa mock_ecr = lazy_load(".ecr", "mock_ecr")
from .stepfunctions import mock_stepfunctions # noqa mock_ecr_deprecated = lazy_load(".ecr", "mock_ecr_deprecated")
from .sts import mock_sts, mock_sts_deprecated # noqa mock_ecs = lazy_load(".ecs", "mock_ecs")
from .swf import mock_swf, mock_swf_deprecated # noqa mock_ecs_deprecated = lazy_load(".ecs", "mock_ecs_deprecated")
from .xray import XRaySegment, mock_xray, mock_xray_client # noqa mock_elb = lazy_load(".elb", "mock_elb")
mock_elb_deprecated = lazy_load(".elb", "mock_elb_deprecated")
mock_elbv2 = lazy_load(".elbv2", "mock_elbv2")
mock_emr = lazy_load(".emr", "mock_emr")
mock_emr_deprecated = lazy_load(".emr", "mock_emr_deprecated")
mock_events = lazy_load(".events", "mock_events")
mock_glacier = lazy_load(".glacier", "mock_glacier")
mock_glacier_deprecated = lazy_load(".glacier", "mock_glacier_deprecated")
mock_glue = lazy_load(".glue", "mock_glue")
mock_iam = lazy_load(".iam", "mock_iam")
mock_iam_deprecated = lazy_load(".iam", "mock_iam_deprecated")
mock_iot = lazy_load(".iot", "mock_iot")
mock_iotdata = lazy_load(".iotdata", "mock_iotdata")
mock_kinesis = lazy_load(".kinesis", "mock_kinesis")
mock_kinesis_deprecated = lazy_load(".kinesis", "mock_kinesis_deprecated")
mock_kms = lazy_load(".kms", "mock_kms")
mock_kms_deprecated = lazy_load(".kms", "mock_kms_deprecated")
mock_logs = lazy_load(".logs", "mock_logs")
mock_logs_deprecated = lazy_load(".logs", "mock_logs_deprecated")
mock_opsworks = lazy_load(".opsworks", "mock_opsworks")
mock_opsworks_deprecated = lazy_load(".opsworks", "mock_opsworks_deprecated")
mock_organizations = lazy_load(".organizations", "mock_organizations")
mock_polly = lazy_load(".polly", "mock_polly")
mock_rds = lazy_load(".rds", "mock_rds")
mock_rds_deprecated = lazy_load(".rds", "mock_rds_deprecated")
mock_rds2 = lazy_load(".rds2", "mock_rds2")
mock_rds2_deprecated = lazy_load(".rds2", "mock_rds2_deprecated")
mock_redshift = lazy_load(".redshift", "mock_redshift")
mock_redshift_deprecated = lazy_load(".redshift", "mock_redshift_deprecated")
mock_resourcegroups = lazy_load(".resourcegroups", "mock_resourcegroups")
mock_resourcegroupstaggingapi = lazy_load(
".resourcegroupstaggingapi", "mock_resourcegroupstaggingapi"
)
mock_route53 = lazy_load(".route53", "mock_route53")
mock_route53_deprecated = lazy_load(".route53", "mock_route53_deprecated")
mock_s3 = lazy_load(".s3", "mock_s3")
mock_s3_deprecated = lazy_load(".s3", "mock_s3_deprecated")
mock_secretsmanager = lazy_load(".secretsmanager", "mock_secretsmanager")
mock_ses = lazy_load(".ses", "mock_ses")
mock_ses_deprecated = lazy_load(".ses", "mock_ses_deprecated")
mock_sns = lazy_load(".sns", "mock_sns")
mock_sns_deprecated = lazy_load(".sns", "mock_sns_deprecated")
mock_sqs = lazy_load(".sqs", "mock_sqs")
mock_sqs_deprecated = lazy_load(".sqs", "mock_sqs_deprecated")
mock_ssm = lazy_load(".ssm", "mock_ssm")
mock_stepfunctions = lazy_load(".stepfunctions", "mock_stepfunctions")
mock_sts = lazy_load(".sts", "mock_sts")
mock_sts_deprecated = lazy_load(".sts", "mock_sts_deprecated")
mock_swf = lazy_load(".swf", "mock_swf")
mock_swf_deprecated = lazy_load(".swf", "mock_swf_deprecated")
XRaySegment = lazy_load(".xray", "XRaySegment")
mock_xray = lazy_load(".xray", "mock_xray")
mock_xray_client = lazy_load(".xray", "mock_xray_client")
# import logging # import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL) # logging.getLogger('boto').setLevel(logging.CRITICAL)

View File

@ -112,6 +112,15 @@ class ApiKeyNotFoundException(RESTError):
) )
class UsagePlanNotFoundException(RESTError):
code = 404
def __init__(self):
super(UsagePlanNotFoundException, self).__init__(
"NotFoundException", "Invalid Usage Plan ID specified"
)
class ApiKeyAlreadyExists(RESTError): class ApiKeyAlreadyExists(RESTError):
code = 409 code = 409

View File

@ -20,6 +20,7 @@ from moto.core.utils import path_url
from moto.sts.models import ACCOUNT_ID from moto.sts.models import ACCOUNT_ID
from .exceptions import ( from .exceptions import (
ApiKeyNotFoundException, ApiKeyNotFoundException,
UsagePlanNotFoundException,
AwsProxyNotAllowed, AwsProxyNotAllowed,
CrossAccountNotAllowed, CrossAccountNotAllowed,
IntegrationMethodNotDefined, IntegrationMethodNotDefined,
@ -1045,6 +1046,9 @@ class APIGatewayBackend(BaseBackend):
return plans return plans
def get_usage_plan(self, usage_plan_id): def get_usage_plan(self, usage_plan_id):
if usage_plan_id not in self.usage_plans:
raise UsagePlanNotFoundException()
return self.usage_plans[usage_plan_id] return self.usage_plans[usage_plan_id]
def delete_usage_plan(self, usage_plan_id): def delete_usage_plan(self, usage_plan_id):
@ -1077,6 +1081,17 @@ class APIGatewayBackend(BaseBackend):
return list(self.usage_plan_keys[usage_plan_id].values()) return list(self.usage_plan_keys[usage_plan_id].values())
def get_usage_plan_key(self, usage_plan_id, key_id): def get_usage_plan_key(self, usage_plan_id, key_id):
# first check if is a valid api key
if key_id not in self.keys:
raise ApiKeyNotFoundException()
# then check if is a valid api key and that the key is in the plan
if (
usage_plan_id not in self.usage_plan_keys
or key_id not in self.usage_plan_keys[usage_plan_id]
):
raise UsagePlanNotFoundException()
return self.usage_plan_keys[usage_plan_id][key_id] return self.usage_plan_keys[usage_plan_id][key_id]
def delete_usage_plan_key(self, usage_plan_id, key_id): def delete_usage_plan_key(self, usage_plan_id, key_id):

View File

@ -6,6 +6,7 @@ from moto.core.responses import BaseResponse
from .models import apigateway_backends from .models import apigateway_backends
from .exceptions import ( from .exceptions import (
ApiKeyNotFoundException, ApiKeyNotFoundException,
UsagePlanNotFoundException,
BadRequestException, BadRequestException,
CrossAccountNotAllowed, CrossAccountNotAllowed,
AuthorizerNotFoundException, AuthorizerNotFoundException,
@ -490,7 +491,16 @@ class APIGatewayResponse(BaseResponse):
usage_plan = url_path_parts[2] usage_plan = url_path_parts[2]
if self.method == "GET": if self.method == "GET":
usage_plan_response = self.backend.get_usage_plan(usage_plan) try:
usage_plan_response = self.backend.get_usage_plan(usage_plan)
except (UsagePlanNotFoundException) as error:
return (
error.code,
{},
'{{"message":"{0}","code":"{1}"}}'.format(
error.message, error.error_type
),
)
elif self.method == "DELETE": elif self.method == "DELETE":
usage_plan_response = self.backend.delete_usage_plan(usage_plan) usage_plan_response = self.backend.delete_usage_plan(usage_plan)
return 200, {}, json.dumps(usage_plan_response) return 200, {}, json.dumps(usage_plan_response)
@ -529,7 +539,18 @@ class APIGatewayResponse(BaseResponse):
key_id = url_path_parts[4] key_id = url_path_parts[4]
if self.method == "GET": if self.method == "GET":
usage_plan_response = self.backend.get_usage_plan_key(usage_plan_id, key_id) try:
usage_plan_response = self.backend.get_usage_plan_key(
usage_plan_id, key_id
)
except (UsagePlanNotFoundException, ApiKeyNotFoundException) as error:
return (
error.code,
{},
'{{"message":"{0}","code":"{1}"}}'.format(
error.message, error.error_type
),
)
elif self.method == "DELETE": elif self.method == "DELETE":
usage_plan_response = self.backend.delete_usage_plan_key( usage_plan_response = self.backend.delete_usage_plan_key(
usage_plan_id, key_id usage_plan_id, key_id

View File

@ -1,124 +1,99 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from moto.acm import acm_backends import importlib
from moto.apigateway import apigateway_backends
from moto.athena import athena_backends
from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends
from moto.batch import batch_backends
from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends
from moto.codecommit import codecommit_backends
from moto.codepipeline import codepipeline_backends
from moto.cognitoidentity import cognitoidentity_backends
from moto.cognitoidp import cognitoidp_backends
from moto.config import config_backends
from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.datasync import datasync_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
from moto.ec2 import ec2_backends
from moto.ec2_instance_connect import ec2_instance_connect_backends
from moto.ecr import ecr_backends
from moto.ecs import ecs_backends
from moto.elasticbeanstalk import eb_backends
from moto.elb import elb_backends
from moto.elbv2 import elbv2_backends
from moto.emr import emr_backends
from moto.events import events_backends
from moto.glacier import glacier_backends
from moto.glue import glue_backends
from moto.iam import iam_backends
from moto.instance_metadata import instance_metadata_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.kinesis import kinesis_backends
from moto.kms import kms_backends
from moto.logs import logs_backends
from moto.opsworks import opsworks_backends
from moto.organizations import organizations_backends
from moto.polly import polly_backends
from moto.rds2 import rds2_backends
from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.route53 import route53_backends
from moto.s3 import s3_backends
from moto.secretsmanager import secretsmanager_backends
from moto.ses import ses_backends
from moto.sns import sns_backends
from moto.sqs import sqs_backends
from moto.ssm import ssm_backends
from moto.stepfunctions import stepfunction_backends
from moto.sts import sts_backends
from moto.swf import swf_backends
from moto.xray import xray_backends
BACKENDS = { BACKENDS = {
"acm": acm_backends, "acm": ("acm", "acm_backends"),
"apigateway": apigateway_backends, "apigateway": ("apigateway", "apigateway_backends"),
"athena": athena_backends, "athena": ("athena", "athena_backends"),
"autoscaling": autoscaling_backends, "autoscaling": ("autoscaling", "autoscaling_backends"),
"batch": batch_backends, "batch": ("batch", "batch_backends"),
"cloudformation": cloudformation_backends, "cloudformation": ("cloudformation", "cloudformation_backends"),
"cloudwatch": cloudwatch_backends, "cloudwatch": ("cloudwatch", "cloudwatch_backends"),
"codecommit": codecommit_backends, "codecommit": ("codecommit", "codecommit_backends"),
"codepipeline": codepipeline_backends, "codepipeline": ("codepipeline", "codepipeline_backends"),
"cognito-identity": cognitoidentity_backends, "cognito-identity": ("cognitoidentity", "cognitoidentity_backends"),
"cognito-idp": cognitoidp_backends, "cognito-idp": ("cognitoidp", "cognitoidp_backends"),
"config": config_backends, "config": ("config", "config_backends"),
"datapipeline": datapipeline_backends, "datapipeline": ("datapipeline", "datapipeline_backends"),
"datasync": datasync_backends, "datasync": ("datasync", "datasync_backends"),
"dynamodb": dynamodb_backends, "dynamodb": ("dynamodb", "dynamodb_backends"),
"dynamodb2": dynamodb_backends2, "dynamodb2": ("dynamodb2", "dynamodb_backends2"),
"dynamodbstreams": dynamodbstreams_backends, "dynamodbstreams": ("dynamodbstreams", "dynamodbstreams_backends"),
"ec2": ec2_backends, "ec2": ("ec2", "ec2_backends"),
"ec2_instance_connect": ec2_instance_connect_backends, "ec2_instance_connect": ("ec2_instance_connect", "ec2_instance_connect_backends"),
"ecr": ecr_backends, "ecr": ("ecr", "ecr_backends"),
"ecs": ecs_backends, "ecs": ("ecs", "ecs_backends"),
"elasticbeanstalk": eb_backends, "elasticbeanstalk": ("elasticbeanstalk", "eb_backends"),
"elb": elb_backends, "elb": ("elb", "elb_backends"),
"elbv2": elbv2_backends, "elbv2": ("elbv2", "elbv2_backends"),
"events": events_backends, "emr": ("emr", "emr_backends"),
"emr": emr_backends, "events": ("events", "events_backends"),
"glacier": glacier_backends, "glacier": ("glacier", "glacier_backends"),
"glue": glue_backends, "glue": ("glue", "glue_backends"),
"iam": iam_backends, "iam": ("iam", "iam_backends"),
"moto_api": moto_api_backends, "instance_metadata": ("instance_metadata", "instance_metadata_backends"),
"instance_metadata": instance_metadata_backends, "iot": ("iot", "iot_backends"),
"logs": logs_backends, "iot-data": ("iotdata", "iotdata_backends"),
"kinesis": kinesis_backends, "kinesis": ("kinesis", "kinesis_backends"),
"kms": kms_backends, "kms": ("kms", "kms_backends"),
"opsworks": opsworks_backends, "lambda": ("awslambda", "lambda_backends"),
"organizations": organizations_backends, "logs": ("logs", "logs_backends"),
"polly": polly_backends, "moto_api": ("core", "moto_api_backends"),
"redshift": redshift_backends, "opsworks": ("opsworks", "opsworks_backends"),
"resource-groups": resourcegroups_backends, "organizations": ("organizations", "organizations_backends"),
"rds": rds2_backends, "polly": ("polly", "polly_backends"),
"s3": s3_backends, "rds": ("rds2", "rds2_backends"),
"s3bucket_path": s3_backends, "redshift": ("redshift", "redshift_backends"),
"ses": ses_backends, "resource-groups": ("resourcegroups", "resourcegroups_backends"),
"secretsmanager": secretsmanager_backends, "resourcegroupstaggingapi": (
"sns": sns_backends, "resourcegroupstaggingapi",
"sqs": sqs_backends, "resourcegroupstaggingapi_backends",
"ssm": ssm_backends, ),
"stepfunctions": stepfunction_backends, "route53": ("route53", "route53_backends"),
"sts": sts_backends, "s3": ("s3", "s3_backends"),
"swf": swf_backends, "s3bucket_path": ("s3", "s3_backends"),
"route53": route53_backends, "secretsmanager": ("secretsmanager", "secretsmanager_backends"),
"lambda": lambda_backends, "ses": ("ses", "ses_backends"),
"xray": xray_backends, "sns": ("sns", "sns_backends"),
"resourcegroupstaggingapi": resourcegroupstaggingapi_backends, "sqs": ("sqs", "sqs_backends"),
"iot": iot_backends, "ssm": ("ssm", "ssm_backends"),
"iot-data": iotdata_backends, "stepfunctions": ("stepfunctions", "stepfunction_backends"),
"sts": ("sts", "sts_backends"),
"swf": ("swf", "swf_backends"),
"xray": ("xray", "xray_backends"),
} }
def _import_backend(module_name, backends_name):
module = importlib.import_module("moto." + module_name)
return getattr(module, backends_name)
def backends():
for module_name, backends_name in BACKENDS.values():
yield _import_backend(module_name, backends_name)
def named_backends():
for name, (module_name, backends_name) in BACKENDS.items():
yield name, _import_backend(module_name, backends_name)
def get_backend(name):
module_name, backends_name = BACKENDS[name]
return _import_backend(module_name, backends_name)
def search_backend(predicate):
for name, backend in named_backends():
if predicate(backend):
return name
def get_model(name, region_name): def get_model(name, region_name):
for backends in BACKENDS.values(): for backends_ in backends():
for region, backend in backends.items(): for region, backend in backends_.items():
if region == region_name: if region == region_name:
models = getattr(backend.__class__, "__models__", {}) models = getattr(backend.__class__, "__models__", {})
if name in models: if name in models:

View File

@ -6,7 +6,6 @@ import yaml
import os import os
import string import string
from cfnlint import decode, core
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
@ -62,6 +61,8 @@ def yaml_tag_constructor(loader, tag, node):
def validate_template_cfn_lint(template): def validate_template_cfn_lint(template):
# Importing cfnlint adds a significant overhead, so we keep it local
from cfnlint import decode, core
# Save the template to a temporary file -- cfn-lint requires a file # Save the template to a temporary file -- cfn-lint requires a file
filename = "file.tmp" filename = "file.tmp"

View File

@ -384,7 +384,7 @@ LIST_METRICS_TEMPLATE = """<ListMetricsResponse xmlns="http://monitoring.amazona
</member> </member>
{% endfor %} {% endfor %}
</Dimensions> </Dimensions>
<MetricName>Metric:{{ metric.name }}</MetricName> <MetricName>{{ metric.name }}</MetricName>
<Namespace>{{ metric.namespace }}</Namespace> <Namespace>{{ metric.namespace }}</Namespace>
</member> </member>
{% endfor %} {% endfor %}

View File

@ -33,14 +33,15 @@ class BaseMockAWS(object):
nested_count = 0 nested_count = 0
def __init__(self, backends): def __init__(self, backends):
from moto.instance_metadata import instance_metadata_backend
from moto.core import moto_api_backend
self.backends = backends self.backends = backends
self.backends_for_urls = {} self.backends_for_urls = {}
from moto.backends import BACKENDS
default_backends = { default_backends = {
"instance_metadata": BACKENDS["instance_metadata"]["global"], "instance_metadata": instance_metadata_backend,
"moto_api": BACKENDS["moto_api"]["global"], "moto_api": moto_api_backend,
} }
self.backends_for_urls.update(self.backends) self.backends_for_urls.update(self.backends)
self.backends_for_urls.update(default_backends) self.backends_for_urls.update(default_backends)
@ -721,12 +722,12 @@ class deprecated_base_decorator(base_decorator):
class MotoAPIBackend(BaseBackend): class MotoAPIBackend(BaseBackend):
def reset(self): def reset(self):
from moto.backends import BACKENDS import moto.backends as backends
for name, backends in BACKENDS.items(): for name, backends_ in backends.named_backends():
if name == "moto_api": if name == "moto_api":
continue continue
for region_name, backend in backends.items(): for region_name, backend in backends_.items():
backend.reset() backend.reset()
self.__init__() self.__init__()

View File

@ -20,7 +20,6 @@ import six
from six.moves.urllib.parse import parse_qs, urlparse from six.moves.urllib.parse import parse_qs, urlparse
import xmltodict import xmltodict
from pkg_resources import resource_filename
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
import boto3 import boto3
@ -766,6 +765,9 @@ class AWSServiceSpec(object):
""" """
def __init__(self, path): def __init__(self, path):
# Importing pkg_resources takes ~60ms; keep it local
from pkg_resources import resource_filename # noqa
self.path = resource_filename("botocore", path) self.path = resource_filename("botocore", path)
with io.open(self.path, "r", encoding="utf-8") as f: with io.open(self.path, "r", encoding="utf-8") as f:
spec = json.load(f) spec = json.load(f)

View File

@ -39,6 +39,17 @@ class AttributeDoesNotExist(MockValidationException):
super(AttributeDoesNotExist, self).__init__(self.attr_does_not_exist_msg) super(AttributeDoesNotExist, self).__init__(self.attr_does_not_exist_msg)
class ProvidedKeyDoesNotExist(MockValidationException):
provided_key_does_not_exist_msg = (
"The provided key element does not match the schema"
)
def __init__(self):
super(ProvidedKeyDoesNotExist, self).__init__(
self.provided_key_does_not_exist_msg
)
class ExpressionAttributeNameNotDefined(InvalidUpdateExpression): class ExpressionAttributeNameNotDefined(InvalidUpdateExpression):
name_not_defined_msg = "An expression attribute name used in the document path is not defined; attribute name: {n}" name_not_defined_msg = "An expression attribute name used in the document path is not defined; attribute name: {n}"
@ -131,3 +142,10 @@ class IncorrectOperandType(InvalidUpdateExpression):
super(IncorrectOperandType, self).__init__( super(IncorrectOperandType, self).__init__(
self.inv_operand_msg.format(f=operator_or_function, t=operand_type) self.inv_operand_msg.format(f=operator_or_function, t=operand_type)
) )
class IncorrectDataType(MockValidationException):
inc_data_type_msg = "An operand in the update expression has an incorrect data type"
def __init__(self):
super(IncorrectDataType, self).__init__(self.inc_data_type_msg)

View File

@ -8,7 +8,6 @@ import re
import uuid import uuid
from boto3 import Session from boto3 import Session
from botocore.exceptions import ParamValidationError
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import unix_time from moto.core.utils import unix_time
@ -20,8 +19,9 @@ from moto.dynamodb2.exceptions import (
ItemSizeTooLarge, ItemSizeTooLarge,
ItemSizeToUpdateTooLarge, ItemSizeToUpdateTooLarge,
) )
from moto.dynamodb2.models.utilities import bytesize, attribute_is_list from moto.dynamodb2.models.utilities import bytesize
from moto.dynamodb2.models.dynamo_type import DynamoType from moto.dynamodb2.models.dynamo_type import DynamoType
from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
@ -71,6 +71,17 @@ class Item(BaseModel):
for key, value in attrs.items(): for key, value in attrs.items():
self.attrs[key] = DynamoType(value) self.attrs[key] = DynamoType(value)
def __eq__(self, other):
return all(
[
self.hash_key == other.hash_key,
self.hash_key_type == other.hash_key_type,
self.range_key == other.range_key,
self.range_key_type == other.range_key_type,
self.attrs == other.attrs,
]
)
def __repr__(self): def __repr__(self):
return "Item: {0}".format(self.to_json()) return "Item: {0}".format(self.to_json())
@ -94,192 +105,6 @@ class Item(BaseModel):
included = self.attrs included = self.attrs
return {"Item": included} return {"Item": included}
def update(
self, update_expression, expression_attribute_names, expression_attribute_values
):
# Update subexpressions are identifiable by the operator keyword, so split on that and
# get rid of the empty leading string.
parts = [
p
for p in re.split(
r"\b(SET|REMOVE|ADD|DELETE)\b", update_expression, flags=re.I
)
if p
]
# make sure that we correctly found only operator/value pairs
assert (
len(parts) % 2 == 0
), "Mismatched operators and values in update expression: '{}'".format(
update_expression
)
for action, valstr in zip(parts[:-1:2], parts[1::2]):
action = action.upper()
# "Should" retain arguments in side (...)
values = re.split(r",(?![^(]*\))", valstr)
for value in values:
# A Real value
value = value.lstrip(":").rstrip(",").strip()
for k, v in expression_attribute_names.items():
value = re.sub(r"{0}\b".format(k), v, value)
if action == "REMOVE":
key = value
attr, list_index = attribute_is_list(key.split(".")[0])
if "." not in key:
if list_index:
new_list = DynamoType(self.attrs[attr])
new_list.delete(None, list_index)
self.attrs[attr] = new_list
else:
self.attrs.pop(value, None)
else:
# Handle nested dict updates
self.attrs[attr].delete(".".join(key.split(".")[1:]))
elif action == "SET":
key, value = value.split("=", 1)
key = key.strip()
value = value.strip()
# check whether key is a list
attr, list_index = attribute_is_list(key.split(".")[0])
# If value not exists, changes value to a default if needed, else its the same as it was
value = self._get_default(value)
# If operation == list_append, get the original value and append it
value = self._get_appended_list(value, expression_attribute_values)
if type(value) != DynamoType:
if value in expression_attribute_values:
dyn_value = DynamoType(expression_attribute_values[value])
else:
dyn_value = DynamoType({"S": value})
else:
dyn_value = value
if "." in key and attr not in self.attrs:
raise ValueError # Setting nested attr not allowed if first attr does not exist yet
elif attr not in self.attrs:
try:
self.attrs[attr] = dyn_value # set new top-level attribute
except ItemSizeTooLarge:
raise ItemSizeToUpdateTooLarge()
else:
self.attrs[attr].set(
".".join(key.split(".")[1:]), dyn_value, list_index
) # set value recursively
elif action == "ADD":
key, value = value.split(" ", 1)
key = key.strip()
value_str = value.strip()
if value_str in expression_attribute_values:
dyn_value = DynamoType(expression_attribute_values[value])
else:
raise TypeError
# Handle adding numbers - value gets added to existing value,
# or added to 0 if it doesn't exist yet
if dyn_value.is_number():
existing = self.attrs.get(key, DynamoType({"N": "0"}))
if not existing.same_type(dyn_value):
raise TypeError()
self.attrs[key] = DynamoType(
{
"N": str(
decimal.Decimal(existing.value)
+ decimal.Decimal(dyn_value.value)
)
}
)
# Handle adding sets - value is added to the set, or set is
# created with only this value if it doesn't exist yet
# New value must be of same set type as previous value
elif dyn_value.is_set():
key_head = key.split(".")[0]
key_tail = ".".join(key.split(".")[1:])
if key_head not in self.attrs:
self.attrs[key_head] = DynamoType({dyn_value.type: {}})
existing = self.attrs.get(key_head)
existing = existing.get(key_tail)
if existing.value and not existing.same_type(dyn_value):
raise TypeError()
new_set = set(existing.value or []).union(dyn_value.value)
existing.set(
key=None,
new_value=DynamoType({dyn_value.type: list(new_set)}),
)
else: # Number and Sets are the only supported types for ADD
raise TypeError
elif action == "DELETE":
key, value = value.split(" ", 1)
key = key.strip()
value_str = value.strip()
if value_str in expression_attribute_values:
dyn_value = DynamoType(expression_attribute_values[value])
else:
raise TypeError
if not dyn_value.is_set():
raise TypeError
key_head = key.split(".")[0]
key_tail = ".".join(key.split(".")[1:])
existing = self.attrs.get(key_head)
existing = existing.get(key_tail)
if existing:
if not existing.same_type(dyn_value):
raise TypeError
new_set = set(existing.value).difference(dyn_value.value)
existing.set(
key=None,
new_value=DynamoType({existing.type: list(new_set)}),
)
else:
raise NotImplementedError(
"{} update action not yet supported".format(action)
)
def _get_appended_list(self, value, expression_attribute_values):
if type(value) != DynamoType:
list_append_re = re.match("list_append\\((.+),(.+)\\)", value)
if list_append_re:
new_value = expression_attribute_values[list_append_re.group(2).strip()]
old_list_key = list_append_re.group(1)
# old_key could be a function itself (if_not_exists)
if old_list_key.startswith("if_not_exists"):
old_list = self._get_default(old_list_key)
if not isinstance(old_list, DynamoType):
old_list = DynamoType(expression_attribute_values[old_list])
else:
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
)
if not old_list.is_list():
raise ParamValidationError
old_list.value.extend([DynamoType(v) for v in new_value["L"]])
value = old_list
return value
def _get_default(self, value):
if value.startswith("if_not_exists"):
# Function signature
match = re.match(
r".*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*", value
)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
return value
def update_with_attribute_updates(self, attribute_updates): def update_with_attribute_updates(self, attribute_updates):
for attribute_name, update_action in attribute_updates.items(): for attribute_name, update_action in attribute_updates.items():
action = update_action["Action"] action = update_action["Action"]
@ -1209,9 +1034,9 @@ class DynamoDBBackend(BaseBackend):
table_name, table_name,
key, key,
update_expression, update_expression,
attribute_updates,
expression_attribute_names, expression_attribute_names,
expression_attribute_values, expression_attribute_values,
attribute_updates=None,
expected=None, expected=None,
condition_expression=None, condition_expression=None,
): ):
@ -1266,17 +1091,18 @@ class DynamoDBBackend(BaseBackend):
item = table.get_item(hash_value, range_value) item = table.get_item(hash_value, range_value)
if update_expression: if update_expression:
UpdateExpressionValidator( validated_ast = UpdateExpressionValidator(
update_expression_ast, update_expression_ast,
expression_attribute_names=expression_attribute_names, expression_attribute_names=expression_attribute_names,
expression_attribute_values=expression_attribute_values, expression_attribute_values=expression_attribute_values,
item=item, item=item,
).validate() ).validate()
item.update( try:
update_expression, UpdateExpressionExecutor(
expression_attribute_names, validated_ast, item, expression_attribute_names
expression_attribute_values, ).execute()
) except ItemSizeTooLarge:
raise ItemSizeToUpdateTooLarge()
else: else:
item.update_with_attribute_updates(attribute_updates) item.update_with_attribute_updates(attribute_updates)
if table.stream_shard is not None: if table.stream_shard is not None:
@ -1332,6 +1158,94 @@ class DynamoDBBackend(BaseBackend):
return table.ttl return table.ttl
def transact_write_items(self, transact_items):
# Create a backup in case any of the transactions fail
original_table_state = copy.deepcopy(self.tables)
try:
for item in transact_items:
if "ConditionCheck" in item:
item = item["ConditionCheck"]
key = item["Key"]
table_name = item["TableName"]
condition_expression = item.get("ConditionExpression", None)
expression_attribute_names = item.get(
"ExpressionAttributeNames", None
)
expression_attribute_values = item.get(
"ExpressionAttributeValues", None
)
current = self.get_item(table_name, key)
condition_op = get_filter_expression(
condition_expression,
expression_attribute_names,
expression_attribute_values,
)
if not condition_op.expr(current):
raise ValueError("The conditional request failed")
elif "Put" in item:
item = item["Put"]
attrs = item["Item"]
table_name = item["TableName"]
condition_expression = item.get("ConditionExpression", None)
expression_attribute_names = item.get(
"ExpressionAttributeNames", None
)
expression_attribute_values = item.get(
"ExpressionAttributeValues", None
)
self.put_item(
table_name,
attrs,
condition_expression=condition_expression,
expression_attribute_names=expression_attribute_names,
expression_attribute_values=expression_attribute_values,
)
elif "Delete" in item:
item = item["Delete"]
key = item["Key"]
table_name = item["TableName"]
condition_expression = item.get("ConditionExpression", None)
expression_attribute_names = item.get(
"ExpressionAttributeNames", None
)
expression_attribute_values = item.get(
"ExpressionAttributeValues", None
)
self.delete_item(
table_name,
key,
condition_expression=condition_expression,
expression_attribute_names=expression_attribute_names,
expression_attribute_values=expression_attribute_values,
)
elif "Update" in item:
item = item["Update"]
key = item["Key"]
table_name = item["TableName"]
update_expression = item["UpdateExpression"]
condition_expression = item.get("ConditionExpression", None)
expression_attribute_names = item.get(
"ExpressionAttributeNames", None
)
expression_attribute_values = item.get(
"ExpressionAttributeValues", None
)
self.update_item(
table_name,
key,
update_expression=update_expression,
condition_expression=condition_expression,
expression_attribute_names=expression_attribute_names,
expression_attribute_values=expression_attribute_values,
)
else:
raise ValueError
except: # noqa: E722 Do not use bare except
# Rollback to the original state, and reraise the error
self.tables = original_table_state
raise
dynamodb_backends = {} dynamodb_backends = {}
for region in Session().get_available_regions("dynamodb"): for region in Session().get_available_regions("dynamodb"):

View File

@ -1,10 +1,53 @@
import six import six
from moto.dynamodb2.comparisons import get_comparison_func from moto.dynamodb2.comparisons import get_comparison_func
from moto.dynamodb2.exceptions import InvalidUpdateExpression from moto.dynamodb2.exceptions import InvalidUpdateExpression, IncorrectDataType
from moto.dynamodb2.models.utilities import attribute_is_list, bytesize from moto.dynamodb2.models.utilities import attribute_is_list, bytesize
class DDBType(object):
"""
Official documentation at https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html
"""
BINARY_SET = "BS"
NUMBER_SET = "NS"
STRING_SET = "SS"
STRING = "S"
NUMBER = "N"
MAP = "M"
LIST = "L"
BOOLEAN = "BOOL"
BINARY = "B"
NULL = "NULL"
class DDBTypeConversion(object):
_human_type_mapping = {
val: key.replace("_", " ")
for key, val in DDBType.__dict__.items()
if key.upper() == key
}
@classmethod
def get_human_type(cls, abbreviated_type):
"""
Args:
abbreviated_type(str): An attribute of DDBType
Returns:
str: The human readable form of the DDBType.
"""
try:
human_type_str = cls._human_type_mapping[abbreviated_type]
except KeyError:
raise ValueError(
"Invalid abbreviated_type {at}".format(at=abbreviated_type)
)
return human_type_str
class DynamoType(object): class DynamoType(object):
""" """
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
@ -50,13 +93,22 @@ class DynamoType(object):
self.value = new_value.value self.value = new_value.value
else: else:
if attr not in self.value: # nonexistingattribute if attr not in self.value: # nonexistingattribute
type_of_new_attr = "M" if "." in key else new_value.type type_of_new_attr = DDBType.MAP if "." in key else new_value.type
self.value[attr] = DynamoType({type_of_new_attr: {}}) self.value[attr] = DynamoType({type_of_new_attr: {}})
# {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value) # {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value)
self.value[attr].set( self.value[attr].set(
".".join(key.split(".")[1:]), new_value, list_index ".".join(key.split(".")[1:]), new_value, list_index
) )
def __contains__(self, item):
if self.type == DDBType.STRING:
return False
try:
self.__getitem__(item)
return True
except KeyError:
return False
def delete(self, key, index=None): def delete(self, key, index=None):
if index: if index:
if not key: if not key:
@ -126,27 +178,35 @@ class DynamoType(object):
def __add__(self, other): def __add__(self, other):
if self.type != other.type: if self.type != other.type:
raise TypeError("Different types of operandi is not allowed.") raise TypeError("Different types of operandi is not allowed.")
if self.type == "N": if self.is_number():
return DynamoType({"N": "{v}".format(v=int(self.value) + int(other.value))}) self_value = float(self.value) if "." in self.value else int(self.value)
other_value = float(other.value) if "." in other.value else int(other.value)
return DynamoType(
{DDBType.NUMBER: "{v}".format(v=self_value + other_value)}
)
else: else:
raise TypeError("Sum only supported for Numbers.") raise IncorrectDataType()
def __sub__(self, other): def __sub__(self, other):
if self.type != other.type: if self.type != other.type:
raise TypeError("Different types of operandi is not allowed.") raise TypeError("Different types of operandi is not allowed.")
if self.type == "N": if self.type == DDBType.NUMBER:
return DynamoType({"N": "{v}".format(v=int(self.value) - int(other.value))}) self_value = float(self.value) if "." in self.value else int(self.value)
other_value = float(other.value) if "." in other.value else int(other.value)
return DynamoType(
{DDBType.NUMBER: "{v}".format(v=self_value - other_value)}
)
else: else:
raise TypeError("Sum only supported for Numbers.") raise TypeError("Sum only supported for Numbers.")
def __getitem__(self, item): def __getitem__(self, item):
if isinstance(item, six.string_types): if isinstance(item, six.string_types):
# If our DynamoType is a map it should be subscriptable with a key # If our DynamoType is a map it should be subscriptable with a key
if self.type == "M": if self.type == DDBType.MAP:
return self.value[item] return self.value[item]
elif isinstance(item, int): elif isinstance(item, int):
# If our DynamoType is a list is should be subscriptable with an index # If our DynamoType is a list is should be subscriptable with an index
if self.type == "L": if self.type == DDBType.LIST:
return self.value[item] return self.value[item]
raise TypeError( raise TypeError(
"This DynamoType {dt} is not subscriptable by a {it}".format( "This DynamoType {dt} is not subscriptable by a {it}".format(
@ -154,6 +214,20 @@ class DynamoType(object):
) )
) )
def __setitem__(self, key, value):
if isinstance(key, int):
if self.is_list():
if key >= len(self.value):
# DynamoDB doesn't care you are out of box just add it to the end.
self.value.append(value)
else:
self.value[key] = value
elif isinstance(key, six.string_types):
if self.is_map():
self.value[key] = value
else:
raise NotImplementedError("No set_item for {t}".format(t=type(key)))
@property @property
def cast_value(self): def cast_value(self):
if self.is_number(): if self.is_number():
@ -222,16 +296,22 @@ class DynamoType(object):
return comparison_func(self.cast_value, *range_values) return comparison_func(self.cast_value, *range_values)
def is_number(self): def is_number(self):
return self.type == "N" return self.type == DDBType.NUMBER
def is_set(self): def is_set(self):
return self.type == "SS" or self.type == "NS" or self.type == "BS" return self.type in (DDBType.STRING_SET, DDBType.NUMBER_SET, DDBType.BINARY_SET)
def is_list(self): def is_list(self):
return self.type == "L" return self.type == DDBType.LIST
def is_map(self): def is_map(self):
return self.type == "M" return self.type == DDBType.MAP
def same_type(self, other): def same_type(self, other):
return self.type == other.type return self.type == other.type
def pop(self, key, *args, **kwargs):
if self.is_map() or self.is_list():
self.value.pop(key, *args, **kwargs)
else:
raise TypeError("pop not supported for DynamoType {t}".format(t=self.type))

View File

@ -0,0 +1,262 @@
from abc import abstractmethod
from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType
from moto.dynamodb2.models import DynamoType
from moto.dynamodb2.models.dynamo_type import DDBTypeConversion, DDBType
from moto.dynamodb2.parsing.ast_nodes import (
UpdateExpressionSetAction,
UpdateExpressionDeleteAction,
UpdateExpressionRemoveAction,
UpdateExpressionAddAction,
UpdateExpressionPath,
DDBTypedValue,
ExpressionAttribute,
ExpressionSelector,
ExpressionAttributeName,
)
from moto.dynamodb2.parsing.validators import ExpressionPathResolver
class NodeExecutor(object):
def __init__(self, ast_node, expression_attribute_names):
self.node = ast_node
self.expression_attribute_names = expression_attribute_names
@abstractmethod
def execute(self, item):
pass
def get_item_part_for_path_nodes(self, item, path_nodes):
"""
For a list of path nodes travers the item by following the path_nodes
Args:
item(Item):
path_nodes(list):
Returns:
"""
if len(path_nodes) == 0:
return item.attrs
else:
return ExpressionPathResolver(
self.expression_attribute_names
).resolve_expression_path_nodes_to_dynamo_type(item, path_nodes)
def get_item_before_end_of_path(self, item):
"""
Get the part ot the item where the item will perform the action. For most actions this should be the parent. As
that element will need to be modified by the action.
Args:
item(Item):
Returns:
DynamoType or dict: The path to be set
"""
return self.get_item_part_for_path_nodes(
item, self.get_path_expression_nodes()[:-1]
)
def get_item_at_end_of_path(self, item):
"""
For a DELETE the path points at the stringset so we need to evaluate the full path.
Args:
item(Item):
Returns:
DynamoType or dict: The path to be set
"""
return self.get_item_part_for_path_nodes(item, self.get_path_expression_nodes())
# Get the part ot the item where the item will perform the action. For most actions this should be the parent. As
# that element will need to be modified by the action.
get_item_part_in_which_to_perform_action = get_item_before_end_of_path
def get_path_expression_nodes(self):
update_expression_path = self.node.children[0]
assert isinstance(update_expression_path, UpdateExpressionPath)
return update_expression_path.children
def get_element_to_action(self):
return self.get_path_expression_nodes()[-1]
def get_action_value(self):
"""
Returns:
DynamoType: The value to be set
"""
ddb_typed_value = self.node.children[1]
assert isinstance(ddb_typed_value, DDBTypedValue)
dynamo_type_value = ddb_typed_value.children[0]
assert isinstance(dynamo_type_value, DynamoType)
return dynamo_type_value
class SetExecutor(NodeExecutor):
def execute(self, item):
self.set(
item_part_to_modify_with_set=self.get_item_part_in_which_to_perform_action(
item
),
element_to_set=self.get_element_to_action(),
value_to_set=self.get_action_value(),
expression_attribute_names=self.expression_attribute_names,
)
@classmethod
def set(
cls,
item_part_to_modify_with_set,
element_to_set,
value_to_set,
expression_attribute_names,
):
if isinstance(element_to_set, ExpressionAttribute):
attribute_name = element_to_set.get_attribute_name()
item_part_to_modify_with_set[attribute_name] = value_to_set
elif isinstance(element_to_set, ExpressionSelector):
index = element_to_set.get_index()
item_part_to_modify_with_set[index] = value_to_set
elif isinstance(element_to_set, ExpressionAttributeName):
attribute_name = expression_attribute_names[
element_to_set.get_attribute_name_placeholder()
]
item_part_to_modify_with_set[attribute_name] = value_to_set
else:
raise NotImplementedError(
"Moto does not support setting {t} yet".format(t=type(element_to_set))
)
class DeleteExecutor(NodeExecutor):
operator = "operator: DELETE"
def execute(self, item):
string_set_to_remove = self.get_action_value()
assert isinstance(string_set_to_remove, DynamoType)
if not string_set_to_remove.is_set():
raise IncorrectOperandType(
self.operator,
DDBTypeConversion.get_human_type(string_set_to_remove.type),
)
string_set = self.get_item_at_end_of_path(item)
assert isinstance(string_set, DynamoType)
if string_set.type != string_set_to_remove.type:
raise IncorrectDataType()
# String set is currently implemented as a list
string_set_list = string_set.value
stringset_to_remove_list = string_set_to_remove.value
for value in stringset_to_remove_list:
try:
string_set_list.remove(value)
except (KeyError, ValueError):
# DynamoDB does not mind if value is not present
pass
class RemoveExecutor(NodeExecutor):
def execute(self, item):
element_to_remove = self.get_element_to_action()
if isinstance(element_to_remove, ExpressionAttribute):
attribute_name = element_to_remove.get_attribute_name()
self.get_item_part_in_which_to_perform_action(item).pop(
attribute_name, None
)
elif isinstance(element_to_remove, ExpressionAttributeName):
attribute_name = self.expression_attribute_names[
element_to_remove.get_attribute_name_placeholder()
]
self.get_item_part_in_which_to_perform_action(item).pop(
attribute_name, None
)
elif isinstance(element_to_remove, ExpressionSelector):
index = element_to_remove.get_index()
try:
self.get_item_part_in_which_to_perform_action(item).pop(index)
except IndexError:
# DynamoDB does not care that index is out of bounds, it will just do nothing.
pass
else:
raise NotImplementedError(
"Moto does not support setting {t} yet".format(
t=type(element_to_remove)
)
)
class AddExecutor(NodeExecutor):
def execute(self, item):
value_to_add = self.get_action_value()
if isinstance(value_to_add, DynamoType):
if value_to_add.is_set():
current_string_set = self.get_item_at_end_of_path(item)
assert isinstance(current_string_set, DynamoType)
if not current_string_set.type == value_to_add.type:
raise IncorrectDataType()
# Sets are implemented as list
for value in value_to_add.value:
if value in current_string_set.value:
continue
else:
current_string_set.value.append(value)
elif value_to_add.type == DDBType.NUMBER:
existing_value = self.get_item_at_end_of_path(item)
assert isinstance(existing_value, DynamoType)
if not existing_value.type == DDBType.NUMBER:
raise IncorrectDataType()
new_value = existing_value + value_to_add
SetExecutor.set(
item_part_to_modify_with_set=self.get_item_before_end_of_path(item),
element_to_set=self.get_element_to_action(),
value_to_set=new_value,
expression_attribute_names=self.expression_attribute_names,
)
else:
raise IncorrectDataType()
class UpdateExpressionExecutor(object):
execution_map = {
UpdateExpressionSetAction: SetExecutor,
UpdateExpressionAddAction: AddExecutor,
UpdateExpressionRemoveAction: RemoveExecutor,
UpdateExpressionDeleteAction: DeleteExecutor,
}
def __init__(self, update_ast, item, expression_attribute_names):
self.update_ast = update_ast
self.item = item
self.expression_attribute_names = expression_attribute_names
def execute(self, node=None):
"""
As explained in moto.dynamodb2.parsing.expressions.NestableExpressionParserMixin._create_node the order of nodes
in the AST can be translated of the order of statements in the expression. As such we can start at the root node
and process the nodes 1-by-1. If no specific execution for the node type is defined we can execute the children
in order since it will be a container node that is expandable and left child will be first in the statement.
Args:
node(Node):
Returns:
None
"""
if node is None:
node = self.update_ast
node_executor = self.get_specific_execution(node)
if node_executor is None:
for node in node.children:
self.execute(node)
else:
node_executor(node, self.expression_attribute_names).execute(self.item)
def get_specific_execution(self, node):
for node_class in self.execution_map:
if isinstance(node, node_class):
return self.execution_map[node_class]
return None

View File

@ -11,6 +11,7 @@ from moto.dynamodb2.exceptions import (
ExpressionAttributeNameNotDefined, ExpressionAttributeNameNotDefined,
IncorrectOperandType, IncorrectOperandType,
InvalidUpdateExpressionInvalidDocumentPath, InvalidUpdateExpressionInvalidDocumentPath,
ProvidedKeyDoesNotExist,
) )
from moto.dynamodb2.models import DynamoType from moto.dynamodb2.models import DynamoType
from moto.dynamodb2.parsing.ast_nodes import ( from moto.dynamodb2.parsing.ast_nodes import (
@ -56,6 +57,76 @@ class ExpressionAttributeValueProcessor(DepthFirstTraverser):
return DDBTypedValue(DynamoType(target)) return DDBTypedValue(DynamoType(target))
class ExpressionPathResolver(object):
def __init__(self, expression_attribute_names):
self.expression_attribute_names = expression_attribute_names
@classmethod
def raise_exception_if_keyword(cls, attribute):
if attribute.upper() in ReservedKeywords.get_reserved_keywords():
raise AttributeIsReservedKeyword(attribute)
def resolve_expression_path(self, item, update_expression_path):
assert isinstance(update_expression_path, UpdateExpressionPath)
return self.resolve_expression_path_nodes(item, update_expression_path.children)
def resolve_expression_path_nodes(self, item, update_expression_path_nodes):
target = item.attrs
for child in update_expression_path_nodes:
# First replace placeholder with attribute_name
attr_name = None
if isinstance(child, ExpressionAttributeName):
attr_placeholder = child.get_attribute_name_placeholder()
try:
attr_name = self.expression_attribute_names[attr_placeholder]
except KeyError:
raise ExpressionAttributeNameNotDefined(attr_placeholder)
elif isinstance(child, ExpressionAttribute):
attr_name = child.get_attribute_name()
self.raise_exception_if_keyword(attr_name)
if attr_name is not None:
# Resolv attribute_name
try:
target = target[attr_name]
except (KeyError, TypeError):
if child == update_expression_path_nodes[-1]:
return NoneExistingPath(creatable=True)
return NoneExistingPath()
else:
if isinstance(child, ExpressionPathDescender):
continue
elif isinstance(child, ExpressionSelector):
index = child.get_index()
if target.is_list():
try:
target = target[index]
except IndexError:
# When a list goes out of bounds when assigning that is no problem when at the assignment
# side. It will just append to the list.
if child == update_expression_path_nodes[-1]:
return NoneExistingPath(creatable=True)
return NoneExistingPath()
else:
raise InvalidUpdateExpressionInvalidDocumentPath
else:
raise NotImplementedError(
"Path resolution for {t}".format(t=type(child))
)
if not isinstance(target, DynamoType):
print(target)
return DDBTypedValue(target)
def resolve_expression_path_nodes_to_dynamo_type(
self, item, update_expression_path_nodes
):
node = self.resolve_expression_path_nodes(item, update_expression_path_nodes)
if isinstance(node, NoneExistingPath):
raise ProvidedKeyDoesNotExist()
assert isinstance(node, DDBTypedValue)
return node.get_value()
class ExpressionAttributeResolvingProcessor(DepthFirstTraverser): class ExpressionAttributeResolvingProcessor(DepthFirstTraverser):
def _processing_map(self): def _processing_map(self):
return { return {
@ -107,55 +178,9 @@ class ExpressionAttributeResolvingProcessor(DepthFirstTraverser):
return node return node
def resolve_expression_path(self, node): def resolve_expression_path(self, node):
assert isinstance(node, UpdateExpressionPath) return ExpressionPathResolver(
self.expression_attribute_names
target = deepcopy(self.item.attrs) ).resolve_expression_path(self.item, node)
for child in node.children:
# First replace placeholder with attribute_name
attr_name = None
if isinstance(child, ExpressionAttributeName):
attr_placeholder = child.get_attribute_name_placeholder()
try:
attr_name = self.expression_attribute_names[attr_placeholder]
except KeyError:
raise ExpressionAttributeNameNotDefined(attr_placeholder)
elif isinstance(child, ExpressionAttribute):
attr_name = child.get_attribute_name()
self.raise_exception_if_keyword(attr_name)
if attr_name is not None:
# Resolv attribute_name
try:
target = target[attr_name]
except (KeyError, TypeError):
if child == node.children[-1]:
return NoneExistingPath(creatable=True)
return NoneExistingPath()
else:
if isinstance(child, ExpressionPathDescender):
continue
elif isinstance(child, ExpressionSelector):
index = child.get_index()
if target.is_list():
try:
target = target[index]
except IndexError:
# When a list goes out of bounds when assigning that is no problem when at the assignment
# side. It will just append to the list.
if child == node.children[-1]:
return NoneExistingPath(creatable=True)
return NoneExistingPath()
else:
raise InvalidUpdateExpressionInvalidDocumentPath
else:
raise NotImplementedError(
"Path resolution for {t}".format(t=type(child))
)
return DDBTypedValue(DynamoType(target))
@classmethod
def raise_exception_if_keyword(cls, attribute):
if attribute.upper() in ReservedKeywords.get_reserved_keywords():
raise AttributeIsReservedKeyword(attribute)
class UpdateExpressionFunctionEvaluator(DepthFirstTraverser): class UpdateExpressionFunctionEvaluator(DepthFirstTraverser):
@ -183,7 +208,9 @@ class UpdateExpressionFunctionEvaluator(DepthFirstTraverser):
assert isinstance(result, (DDBTypedValue, NoneExistingPath)) assert isinstance(result, (DDBTypedValue, NoneExistingPath))
return result return result
elif function_name == "list_append": elif function_name == "list_append":
first_arg = self.get_list_from_ddb_typed_value(first_arg, function_name) first_arg = deepcopy(
self.get_list_from_ddb_typed_value(first_arg, function_name)
)
second_arg = self.get_list_from_ddb_typed_value(second_arg, function_name) second_arg = self.get_list_from_ddb_typed_value(second_arg, function_name)
for list_element in second_arg.value: for list_element in second_arg.value:
first_arg.value.append(list_element) first_arg.value.append(list_element)

View File

@ -762,12 +762,12 @@ class DynamoHandler(BaseResponse):
item = self.dynamodb_backend.update_item( item = self.dynamodb_backend.update_item(
name, name,
key, key,
update_expression, update_expression=update_expression,
attribute_updates, attribute_updates=attribute_updates,
expression_attribute_names, expression_attribute_names=expression_attribute_names,
expression_attribute_values, expression_attribute_values=expression_attribute_values,
expected, expected=expected,
condition_expression, condition_expression=condition_expression,
) )
except MockValidationException as mve: except MockValidationException as mve:
er = "com.amazonaws.dynamodb.v20111205#ValidationException" er = "com.amazonaws.dynamodb.v20111205#ValidationException"
@ -924,3 +924,15 @@ class DynamoHandler(BaseResponse):
result.update({"ConsumedCapacity": [v for v in consumed_capacity.values()]}) result.update({"ConsumedCapacity": [v for v in consumed_capacity.values()]})
return dynamo_json_dump(result) return dynamo_json_dump(result)
def transact_write_items(self):
transact_items = self.body["TransactItems"]
try:
self.dynamodb_backend.transact_write_items(transact_items)
except ValueError:
er = "com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException"
return self.error(
er, "A condition specified in the operation could not be evaluated."
)
response = {"ConsumedCapacity": [], "ItemCollectionMetrics": {}}
return dynamo_json_dump(response)

View File

@ -1503,9 +1503,10 @@ class AmiBackend(object):
class Region(object): class Region(object):
def __init__(self, name, endpoint): def __init__(self, name, endpoint, opt_in_status):
self.name = name self.name = name
self.endpoint = endpoint self.endpoint = endpoint
self.opt_in_status = opt_in_status
class Zone(object): class Zone(object):
@ -1516,13 +1517,49 @@ class Zone(object):
class RegionsAndZonesBackend(object): class RegionsAndZonesBackend(object):
regions_opt_in_not_required = [
"af-south-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
]
regions = [] regions = []
for region in Session().get_available_regions("ec2"): for region in Session().get_available_regions("ec2"):
regions.append(Region(region, "ec2.{}.amazonaws.com".format(region))) if region in regions_opt_in_not_required:
regions.append(
Region(
region, "ec2.{}.amazonaws.com".format(region), "opt-in-not-required"
)
)
else:
regions.append(
Region(region, "ec2.{}.amazonaws.com".format(region), "not-opted-in")
)
for region in Session().get_available_regions("ec2", partition_name="aws-us-gov"): for region in Session().get_available_regions("ec2", partition_name="aws-us-gov"):
regions.append(Region(region, "ec2.{}.amazonaws.com".format(region))) regions.append(
Region(region, "ec2.{}.amazonaws.com".format(region), "opt-in-not-required")
)
for region in Session().get_available_regions("ec2", partition_name="aws-cn"): for region in Session().get_available_regions("ec2", partition_name="aws-cn"):
regions.append(Region(region, "ec2.{}.amazonaws.com.cn".format(region))) regions.append(
Region(
region, "ec2.{}.amazonaws.com.cn".format(region), "opt-in-not-required"
)
)
zones = { zones = {
"af-south-1": [ "af-south-1": [

View File

@ -22,6 +22,7 @@ DESCRIBE_REGIONS_RESPONSE = """<DescribeRegionsResponse xmlns="http://ec2.amazon
<item> <item>
<regionName>{{ region.name }}</regionName> <regionName>{{ region.name }}</regionName>
<regionEndpoint>{{ region.endpoint }}</regionEndpoint> <regionEndpoint>{{ region.endpoint }}</regionEndpoint>
<optInStatus>{{ region.opt_in_status }}</optInStatus>
</item> </item>
{% endfor %} {% endfor %}
</regionInfo> </regionInfo>

View File

@ -10,8 +10,6 @@ import six
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives.asymmetric import rsa
import sshpubkeys.exceptions
from sshpubkeys.keys import SSHKey
EC2_RESOURCE_TO_PREFIX = { EC2_RESOURCE_TO_PREFIX = {
@ -544,6 +542,10 @@ def generate_instance_identity_document(instance):
def rsa_public_key_parse(key_material): def rsa_public_key_parse(key_material):
# These imports take ~.5s; let's keep them local
import sshpubkeys.exceptions
from sshpubkeys.keys import SSHKey
try: try:
if not isinstance(key_material, six.binary_type): if not isinstance(key_material, six.binary_type):
key_material = key_material.encode("ascii") key_material = key_material.encode("ascii")

View File

@ -1,4 +1,4 @@
import boto import boto.ec2
from moto.core import BaseBackend from moto.core import BaseBackend

View File

@ -288,6 +288,7 @@ class AWSManagedPolicy(ManagedPolicy):
# AWS defines some of its own managed policies and we periodically # AWS defines some of its own managed policies and we periodically
# import them via `make aws_managed_policies` # import them via `make aws_managed_policies`
# FIXME: Takes about 40ms at import time
aws_managed_policies = [ aws_managed_policies = [
AWSManagedPolicy.from_data(name, d) AWSManagedPolicy.from_data(name, d)
for name, d in json.loads(aws_managed_policies_data).items() for name, d in json.loads(aws_managed_policies_data).items()

View File

@ -8,7 +8,6 @@ from collections import defaultdict
from boto3 import Session from boto3 import Session
from jinja2 import Template from jinja2 import Template
from re import compile as re_compile from re import compile as re_compile
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import get_random_hex from moto.core.utils import get_random_hex
@ -308,6 +307,9 @@ class Database(BaseModel):
setattr(self, key, value) setattr(self, key, value)
def get_cfn_attribute(self, attribute_name): def get_cfn_attribute(self, attribute_name):
# Local import to avoid circular dependency with cloudformation.parsing
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == "Endpoint.Address": if attribute_name == "Endpoint.Address":
return self.address return self.address
elif attribute_name == "Endpoint.Port": elif attribute_name == "Endpoint.Port":

View File

@ -22,7 +22,7 @@ import six
from bisect import insort from bisect import insort
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from moto.cloudwatch.models import metric_providers, MetricDatum from moto.cloudwatch.models import MetricDatum
from moto.utilities.tagging_service import TaggingService from moto.utilities.tagging_service import TaggingService
from .exceptions import ( from .exceptions import (
BucketAlreadyExists, BucketAlreadyExists,
@ -1159,9 +1159,11 @@ class S3Backend(BaseBackend):
self.account_public_access_block = None self.account_public_access_block = None
self.tagger = TaggingService() self.tagger = TaggingService()
# TODO: This is broken! DO NOT IMPORT MUTABLE DATA TYPES FROM OTHER AREAS -- THIS BREAKS UNMOCKING!
# WRAP WITH A GETTER/SETTER FUNCTION
# Register this class as a CloudWatch Metric Provider # Register this class as a CloudWatch Metric Provider
# Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available # Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available
metric_providers["S3"] = self # metric_providers["S3"] = self
def get_cloudwatch_metrics(self): def get_cloudwatch_metrics(self):
metrics = [] metrics = []

View File

@ -838,27 +838,35 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
def _bucket_response_delete_keys(self, request, body, bucket_name): def _bucket_response_delete_keys(self, request, body, bucket_name):
template = self.response_template(S3_DELETE_KEYS_RESPONSE) template = self.response_template(S3_DELETE_KEYS_RESPONSE)
body_dict = xmltodict.parse(body)
keys = minidom.parseString(body).getElementsByTagName("Key") objects = body_dict["Delete"].get("Object", [])
deleted_names = [] if not isinstance(objects, list):
error_names = [] # We expect a list of objects, but when there is a single <Object> node xmltodict does not
if len(keys) == 0: # return a list.
objects = [objects]
if len(objects) == 0:
raise MalformedXML() raise MalformedXML()
for k in keys: deleted_objects = []
key_name = k.firstChild.nodeValue error_names = []
for object_ in objects:
key_name = object_["Key"]
version_id = object_.get("VersionId", None)
success = self.backend.delete_key( success = self.backend.delete_key(
bucket_name, undo_clean_key_name(key_name) bucket_name, undo_clean_key_name(key_name), version_id=version_id
) )
if success: if success:
deleted_names.append(key_name) deleted_objects.append((key_name, version_id))
else: else:
error_names.append(key_name) error_names.append(key_name)
return ( return (
200, 200,
{}, {},
template.render(deleted=deleted_names, delete_errors=error_names), template.render(deleted=deleted_objects, delete_errors=error_names),
) )
def _handle_range_header(self, request, headers, response_content): def _handle_range_header(self, request, headers, response_content):
@ -1852,9 +1860,10 @@ S3_BUCKET_GET_VERSIONS = """<?xml version="1.0" encoding="UTF-8"?>
S3_DELETE_KEYS_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?> S3_DELETE_KEYS_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01"> <DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
{% for k in deleted %} {% for k, v in deleted %}
<Deleted> <Deleted>
<Key>{{k}}</Key> <Key>{{k}}</Key>
{% if v %}<VersionId>{{v}}</VersionId>{% endif %}
</Deleted> </Deleted>
{% endfor %} {% endfor %}
{% for k in delete_errors %} {% for k in delete_errors %}

View File

@ -121,8 +121,16 @@ class SecretsManagerBackend(BaseBackend):
"You can't perform this operation on the secret because it was marked for deletion." "You can't perform this operation on the secret because it was marked for deletion."
) )
secret = self.secrets[secret_id]
tags = secret["tags"]
description = secret["description"]
version_id = self._add_secret( version_id = self._add_secret(
secret_id, secret_string=secret_string, secret_binary=secret_binary secret_id,
secret_string=secret_string,
secret_binary=secret_binary,
description=description,
tags=tags,
) )
response = json.dumps( response = json.dumps(
@ -136,7 +144,13 @@ class SecretsManagerBackend(BaseBackend):
return response return response
def create_secret( def create_secret(
self, name, secret_string=None, secret_binary=None, tags=[], **kwargs self,
name,
secret_string=None,
secret_binary=None,
description=None,
tags=[],
**kwargs
): ):
# error if secret exists # error if secret exists
@ -146,7 +160,11 @@ class SecretsManagerBackend(BaseBackend):
) )
version_id = self._add_secret( version_id = self._add_secret(
name, secret_string=secret_string, secret_binary=secret_binary, tags=tags name,
secret_string=secret_string,
secret_binary=secret_binary,
description=description,
tags=tags,
) )
response = json.dumps( response = json.dumps(
@ -164,6 +182,7 @@ class SecretsManagerBackend(BaseBackend):
secret_id, secret_id,
secret_string=None, secret_string=None,
secret_binary=None, secret_binary=None,
description=None,
tags=[], tags=[],
version_id=None, version_id=None,
version_stages=None, version_stages=None,
@ -216,13 +235,27 @@ class SecretsManagerBackend(BaseBackend):
secret["rotation_lambda_arn"] = "" secret["rotation_lambda_arn"] = ""
secret["auto_rotate_after_days"] = 0 secret["auto_rotate_after_days"] = 0
secret["tags"] = tags secret["tags"] = tags
secret["description"] = description
return version_id return version_id
def put_secret_value(self, secret_id, secret_string, secret_binary, version_stages): def put_secret_value(self, secret_id, secret_string, secret_binary, version_stages):
if secret_id in self.secrets.keys():
secret = self.secrets[secret_id]
tags = secret["tags"]
description = secret["description"]
else:
tags = []
description = ""
version_id = self._add_secret( version_id = self._add_secret(
secret_id, secret_string, secret_binary, version_stages=version_stages secret_id,
secret_string,
secret_binary,
description=description,
tags=tags,
version_stages=version_stages,
) )
response = json.dumps( response = json.dumps(
@ -246,7 +279,7 @@ class SecretsManagerBackend(BaseBackend):
{ {
"ARN": secret_arn(self.region, secret["secret_id"]), "ARN": secret_arn(self.region, secret["secret_id"]),
"Name": secret["name"], "Name": secret["name"],
"Description": "", "Description": secret.get("description", ""),
"KmsKeyId": "", "KmsKeyId": "",
"RotationEnabled": secret["rotation_enabled"], "RotationEnabled": secret["rotation_enabled"],
"RotationLambdaARN": secret["rotation_lambda_arn"], "RotationLambdaARN": secret["rotation_lambda_arn"],
@ -310,6 +343,7 @@ class SecretsManagerBackend(BaseBackend):
self._add_secret( self._add_secret(
secret_id, secret_id,
old_secret_version["secret_string"], old_secret_version["secret_string"],
secret["description"],
secret["tags"], secret["tags"],
version_id=new_version_id, version_id=new_version_id,
version_stages=["AWSCURRENT"], version_stages=["AWSCURRENT"],
@ -416,7 +450,7 @@ class SecretsManagerBackend(BaseBackend):
{ {
"ARN": secret_arn(self.region, secret["secret_id"]), "ARN": secret_arn(self.region, secret["secret_id"]),
"DeletedDate": secret.get("deleted_date", None), "DeletedDate": secret.get("deleted_date", None),
"Description": "", "Description": secret.get("description", ""),
"KmsKeyId": "", "KmsKeyId": "",
"LastAccessedDate": None, "LastAccessedDate": None,
"LastChangedDate": None, "LastChangedDate": None,

View File

@ -21,11 +21,13 @@ class SecretsManagerResponse(BaseResponse):
name = self._get_param("Name") name = self._get_param("Name")
secret_string = self._get_param("SecretString") secret_string = self._get_param("SecretString")
secret_binary = self._get_param("SecretBinary") secret_binary = self._get_param("SecretBinary")
description = self._get_param("Description", if_none="")
tags = self._get_param("Tags", if_none=[]) tags = self._get_param("Tags", if_none=[])
return secretsmanager_backends[self.region].create_secret( return secretsmanager_backends[self.region].create_secret(
name=name, name=name,
secret_string=secret_string, secret_string=secret_string,
secret_binary=secret_binary, secret_binary=secret_binary,
description=description,
tags=tags, tags=tags,
) )

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import argparse import argparse
import io
import json import json
import re import re
import sys import sys
@ -14,7 +15,7 @@ from six.moves.urllib.parse import urlencode
from werkzeug.routing import BaseConverter from werkzeug.routing import BaseConverter
from werkzeug.serving import run_simple from werkzeug.serving import run_simple
from moto.backends import BACKENDS import moto.backends as backends
from moto.core.utils import convert_flask_to_httpretty_response from moto.core.utils import convert_flask_to_httpretty_response
@ -29,6 +30,7 @@ UNSIGNED_REQUESTS = {
"AWSCognitoIdentityService": ("cognito-identity", "us-east-1"), "AWSCognitoIdentityService": ("cognito-identity", "us-east-1"),
"AWSCognitoIdentityProviderService": ("cognito-idp", "us-east-1"), "AWSCognitoIdentityProviderService": ("cognito-idp", "us-east-1"),
} }
UNSIGNED_ACTIONS = {"AssumeRoleWithSAML": ("sts", "us-east-1")}
class DomainDispatcherApplication(object): class DomainDispatcherApplication(object):
@ -50,13 +52,15 @@ class DomainDispatcherApplication(object):
if self.service: if self.service:
return self.service return self.service
if host in BACKENDS: if host in backends.BACKENDS:
return host return host
for backend_name, backend in BACKENDS.items(): return backends.search_backend(
for url_base in list(backend.values())[0].url_bases: lambda backend: any(
if re.match(url_base, "http://%s" % host): re.match(url_base, "http://%s" % host)
return backend_name for url_base in list(backend.values())[0].url_bases
)
)
def infer_service_region_host(self, environ): def infer_service_region_host(self, environ):
auth = environ.get("HTTP_AUTHORIZATION") auth = environ.get("HTTP_AUTHORIZATION")
@ -77,9 +81,13 @@ class DomainDispatcherApplication(object):
else: else:
# Unsigned request # Unsigned request
target = environ.get("HTTP_X_AMZ_TARGET") target = environ.get("HTTP_X_AMZ_TARGET")
action = self.get_action_from_body(environ)
if target: if target:
service, _ = target.split(".", 1) service, _ = target.split(".", 1)
service, region = UNSIGNED_REQUESTS.get(service, DEFAULT_SERVICE_REGION) service, region = UNSIGNED_REQUESTS.get(service, DEFAULT_SERVICE_REGION)
elif action and action in UNSIGNED_ACTIONS:
# See if we can match the Action to a known service
service, region = UNSIGNED_ACTIONS.get(action)
else: else:
# S3 is the last resort when the target is also unknown # S3 is the last resort when the target is also unknown
service, region = DEFAULT_SERVICE_REGION service, region = DEFAULT_SERVICE_REGION
@ -130,6 +138,26 @@ class DomainDispatcherApplication(object):
self.app_instances[backend] = app self.app_instances[backend] = app
return app return app
def get_action_from_body(self, environ):
body = None
try:
# AWS requests use querystrings as the body (Action=x&Data=y&...)
simple_form = environ["CONTENT_TYPE"].startswith(
"application/x-www-form-urlencoded"
)
request_body_size = int(environ["CONTENT_LENGTH"])
if simple_form and request_body_size:
body = environ["wsgi.input"].read(request_body_size).decode("utf-8")
body_dict = dict(x.split("=") for x in body.split("&"))
return body_dict["Action"]
except (KeyError, ValueError):
pass
finally:
if body:
# We've consumed the body = need to reset it
environ["wsgi.input"] = io.StringIO(body)
return None
def __call__(self, environ, start_response): def __call__(self, environ, start_response):
backend_app = self.get_application(environ) backend_app = self.get_application(environ)
return backend_app(environ, start_response) return backend_app(environ, start_response)
@ -178,7 +206,7 @@ def create_backend_app(service):
backend_app.view_functions = {} backend_app.view_functions = {}
backend_app.url_map = Map() backend_app.url_map = Map()
backend_app.url_map.converters["regex"] = RegexConverter backend_app.url_map.converters["regex"] = RegexConverter
backend = list(BACKENDS[service].values())[0] backend = list(backends.get_backend(service).values())[0]
for url_path, handler in backend.flask_paths.items(): for url_path, handler in backend.flask_paths.items():
view_func = convert_flask_to_httpretty_response(handler) view_func = convert_flask_to_httpretty_response(handler)
if handler.__name__ == "dispatch": if handler.__name__ == "dispatch":

View File

@ -1,5 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from base64 import b64decode
import datetime import datetime
import xmltodict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds from moto.core.utils import iso_8601_datetime_with_milliseconds
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
@ -79,5 +81,24 @@ class STSBackend(BaseBackend):
def assume_role_with_web_identity(self, **kwargs): def assume_role_with_web_identity(self, **kwargs):
return self.assume_role(**kwargs) return self.assume_role(**kwargs)
def assume_role_with_saml(self, **kwargs):
del kwargs["principal_arn"]
saml_assertion_encoded = kwargs.pop("saml_assertion")
saml_assertion_decoded = b64decode(saml_assertion_encoded)
saml_assertion = xmltodict.parse(saml_assertion_decoded.decode("utf-8"))
kwargs["duration"] = int(
saml_assertion["samlp:Response"]["Assertion"]["AttributeStatement"][
"Attribute"
][2]["AttributeValue"]
)
kwargs["role_session_name"] = saml_assertion["samlp:Response"]["Assertion"][
"AttributeStatement"
]["Attribute"][0]["AttributeValue"]
kwargs["external_id"] = None
kwargs["policy"] = None
role = AssumedRole(**kwargs)
self.assumed_roles.append(role)
return role
sts_backend = STSBackend() sts_backend = STSBackend()

View File

@ -71,6 +71,19 @@ class TokenResponse(BaseResponse):
template = self.response_template(ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE) template = self.response_template(ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE)
return template.render(role=role) return template.render(role=role)
def assume_role_with_saml(self):
role_arn = self.querystring.get("RoleArn")[0]
principal_arn = self.querystring.get("PrincipalArn")[0]
saml_assertion = self.querystring.get("SAMLAssertion")[0]
role = sts_backend.assume_role_with_saml(
role_arn=role_arn,
principal_arn=principal_arn,
saml_assertion=saml_assertion,
)
template = self.response_template(ASSUME_ROLE_WITH_SAML_RESPONSE)
return template.render(role=role)
def get_caller_identity(self): def get_caller_identity(self):
template = self.response_template(GET_CALLER_IDENTITY_RESPONSE) template = self.response_template(GET_CALLER_IDENTITY_RESPONSE)
@ -168,6 +181,30 @@ ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE = """<AssumeRoleWithWebIdentityResponse x
</AssumeRoleWithWebIdentityResponse>""" </AssumeRoleWithWebIdentityResponse>"""
ASSUME_ROLE_WITH_SAML_RESPONSE = """<AssumeRoleWithSAMLResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
<AssumeRoleWithSAMLResult>
<Audience>https://signin.aws.amazon.com/saml</Audience>
<AssumedRoleUser>
<AssumedRoleId>{{ role.user_id }}</AssumedRoleId>
<Arn>{{ role.arn }}</Arn>
</AssumedRoleUser>
<Credentials>
<AccessKeyId>{{ role.access_key_id }}</AccessKeyId>
<SecretAccessKey>{{ role.secret_access_key }}</SecretAccessKey>
<SessionToken>{{ role.session_token }}</SessionToken>
<Expiration>{{ role.expiration_ISO8601 }}</Expiration>
</Credentials>
<Subject>{{ role.user_id }}</Subject>
<NameQualifier>B64EncodedStringOfHashOfIssuerAccountIdAndUserId=</NameQualifier>
<SubjectType>persistent</SubjectType>
<Issuer>http://localhost:3000/</Issuer>
</AssumeRoleWithSAMLResult>
<ResponseMetadata>
<RequestId>c6104cbe-af31-11e0-8154-cbc7ccf896c7</RequestId>
</ResponseMetadata>
</AssumeRoleWithSAMLResponse>"""
GET_CALLER_IDENTITY_RESPONSE = """<GetCallerIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/"> GET_CALLER_IDENTITY_RESPONSE = """<GetCallerIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
<GetCallerIdentityResult> <GetCallerIdentityResult>
<Arn>{{ arn }}</Arn> <Arn>{{ arn }}</Arn>

View File

@ -101,5 +101,4 @@ setup(
project_urls={ project_urls={
"Documentation": "http://docs.getmoto.org/en/latest/", "Documentation": "http://docs.getmoto.org/en/latest/",
}, },
data_files=[('', ['moto/dynamodb2/parsing/reserved_keywords.txt'])],
) )

View File

@ -1797,6 +1797,14 @@ def test_usage_plans():
response = client.get_usage_plans() response = client.get_usage_plans()
len(response["items"]).should.equal(0) len(response["items"]).should.equal(0)
# # Try to get info about a non existing usage
with assert_raises(ClientError) as ex:
client.get_usage_plan(usagePlanId="not_existing")
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid Usage Plan ID specified"
)
usage_plan_name = "TEST-PLAN" usage_plan_name = "TEST-PLAN"
payload = {"name": usage_plan_name} payload = {"name": usage_plan_name}
response = client.create_usage_plan(**payload) response = client.create_usage_plan(**payload)
@ -1879,6 +1887,30 @@ def test_usage_plan_keys():
response = client.get_usage_plan_keys(usagePlanId=usage_plan_id) response = client.get_usage_plan_keys(usagePlanId=usage_plan_id)
len(response["items"]).should.equal(0) len(response["items"]).should.equal(0)
# Try to get info about a non existing api key
with assert_raises(ClientError) as ex:
client.get_usage_plan_key(usagePlanId=usage_plan_id, keyId="not_existing_key")
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid API Key identifier specified"
)
# Try to get info about an existing api key that has not jet added to a valid usage plan
with assert_raises(ClientError) as ex:
client.get_usage_plan_key(usagePlanId=usage_plan_id, keyId=key_id)
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid Usage Plan ID specified"
)
# Try to get info about an existing api key that has not jet added to a valid usage plan
with assert_raises(ClientError) as ex:
client.get_usage_plan_key(usagePlanId="not_existing_plan_id", keyId=key_id)
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid Usage Plan ID specified"
)
@mock_apigateway @mock_apigateway
def test_create_usage_plan_key_non_existent_api_key(): def test_create_usage_plan_key_non_existent_api_key():

View File

@ -39,6 +39,10 @@ def test_usage_plans_apis():
fetched_plan = json.loads(res.data) fetched_plan = json.loads(res.data)
fetched_plan.should.equal(created_plan) fetched_plan.should.equal(created_plan)
# Not existing usage plan
res = test_client.get("/usageplans/{0}".format("not_existing"))
res.status_code.should.equal(404)
# Delete usage plan # Delete usage plan
res = test_client.delete("/usageplans/{0}".format(created_plan["id"])) res = test_client.delete("/usageplans/{0}".format(created_plan["id"]))
res.data.should.equal(b"{}") res.data.should.equal(b"{}")
@ -61,6 +65,24 @@ def test_usage_plans_keys():
res = test_client.get("/usageplans/{0}/keys".format(usage_plan_id)) res = test_client.get("/usageplans/{0}/keys".format(usage_plan_id))
json.loads(res.data)["item"].should.have.length_of(0) json.loads(res.data)["item"].should.have.length_of(0)
# Invalid api key (does not exists at all)
res = test_client.get(
"/usageplans/{0}/keys/{1}".format(usage_plan_id, "not_existing")
)
res.status_code.should.equal(404)
# not existing usage plan with existing api key
res = test_client.get(
"/usageplans/{0}/keys/{1}".format("not_existing", created_api_key["id"])
)
res.status_code.should.equal(404)
# not jet added api key
res = test_client.get(
"/usageplans/{0}/keys/{1}".format(usage_plan_id, created_api_key["id"])
)
res.status_code.should.equal(404)
# Create usage plan key # Create usage plan key
res = test_client.post( res = test_client.post(
"/usageplans/{0}/keys".format(usage_plan_id), "/usageplans/{0}/keys".format(usage_plan_id),

View File

@ -88,7 +88,7 @@ def test_put_metric_data():
metric_names.should.have(1) metric_names.should.have(1)
metric = metrics[0] metric = metrics[0]
metric.namespace.should.equal("tester") metric.namespace.should.equal("tester")
metric.name.should.equal("Metric:metric") metric.name.should.equal("metric")
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
@ -157,33 +157,34 @@ def test_get_metric_statistics():
datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp) datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp)
@mock_s3_deprecated # TODO: THIS IS CURRENTLY BROKEN!
@mock_cloudwatch_deprecated # @mock_s3_deprecated
def test_cloudwatch_return_s3_metrics(): # @mock_cloudwatch_deprecated
# def test_cloudwatch_return_s3_metrics():
region = "us-east-1" #
# region = "us-east-1"
cw = boto.ec2.cloudwatch.connect_to_region(region) #
s3 = boto.s3.connect_to_region(region) # cw = boto.ec2.cloudwatch.connect_to_region(region)
# s3 = boto.s3.connect_to_region(region)
bucket_name_1 = "test-bucket-1" #
bucket_name_2 = "test-bucket-2" # bucket_name_1 = "test-bucket-1"
# bucket_name_2 = "test-bucket-2"
bucket1 = s3.create_bucket(bucket_name=bucket_name_1) #
key = Key(bucket1) # bucket1 = s3.create_bucket(bucket_name=bucket_name_1)
key.key = "the-key" # key = Key(bucket1)
key.set_contents_from_string("foobar" * 4) # key.key = "the-key"
s3.create_bucket(bucket_name=bucket_name_2) # key.set_contents_from_string("foobar" * 4)
# s3.create_bucket(bucket_name=bucket_name_2)
metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1}) #
# Verify that the OOTB S3 metrics are available for the created buckets # metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1})
len(metrics_s3_bucket_1).should.be(2) # # Verify that the OOTB S3 metrics are available for the created buckets
metric_names = [m.name for m in metrics_s3_bucket_1] # len(metrics_s3_bucket_1).should.be(2)
sorted(metric_names).should.equal( # metric_names = [m.name for m in metrics_s3_bucket_1]
["Metric:BucketSizeBytes", "Metric:NumberOfObjects"] # sorted(metric_names).should.equal(
) # ["Metric:BucketSizeBytes", "Metric:NumberOfObjects"]
# )
# Explicit clean up - the metrics for these buckets are messing with subsequent tests #
key.delete() # # Explicit clean up - the metrics for these buckets are messing with subsequent tests
s3.delete_bucket(bucket_name_1) # key.delete()
s3.delete_bucket(bucket_name_2) # s3.delete_bucket(bucket_name_1)
# s3.delete_bucket(bucket_name_2)

View File

@ -155,7 +155,7 @@ def test_put_metric_data_no_dimensions():
metrics.should.have.length_of(1) metrics.should.have.length_of(1)
metric = metrics[0] metric = metrics[0]
metric["Namespace"].should.equal("tester") metric["Namespace"].should.equal("tester")
metric["MetricName"].should.equal("Metric:metric") metric["MetricName"].should.equal("metric")
@mock_cloudwatch @mock_cloudwatch
@ -183,7 +183,7 @@ def test_put_metric_data_with_statistics():
metrics.should.have.length_of(1) metrics.should.have.length_of(1)
metric = metrics[0] metric = metrics[0]
metric["Namespace"].should.equal("tester") metric["Namespace"].should.equal("tester")
metric["MetricName"].should.equal("Metric:statmetric") metric["MetricName"].should.equal("statmetric")
# TODO: test statistics - https://github.com/spulec/moto/issues/1615 # TODO: test statistics - https://github.com/spulec/moto/issues/1615
@ -266,12 +266,12 @@ def test_list_metrics():
{ {
u"Namespace": "list_test_1/", u"Namespace": "list_test_1/",
u"Dimensions": [], u"Dimensions": [],
u"MetricName": "Metric:metric1", u"MetricName": "metric1",
}, },
{ {
u"Namespace": "list_test_1/", u"Namespace": "list_test_1/",
u"Dimensions": [], u"Dimensions": [],
u"MetricName": "Metric:metric1", u"MetricName": "metric1",
}, },
] ]
) )

View File

@ -1,21 +1,17 @@
from __future__ import unicode_literals, print_function from __future__ import unicode_literals, print_function
import re
from decimal import Decimal from decimal import Decimal
import six
import boto import boto
import boto3 import boto3
from boto3.dynamodb.conditions import Attr, Key from boto3.dynamodb.conditions import Attr, Key
import re import re
import requests
import sure # noqa import sure # noqa
from moto import mock_dynamodb2, mock_dynamodb2_deprecated from moto import mock_dynamodb2, mock_dynamodb2_deprecated
from moto.dynamodb2 import dynamodb_backend2, dynamodb_backends2 from moto.dynamodb2 import dynamodb_backend2, dynamodb_backends2
from boto.exception import JSONResponseError from boto.exception import JSONResponseError
from botocore.exceptions import ClientError, ParamValidationError from botocore.exceptions import ClientError, ParamValidationError
from tests.helpers import requires_boto_gte from tests.helpers import requires_boto_gte
import tests.backport_assert_raises
import moto.dynamodb2.comparisons import moto.dynamodb2.comparisons
import moto.dynamodb2.models import moto.dynamodb2.models
@ -3221,6 +3217,25 @@ def test_remove_top_level_attribute():
result.should.equal({"id": {"S": "foo"}}) result.should.equal({"id": {"S": "foo"}})
@mock_dynamodb2
def test_remove_top_level_attribute_non_existent():
"""
Remove statements do not require attribute to exist they silently pass
"""
table_name = "test_remove"
client = create_table_with_list(table_name)
ddb_item = {"id": {"S": "foo"}, "item": {"S": "bar"}}
client.put_item(TableName=table_name, Item=ddb_item)
client.update_item(
TableName=table_name,
Key={"id": {"S": "foo"}},
UpdateExpression="REMOVE non_existent_attribute",
ExpressionAttributeNames={"#i": "item"},
)
result = client.get_item(TableName=table_name, Key={"id": {"S": "foo"}})["Item"]
result.should.equal(ddb_item)
@mock_dynamodb2 @mock_dynamodb2
def test_remove_list_index__remove_existing_index(): def test_remove_list_index__remove_existing_index():
table_name = "test_list_index_access" table_name = "test_list_index_access"
@ -4219,6 +4234,358 @@ def test_gsi_verify_negative_number_order():
) )
@mock_dynamodb2
def test_transact_write_items_put():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Put multiple items
dynamodb.transact_write_items(
TransactItems=[
{
"Put": {
"Item": {"id": {"S": "foo{}".format(str(i))}, "foo": {"S": "bar"},},
"TableName": "test-table",
}
}
for i in range(0, 5)
]
)
# Assert all are present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(5)
@mock_dynamodb2
def test_transact_write_items_put_conditional_expressions():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
dynamodb.put_item(
TableName="test-table", Item={"id": {"S": "foo2"},},
)
# Put multiple items
with assert_raises(ClientError) as ex:
dynamodb.transact_write_items(
TransactItems=[
{
"Put": {
"Item": {
"id": {"S": "foo{}".format(str(i))},
"foo": {"S": "bar"},
},
"TableName": "test-table",
"ConditionExpression": "#i <> :i",
"ExpressionAttributeNames": {"#i": "id"},
"ExpressionAttributeValues": {
":i": {
"S": "foo2"
} # This item already exist, so the ConditionExpression should fail
},
}
}
for i in range(0, 5)
]
)
# Assert the exception is correct
ex.exception.response["Error"]["Code"].should.equal(
"ConditionalCheckFailedException"
)
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.exception.response["Error"]["Message"].should.equal(
"A condition specified in the operation could not be evaluated."
)
# Assert all are present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"id": {"S": "foo2"}})
@mock_dynamodb2
def test_transact_write_items_conditioncheck_passes():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item without email address
dynamodb.put_item(
TableName="test-table", Item={"id": {"S": "foo"},},
)
# Put an email address, after verifying it doesn't exist yet
dynamodb.transact_write_items(
TransactItems=[
{
"ConditionCheck": {
"Key": {"id": {"S": "foo"}},
"TableName": "test-table",
"ConditionExpression": "attribute_not_exists(#e)",
"ExpressionAttributeNames": {"#e": "email_address"},
}
},
{
"Put": {
"Item": {
"id": {"S": "foo"},
"email_address": {"S": "test@moto.com"},
},
"TableName": "test-table",
}
},
]
)
# Assert all are present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
@mock_dynamodb2
def test_transact_write_items_conditioncheck_fails():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item with email address
dynamodb.put_item(
TableName="test-table",
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
)
# Try to put an email address, but verify whether it exists
# ConditionCheck should fail
with assert_raises(ClientError) as ex:
dynamodb.transact_write_items(
TransactItems=[
{
"ConditionCheck": {
"Key": {"id": {"S": "foo"}},
"TableName": "test-table",
"ConditionExpression": "attribute_not_exists(#e)",
"ExpressionAttributeNames": {"#e": "email_address"},
}
},
{
"Put": {
"Item": {
"id": {"S": "foo"},
"email_address": {"S": "update@moto.com"},
},
"TableName": "test-table",
}
},
]
)
# Assert the exception is correct
ex.exception.response["Error"]["Code"].should.equal(
"ConditionalCheckFailedException"
)
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.exception.response["Error"]["Message"].should.equal(
"A condition specified in the operation could not be evaluated."
)
# Assert the original email address is still present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
@mock_dynamodb2
def test_transact_write_items_delete():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item
dynamodb.put_item(
TableName="test-table", Item={"id": {"S": "foo"},},
)
# Delete the item
dynamodb.transact_write_items(
TransactItems=[
{"Delete": {"Key": {"id": {"S": "foo"}}, "TableName": "test-table",}}
]
)
# Assert the item is deleted
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(0)
@mock_dynamodb2
def test_transact_write_items_delete_with_successful_condition_expression():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item without email address
dynamodb.put_item(
TableName="test-table", Item={"id": {"S": "foo"},},
)
# ConditionExpression will pass - no email address has been specified yet
dynamodb.transact_write_items(
TransactItems=[
{
"Delete": {
"Key": {"id": {"S": "foo"},},
"TableName": "test-table",
"ConditionExpression": "attribute_not_exists(#e)",
"ExpressionAttributeNames": {"#e": "email_address"},
}
}
]
)
# Assert the item is deleted
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(0)
@mock_dynamodb2
def test_transact_write_items_delete_with_failed_condition_expression():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item with email address
dynamodb.put_item(
TableName="test-table",
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
)
# Try to delete an item that does not have an email address
# ConditionCheck should fail
with assert_raises(ClientError) as ex:
dynamodb.transact_write_items(
TransactItems=[
{
"Delete": {
"Key": {"id": {"S": "foo"},},
"TableName": "test-table",
"ConditionExpression": "attribute_not_exists(#e)",
"ExpressionAttributeNames": {"#e": "email_address"},
}
}
]
)
# Assert the exception is correct
ex.exception.response["Error"]["Code"].should.equal(
"ConditionalCheckFailedException"
)
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.exception.response["Error"]["Message"].should.equal(
"A condition specified in the operation could not be evaluated."
)
# Assert the original item is still present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
@mock_dynamodb2
def test_transact_write_items_update():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item
dynamodb.put_item(TableName="test-table", Item={"id": {"S": "foo"}})
# Update the item
dynamodb.transact_write_items(
TransactItems=[
{
"Update": {
"Key": {"id": {"S": "foo"}},
"TableName": "test-table",
"UpdateExpression": "SET #e = :v",
"ExpressionAttributeNames": {"#e": "email_address"},
"ExpressionAttributeValues": {":v": {"S": "test@moto.com"}},
}
}
]
)
# Assert the item is updated
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}})
@mock_dynamodb2
def test_transact_write_items_update_with_failed_condition_expression():
table_schema = {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
}
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
dynamodb.create_table(
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
)
# Insert an item with email address
dynamodb.put_item(
TableName="test-table",
Item={"id": {"S": "foo"}, "email_address": {"S": "test@moto.com"}},
)
# Try to update an item that does not have an email address
# ConditionCheck should fail
with assert_raises(ClientError) as ex:
dynamodb.transact_write_items(
TransactItems=[
{
"Update": {
"Key": {"id": {"S": "foo"}},
"TableName": "test-table",
"UpdateExpression": "SET #e = :v",
"ConditionExpression": "attribute_not_exists(#e)",
"ExpressionAttributeNames": {"#e": "email_address"},
"ExpressionAttributeValues": {":v": {"S": "update@moto.com"}},
}
}
]
)
# Assert the exception is correct
ex.exception.response["Error"]["Code"].should.equal(
"ConditionalCheckFailedException"
)
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.exception.response["Error"]["Message"].should.equal(
"A condition specified in the operation could not be evaluated."
)
# Assert the original item is still present
items = dynamodb.scan(TableName="test-table")["Items"]
items.should.have.length_of(1)
items[0].should.equal({"email_address": {"S": "test@moto.com"}, "id": {"S": "foo"}})
@mock_dynamodb2 @mock_dynamodb2
def test_dynamodb_max_1mb_limit(): def test_dynamodb_max_1mb_limit():
ddb = boto3.resource("dynamodb", region_name="eu-west-1") ddb = boto3.resource("dynamodb", region_name="eu-west-1")
@ -4331,3 +4698,251 @@ def test_list_tables_exclusive_start_table_name_empty():
resp = client.list_tables(Limit=1, ExclusiveStartTableName="whatever") resp = client.list_tables(Limit=1, ExclusiveStartTableName="whatever")
len(resp["TableNames"]).should.equal(0) len(resp["TableNames"]).should.equal(0)
def assert_correct_client_error(
client_error, code, message_template, message_values=None, braces=None
):
"""
Assert whether a client_error is as expected. Allow for a list of values to be passed into the message
Args:
client_error(ClientError): The ClientError exception that was raised
code(str): The code for the error (e.g. ValidationException)
message_template(str): Error message template. if message_values is not None then this template has a {values}
as placeholder. For example:
'Value provided in ExpressionAttributeValues unused in expressions: keys: {values}'
message_values(list of str|None): The values that are passed in the error message
braces(list of str|None): List of length 2 with opening and closing brace for the values. By default it will be
surrounded by curly brackets
"""
braces = braces or ["{", "}"]
assert client_error.response["Error"]["Code"] == code
if message_values is not None:
values_string = "{open_brace}(?P<values>.*){close_brace}".format(
open_brace=braces[0], close_brace=braces[1]
)
re_msg = re.compile(message_template.format(values=values_string))
match_result = re_msg.match(client_error.response["Error"]["Message"])
assert match_result is not None
values_string = match_result.groupdict()["values"]
values = [key for key in values_string.split(", ")]
assert len(message_values) == len(values)
for value in message_values:
assert value in values
else:
assert client_error.response["Error"]["Message"] == message_template
def create_simple_table_and_return_client():
dynamodb = boto3.client("dynamodb", region_name="eu-west-1")
dynamodb.create_table(
TableName="moto-test",
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"},],
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
)
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "MyStr": {"S": "1"},},
)
return dynamodb
# https://github.com/spulec/moto/issues/2806
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html
# #DDB-UpdateItem-request-UpdateExpression
@mock_dynamodb2
def test_update_item_with_attribute_in_right_hand_side_and_operation():
dynamodb = create_simple_table_and_return_client()
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET myNum = myNum+:val",
ExpressionAttributeValues={":val": {"N": "3"}},
)
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
assert result["Item"]["myNum"]["N"] == "4"
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET myNum = myNum - :val",
ExpressionAttributeValues={":val": {"N": "1"}},
)
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
assert result["Item"]["myNum"]["N"] == "3"
@mock_dynamodb2
def test_non_existing_attribute_should_raise_exception():
"""
Does error message get correctly raised if attribute is referenced but it does not exist for the item.
"""
dynamodb = create_simple_table_and_return_client()
try:
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET MyStr = no_attr + MyStr",
)
assert False, "Validation exception not thrown"
except dynamodb.exceptions.ClientError as e:
assert_correct_client_error(
e,
"ValidationException",
"The provided expression refers to an attribute that does not exist in the item",
)
@mock_dynamodb2
def test_update_expression_with_plus_in_attribute_name():
"""
Does error message get correctly raised if attribute contains a plus and is passed in without an AttributeName. And
lhs & rhs are not attribute IDs by themselve.
"""
dynamodb = create_simple_table_and_return_client()
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "my+Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
)
try:
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET MyStr = my+Num",
)
assert False, "Validation exception not thrown"
except dynamodb.exceptions.ClientError as e:
assert_correct_client_error(
e,
"ValidationException",
"The provided expression refers to an attribute that does not exist in the item",
)
@mock_dynamodb2
def test_update_expression_with_minus_in_attribute_name():
"""
Does error message get correctly raised if attribute contains a minus and is passed in without an AttributeName. And
lhs & rhs are not attribute IDs by themselve.
"""
dynamodb = create_simple_table_and_return_client()
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "my-Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
)
try:
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET MyStr = my-Num",
)
assert False, "Validation exception not thrown"
except dynamodb.exceptions.ClientError as e:
assert_correct_client_error(
e,
"ValidationException",
"The provided expression refers to an attribute that does not exist in the item",
)
@mock_dynamodb2
def test_update_expression_with_space_in_attribute_name():
"""
Does error message get correctly raised if attribute contains a space and is passed in without an AttributeName. And
lhs & rhs are not attribute IDs by themselves.
"""
dynamodb = create_simple_table_and_return_client()
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "my Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
)
try:
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET MyStr = my Num",
)
assert False, "Validation exception not thrown"
except dynamodb.exceptions.ClientError as e:
assert_raise_syntax_error(e, "Num", "my Num")
@mock_dynamodb2
def test_summing_up_2_strings_raises_exception():
"""
Update set supports different DynamoDB types but some operations are not supported. For example summing up 2 strings
raises an exception. It results in ClientError with code ValidationException:
Saying An operand in the update expression has an incorrect data type
"""
dynamodb = create_simple_table_and_return_client()
try:
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET MyStr = MyStr + MyStr",
)
assert False, "Validation exception not thrown"
except dynamodb.exceptions.ClientError as e:
assert_correct_client_error(
e,
"ValidationException",
"An operand in the update expression has an incorrect data type",
)
# https://github.com/spulec/moto/issues/2806
@mock_dynamodb2
def test_update_item_with_attribute_in_right_hand_side():
"""
After tokenization and building expression make sure referenced attributes are replaced with their current value
"""
dynamodb = create_simple_table_and_return_client()
# Make sure there are 2 values
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "myVal1": {"S": "Value1"}, "myVal2": {"S": "Value2"}},
)
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET myVal1 = myVal2",
)
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})
assert result["Item"]["myVal1"]["S"] == result["Item"]["myVal2"]["S"] == "Value2"
@mock_dynamodb2
def test_multiple_updates():
dynamodb = create_simple_table_and_return_client()
dynamodb.put_item(
TableName="moto-test",
Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "path": {"N": "6"}},
)
dynamodb.update_item(
TableName="moto-test",
Key={"id": {"S": "1"}},
UpdateExpression="SET myNum = #p + :val, newAttr = myNum",
ExpressionAttributeValues={":val": {"N": "1"}},
ExpressionAttributeNames={"#p": "path"},
)
result = dynamodb.get_item(TableName="moto-test", Key={"id": {"S": "1"}})["Item"]
expected_result = {
"myNum": {"N": "7"},
"newAttr": {"N": "1"},
"path": {"N": "6"},
"id": {"S": "1"},
}
assert result == expected_result

View File

@ -0,0 +1,446 @@
from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType
from moto.dynamodb2.models import Item, DynamoType
from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
from parameterized import parameterized
def test_execution_of_if_not_exists_not_existing_value():
update_expression = "SET a = if_not_exists(b, a)"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"S": "A"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"S": "A"}},
)
assert expected_item == item
def test_execution_of_if_not_exists_with_existing_attribute_should_return_attribute():
update_expression = "SET a = if_not_exists(b, a)"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"S": "A"}, "b": {"S": "B"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"S": "B"}, "b": {"S": "B"}},
)
assert expected_item == item
def test_execution_of_if_not_exists_with_existing_attribute_should_return_value():
update_expression = "SET a = if_not_exists(b, :val)"
update_expression_values = {":val": {"N": "4"}}
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "b": {"N": "3"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=update_expression_values,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "b": {"N": "3"}, "a": {"N": "3"}},
)
assert expected_item == item
def test_execution_of_if_not_exists_with_non_existing_attribute_should_return_value():
update_expression = "SET a = if_not_exists(b, :val)"
update_expression_values = {":val": {"N": "4"}}
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=update_expression_values,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"N": "4"}},
)
assert expected_item == item
def test_execution_of_sum_operation():
update_expression = "SET a = a + b"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"N": "7"}, "b": {"N": "4"}},
)
assert expected_item == item
def test_execution_of_remove():
update_expression = "Remove a"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "a": {"N": "3"}, "b": {"N": "4"}},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "1"}, "b": {"N": "4"}},
)
assert expected_item == item
def test_execution_of_remove_in_map():
update_expression = "Remove itemmap.itemlist[1].foo11"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={
"id": {"S": "foo2"},
"itemmap": {
"M": {
"itemlist": {
"L": [
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
{"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}},
]
}
}
},
},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={
"id": {"S": "foo2"},
"itemmap": {
"M": {
"itemlist": {
"L": [
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
{"M": {"foo10": {"S": "bar1"},}},
]
}
}
},
},
)
assert expected_item == item
def test_execution_of_remove_in_list():
update_expression = "Remove itemmap.itemlist[1]"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={
"id": {"S": "foo2"},
"itemmap": {
"M": {
"itemlist": {
"L": [
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
{"M": {"foo10": {"S": "bar1"}, "foo11": {"S": "bar2"}}},
]
}
}
},
},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=None,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={
"id": {"S": "foo2"},
"itemmap": {
"M": {
"itemlist": {
"L": [{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},]
}
}
},
},
)
assert expected_item == item
def test_execution_of_delete_element_from_set():
update_expression = "delete s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values={":value": {"SS": ["value2", "value5"]}},
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value3"]},},
)
assert expected_item == item
def test_execution_of_add_number():
update_expression = "add s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values={":value": {"N": "10"}},
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"N": "15"}},
)
assert expected_item == item
def test_execution_of_add_set_to_a_number():
update_expression = "add s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
)
try:
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values={":value": {"SS": ["s1"]}},
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"N": "15"}},
)
assert expected_item == item
assert False
except IncorrectDataType:
assert True
def test_execution_of_add_to_a_set():
update_expression = "ADD s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
)
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values={":value": {"SS": ["value2", "value5"]}},
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
expected_item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={
"id": {"S": "foo2"},
"s": {"SS": ["value1", "value2", "value3", "value5"]},
},
)
assert expected_item == item
@parameterized(
[
({":value": {"S": "10"}}, "STRING",),
({":value": {"N": "10"}}, "NUMBER",),
({":value": {"B": "10"}}, "BINARY",),
({":value": {"BOOL": True}}, "BOOLEAN",),
({":value": {"NULL": True}}, "NULL",),
({":value": {"M": {"el0": {"S": "10"}}}}, "MAP",),
({":value": {"L": []}}, "LIST",),
]
)
def test_execution_of__delete_element_from_set_invalid_value(
expression_attribute_values, unexpected_data_type
):
"""A delete statement must use a value of type SS in order to delete elements from a set."""
update_expression = "delete s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
)
try:
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values=expression_attribute_values,
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
assert False, "Must raise exception"
except IncorrectOperandType as e:
assert e.operator_or_function == "operator: DELETE"
assert e.operand_type == unexpected_data_type
def test_execution_of_delete_element_from_a_string_attribute():
"""A delete statement must use a value of type SS in order to delete elements from a set."""
update_expression = "delete s :value"
update_expression_ast = UpdateExpressionParser.make(update_expression)
item = Item(
hash_key=DynamoType({"S": "id"}),
hash_key_type="TYPE",
range_key=None,
range_key_type=None,
attrs={"id": {"S": "foo2"}, "s": {"S": "5"},},
)
try:
validated_ast = UpdateExpressionValidator(
update_expression_ast,
expression_attribute_names=None,
expression_attribute_values={":value": {"SS": ["value2"]}},
item=item,
).validate()
UpdateExpressionExecutor(validated_ast, item, None).execute()
assert False, "Must raise exception"
except IncorrectDataType:
assert True

View File

@ -8,6 +8,8 @@ from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import sure # noqa import sure # noqa
from freezegun import freeze_time from freezegun import freeze_time
from nose.tools import assert_raises
from moto import mock_dynamodb2, mock_dynamodb2_deprecated from moto import mock_dynamodb2, mock_dynamodb2_deprecated
from boto.exception import JSONResponseError from boto.exception import JSONResponseError
from tests.helpers import requires_boto_gte from tests.helpers import requires_boto_gte
@ -1273,6 +1275,15 @@ def test_update_item_with_expression():
) )
def assert_failure_due_to_key_not_in_schema(func, **kwargs):
with assert_raises(ClientError) as ex:
func(**kwargs)
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
ex.exception.response["Error"]["Message"].should.equal(
"The provided key element does not match the schema"
)
@mock_dynamodb2 @mock_dynamodb2
def test_update_item_add_with_expression(): def test_update_item_add_with_expression():
table = _create_table_with_range_key() table = _create_table_with_range_key()
@ -1299,14 +1310,13 @@ def test_update_item_add_with_expression():
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
# Update item to add a string value to a non-existing set # Update item to add a string value to a non-existing set
# Should just create the set in the background # Should throw: 'The provided key element does not match the schema'
table.update_item( assert_failure_due_to_key_not_in_schema(
table.update_item,
Key=item_key, Key=item_key,
UpdateExpression="ADD non_existing_str_set :v", UpdateExpression="ADD non_existing_str_set :v",
ExpressionAttributeValues={":v": {"item4"}}, ExpressionAttributeValues={":v": {"item4"}},
) )
current_item["non_existing_str_set"] = {"item4"}
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
# Update item to add a num value to a num set # Update item to add a num value to a num set
table.update_item( table.update_item(
@ -1381,15 +1391,14 @@ def test_update_item_add_with_nested_sets():
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item) dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
# Update item to add a string value to a non-existing set # Update item to add a string value to a non-existing set
# Should just create the set in the background # Should raise
table.update_item( assert_failure_due_to_key_not_in_schema(
table.update_item,
Key=item_key, Key=item_key,
UpdateExpression="ADD #ns.#ne :v", UpdateExpression="ADD #ns.#ne :v",
ExpressionAttributeNames={"#ns": "nested", "#ne": "non_existing_str_set"}, ExpressionAttributeNames={"#ns": "nested", "#ne": "non_existing_str_set"},
ExpressionAttributeValues={":v": {"new_item"}}, ExpressionAttributeValues={":v": {"new_item"}},
) )
current_item["nested"]["non_existing_str_set"] = {"new_item"}
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
@mock_dynamodb2 @mock_dynamodb2

View File

@ -40,6 +40,15 @@ def test_boto3_describe_regions():
resp = ec2.describe_regions(RegionNames=[test_region]) resp = ec2.describe_regions(RegionNames=[test_region])
resp["Regions"].should.have.length_of(1) resp["Regions"].should.have.length_of(1)
resp["Regions"][0].should.have.key("RegionName").which.should.equal(test_region) resp["Regions"][0].should.have.key("RegionName").which.should.equal(test_region)
resp["Regions"][0].should.have.key("OptInStatus").which.should.equal(
"opt-in-not-required"
)
test_region = "ap-east-1"
resp = ec2.describe_regions(RegionNames=[test_region])
resp["Regions"].should.have.length_of(1)
resp["Regions"][0].should.have.key("RegionName").which.should.equal(test_region)
resp["Regions"][0].should.have.key("OptInStatus").which.should.equal("not-opted-in")
@mock_ec2 @mock_ec2

View File

@ -2218,6 +2218,29 @@ def test_boto3_deleted_versionings_list():
assert len(listed["Contents"]) == 1 assert len(listed["Contents"]) == 1
@mock_s3
def test_boto3_delete_objects_for_specific_version_id():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="blah")
client.put_bucket_versioning(
Bucket="blah", VersioningConfiguration={"Status": "Enabled"}
)
client.put_object(Bucket="blah", Key="test1", Body=b"test1a")
client.put_object(Bucket="blah", Key="test1", Body=b"test1b")
response = client.list_object_versions(Bucket="blah", Prefix="test1")
id_to_delete = [v["VersionId"] for v in response["Versions"] if v["IsLatest"]][0]
response = client.delete_objects(
Bucket="blah", Delete={"Objects": [{"Key": "test1", "VersionId": id_to_delete}]}
)
assert response["Deleted"] == [{"Key": "test1", "VersionId": id_to_delete}]
listed = client.list_objects_v2(Bucket="blah")
assert len(listed["Contents"]) == 1
@mock_s3 @mock_s3
def test_boto3_delete_versioned_bucket(): def test_boto3_delete_versioned_bucket():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)

View File

@ -137,6 +137,45 @@ def test_create_secret_with_tags():
] ]
@mock_secretsmanager
def test_create_secret_with_description():
conn = boto3.client("secretsmanager", region_name="us-east-1")
secret_name = "test-secret-with-tags"
result = conn.create_secret(
Name=secret_name, SecretString="foosecret", Description="desc"
)
assert result["ARN"]
assert result["Name"] == secret_name
secret_value = conn.get_secret_value(SecretId=secret_name)
assert secret_value["SecretString"] == "foosecret"
secret_details = conn.describe_secret(SecretId=secret_name)
assert secret_details["Description"] == "desc"
@mock_secretsmanager
def test_create_secret_with_tags_and_description():
conn = boto3.client("secretsmanager", region_name="us-east-1")
secret_name = "test-secret-with-tags"
result = conn.create_secret(
Name=secret_name,
SecretString="foosecret",
Description="desc",
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
)
assert result["ARN"]
assert result["Name"] == secret_name
secret_value = conn.get_secret_value(SecretId=secret_name)
assert secret_value["SecretString"] == "foosecret"
secret_details = conn.describe_secret(SecretId=secret_name)
assert secret_details["Tags"] == [
{"Key": "Foo", "Value": "Bar"},
{"Key": "Mykey", "Value": "Myvalue"},
]
assert secret_details["Description"] == "desc"
@mock_secretsmanager @mock_secretsmanager
def test_delete_secret(): def test_delete_secret():
conn = boto3.client("secretsmanager", region_name="us-west-2") conn = boto3.client("secretsmanager", region_name="us-west-2")
@ -690,6 +729,31 @@ def test_put_secret_value_versions_differ_if_same_secret_put_twice():
assert first_version_id != second_version_id assert first_version_id != second_version_id
@mock_secretsmanager
def test_put_secret_value_maintains_description_and_tags():
conn = boto3.client("secretsmanager", region_name="us-west-2")
conn.create_secret(
Name=DEFAULT_SECRET_NAME,
SecretString="foosecret",
Description="desc",
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
)
conn = boto3.client("secretsmanager", region_name="us-west-2")
conn.put_secret_value(
SecretId=DEFAULT_SECRET_NAME,
SecretString="dupe_secret",
VersionStages=["AWSCURRENT"],
)
secret_details = conn.describe_secret(SecretId=DEFAULT_SECRET_NAME)
assert secret_details["Tags"] == [
{"Key": "Foo", "Value": "Bar"},
{"Key": "Mykey", "Value": "Myvalue"},
]
assert secret_details["Description"] == "desc"
@mock_secretsmanager @mock_secretsmanager
def test_can_list_secret_version_ids(): def test_can_list_secret_version_ids():
conn = boto3.client("secretsmanager", region_name="us-west-2") conn = boto3.client("secretsmanager", region_name="us-west-2")
@ -739,6 +803,43 @@ def test_update_secret():
assert created_secret["VersionId"] != updated_secret["VersionId"] assert created_secret["VersionId"] != updated_secret["VersionId"]
@mock_secretsmanager
def test_update_secret_with_tags_and_description():
conn = boto3.client("secretsmanager", region_name="us-west-2")
created_secret = conn.create_secret(
Name="test-secret",
SecretString="foosecret",
Description="desc",
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}],
)
assert created_secret["ARN"]
assert created_secret["Name"] == "test-secret"
assert created_secret["VersionId"] != ""
secret = conn.get_secret_value(SecretId="test-secret")
assert secret["SecretString"] == "foosecret"
updated_secret = conn.update_secret(
SecretId="test-secret", SecretString="barsecret"
)
assert updated_secret["ARN"]
assert updated_secret["Name"] == "test-secret"
assert updated_secret["VersionId"] != ""
secret = conn.get_secret_value(SecretId="test-secret")
assert secret["SecretString"] == "barsecret"
assert created_secret["VersionId"] != updated_secret["VersionId"]
secret_details = conn.describe_secret(SecretId="test-secret")
assert secret_details["Tags"] == [
{"Key": "Foo", "Value": "Bar"},
{"Key": "Mykey", "Value": "Myvalue"},
]
assert secret_details["Description"] == "desc"
@mock_secretsmanager @mock_secretsmanager
def test_update_secret_which_does_not_exit(): def test_update_secret_which_does_not_exit():
conn = boto3.client("secretsmanager", region_name="us-west-2") conn = boto3.client("secretsmanager", region_name="us-west-2")

View File

@ -1,4 +1,5 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from base64 import b64encode
import json import json
import boto import boto
@ -103,6 +104,128 @@ def test_assume_role():
) )
@freeze_time("2012-01-01 12:00:00")
@mock_sts
def test_assume_role_with_saml():
client = boto3.client("sts", region_name="us-east-1")
session_name = "session-name"
policy = json.dumps(
{
"Statement": [
{
"Sid": "Stmt13690092345534",
"Action": ["S3:ListBucket"],
"Effect": "Allow",
"Resource": ["arn:aws:s3:::foobar-tester"],
}
]
}
)
role_name = "test-role"
provider_name = "TestProvFed"
user_name = "testuser"
role_input = "arn:aws:iam::{account_id}:role/{role_name}".format(
account_id=ACCOUNT_ID, role_name=role_name
)
principal_role = "arn:aws:iam:{account_id}:saml-provider/{provider_name}".format(
account_id=ACCOUNT_ID, provider_name=provider_name
)
saml_assertion = """
<?xml version="1.0"?>
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="_00000000-0000-0000-0000-000000000000" Version="2.0" IssueInstant="2012-01-01T12:00:00.000Z" Destination="https://signin.aws.amazon.com/saml" Consent="urn:oasis:names:tc:SAML:2.0:consent:unspecified">
<Issuer xmlns="urn:oasis:names:tc:SAML:2.0:assertion">http://localhost/</Issuer>
<samlp:Status>
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/>
</samlp:Status>
<Assertion xmlns="urn:oasis:names:tc:SAML:2.0:assertion" ID="_00000000-0000-0000-0000-000000000000" IssueInstant="2012-12-01T12:00:00.000Z" Version="2.0">
<Issuer>http://localhost:3000/</Issuer>
<ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
<ds:SignedInfo>
<ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
<ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/>
<ds:Reference URI="#_00000000-0000-0000-0000-000000000000">
<ds:Transforms>
<ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
<ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
</ds:Transforms>
<ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/>
<ds:DigestValue>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:DigestValue>
</ds:Reference>
</ds:SignedInfo>
<ds:SignatureValue>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:SignatureValue>
<KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">
<ds:X509Data>
<ds:X509Certificate>NTIyMzk0ZGI4MjI0ZjI5ZGNhYjkyOGQyZGQ1NTZjODViZjk5YTY4ODFjOWRjNjkyYzZmODY2ZDQ4NjlkZjY3YSAgLQo=</ds:X509Certificate>
</ds:X509Data>
</KeyInfo>
</ds:Signature>
<Subject>
<NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:persistent">{username}</NameID>
<SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
<SubjectConfirmationData NotOnOrAfter="2012-01-01T13:00:00.000Z" Recipient="https://signin.aws.amazon.com/saml"/>
</SubjectConfirmation>
</Subject>
<Conditions NotBefore="2012-01-01T12:00:00.000Z" NotOnOrAfter="2012-01-01T13:00:00.000Z">
<AudienceRestriction>
<Audience>urn:amazon:webservices</Audience>
</AudienceRestriction>
</Conditions>
<AttributeStatement>
<Attribute Name="https://aws.amazon.com/SAML/Attributes/RoleSessionName">
<AttributeValue>{username}@localhost</AttributeValue>
</Attribute>
<Attribute Name="https://aws.amazon.com/SAML/Attributes/Role">
<AttributeValue>arn:aws:iam::{account_id}:saml-provider/{provider_name},arn:aws:iam::{account_id}:role/{role_name}</AttributeValue>
</Attribute>
<Attribute Name="https://aws.amazon.com/SAML/Attributes/SessionDuration">
<AttributeValue>900</AttributeValue>
</Attribute>
</AttributeStatement>
<AuthnStatement AuthnInstant="2012-01-01T12:00:00.000Z" SessionIndex="_00000000-0000-0000-0000-000000000000">
<AuthnContext>
<AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport</AuthnContextClassRef>
</AuthnContext>
</AuthnStatement>
</Assertion>
</samlp:Response>""".format(
account_id=ACCOUNT_ID,
role_name=role_name,
provider_name=provider_name,
username=user_name,
).replace(
"\n", ""
)
assume_role_response = client.assume_role_with_saml(
RoleArn=role_input,
PrincipalArn=principal_role,
SAMLAssertion=b64encode(saml_assertion.encode("utf-8")).decode("utf-8"),
)
credentials = assume_role_response["Credentials"]
if not settings.TEST_SERVER_MODE:
credentials["Expiration"].isoformat().should.equal("2012-01-01T12:15:00+00:00")
credentials["SessionToken"].should.have.length_of(356)
assert credentials["SessionToken"].startswith("FQoGZXIvYXdzE")
credentials["AccessKeyId"].should.have.length_of(20)
assert credentials["AccessKeyId"].startswith("ASIA")
credentials["SecretAccessKey"].should.have.length_of(40)
assume_role_response["AssumedRoleUser"]["Arn"].should.equal(
"arn:aws:sts::{account_id}:assumed-role/{role_name}/{fed_name}@localhost".format(
account_id=ACCOUNT_ID, role_name=role_name, fed_name=user_name
)
)
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith("AROA")
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].endswith(
":{fed_name}@localhost".format(fed_name=user_name)
)
assume_role_response["AssumedRoleUser"]["AssumedRoleId"].should.have.length_of(
21 + 1 + len("{fed_name}@localhost".format(fed_name=user_name))
)
@freeze_time("2012-01-01 12:00:00") @freeze_time("2012-01-01 12:00:00")
@mock_sts_deprecated @mock_sts_deprecated
def test_assume_role_with_web_identity(): def test_assume_role_with_web_identity():