diff --git a/.gitignore b/.gitignore index 0282e3caf..fb9bd51de 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,5 @@ env/ .vscode/ tests/file.tmp .eggs/ +.mypy_cache/ +*.tmp diff --git a/.travis.yml b/.travis.yml index ac9322211..8f218134b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,11 +26,12 @@ install: fi docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh & fi + travis_retry pip install -r requirements-dev.txt travis_retry pip install boto==2.45.0 travis_retry pip install boto3 travis_retry pip install dist/moto*.gz travis_retry pip install coveralls==1.1 - travis_retry pip install -r requirements-dev.txt + travis_retry pip install coverage==4.5.4 if [ "$TEST_SERVER_MODE" = "true" ]; then python wait_for.py diff --git a/README.md b/README.md index 4024328a9..6fb942aef 100644 --- a/README.md +++ b/README.md @@ -283,14 +283,14 @@ def test_describe_instances_allowed(): ] } access_key = ... - # create access key for an IAM user/assumed role that has the policy above. + # create access key for an IAM user/assumed role that has the policy above. # this part should call __exactly__ 4 AWS actions, so that authentication and authorization starts exactly after this - + client = boto3.client('ec2', region_name='us-east-1', aws_access_key_id=access_key['AccessKeyId'], aws_secret_access_key=access_key['SecretAccessKey']) - - # if the IAM principal whose access key is used, does not have the permission to describe instances, this will fail + + # if the IAM principal whose access key is used, does not have the permission to describe instances, this will fail instances = client.describe_instances()['Reservations'][0]['Instances'] assert len(instances) == 0 ``` @@ -310,16 +310,16 @@ You need to ensure that the mocks are actually in place. Changes made to recent have altered some of the mock behavior. In short, you need to ensure that you _always_ do the following: 1. Ensure that your tests have dummy environment variables set up: - + export AWS_ACCESS_KEY_ID='testing' export AWS_SECRET_ACCESS_KEY='testing' export AWS_SECURITY_TOKEN='testing' export AWS_SESSION_TOKEN='testing' - -1. __VERY IMPORTANT__: ensure that you have your mocks set up __BEFORE__ your `boto3` client is established. + +1. __VERY IMPORTANT__: ensure that you have your mocks set up __BEFORE__ your `boto3` client is established. This can typically happen if you import a module that has a `boto3` client instantiated outside of a function. See the pesky imports section below on how to work around this. - + ### Example on usage? If you are a user of [pytest](https://pytest.org/en/latest/), you can leverage [pytest fixtures](https://pytest.org/en/latest/fixture.html#fixture) to help set up your mocks and other AWS resources that you would need. @@ -354,7 +354,7 @@ def cloudwatch(aws_credentials): ... etc. ``` -In the code sample above, all of the AWS/mocked fixtures take in a parameter of `aws_credentials`, +In the code sample above, all of the AWS/mocked fixtures take in a parameter of `aws_credentials`, which sets the proper fake environment variables. The fake environment variables are used so that `botocore` doesn't try to locate real credentials on your system. @@ -364,7 +364,7 @@ def test_create_bucket(s3): # s3 is a fixture defined above that yields a boto3 s3 client. # Feel free to instantiate another boto3 S3 client -- Keep note of the region though. s3.create_bucket(Bucket="somebucket") - + result = s3.list_buckets() assert len(result['Buckets']) == 1 assert result['Buckets'][0]['Name'] == 'somebucket' @@ -373,7 +373,7 @@ def test_create_bucket(s3): ### What about those pesky imports? Recall earlier, it was mentioned that mocks should be established __BEFORE__ the clients are set up. One way to avoid import issues is to make use of local Python imports -- i.e. import the module inside of the unit -test you want to run vs. importing at the top of the file. +test you want to run vs. importing at the top of the file. Example: ```python @@ -381,12 +381,12 @@ def test_something(s3): from some.package.that.does.something.with.s3 import some_func # <-- Local import for unit test # ^^ Importing here ensures that the mock has been established. - sume_func() # The mock has been established from the "s3" pytest fixture, so this function that uses + some_func() # The mock has been established from the "s3" pytest fixture, so this function that uses # a package-level S3 client will properly use the mock and not reach out to AWS. ``` ### Other caveats -For Tox, Travis CI, and other build systems, you might need to also perform a `touch ~/.aws/credentials` +For Tox, Travis CI, and other build systems, you might need to also perform a `touch ~/.aws/credentials` command before running the tests. As long as that file is present (empty preferably) and the environment variables above are set, you should be good to go. @@ -450,6 +450,16 @@ boto3.resource( ) ``` +### Caveats +The standalone server has some caveats with some services. The following services +require that you update your hosts file for your code to work properly: + +1. `s3-control` + +For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`. +As a result, you need to add that entry to your host file for your tests to function properly. + + ## Install diff --git a/docs/conf.py b/docs/conf.py index 28a4b4e6b..a902d0ecf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,10 @@ author = 'Steve Pulec' # built documents. # # The short X.Y version. -version = '0.4.10' +import moto +version = moto.__version__ # The full version, including alpha/beta/rc tags. -release = '0.4.10' +release = moto.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/docs/getting_started.rst b/docs/docs/getting_started.rst index d52e76235..ffe37f3a0 100644 --- a/docs/docs/getting_started.rst +++ b/docs/docs/getting_started.rst @@ -24,8 +24,7 @@ For example, we have the following code we want to test: .. sourcecode:: python - import boto - from boto.s3.key import Key + import boto3 class MyModel(object): def __init__(self, name, value): @@ -33,11 +32,8 @@ For example, we have the following code we want to test: self.value = value def save(self): - conn = boto.connect_s3() - bucket = conn.get_bucket('mybucket') - k = Key(bucket) - k.key = self.name - k.set_contents_from_string(self.value) + s3 = boto3.client('s3', region_name='us-east-1') + s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment. @@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out. .. sourcecode:: python - import boto + import boto3 from moto import mock_s3 from mymodule import MyModel @mock_s3 def test_my_model_save(): - conn = boto.connect_s3() + conn = boto3.resource('s3', region_name='us-east-1') # We need to create the bucket since this is all in Moto's 'virtual' AWS account - conn.create_bucket('mybucket') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' Context manager ~~~~~~~~~~~~~~~ @@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out. def test_my_model_save(): with mock_s3(): - conn = boto.connect_s3() - conn.create_bucket('mybucket') + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' Raw ~~~ @@ -91,13 +93,16 @@ You can also start and stop the mocking manually. mock = mock_s3() mock.start() - conn = boto.connect_s3() - conn.create_bucket('mybucket') + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' mock.stop() diff --git a/docs/index.rst b/docs/index.rst index 6311597fe..22ac97228 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -76,7 +76,7 @@ Currently implemented Services: +---------------------------+-----------------------+------------------------------------+ | Logs | @mock_logs | basic endpoints done | +---------------------------+-----------------------+------------------------------------+ -| Organizations | @mock_organizations | some core edpoints done | +| Organizations | @mock_organizations | some core endpoints done | +---------------------------+-----------------------+------------------------------------+ | Polly | @mock_polly | all endpoints done | +---------------------------+-----------------------+------------------------------------+ diff --git a/moto/apigateway/exceptions.py b/moto/apigateway/exceptions.py index 434ebc467..2a306ab99 100644 --- a/moto/apigateway/exceptions.py +++ b/moto/apigateway/exceptions.py @@ -39,7 +39,7 @@ class InvalidResourcePathException(BadRequestException): def __init__(self): super(InvalidResourcePathException, self).__init__( "BadRequestException", - "Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end.", + "Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end and an optional plus sign before the closing brace.", ) diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index fd2fb7064..dcc38efc9 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -83,14 +83,14 @@ class MethodResponse(BaseModel, dict): class Method(BaseModel, dict): - def __init__(self, method_type, authorization_type): + def __init__(self, method_type, authorization_type, **kwargs): super(Method, self).__init__() self.update( dict( httpMethod=method_type, authorizationType=authorization_type, authorizerId=None, - apiKeyRequired=None, + apiKeyRequired=kwargs.get("api_key_required") or False, requestParameters=None, requestModels=None, methodIntegration=None, @@ -117,14 +117,15 @@ class Resource(BaseModel): self.api_id = api_id self.path_part = path_part self.parent_id = parent_id - self.resource_methods = {"GET": {}} + self.resource_methods = {} def to_dict(self): response = { "path": self.get_path(), "id": self.id, - "resourceMethods": self.resource_methods, } + if self.resource_methods: + response["resourceMethods"] = self.resource_methods if self.parent_id: response["parentId"] = self.parent_id response["pathPart"] = self.path_part @@ -158,8 +159,12 @@ class Resource(BaseModel): ) return response.status_code, response.text - def add_method(self, method_type, authorization_type): - method = Method(method_type=method_type, authorization_type=authorization_type) + def add_method(self, method_type, authorization_type, api_key_required): + method = Method( + method_type=method_type, + authorization_type=authorization_type, + api_key_required=api_key_required, + ) self.resource_methods[method_type] = method return method @@ -394,12 +399,17 @@ class UsagePlanKey(BaseModel, dict): class RestAPI(BaseModel): - def __init__(self, id, region_name, name, description): + def __init__(self, id, region_name, name, description, **kwargs): self.id = id self.region_name = region_name self.name = name self.description = description self.create_date = int(time.time()) + self.api_key_source = kwargs.get("api_key_source") or "HEADER" + self.endpoint_configuration = kwargs.get("endpoint_configuration") or { + "types": ["EDGE"] + } + self.tags = kwargs.get("tags") or {} self.deployments = {} self.stages = {} @@ -416,6 +426,9 @@ class RestAPI(BaseModel): "name": self.name, "description": self.description, "createdDate": int(time.time()), + "apiKeySource": self.api_key_source, + "endpointConfiguration": self.endpoint_configuration, + "tags": self.tags, } def add_child(self, path, parent_id=None): @@ -529,9 +542,24 @@ class APIGatewayBackend(BaseBackend): self.__dict__ = {} self.__init__(region_name) - def create_rest_api(self, name, description): + def create_rest_api( + self, + name, + description, + api_key_source=None, + endpoint_configuration=None, + tags=None, + ): api_id = create_id() - rest_api = RestAPI(api_id, self.region_name, name, description) + rest_api = RestAPI( + api_id, + self.region_name, + name, + description, + api_key_source=api_key_source, + endpoint_configuration=endpoint_configuration, + tags=tags, + ) self.apis[api_id] = rest_api return rest_api @@ -556,7 +584,7 @@ class APIGatewayBackend(BaseBackend): return resource def create_resource(self, function_id, parent_resource_id, path_part): - if not re.match("^\\{?[a-zA-Z0-9._-]+\\}?$", path_part): + if not re.match("^\\{?[a-zA-Z0-9._-]+\\+?\\}?$", path_part): raise InvalidResourcePathException() api = self.get_rest_api(function_id) child = api.add_child(path=path_part, parent_id=parent_resource_id) @@ -571,9 +599,18 @@ class APIGatewayBackend(BaseBackend): resource = self.get_resource(function_id, resource_id) return resource.get_method(method_type) - def create_method(self, function_id, resource_id, method_type, authorization_type): + def create_method( + self, + function_id, + resource_id, + method_type, + authorization_type, + api_key_required=None, + ): resource = self.get_resource(function_id, resource_id) - method = resource.add_method(method_type, authorization_type) + method = resource.add_method( + method_type, authorization_type, api_key_required=api_key_required + ) return method def get_stage(self, function_id, stage_name): diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index c4c7b403e..6a22a4708 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -12,6 +12,9 @@ from .exceptions import ( ApiKeyAlreadyExists, ) +API_KEY_SOURCES = ["AUTHORIZER", "HEADER"] +ENDPOINT_CONFIGURATION_TYPES = ["PRIVATE", "EDGE", "REGIONAL"] + class APIGatewayResponse(BaseResponse): def error(self, type_, message, status=400): @@ -45,7 +48,45 @@ class APIGatewayResponse(BaseResponse): elif self.method == "POST": name = self._get_param("name") description = self._get_param("description") - rest_api = self.backend.create_rest_api(name, description) + api_key_source = self._get_param("apiKeySource") + endpoint_configuration = self._get_param("endpointConfiguration") + tags = self._get_param("tags") + + # Param validation + if api_key_source and api_key_source not in API_KEY_SOURCES: + return self.error( + "ValidationException", + ( + "1 validation error detected: " + "Value '{api_key_source}' at 'createRestApiInput.apiKeySource' failed " + "to satisfy constraint: Member must satisfy enum value set: " + "[AUTHORIZER, HEADER]" + ).format(api_key_source=api_key_source), + ) + + if endpoint_configuration and "types" in endpoint_configuration: + invalid_types = list( + set(endpoint_configuration["types"]) + - set(ENDPOINT_CONFIGURATION_TYPES) + ) + if invalid_types: + return self.error( + "ValidationException", + ( + "1 validation error detected: Value '{endpoint_type}' " + "at 'createRestApiInput.endpointConfiguration.types' failed " + "to satisfy constraint: Member must satisfy enum value set: " + "[PRIVATE, EDGE, REGIONAL]" + ).format(endpoint_type=invalid_types[0]), + ) + + rest_api = self.backend.create_rest_api( + name, + description, + api_key_source=api_key_source, + endpoint_configuration=endpoint_configuration, + tags=tags, + ) return 200, {}, json.dumps(rest_api.to_dict()) def restapis_individual(self, request, full_url, headers): @@ -104,8 +145,13 @@ class APIGatewayResponse(BaseResponse): return 200, {}, json.dumps(method) elif self.method == "PUT": authorization_type = self._get_param("authorizationType") + api_key_required = self._get_param("apiKeyRequired") method = self.backend.create_method( - function_id, resource_id, method_type, authorization_type + function_id, + resource_id, + method_type, + authorization_type, + api_key_required, ) return 200, {}, json.dumps(method) diff --git a/moto/awslambda/exceptions.py b/moto/awslambda/exceptions.py index 1a82977c3..08d13dce5 100644 --- a/moto/awslambda/exceptions.py +++ b/moto/awslambda/exceptions.py @@ -1,4 +1,5 @@ from botocore.client import ClientError +from moto.core.exceptions import JsonRESTError class LambdaClientError(ClientError): @@ -29,3 +30,12 @@ class InvalidRoleFormat(LambdaClientError): role, InvalidRoleFormat.pattern ) super(InvalidRoleFormat, self).__init__("ValidationException", message) + + +class PreconditionFailedException(JsonRESTError): + code = 412 + + def __init__(self, message): + super(PreconditionFailedException, self).__init__( + "PreconditionFailedException", message + ) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 95a5c4ad5..939952d5e 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -25,6 +25,7 @@ import requests.adapters from boto3 import Session +from moto.awslambda.policy import Policy from moto.core import BaseBackend, BaseModel from moto.core.exceptions import RESTError from moto.iam.models import iam_backend @@ -47,15 +48,11 @@ from moto.core import ACCOUNT_ID logger = logging.getLogger(__name__) - try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory -# The lambci container is returning a special escape character for the "RequestID" fields. Unicode 033: -# _stderr_regex = re.compile(r"START|END|REPORT RequestId: .*") -_stderr_regex = re.compile(r"\033\[\d+.*") _orig_adapter_send = requests.adapters.HTTPAdapter.send docker_3 = docker.__version__[0] >= "3" @@ -164,7 +161,8 @@ class LambdaFunction(BaseModel): self.logs_backend = logs_backends[self.region] self.environment_vars = spec.get("Environment", {}).get("Variables", {}) self.docker_client = docker.from_env() - self.policy = "" + self.policy = None + self.state = "Active" # Unfortunately mocking replaces this method w/o fallback enabled, so we # need to replace it if we detect it's been mocked @@ -274,11 +272,11 @@ class LambdaFunction(BaseModel): "MemorySize": self.memory_size, "Role": self.role, "Runtime": self.run_time, + "State": self.state, "Timeout": self.timeout, "Version": str(self.version), "VpcConfig": self.vpc_config, } - if self.environment_vars: config["Environment"] = {"Variables": self.environment_vars} @@ -385,7 +383,7 @@ class LambdaFunction(BaseModel): try: # TODO: I believe we can keep the container running and feed events as needed # also need to hook it up to the other services so it can make kws/s3 etc calls - # Should get invoke_id /RequestId from invovation + # Should get invoke_id /RequestId from invocation env_vars = { "AWS_LAMBDA_FUNCTION_TIMEOUT": self.timeout, "AWS_LAMBDA_FUNCTION_NAME": self.function_name, @@ -397,6 +395,7 @@ class LambdaFunction(BaseModel): env_vars.update(self.environment_vars) container = output = exit_code = None + log_config = docker.types.LogConfig(type=docker.types.LogConfig.types.JSON) with _DockerDataVolumeContext(self) as data_vol: try: run_kwargs = ( @@ -412,6 +411,7 @@ class LambdaFunction(BaseModel): volumes=["{}:/var/task".format(data_vol.name)], environment=env_vars, detach=True, + log_config=log_config, **run_kwargs ) finally: @@ -453,14 +453,9 @@ class LambdaFunction(BaseModel): if exit_code != 0: raise Exception("lambda invoke failed output: {}".format(output)) - # strip out RequestId lines (TODO: This will return an additional '\n' in the response) - output = os.linesep.join( - [ - line - for line in self.convert(output).splitlines() - if not _stderr_regex.match(line) - ] - ) + # We only care about the response from the lambda + # Which is the last line of the output, according to https://github.com/lambci/docker-lambda/issues/25 + output = output.splitlines()[-1] return output, False except BaseException as e: traceback.print_exc() @@ -480,7 +475,7 @@ class LambdaFunction(BaseModel): payload["result"] = response_headers["x-amz-log-result"] result = res.encode("utf-8") else: - result = json.dumps(payload) + result = res if errored: response_headers["x-amz-function-error"] = "Handled" @@ -709,7 +704,8 @@ class LambdaStorage(object): "versions": [], "alias": weakref.WeakValueDictionary(), } - + # instantiate a new policy for this version of the lambda + fn.policy = Policy(fn) self._arns[fn.function_arn] = fn def publish_function(self, name): @@ -1010,8 +1006,21 @@ class LambdaBackend(BaseBackend): return True return False - def add_policy(self, function_name, policy): - self.get_function(function_name).policy = policy + def add_policy_statement(self, function_name, raw): + fn = self.get_function(function_name) + fn.policy.add_statement(raw) + + def del_policy_statement(self, function_name, sid, revision=""): + fn = self.get_function(function_name) + fn.policy.del_statement(sid, revision) + + def get_policy(self, function_name): + fn = self.get_function(function_name) + return fn.policy.get_policy() + + def get_policy_wire_format(self, function_name): + fn = self.get_function(function_name) + return fn.policy.wire_format() def update_function_code(self, function_name, qualifier, body): fn = self.get_function(function_name, qualifier) diff --git a/moto/awslambda/policy.py b/moto/awslambda/policy.py new file mode 100644 index 000000000..495e2cee6 --- /dev/null +++ b/moto/awslambda/policy.py @@ -0,0 +1,134 @@ +from __future__ import unicode_literals + +import json +import uuid + +from six import string_types + +from moto.awslambda.exceptions import PreconditionFailedException + + +class Policy: + def __init__(self, parent): + self.revision = str(uuid.uuid4()) + self.statements = [] + self.parent = parent + + def wire_format(self): + p = self.get_policy() + p["Policy"] = json.dumps(p["Policy"]) + return json.dumps(p) + + def get_policy(self): + return { + "Policy": { + "Version": "2012-10-17", + "Id": "default", + "Statement": self.statements, + }, + "RevisionId": self.revision, + } + + # adds the raw JSON statement to the policy + def add_statement(self, raw): + policy = json.loads(raw, object_hook=self.decode_policy) + if len(policy.revision) > 0 and self.revision != policy.revision: + raise PreconditionFailedException( + "The RevisionId provided does not match the latest RevisionId" + " for the Lambda function or alias. Call the GetFunction or the GetAlias API to retrieve" + " the latest RevisionId for your resource." + ) + self.statements.append(policy.statements[0]) + self.revision = str(uuid.uuid4()) + + # removes the statement that matches 'sid' from the policy + def del_statement(self, sid, revision=""): + if len(revision) > 0 and self.revision != revision: + raise PreconditionFailedException( + "The RevisionId provided does not match the latest RevisionId" + " for the Lambda function or alias. Call the GetFunction or the GetAlias API to retrieve" + " the latest RevisionId for your resource." + ) + for statement in self.statements: + if "Sid" in statement and statement["Sid"] == sid: + self.statements.remove(statement) + + # converts AddPermission request to PolicyStatement + # https://docs.aws.amazon.com/lambda/latest/dg/API_AddPermission.html + def decode_policy(self, obj): + # import pydevd + # pydevd.settrace("localhost", port=5678) + policy = Policy(self.parent) + policy.revision = obj.get("RevisionId", "") + + # set some default values if these keys are not set + self.ensure_set(obj, "Effect", "Allow") + self.ensure_set(obj, "Resource", self.parent.function_arn + ":$LATEST") + self.ensure_set(obj, "StatementId", str(uuid.uuid4())) + + # transform field names and values + self.transform_property(obj, "StatementId", "Sid", self.nop_formatter) + self.transform_property(obj, "Principal", "Principal", self.principal_formatter) + + self.transform_property( + obj, "SourceArn", "SourceArn", self.source_arn_formatter + ) + self.transform_property( + obj, "SourceAccount", "SourceAccount", self.source_account_formatter + ) + + # remove RevisionId and EventSourceToken if they are set + self.remove_if_set(obj, ["RevisionId", "EventSourceToken"]) + + # merge conditional statements into a single map under the Condition key + self.condition_merge(obj) + + # append resulting statement to policy.statements + policy.statements.append(obj) + + return policy + + def nop_formatter(self, obj): + return obj + + def ensure_set(self, obj, key, value): + if key not in obj: + obj[key] = value + + def principal_formatter(self, obj): + if isinstance(obj, string_types): + if obj.endswith(".amazonaws.com"): + return {"Service": obj} + if obj.endswith(":root"): + return {"AWS": obj} + return obj + + def source_account_formatter(self, obj): + return {"StringEquals": {"AWS:SourceAccount": obj}} + + def source_arn_formatter(self, obj): + return {"ArnLike": {"AWS:SourceArn": obj}} + + def transform_property(self, obj, old_name, new_name, formatter): + if old_name in obj: + obj[new_name] = formatter(obj[old_name]) + if new_name != old_name: + del obj[old_name] + + def remove_if_set(self, obj, keys): + for key in keys: + if key in obj: + del obj[key] + + def condition_merge(self, obj): + if "SourceArn" in obj: + if "Condition" not in obj: + obj["Condition"] = {} + obj["Condition"].update(obj["SourceArn"]) + del obj["SourceArn"] + + if "SourceAccount" in obj: + if "Condition" not in obj: + obj["Condition"] = {} + obj["Condition"].update(obj["SourceAccount"]) + del obj["SourceAccount"] diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index 46203c10d..ce6c93f16 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -120,8 +120,12 @@ class LambdaResponse(BaseResponse): self.setup_class(request, full_url, headers) if request.method == "GET": return self._get_policy(request, full_url, headers) - if request.method == "POST": + elif request.method == "POST": return self._add_policy(request, full_url, headers) + elif request.method == "DELETE": + return self._del_policy(request, full_url, headers, self.querystring) + else: + raise ValueError("Cannot handle {0} request".format(request.method)) def configuration(self, request, full_url, headers): self.setup_class(request, full_url, headers) @@ -141,9 +145,9 @@ class LambdaResponse(BaseResponse): path = request.path if hasattr(request, "path") else path_url(request.url) function_name = path.split("/")[-2] if self.lambda_backend.get_function(function_name): - policy = self.body - self.lambda_backend.add_policy(function_name, policy) - return 200, {}, json.dumps(dict(Statement=policy)) + statement = self.body + self.lambda_backend.add_policy_statement(function_name, statement) + return 200, {}, json.dumps({"Statement": statement}) else: return 404, {}, "{}" @@ -151,28 +155,42 @@ class LambdaResponse(BaseResponse): path = request.path if hasattr(request, "path") else path_url(request.url) function_name = path.split("/")[-2] if self.lambda_backend.get_function(function_name): - lambda_function = self.lambda_backend.get_function(function_name) - return ( - 200, - {}, - json.dumps( - dict(Policy='{"Statement":[' + lambda_function.policy + "]}") - ), + out = self.lambda_backend.get_policy_wire_format(function_name) + return 200, {}, out + else: + return 404, {}, "{}" + + def _del_policy(self, request, full_url, headers, querystring): + path = request.path if hasattr(request, "path") else path_url(request.url) + function_name = path.split("/")[-3] + statement_id = path.split("/")[-1].split("?")[0] + revision = querystring.get("RevisionId", "") + if self.lambda_backend.get_function(function_name): + self.lambda_backend.del_policy_statement( + function_name, statement_id, revision ) + return 204, {}, "{}" else: return 404, {}, "{}" def _invoke(self, request, full_url): response_headers = {} - function_name = self.path.rsplit("/", 2)[-2] + # URL Decode in case it's a ARN: + function_name = unquote(self.path.rsplit("/", 2)[-2]) qualifier = self._get_param("qualifier") response_header, payload = self.lambda_backend.invoke( function_name, qualifier, self.body, self.headers, response_headers ) if payload: - return 202, response_headers, payload + if request.headers["X-Amz-Invocation-Type"] == "Event": + status_code = 202 + elif request.headers["X-Amz-Invocation-Type"] == "DryRun": + status_code = 204 + else: + status_code = 200 + return status_code, response_headers, payload else: return 404, response_headers, "{}" @@ -283,7 +301,7 @@ class LambdaResponse(BaseResponse): code["Configuration"]["FunctionArn"] += ":$LATEST" return 200, {}, json.dumps(code) else: - return 404, {}, "{}" + return 404, {"x-amzn-ErrorType": "ResourceNotFoundException"}, "{}" def _get_aws_region(self, full_url): region = self.region_regex.search(full_url) diff --git a/moto/awslambda/urls.py b/moto/awslambda/urls.py index da7346817..c25e58dba 100644 --- a/moto/awslambda/urls.py +++ b/moto/awslambda/urls.py @@ -6,14 +6,16 @@ url_bases = ["https?://lambda.(.+).amazonaws.com"] response = LambdaResponse() url_paths = { - "{0}/(?P[^/]+)/functions/?$": response.root, + r"{0}/(?P[^/]+)/functions/?$": response.root, r"{0}/(?P[^/]+)/functions/(?P[\w_:%-]+)/?$": response.function, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/versions/?$": response.versions, r"{0}/(?P[^/]+)/event-source-mappings/?$": response.event_source_mappings, r"{0}/(?P[^/]+)/event-source-mappings/(?P[\w_-]+)/?$": response.event_source_mapping, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/invocations/?$": response.invoke, + r"{0}/(?P[^/]+)/functions/(?P.+)/invocations/?$": response.invoke, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/invoke-async/?$": response.invoke_async, r"{0}/(?P[^/]+)/tags/(?P.+)": response.tag, + r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/policy/(?P[\w_-]+)$": response.policy, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/policy/?$": response.policy, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/configuration/?$": response.configuration, r"{0}/(?P[^/]+)/functions/(?P[\w_-]+)/code/?$": response.code, diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index 0ae5d1ae4..b32d63b32 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -677,6 +677,8 @@ class CloudFormationBackend(BaseBackend): def list_stack_resources(self, stack_name_or_id): stack = self.get_stack(stack_name_or_id) + if stack is None: + return None return stack.stack_resources def delete_stack(self, name_or_stack_id): diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index bf68a6325..77a3051fd 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -229,6 +229,9 @@ class CloudFormationResponse(BaseResponse): stack_name_or_id = self._get_param("StackName") resources = self.cloudformation_backend.list_stack_resources(stack_name_or_id) + if resources is None: + raise ValidationError(stack_name_or_id) + template = self.response_template(LIST_STACKS_RESOURCES_RESPONSE) return template.render(resources=resources) diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 478ceffb2..93e297551 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -14,6 +14,7 @@ from jose import jws from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel +from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID from .exceptions import ( GroupExistsException, NotAuthorizedError, @@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel): def __init__(self, region, name, extended_config): self.region = region self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex)) + self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format( + self.region, DEFAULT_ACCOUNT_ID, self.id + ) self.name = name self.status = None self.extended_config = extended_config or {} @@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel): def _base_json(self): return { "Id": self.id, + "Arn": self.arn, "Name": self.name, "Status": self.status, "CreationDate": time.mktime(self.creation_date.timetuple()), @@ -108,7 +113,9 @@ class CognitoIdpUserPool(BaseModel): return user_pool_json - def create_jwt(self, client_id, username, expires_in=60 * 60, extra_data={}): + def create_jwt( + self, client_id, username, token_use, expires_in=60 * 60, extra_data={} + ): now = int(time.time()) payload = { "iss": "https://cognito-idp.{}.amazonaws.com/{}".format( @@ -116,7 +123,7 @@ class CognitoIdpUserPool(BaseModel): ), "sub": self.users[username].id, "aud": client_id, - "token_use": "id", + "token_use": token_use, "auth_time": now, "exp": now + expires_in, } @@ -125,7 +132,10 @@ class CognitoIdpUserPool(BaseModel): return jws.sign(payload, self.json_web_key, algorithm="RS256"), expires_in def create_id_token(self, client_id, username): - id_token, expires_in = self.create_jwt(client_id, username) + extra_data = self.get_user_extra_data_by_client_id(client_id, username) + id_token, expires_in = self.create_jwt( + client_id, username, "id", extra_data=extra_data + ) self.id_tokens[id_token] = (client_id, username) return id_token, expires_in @@ -135,10 +145,7 @@ class CognitoIdpUserPool(BaseModel): return refresh_token def create_access_token(self, client_id, username): - extra_data = self.get_user_extra_data_by_client_id(client_id, username) - access_token, expires_in = self.create_jwt( - client_id, username, extra_data=extra_data - ) + access_token, expires_in = self.create_jwt(client_id, username, "access") self.access_tokens[access_token] = (client_id, username) return access_token, expires_in @@ -562,12 +569,17 @@ class CognitoIdpBackend(BaseBackend): user.groups.discard(group) # User - def admin_create_user(self, user_pool_id, username, temporary_password, attributes): + def admin_create_user( + self, user_pool_id, username, message_action, temporary_password, attributes + ): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) - if username in user_pool.users: + if message_action and message_action == "RESEND": + if username not in user_pool.users: + raise UserNotFoundError(username) + elif username in user_pool.users: raise UsernameExistsException(username) user = CognitoIdpUser( diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 80247b076..6c89c4806 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse): def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") username = self._get_param("Username") + message_action = self._get_param("MessageAction") temporary_password = self._get_param("TemporaryPassword") user = cognitoidp_backends[self.region].admin_create_user( user_pool_id, username, + message_action, temporary_password, self._get_param("UserAttributes", []), ) @@ -279,9 +281,18 @@ class CognitoIdpResponse(BaseResponse): user_pool_id = self._get_param("UserPoolId") limit = self._get_param("Limit") token = self._get_param("PaginationToken") + filt = self._get_param("Filter") users, token = cognitoidp_backends[self.region].list_users( user_pool_id, limit=limit, pagination_token=token ) + if filt: + name, value = filt.replace('"', "").split("=") + users = [ + user + for user in users + for attribute in user.attributes + if attribute["Name"] == name and attribute["Value"] == value + ] response = {"Users": [user.to_json(extended=True) for user in users]} if token: response["PaginationToken"] = str(token) diff --git a/moto/config/models.py b/moto/config/models.py index 45dccd1ba..a66576979 100644 --- a/moto/config/models.py +++ b/moto/config/models.py @@ -43,7 +43,7 @@ from moto.config.exceptions import ( ) from moto.core import BaseBackend, BaseModel -from moto.s3.config import s3_config_query +from moto.s3.config import s3_account_public_access_block_query, s3_config_query from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID @@ -58,7 +58,10 @@ POP_STRINGS = [ DEFAULT_PAGE_SIZE = 100 # Map the Config resource type to a backend: -RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query} +RESOURCE_MAP = { + "AWS::S3::Bucket": s3_config_query, + "AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query, +} def datetime2int(date): @@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend): backend_region=backend_query_region, ) - result = { - "resourceIdentifiers": [ - { - "resourceType": identifier["type"], - "resourceId": identifier["id"], - "resourceName": identifier["name"], - } - for identifier in identifiers - ] - } + resource_identifiers = [] + for identifier in identifiers: + item = {"resourceType": identifier["type"], "resourceId": identifier["id"]} + + # Some resource types lack names: + if identifier.get("name"): + item["resourceName"] = identifier["name"] + + resource_identifiers.append(item) + + result = {"resourceIdentifiers": resource_identifiers} if new_token: result["nextToken"] = new_token @@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend): resource_region=resource_region, ) - result = { - "ResourceIdentifiers": [ - { - "SourceAccountId": DEFAULT_ACCOUNT_ID, - "SourceRegion": identifier["region"], - "ResourceType": identifier["type"], - "ResourceId": identifier["id"], - "ResourceName": identifier["name"], - } - for identifier in identifiers - ] - } + resource_identifiers = [] + for identifier in identifiers: + item = { + "SourceAccountId": DEFAULT_ACCOUNT_ID, + "SourceRegion": identifier["region"], + "ResourceType": identifier["type"], + "ResourceId": identifier["id"], + } + + if identifier.get("name"): + item["ResourceName"] = identifier["name"] + + resource_identifiers.append(item) + + result = {"ResourceIdentifiers": resource_identifiers} if new_token: result["NextToken"] = new_token diff --git a/moto/core/models.py b/moto/core/models.py index 3be3bbd8e..ffb2ffd9f 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -606,12 +606,13 @@ class ConfigQueryModel(object): As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter from there. It may be valuable to make this a concatenation of the region and resource name. - :param resource_region: - :param resource_ids: - :param resource_name: - :param limit: - :param next_token: + :param resource_ids: A list of resource IDs + :param resource_name: The individual name of a resource + :param limit: How many per page + :param next_token: The item that will page on :param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query. + :param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a + non-aggregated query. :return: This should return a list of Dicts that have the following fields: [ { diff --git a/moto/dynamodb2/comparisons.py b/moto/dynamodb2/comparisons.py index 69d7f74e0..29951d92d 100644 --- a/moto/dynamodb2/comparisons.py +++ b/moto/dynamodb2/comparisons.py @@ -977,10 +977,8 @@ class OpLessThan(Op): lhs = self.lhs.expr(item) rhs = self.rhs.expr(item) # In python3 None is not a valid comparator when using < or > so must be handled specially - if lhs and rhs: + if lhs is not None and rhs is not None: return lhs < rhs - elif lhs is None and rhs: - return True else: return False @@ -992,10 +990,8 @@ class OpGreaterThan(Op): lhs = self.lhs.expr(item) rhs = self.rhs.expr(item) # In python3 None is not a valid comparator when using < or > so must be handled specially - if lhs and rhs: + if lhs is not None and rhs is not None: return lhs > rhs - elif lhs and rhs is None: - return True else: return False @@ -1025,10 +1021,8 @@ class OpLessThanOrEqual(Op): lhs = self.lhs.expr(item) rhs = self.rhs.expr(item) # In python3 None is not a valid comparator when using < or > so must be handled specially - if lhs and rhs: + if lhs is not None and rhs is not None: return lhs <= rhs - elif lhs is None and rhs or lhs is None and rhs is None: - return True else: return False @@ -1040,10 +1034,8 @@ class OpGreaterThanOrEqual(Op): lhs = self.lhs.expr(item) rhs = self.rhs.expr(item) # In python3 None is not a valid comparator when using < or > so must be handled specially - if lhs and rhs: + if lhs is not None and rhs is not None: return lhs >= rhs - elif lhs and rhs is None or lhs is None and rhs is None: - return True else: return False diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index d4907cba5..82c3559ea 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -448,16 +448,21 @@ class Item(BaseModel): if list_append_re: new_value = expression_attribute_values[list_append_re.group(2).strip()] old_list_key = list_append_re.group(1) - # Get the existing value - old_list = self.attrs[old_list_key.split(".")[0]] - if "." in old_list_key: - # Value is nested inside a map - find the appropriate child attr - old_list = old_list.child_attr( - ".".join(old_list_key.split(".")[1:]) + # old_key could be a function itself (if_not_exists) + if old_list_key.startswith("if_not_exists"): + old_list = DynamoType( + expression_attribute_values[self._get_default(old_list_key)] ) + else: + old_list = self.attrs[old_list_key.split(".")[0]] + if "." in old_list_key: + # Value is nested inside a map - find the appropriate child attr + old_list = old_list.child_attr( + ".".join(old_list_key.split(".")[1:]) + ) if not old_list.is_list(): raise ParamValidationError - old_list.value.extend(new_value["L"]) + old_list.value.extend([DynamoType(v) for v in new_value["L"]]) value = old_list return value diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index c9f3529a9..d3767c3fd 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -508,6 +508,13 @@ class DynamoHandler(BaseResponse): # 'KeyConditions': {u'forum_name': {u'ComparisonOperator': u'EQ', u'AttributeValueList': [{u'S': u'the-key'}]}} key_conditions = self.body.get("KeyConditions") query_filters = self.body.get("QueryFilter") + + if not (key_conditions or query_filters): + return self.error( + "com.amazonaws.dynamodb.v20111205#ValidationException", + "Either KeyConditions or QueryFilter should be present", + ) + if key_conditions: ( hash_key_name, diff --git a/moto/ec2/models.py b/moto/ec2/models.py index 93a350914..166d8e646 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -27,6 +27,7 @@ from moto.core.utils import ( iso_8601_datetime_with_milliseconds, camelcase_to_underscores, ) +from moto.iam.models import ACCOUNT_ID from .exceptions import ( CidrLimitExceeded, DependencyViolationError, @@ -139,18 +140,23 @@ from .utils import ( rsa_public_key_fingerprint, ) -INSTANCE_TYPES = json.load( - open(resource_filename(__name__, "resources/instance_types.json"), "r") -) -AMIS = json.load( - open( - os.environ.get("MOTO_AMIS_PATH") - or resource_filename(__name__, "resources/amis.json"), - "r", - ) + +def _load_resource(filename): + with open(filename, "r") as f: + return json.load(f) + + +INSTANCE_TYPES = _load_resource( + resource_filename(__name__, "resources/instance_types.json") ) -OWNER_ID = "111122223333" +AMIS = _load_resource( + os.environ.get("MOTO_AMIS_PATH") + or resource_filename(__name__, "resources/amis.json"), +) + + +OWNER_ID = ACCOUNT_ID def utc_date_and_time(): @@ -1336,7 +1342,7 @@ class AmiBackend(object): source_ami=None, name=name, description=description, - owner_id=context.get_current_user() if context else OWNER_ID, + owner_id=OWNER_ID, ) self.amis[ami_id] = ami return ami @@ -1387,14 +1393,7 @@ class AmiBackend(object): # Limit by owner ids if owners: # support filtering by Owners=['self'] - owners = list( - map( - lambda o: context.get_current_user() - if context and o == "self" - else o, - owners, - ) - ) + owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,)) images = [ami for ami in images if ami.owner_id in owners] # Generic filters diff --git a/moto/ec2/responses/security_groups.py b/moto/ec2/responses/security_groups.py index 6f2926f61..f0002d5bd 100644 --- a/moto/ec2/responses/security_groups.py +++ b/moto/ec2/responses/security_groups.py @@ -104,7 +104,7 @@ class SecurityGroups(BaseResponse): if self.is_not_dryrun("GrantSecurityGroupIngress"): for args in self._process_rules_from_querystring(): self.ec2_backend.authorize_security_group_ingress(*args) - return AUTHORIZE_SECURITY_GROUP_INGRESS_REPONSE + return AUTHORIZE_SECURITY_GROUP_INGRESS_RESPONSE def create_security_group(self): name = self._get_param("GroupName") @@ -158,7 +158,7 @@ class SecurityGroups(BaseResponse): if self.is_not_dryrun("RevokeSecurityGroupIngress"): for args in self._process_rules_from_querystring(): self.ec2_backend.revoke_security_group_ingress(*args) - return REVOKE_SECURITY_GROUP_INGRESS_REPONSE + return REVOKE_SECURITY_GROUP_INGRESS_RESPONSE CREATE_SECURITY_GROUP_RESPONSE = """ @@ -265,12 +265,12 @@ DESCRIBE_SECURITY_GROUPS_RESPONSE = ( """ ) -AUTHORIZE_SECURITY_GROUP_INGRESS_REPONSE = """ +AUTHORIZE_SECURITY_GROUP_INGRESS_RESPONSE = """ 59dbff89-35bd-4eac-99ed-be587EXAMPLE true """ -REVOKE_SECURITY_GROUP_INGRESS_REPONSE = """ +REVOKE_SECURITY_GROUP_INGRESS_RESPONSE = """ 59dbff89-35bd-4eac-99ed-be587EXAMPLE true """ diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 845bdf650..30e4687c4 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -118,6 +118,7 @@ class TaskDefinition(BaseObject): revision, container_definitions, region_name, + network_mode=None, volumes=None, tags=None, ): @@ -132,6 +133,10 @@ class TaskDefinition(BaseObject): self.volumes = [] else: self.volumes = volumes + if network_mode is None: + self.network_mode = "bridge" + else: + self.network_mode = network_mode @property def response_object(self): @@ -553,7 +558,7 @@ class EC2ContainerServiceBackend(BaseBackend): raise Exception("{0} is not a cluster".format(cluster_name)) def register_task_definition( - self, family, container_definitions, volumes, tags=None + self, family, container_definitions, volumes=None, network_mode=None, tags=None ): if family in self.task_definitions: last_id = self._get_last_task_definition_revision_id(family) @@ -562,7 +567,13 @@ class EC2ContainerServiceBackend(BaseBackend): self.task_definitions[family] = {} revision = 1 task_definition = TaskDefinition( - family, revision, container_definitions, self.region_name, volumes, tags + family, + revision, + container_definitions, + self.region_name, + volumes=volumes, + network_mode=network_mode, + tags=tags, ) self.task_definitions[family][revision] = task_definition diff --git a/moto/ecs/responses.py b/moto/ecs/responses.py index d08bded2c..49bf022b4 100644 --- a/moto/ecs/responses.py +++ b/moto/ecs/responses.py @@ -62,8 +62,13 @@ class EC2ContainerServiceResponse(BaseResponse): container_definitions = self._get_param("containerDefinitions") volumes = self._get_param("volumes") tags = self._get_param("tags") + network_mode = self._get_param("networkMode") task_definition = self.ecs_backend.register_task_definition( - family, container_definitions, volumes, tags + family, + container_definitions, + volumes=volumes, + network_mode=network_mode, + tags=tags, ) return json.dumps({"taskDefinition": task_definition.response_object}) diff --git a/moto/events/models.py b/moto/events/models.py index 84a663b6d..c400677df 100644 --- a/moto/events/models.py +++ b/moto/events/models.py @@ -143,6 +143,9 @@ class EventsBackend(BaseBackend): def delete_rule(self, name): self.rules_order.pop(self.rules_order.index(name)) + arn = self.rules.get(name).arn + if self.tagger.has_tags(arn): + self.tagger.delete_all_tags_for_resource(arn) return self.rules.pop(name) is not None def describe_rule(self, name): @@ -362,32 +365,41 @@ class EventsBackend(BaseBackend): ) self.event_buses.pop(name, None) - + def list_tags_for_resource(self, arn): name = arn.split('/')[-1] if name in self.rules: return self.tagger.list_tags_for_resource(self.rules[name].arn) raise JsonRESTError( - "ResourceNotFoundException", "An entity that you specified does not exist." - ) + "ResourceNotFoundException", "An entity that you specified does not exist." + ) + + def list_tags_for_resource(self, arn): + name = arn.split("/")[-1] + if name in self.rules: + return self.tagger.list_tags_for_resource(self.rules[name].arn) + raise JsonRESTError( + "ResourceNotFoundException", "An entity that you specified does not exist." + ) def tag_resource(self, arn, tags): - name = arn.split('/')[-1] + name = arn.split("/")[-1] if name in self.rules: self.tagger.tag_resource(self.rules[name].arn, tags) return {} raise JsonRESTError( - "ResourceNotFoundException", "An entity that you specified does not exist." - ) + "ResourceNotFoundException", "An entity that you specified does not exist." + ) def untag_resource(self, arn, tag_names): - name = arn.split('/')[-1] + name = arn.split("/")[-1] if name in self.rules: self.tagger.untag_resource_using_names(self.rules[name].arn, tag_names) return {} raise JsonRESTError( - "ResourceNotFoundException", "An entity that you specified does not exist." - ) + "ResourceNotFoundException", "An entity that you specified does not exist." + ) + events_backends = {} for region in Session().get_available_regions("events"): diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 06561d4c4..12501769e 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -563,6 +563,10 @@ class IamResponse(BaseResponse): def create_access_key(self): user_name = self._get_param("UserName") + if not user_name: + access_key_id = self.get_current_user() + access_key = iam_backend.get_access_key_last_used(access_key_id) + user_name = access_key["user_name"] key = iam_backend.create_access_key(user_name) template = self.response_template(CREATE_ACCESS_KEY_TEMPLATE) @@ -572,6 +576,10 @@ class IamResponse(BaseResponse): user_name = self._get_param("UserName") access_key_id = self._get_param("AccessKeyId") status = self._get_param("Status") + if not user_name: + access_key = iam_backend.get_access_key_last_used(access_key_id) + user_name = access_key["user_name"] + iam_backend.update_access_key(user_name, access_key_id, status) template = self.response_template(GENERIC_EMPTY_TEMPLATE) return template.render(name="UpdateAccessKey") @@ -587,6 +595,11 @@ class IamResponse(BaseResponse): def list_access_keys(self): user_name = self._get_param("UserName") + if not user_name: + access_key_id = self.get_current_user() + access_key = iam_backend.get_access_key_last_used(access_key_id) + user_name = access_key["user_name"] + keys = iam_backend.get_all_access_keys(user_name) template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE) return template.render(user_name=user_name, keys=keys) @@ -594,6 +607,9 @@ class IamResponse(BaseResponse): def delete_access_key(self): user_name = self._get_param("UserName") access_key_id = self._get_param("AccessKeyId") + if not user_name: + access_key = iam_backend.get_access_key_last_used(access_key_id) + user_name = access_key["user_name"] iam_backend.delete_access_key(access_key_id, user_name) template = self.response_template(GENERIC_EMPTY_TEMPLATE) diff --git a/moto/iot/exceptions.py b/moto/iot/exceptions.py index 14d577389..d114a12ad 100644 --- a/moto/iot/exceptions.py +++ b/moto/iot/exceptions.py @@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError): ) +class InvalidStateTransitionException(IoTClientError): + def __init__(self, msg=None): + self.code = 409 + super(InvalidStateTransitionException, self).__init__( + "InvalidStateTransitionException", + msg or "An attempt was made to change to an invalid state.", + ) + + class VersionConflictException(IoTClientError): def __init__(self, name): self.code = 409 diff --git a/moto/iot/models.py b/moto/iot/models.py index d59d7533c..de4383b96 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -17,6 +17,7 @@ from .exceptions import ( DeleteConflictException, ResourceNotFoundException, InvalidRequestException, + InvalidStateTransitionException, VersionConflictException, ) @@ -29,7 +30,7 @@ class FakeThing(BaseModel): self.attributes = attributes self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name) self.version = 1 - # TODO: we need to handle 'version'? + # TODO: we need to handle "version"? # for iot-data self.thing_shadow = None @@ -174,18 +175,19 @@ class FakeCertificate(BaseModel): class FakePolicy(BaseModel): - def __init__(self, name, document, region_name): + def __init__(self, name, document, region_name, default_version_id="1"): self.name = name self.document = document self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name) - self.version = "1" # TODO: handle version + self.default_version_id = default_version_id + self.versions = [FakePolicyVersion(self.name, document, True, region_name)] def to_get_dict(self): return { "policyName": self.name, "policyArn": self.arn, "policyDocument": self.document, - "defaultVersionId": self.version, + "defaultVersionId": self.default_version_id, } def to_dict_at_creation(self): @@ -193,13 +195,52 @@ class FakePolicy(BaseModel): "policyName": self.name, "policyArn": self.arn, "policyDocument": self.document, - "policyVersionId": self.version, + "policyVersionId": self.default_version_id, } def to_dict(self): return {"policyName": self.name, "policyArn": self.arn} +class FakePolicyVersion(object): + def __init__(self, policy_name, document, is_default, region_name): + self.name = policy_name + self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name) + self.document = document or {} + self.is_default = is_default + self.version_id = "1" + + self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple()) + self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple()) + + def to_get_dict(self): + return { + "policyName": self.name, + "policyArn": self.arn, + "policyDocument": self.document, + "policyVersionId": self.version_id, + "isDefaultVersion": self.is_default, + "creationDate": self.create_datetime, + "lastModifiedDate": self.last_modified_datetime, + "generationId": self.version_id, + } + + def to_dict_at_creation(self): + return { + "policyArn": self.arn, + "policyDocument": self.document, + "policyVersionId": self.version_id, + "isDefaultVersion": self.is_default, + } + + def to_dict(self): + return { + "versionId": self.version_id, + "isDefaultVersion": self.is_default, + "createDate": self.create_datetime, + } + + class FakeJob(BaseModel): JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]" JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN) @@ -226,12 +267,14 @@ class FakeJob(BaseModel): self.targets = targets self.document_source = document_source self.document = document + self.force = False self.description = description self.presigned_url_config = presigned_url_config self.target_selection = target_selection self.job_executions_rollout_config = job_executions_rollout_config - self.status = None # IN_PROGRESS | CANCELED | COMPLETED + self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED self.comment = None + self.reason_code = None self.created_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.completed_at = None @@ -258,9 +301,11 @@ class FakeJob(BaseModel): "jobExecutionsRolloutConfig": self.job_executions_rollout_config, "status": self.status, "comment": self.comment, + "forceCanceled": self.force, + "reasonCode": self.reason_code, "createdAt": self.created_at, "lastUpdatedAt": self.last_updated_at, - "completedAt": self.completedAt, + "completedAt": self.completed_at, "jobProcessDetails": self.job_process_details, "documentParameters": self.document_parameters, "document": self.document, @@ -275,12 +320,67 @@ class FakeJob(BaseModel): return regex_match and length_match +class FakeJobExecution(BaseModel): + def __init__( + self, + job_id, + thing_arn, + status="QUEUED", + force_canceled=False, + status_details_map={}, + ): + self.job_id = job_id + self.status = status # IN_PROGRESS | CANCELED | COMPLETED + self.force_canceled = force_canceled + self.status_details_map = status_details_map + self.thing_arn = thing_arn + self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple()) + self.started_at = time.mktime(datetime(2015, 1, 1).timetuple()) + self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple()) + self.execution_number = 123 + self.version_number = 123 + self.approximate_seconds_before_time_out = 123 + + def to_get_dict(self): + obj = { + "jobId": self.job_id, + "status": self.status, + "forceCanceled": self.force_canceled, + "statusDetails": {"detailsMap": self.status_details_map}, + "thingArn": self.thing_arn, + "queuedAt": self.queued_at, + "startedAt": self.started_at, + "lastUpdatedAt": self.last_updated_at, + "executionNumber": self.execution_number, + "versionNumber": self.version_number, + "approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out, + } + + return obj + + def to_dict(self): + obj = { + "jobId": self.job_id, + "thingArn": self.thing_arn, + "jobExecutionSummary": { + "status": self.status, + "queuedAt": self.queued_at, + "startedAt": self.started_at, + "lastUpdatedAt": self.last_updated_at, + "executionNumber": self.execution_number, + }, + } + + return obj + + class IoTBackend(BaseBackend): def __init__(self, region_name=None): super(IoTBackend, self).__init__() self.region_name = region_name self.things = OrderedDict() self.jobs = OrderedDict() + self.job_executions = OrderedDict() self.thing_types = OrderedDict() self.thing_groups = OrderedDict() self.certificates = OrderedDict() @@ -535,6 +635,28 @@ class IoTBackend(BaseBackend): self.policies[policy.name] = policy return policy + def attach_policy(self, policy_name, target): + principal = self._get_principal(target) + policy = self.get_policy(policy_name) + k = (target, policy_name) + if k in self.principal_policies: + return + self.principal_policies[k] = (principal, policy) + + def detach_policy(self, policy_name, target): + # this may raises ResourceNotFoundException + self._get_principal(target) + self.get_policy(policy_name) + + k = (target, policy_name) + if k not in self.principal_policies: + raise ResourceNotFoundException() + del self.principal_policies[k] + + def list_attached_policies(self, target): + policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target] + return policies + def list_policies(self): policies = self.policies.values() return policies @@ -559,6 +681,60 @@ class IoTBackend(BaseBackend): policy = self.get_policy(policy_name) del self.policies[policy.name] + def create_policy_version(self, policy_name, policy_document, set_as_default): + policy = self.get_policy(policy_name) + if not policy: + raise ResourceNotFoundException() + version = FakePolicyVersion( + policy_name, policy_document, set_as_default, self.region_name + ) + policy.versions.append(version) + version.version_id = "{0}".format(len(policy.versions)) + if set_as_default: + self.set_default_policy_version(policy_name, version.version_id) + return version + + def set_default_policy_version(self, policy_name, version_id): + policy = self.get_policy(policy_name) + if not policy: + raise ResourceNotFoundException() + for version in policy.versions: + if version.version_id == version_id: + version.is_default = True + policy.default_version_id = version.version_id + policy.document = version.document + else: + version.is_default = False + + def get_policy_version(self, policy_name, version_id): + policy = self.get_policy(policy_name) + if not policy: + raise ResourceNotFoundException() + for version in policy.versions: + if version.version_id == version_id: + return version + raise ResourceNotFoundException() + + def list_policy_versions(self, policy_name): + policy = self.get_policy(policy_name) + if not policy: + raise ResourceNotFoundException() + return policy.versions + + def delete_policy_version(self, policy_name, version_id): + policy = self.get_policy(policy_name) + if not policy: + raise ResourceNotFoundException() + if version_id == policy.default_version_id: + raise InvalidRequestException( + "Cannot delete the default version of a policy" + ) + for i, v in enumerate(policy.versions): + if v.version_id == version_id: + del policy.versions[i] + return + raise ResourceNotFoundException() + def _get_principal(self, principal_arn): """ raise ResourceNotFoundException @@ -574,14 +750,6 @@ class IoTBackend(BaseBackend): pass raise ResourceNotFoundException() - def attach_policy(self, policy_name, target): - principal = self._get_principal(target) - policy = self.get_policy(policy_name) - k = (target, policy_name) - if k in self.principal_policies: - return - self.principal_policies[k] = (principal, policy) - def attach_principal_policy(self, policy_name, principal_arn): principal = self._get_principal(principal_arn) policy = self.get_policy(policy_name) @@ -590,15 +758,6 @@ class IoTBackend(BaseBackend): return self.principal_policies[k] = (principal, policy) - def detach_policy(self, policy_name, target): - # this may raises ResourceNotFoundException - self._get_principal(target) - self.get_policy(policy_name) - k = (target, policy_name) - if k not in self.principal_policies: - raise ResourceNotFoundException() - del self.principal_policies[k] - def detach_principal_policy(self, policy_name, principal_arn): # this may raises ResourceNotFoundException self._get_principal(principal_arn) @@ -819,11 +978,187 @@ class IoTBackend(BaseBackend): self.region_name, ) self.jobs[job_id] = job + + for thing_arn in targets: + thing_name = thing_arn.split(":")[-1].split("/")[-1] + job_execution = FakeJobExecution(job_id, thing_arn) + self.job_executions[(job_id, thing_name)] = job_execution return job.job_arn, job_id, description def describe_job(self, job_id): + jobs = [_ for _ in self.jobs.values() if _.job_id == job_id] + if len(jobs) == 0: + raise ResourceNotFoundException() + return jobs[0] + + def delete_job(self, job_id, force): + job = self.jobs[job_id] + + if job.status == "IN_PROGRESS" and force: + del self.jobs[job_id] + elif job.status != "IN_PROGRESS": + del self.jobs[job_id] + else: + raise InvalidStateTransitionException() + + def cancel_job(self, job_id, reason_code, comment, force): + job = self.jobs[job_id] + + job.reason_code = reason_code if reason_code is not None else job.reason_code + job.comment = comment if comment is not None else job.comment + job.force = force if force is not None and force != job.force else job.force + job.status = "CANCELED" + + if job.status == "IN_PROGRESS" and force: + self.jobs[job_id] = job + elif job.status != "IN_PROGRESS": + self.jobs[job_id] = job + else: + raise InvalidStateTransitionException() + + return job + + def get_job_document(self, job_id): return self.jobs[job_id] + def list_jobs( + self, + status, + target_selection, + max_results, + token, + thing_group_name, + thing_group_id, + ): + # TODO: implement filters + all_jobs = [_.to_dict() for _ in self.jobs.values()] + filtered_jobs = all_jobs + + if token is None: + jobs = filtered_jobs[0:max_results] + next_token = str(max_results) if len(filtered_jobs) > max_results else None + else: + token = int(token) + jobs = filtered_jobs[token : token + max_results] + next_token = ( + str(token + max_results) + if len(filtered_jobs) > token + max_results + else None + ) + + return jobs, next_token + + def describe_job_execution(self, job_id, thing_name, execution_number): + try: + job_execution = self.job_executions[(job_id, thing_name)] + except KeyError: + raise ResourceNotFoundException() + + if job_execution is None or ( + execution_number is not None + and job_execution.execution_number != execution_number + ): + raise ResourceNotFoundException() + + return job_execution + + def cancel_job_execution( + self, job_id, thing_name, force, expected_version, status_details + ): + job_execution = self.job_executions[(job_id, thing_name)] + + if job_execution is None: + raise ResourceNotFoundException() + + job_execution.force_canceled = ( + force if force is not None else job_execution.force_canceled + ) + # TODO: implement expected_version and status_details (at most 10 can be specified) + + if job_execution.status == "IN_PROGRESS" and force: + job_execution.status = "CANCELED" + self.job_executions[(job_id, thing_name)] = job_execution + elif job_execution.status != "IN_PROGRESS": + job_execution.status = "CANCELED" + self.job_executions[(job_id, thing_name)] = job_execution + else: + raise InvalidStateTransitionException() + + def delete_job_execution(self, job_id, thing_name, execution_number, force): + job_execution = self.job_executions[(job_id, thing_name)] + + if job_execution.execution_number != execution_number: + raise ResourceNotFoundException() + + if job_execution.status == "IN_PROGRESS" and force: + del self.job_executions[(job_id, thing_name)] + elif job_execution.status != "IN_PROGRESS": + del self.job_executions[(job_id, thing_name)] + else: + raise InvalidStateTransitionException() + + def list_job_executions_for_job(self, job_id, status, max_results, next_token): + job_executions = [ + self.job_executions[je].to_dict() + for je in self.job_executions + if je[0] == job_id + ] + + if status is not None: + job_executions = list( + filter( + lambda elem: status in elem["status"] and elem["status"] == status, + job_executions, + ) + ) + + token = next_token + if token is None: + job_executions = job_executions[0:max_results] + next_token = str(max_results) if len(job_executions) > max_results else None + else: + token = int(token) + job_executions = job_executions[token : token + max_results] + next_token = ( + str(token + max_results) + if len(job_executions) > token + max_results + else None + ) + + return job_executions, next_token + + def list_job_executions_for_thing( + self, thing_name, status, max_results, next_token + ): + job_executions = [ + self.job_executions[je].to_dict() + for je in self.job_executions + if je[1] == thing_name + ] + + if status is not None: + job_executions = list( + filter( + lambda elem: status in elem["status"] and elem["status"] == status, + job_executions, + ) + ) + + token = next_token + if token is None: + job_executions = job_executions[0:max_results] + next_token = str(max_results) if len(job_executions) > max_results else None + else: + token = int(token) + job_executions = job_executions[token : token + max_results] + next_token = ( + str(token + max_results) + if len(job_executions) > token + max_results + else None + ) + + return job_executions, next_token + iot_backends = {} for region in Session().get_available_regions("iot"): diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 5981eaa37..c12d4b5c5 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import json +from six.moves.urllib.parse import unquote from moto.core.responses import BaseResponse from .models import iot_backends @@ -141,6 +142,8 @@ class IoTResponse(BaseResponse): createdAt=job.created_at, description=job.description, documentParameters=job.document_parameters, + forceCanceled=job.force, + reasonCode=job.reason_code, jobArn=job.job_arn, jobExecutionsRolloutConfig=job.job_executions_rollout_config, jobId=job.job_id, @@ -154,6 +157,127 @@ class IoTResponse(BaseResponse): ) ) + def delete_job(self): + job_id = self._get_param("jobId") + force = self._get_bool_param("force") + + self.iot_backend.delete_job(job_id=job_id, force=force) + + return json.dumps(dict()) + + def cancel_job(self): + job_id = self._get_param("jobId") + reason_code = self._get_param("reasonCode") + comment = self._get_param("comment") + force = self._get_bool_param("force") + + job = self.iot_backend.cancel_job( + job_id=job_id, reason_code=reason_code, comment=comment, force=force + ) + + return json.dumps(job.to_dict()) + + def get_job_document(self): + job = self.iot_backend.get_job_document(job_id=self._get_param("jobId")) + + if job.document is not None: + return json.dumps({"document": job.document}) + else: + # job.document_source is not None: + # TODO: needs to be implemented to get document_source's content from S3 + return json.dumps({"document": ""}) + + def list_jobs(self): + status = (self._get_param("status"),) + target_selection = (self._get_param("targetSelection"),) + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + previous_next_token = self._get_param("nextToken") + thing_group_name = (self._get_param("thingGroupName"),) + thing_group_id = self._get_param("thingGroupId") + jobs, next_token = self.iot_backend.list_jobs( + status=status, + target_selection=target_selection, + max_results=max_results, + token=previous_next_token, + thing_group_name=thing_group_name, + thing_group_id=thing_group_id, + ) + + return json.dumps(dict(jobs=jobs, nextToken=next_token)) + + def describe_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + execution_number = self._get_int_param("executionNumber") + job_execution = self.iot_backend.describe_job_execution( + job_id=job_id, thing_name=thing_name, execution_number=execution_number + ) + + return json.dumps(dict(execution=job_execution.to_get_dict())) + + def cancel_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + force = self._get_bool_param("force") + expected_version = self._get_int_param("expectedVersion") + status_details = self._get_param("statusDetails") + + self.iot_backend.cancel_job_execution( + job_id=job_id, + thing_name=thing_name, + force=force, + expected_version=expected_version, + status_details=status_details, + ) + + return json.dumps(dict()) + + def delete_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + execution_number = self._get_int_param("executionNumber") + force = self._get_bool_param("force") + + self.iot_backend.delete_job_execution( + job_id=job_id, + thing_name=thing_name, + execution_number=execution_number, + force=force, + ) + + return json.dumps(dict()) + + def list_job_executions_for_job(self): + job_id = self._get_param("jobId") + status = self._get_param("status") + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + next_token = self._get_param("nextToken") + job_executions, next_token = self.iot_backend.list_job_executions_for_job( + job_id=job_id, status=status, max_results=max_results, next_token=next_token + ) + + return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token)) + + def list_job_executions_for_thing(self): + thing_name = self._get_param("thingName") + status = self._get_param("status") + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + next_token = self._get_param("nextToken") + job_executions, next_token = self.iot_backend.list_job_executions_for_thing( + thing_name=thing_name, + status=status, + max_results=max_results, + next_token=next_token, + ) + + return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token)) + def create_keys_and_certificate(self): set_as_active = self._get_bool_param("setAsActive") cert, key_pair = self.iot_backend.create_keys_and_certificate( @@ -241,12 +365,61 @@ class IoTResponse(BaseResponse): self.iot_backend.delete_policy(policy_name=policy_name) return json.dumps(dict()) + def create_policy_version(self): + policy_name = self._get_param("policyName") + policy_document = self._get_param("policyDocument") + set_as_default = self._get_bool_param("setAsDefault") + policy_version = self.iot_backend.create_policy_version( + policy_name, policy_document, set_as_default + ) + + return json.dumps(dict(policy_version.to_dict_at_creation())) + + def set_default_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + self.iot_backend.set_default_policy_version(policy_name, version_id) + + return json.dumps(dict()) + + def get_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + policy_version = self.iot_backend.get_policy_version(policy_name, version_id) + return json.dumps(dict(policy_version.to_get_dict())) + + def list_policy_versions(self): + policy_name = self._get_param("policyName") + policiy_versions = self.iot_backend.list_policy_versions( + policy_name=policy_name + ) + + return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions])) + + def delete_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + self.iot_backend.delete_policy_version(policy_name, version_id) + + return json.dumps(dict()) + def attach_policy(self): policy_name = self._get_param("policyName") target = self._get_param("target") self.iot_backend.attach_policy(policy_name=policy_name, target=target) return json.dumps(dict()) + def list_attached_policies(self): + principal = unquote(self._get_param("target")) + # marker = self._get_param("marker") + # page_size = self._get_int_param("pageSize") + policies = self.iot_backend.list_attached_policies(target=principal) + # TODO: implement pagination in the future + next_marker = None + return json.dumps( + dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker) + ) + def attach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get("x-amzn-iot-principal") diff --git a/moto/kms/models.py b/moto/kms/models.py index 32fcd23ae..9f61b275f 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -7,25 +7,42 @@ from datetime import datetime, timedelta from boto3 import Session from moto.core import BaseBackend, BaseModel +<<<<<<< HEAD from moto.core.exceptions import JsonRESTError from moto.core.utils import iso_8601_datetime_without_milliseconds from moto.utilities.tagging_service import TaggingService +======= +from moto.core.utils import unix_time + +from moto.iam.models import ACCOUNT_ID + +>>>>>>> 100dbd529f174f18d579a1dcc066d55409f2e38f from .utils import decrypt, encrypt, generate_key_id, generate_master_key class Key(BaseModel): +<<<<<<< HEAD def __init__(self, policy, key_usage, description, region): +======= + def __init__( + self, policy, key_usage, customer_master_key_spec, description, tags, region + ): +>>>>>>> 100dbd529f174f18d579a1dcc066d55409f2e38f self.id = generate_key_id() + self.creation_date = unix_time() self.policy = policy self.key_usage = key_usage self.key_state = "Enabled" self.description = description self.enabled = True self.region = region - self.account_id = "012345678912" + self.account_id = ACCOUNT_ID self.key_rotation_status = False self.deletion_date = None self.key_material = generate_master_key() + self.origin = "AWS_KMS" + self.key_manager = "CUSTOMER" + self.customer_master_key_spec = customer_master_key_spec or "SYMMETRIC_DEFAULT" @property def physical_resource_id(self): @@ -37,23 +54,55 @@ class Key(BaseModel): self.region, self.account_id, self.id ) + @property + def encryption_algorithms(self): + if self.key_usage == "SIGN_VERIFY": + return None + elif self.customer_master_key_spec == "SYMMETRIC_DEFAULT": + return ["SYMMETRIC_DEFAULT"] + else: + return ["RSAES_OAEP_SHA_1", "RSAES_OAEP_SHA_256"] + + @property + def signing_algorithms(self): + if self.key_usage == "ENCRYPT_DECRYPT": + return None + elif self.customer_master_key_spec in ["ECC_NIST_P256", "ECC_SECG_P256K1"]: + return ["ECDSA_SHA_256"] + elif self.customer_master_key_spec == "ECC_NIST_P384": + return ["ECDSA_SHA_384"] + elif self.customer_master_key_spec == "ECC_NIST_P521": + return ["ECDSA_SHA_512"] + else: + return [ + "RSASSA_PKCS1_V1_5_SHA_256", + "RSASSA_PKCS1_V1_5_SHA_384", + "RSASSA_PKCS1_V1_5_SHA_512", + "RSASSA_PSS_SHA_256", + "RSASSA_PSS_SHA_384", + "RSASSA_PSS_SHA_512", + ] + def to_dict(self): key_dict = { "KeyMetadata": { "AWSAccountId": self.account_id, "Arn": self.arn, - "CreationDate": iso_8601_datetime_without_milliseconds(datetime.now()), + "CreationDate": self.creation_date, + "CustomerMasterKeySpec": self.customer_master_key_spec, "Description": self.description, "Enabled": self.enabled, + "EncryptionAlgorithms": self.encryption_algorithms, "KeyId": self.id, + "KeyManager": self.key_manager, "KeyUsage": self.key_usage, "KeyState": self.key_state, + "Origin": self.origin, + "SigningAlgorithms": self.signing_algorithms, } } if self.key_state == "PendingDeletion": - key_dict["KeyMetadata"][ - "DeletionDate" - ] = iso_8601_datetime_without_milliseconds(self.deletion_date) + key_dict["KeyMetadata"]["DeletionDate"] = unix_time(self.deletion_date) return key_dict def delete(self, region_name): @@ -69,6 +118,7 @@ class Key(BaseModel): key = kms_backend.create_key( policy=properties["KeyPolicy"], key_usage="ENCRYPT_DECRYPT", + customer_master_key_spec="SYMMETRIC_DEFAULT", description=properties["Description"], region=region_name, ) @@ -92,8 +142,17 @@ class KmsBackend(BaseBackend): self.key_to_aliases = defaultdict(set) self.tagger = TaggingService(keyName='TagKey', valueName='TagValue') +<<<<<<< HEAD def create_key(self, policy, key_usage, description, tags, region): key = Key(policy, key_usage, description, region) +======= + def create_key( + self, policy, key_usage, customer_master_key_spec, description, tags, region + ): + key = Key( + policy, key_usage, customer_master_key_spec, description, tags, region + ) +>>>>>>> 100dbd529f174f18d579a1dcc066d55409f2e38f self.keys[key.id] = key if tags != None and len(tags) > 0: self.tag_resource(key.id, tags) @@ -211,9 +270,7 @@ class KmsBackend(BaseBackend): self.keys[key_id].deletion_date = datetime.now() + timedelta( days=pending_window_in_days ) - return iso_8601_datetime_without_milliseconds( - self.keys[key_id].deletion_date - ) + return unix_time(self.keys[key_id].deletion_date) def encrypt(self, key_id, plaintext, encryption_context): key_id = self.any_id_to_key_id(key_id) diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 3658f0d37..995c097e0 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -118,11 +118,12 @@ class KmsResponse(BaseResponse): """https://docs.aws.amazon.com/kms/latest/APIReference/API_CreateKey.html""" policy = self.parameters.get("Policy") key_usage = self.parameters.get("KeyUsage") + customer_master_key_spec = self.parameters.get("CustomerMasterKeySpec") description = self.parameters.get("Description") tags = self.parameters.get("Tags") key = self.kms_backend.create_key( - policy, key_usage, description, tags, self.region + policy, key_usage, customer_master_key_spec, description, tags, self.region ) return json.dumps(key.to_dict()) diff --git a/moto/logs/responses.py b/moto/logs/responses.py index 072c76b71..4631da2f9 100644 --- a/moto/logs/responses.py +++ b/moto/logs/responses.py @@ -103,7 +103,7 @@ class LogsResponse(BaseResponse): ( events, next_backward_token, - next_foward_token, + next_forward_token, ) = self.logs_backend.get_log_events( log_group_name, log_stream_name, @@ -117,7 +117,7 @@ class LogsResponse(BaseResponse): { "events": events, "nextBackwardToken": next_backward_token, - "nextForwardToken": next_foward_token, + "nextForwardToken": next_forward_token, } ) diff --git a/moto/organizations/exceptions.py b/moto/organizations/exceptions.py index 01b98da7e..b40908862 100644 --- a/moto/organizations/exceptions.py +++ b/moto/organizations/exceptions.py @@ -10,3 +10,13 @@ class InvalidInputException(JsonRESTError): "InvalidInputException", "You provided a value that does not match the required pattern.", ) + + +class DuplicateOrganizationalUnitException(JsonRESTError): + code = 400 + + def __init__(self): + super(DuplicateOrganizationalUnitException, self).__init__( + "DuplicateOrganizationalUnitException", + "An OU with the same name already exists.", + ) diff --git a/moto/organizations/models.py b/moto/organizations/models.py index 42e4dd00a..0db069f9a 100644 --- a/moto/organizations/models.py +++ b/moto/organizations/models.py @@ -8,7 +8,10 @@ from moto.core import BaseBackend, BaseModel from moto.core.exceptions import RESTError from moto.core.utils import unix_time from moto.organizations import utils -from moto.organizations.exceptions import InvalidInputException +from moto.organizations.exceptions import ( + InvalidInputException, + DuplicateOrganizationalUnitException, +) class FakeOrganization(BaseModel): @@ -222,6 +225,14 @@ class OrganizationsBackend(BaseBackend): self.attach_policy(PolicyId=utils.DEFAULT_POLICY_ID, TargetId=new_ou.id) return new_ou.describe() + def update_organizational_unit(self, **kwargs): + for ou in self.ou: + if ou.name == kwargs["Name"]: + raise DuplicateOrganizationalUnitException + ou = self.get_organizational_unit_by_id(kwargs["OrganizationalUnitId"]) + ou.name = kwargs["Name"] + return ou.describe() + def get_organizational_unit_by_id(self, ou_id): ou = next((ou for ou in self.ou if ou.id == ou_id), None) if ou is None: diff --git a/moto/organizations/responses.py b/moto/organizations/responses.py index 7c42eb4ec..ba7dd4453 100644 --- a/moto/organizations/responses.py +++ b/moto/organizations/responses.py @@ -36,6 +36,11 @@ class OrganizationsResponse(BaseResponse): self.organizations_backend.create_organizational_unit(**self.request_params) ) + def update_organizational_unit(self): + return json.dumps( + self.organizations_backend.update_organizational_unit(**self.request_params) + ) + def describe_organizational_unit(self): return json.dumps( self.organizations_backend.describe_organizational_unit( diff --git a/moto/rds2/models.py b/moto/rds2/models.py index e648765b7..963af1c63 100644 --- a/moto/rds2/models.py +++ b/moto/rds2/models.py @@ -130,7 +130,9 @@ class Database(BaseModel): if not self.option_group_name and self.engine in self.default_option_groups: self.option_group_name = self.default_option_groups[self.engine] self.character_set_name = kwargs.get("character_set_name", None) - self.iam_database_authentication_enabled = False + self.enable_iam_database_authentication = kwargs.get( + "enable_iam_database_authentication", False + ) self.dbi_resource_id = "db-M5ENSHXFPU6XHZ4G4ZEI5QIO2U" self.tags = kwargs.get("tags", []) @@ -214,7 +216,7 @@ class Database(BaseModel): {{ database.source_db_identifier }} {% endif %} {{ database.engine }} - {{database.iam_database_authentication_enabled }} + {{database.enable_iam_database_authentication|lower }} {{ database.license_model }} {{ database.engine_version }} @@ -542,7 +544,7 @@ class Snapshot(BaseModel): {{ database.kms_key_id }} {{ snapshot.snapshot_arn }} - false + {{ database.enable_iam_database_authentication|lower }} """ ) return template.render(snapshot=self, database=self.database) @@ -986,7 +988,7 @@ class RDS2Backend(BaseBackend): ) if option_group_kwargs["engine_name"] not in valid_option_group_engines.keys(): raise RDSClientError( - "InvalidParameterValue", "Invalid DB engine: non-existant" + "InvalidParameterValue", "Invalid DB engine: non-existent" ) if ( option_group_kwargs["major_engine_version"] diff --git a/moto/rds2/responses.py b/moto/rds2/responses.py index 625838d4d..b63e9f8b8 100644 --- a/moto/rds2/responses.py +++ b/moto/rds2/responses.py @@ -27,6 +27,9 @@ class RDS2Response(BaseResponse): "db_subnet_group_name": self._get_param("DBSubnetGroupName"), "engine": self._get_param("Engine"), "engine_version": self._get_param("EngineVersion"), + "enable_iam_database_authentication": self._get_bool_param( + "EnableIAMDatabaseAuthentication" + ), "license_model": self._get_param("LicenseModel"), "iops": self._get_int_param("Iops"), "kms_key_id": self._get_param("KmsKeyId"), @@ -367,14 +370,14 @@ class RDS2Response(BaseResponse): def modify_db_parameter_group(self): db_parameter_group_name = self._get_param("DBParameterGroupName") - db_parameter_group_parameters = self._get_db_parameter_group_paramters() + db_parameter_group_parameters = self._get_db_parameter_group_parameters() db_parameter_group = self.backend.modify_db_parameter_group( db_parameter_group_name, db_parameter_group_parameters ) template = self.response_template(MODIFY_DB_PARAMETER_GROUP_TEMPLATE) return template.render(db_parameter_group=db_parameter_group) - def _get_db_parameter_group_paramters(self): + def _get_db_parameter_group_parameters(self): parameter_group_parameters = defaultdict(dict) for param_name, value in self.querystring.items(): if not param_name.startswith("Parameters.Parameter"): diff --git a/moto/route53/responses.py b/moto/route53/responses.py index 3e688b65d..077c89a2c 100644 --- a/moto/route53/responses.py +++ b/moto/route53/responses.py @@ -271,6 +271,7 @@ LIST_RRSET_RESPONSE = """ diff --git a/moto/s3/config.py b/moto/s3/config.py index 8098addfc..04b4315f3 100644 --- a/moto/s3/config.py +++ b/moto/s3/config.py @@ -1,8 +1,13 @@ +import datetime import json +import time + +from boto3 import Session from moto.core.exceptions import InvalidNextTokenException from moto.core.models import ConfigQueryModel from moto.s3 import s3_backends +from moto.s3.models import get_moto_s3_account_id class S3ConfigQuery(ConfigQueryModel): @@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel): return config_data +class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel): + def list_config_service_resources( + self, + resource_ids, + resource_name, + limit, + next_token, + backend_region=None, + resource_region=None, + ): + # For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID + # There is no resource name -- it should be a blank string "" if provided. + + # The resource name can only ever be None or an empty string: + if resource_name is not None and resource_name != "": + return [], None + + pab = None + account_id = get_moto_s3_account_id() + regions = [region for region in Session().get_available_regions("config")] + + # If a resource ID was passed in, then filter accordingly: + if resource_ids: + for id in resource_ids: + if account_id == id: + pab = self.backends["global"].account_public_access_block + break + + # Otherwise, just grab the one from the backend: + if not resource_ids: + pab = self.backends["global"].account_public_access_block + + # If it's not present, then return nothing + if not pab: + return [], None + + # Filter on regions (and paginate on them as well): + if backend_region: + pab_list = [backend_region] + elif resource_region: + # Invalid region? + if resource_region not in regions: + return [], None + + pab_list = [resource_region] + + # Aggregated query where no regions were supplied so return them all: + else: + pab_list = regions + + # Pagination logic: + sorted_regions = sorted(pab_list) + new_token = None + + # Get the start: + if not next_token: + start = 0 + else: + # Tokens for this moto feature is just the region-name: + # For OTHER non-global resource types, it's the region concatenated with the resource ID. + if next_token not in sorted_regions: + raise InvalidNextTokenException() + + start = sorted_regions.index(next_token) + + # Get the list of items to collect: + pab_list = sorted_regions[start : (start + limit)] + + if len(sorted_regions) > (start + limit): + new_token = sorted_regions[start + limit] + + return ( + [ + { + "type": "AWS::S3::AccountPublicAccessBlock", + "id": account_id, + "region": region, + } + for region in pab_list + ], + new_token, + ) + + def get_config_resource( + self, resource_id, resource_name=None, backend_region=None, resource_region=None + ): + # Do we even have this defined? + if not self.backends["global"].account_public_access_block: + return None + + # Resource name can only ever be "" if it's supplied: + if resource_name is not None and resource_name != "": + return None + + # Are we filtering based on region? + account_id = get_moto_s3_account_id() + regions = [region for region in Session().get_available_regions("config")] + + # Is the resource ID correct?: + if account_id == resource_id: + if backend_region: + pab_region = backend_region + + # Invalid region? + elif resource_region not in regions: + return None + + else: + pab_region = resource_region + + else: + return None + + # Format the PAB to the AWS Config format: + creation_time = datetime.datetime.utcnow() + config_data = { + "version": "1.3", + "accountId": account_id, + "configurationItemCaptureTime": str(creation_time), + "configurationItemStatus": "OK", + "configurationStateId": str( + int(time.mktime(creation_time.timetuple())) + ), # PY2 and 3 compatible + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": account_id, + "awsRegion": pab_region, + "availabilityZone": "Not Applicable", + "configuration": self.backends[ + "global" + ].account_public_access_block.to_config_dict(), + "supplementaryConfiguration": {}, + } + + # The 'configuration' field is also a JSON string: + config_data["configuration"] = json.dumps(config_data["configuration"]) + + return config_data + + s3_config_query = S3ConfigQuery(s3_backends) +s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery( + s3_backends +) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index 1f2ead639..e26f384d5 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -127,6 +127,18 @@ class InvalidRequest(S3ClientError): ) +class IllegalLocationConstraintException(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(IllegalLocationConstraintException, self).__init__( + "IllegalLocationConstraintException", + "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.", + *args, + **kwargs + ) + + class MalformedXML(S3ClientError): code = 400 @@ -347,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError): *args, **kwargs ) + + +class WrongPublicAccessBlockAccountIdError(S3ClientError): + code = 403 + + def __init__(self): + super(WrongPublicAccessBlockAccountIdError, self).__init__( + "AccessDenied", "Access Denied" + ) diff --git a/moto/s3/models.py b/moto/s3/models.py index fe8e908ef..5a665e27e 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -19,7 +19,7 @@ import uuid import six from bisect import insort -from moto.core import BaseBackend, BaseModel +from moto.core import ACCOUNT_ID, BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from .exceptions import ( BucketAlreadyExists, @@ -37,6 +37,7 @@ from .exceptions import ( CrossLocationLoggingProhibitted, NoSuchPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration, + WrongPublicAccessBlockAccountIdError, ) from .utils import clean_key_name, _VersionedKeyStore @@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a" +def get_moto_s3_account_id(): + """This makes it easy for mocking AWS Account IDs when using AWS Config + -- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free. + """ + return ACCOUNT_ID + + class FakeDeleteMarker(BaseModel): def __init__(self, key): self.key = key @@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel): class S3Backend(BaseBackend): def __init__(self): self.buckets = {} + self.account_public_access_block = None def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: @@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend): return bucket.public_access_block + def get_account_public_access_block(self, account_id): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + if not self.account_public_access_block: + raise NoSuchPublicAccessBlockConfiguration() + + return self.account_public_access_block + def set_key( self, bucket_name, key_name, value, storage=None, etag=None, multipart=None ): @@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend): bucket = self.get_bucket(bucket_name) bucket.public_access_block = None + def delete_account_public_access_block(self, account_id): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + self.account_public_access_block = None + def put_bucket_notification_configuration(self, bucket_name, notification_config): bucket = self.get_bucket(bucket_name) bucket.set_notification_configuration(notification_config) @@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend): pub_block_config.get("RestrictPublicBuckets"), ) + def put_account_public_access_block(self, account_id, pub_block_config): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + if not pub_block_config: + raise InvalidPublicAccessBlockConfiguration() + + self.account_public_access_block = PublicAccessBlock( + pub_block_config.get("BlockPublicAcls"), + pub_block_config.get("IgnorePublicAcls"), + pub_block_config.get("BlockPublicPolicy"), + pub_block_config.get("RestrictPublicBuckets"), + ) + def initiate_multipart(self, bucket_name, key_name, metadata): bucket = self.get_bucket(bucket_name) new_multipart = FakeMultipart(key_name, metadata) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 71f21c8e1..b74be9a63 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -4,6 +4,7 @@ import re import sys import six +from botocore.awsrequest import AWSPreparedRequest from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys from six.moves.urllib.parse import parse_qs, urlparse, unquote @@ -29,6 +30,7 @@ from .exceptions import ( InvalidPartOrder, MalformedXML, MalformedACLError, + IllegalLocationConstraintException, InvalidNotificationARN, InvalidNotificationEvent, ObjectNotInActiveTierError, @@ -122,6 +124,11 @@ ACTION_MAP = { "uploadId": "PutObject", }, }, + "CONTROL": { + "GET": {"publicAccessBlock": "GetPublicAccessBlock"}, + "PUT": {"publicAccessBlock": "PutPublicAccessBlock"}, + "DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"}, + }, } @@ -167,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): or host.startswith("localhost") or host.startswith("localstack") or re.match(r"^[^.]+$", host) - or re.match(r"^.*\.svc\.cluster\.local$", host) + or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host) ): # Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev), # (3) local host names that do not contain a "." (e.g., Docker container host names), or @@ -219,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): # Depending on which calling format the client is using, we don't know # if this is a bucket or key request so we have to check if self.subdomain_based_buckets(request): - return self.key_response(request, full_url, headers) + return self.key_or_control_response(request, full_url, headers) else: # Using path-based buckets return self.bucket_response(request, full_url, headers) @@ -286,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return self._bucket_response_post(request, body, bucket_name) else: raise NotImplementedError( - "Method {0} has not been impelemented in the S3 backend yet".format( + "Method {0} has not been implemented in the S3 backend yet".format( method ) ) @@ -585,6 +592,29 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): next_continuation_token = None return result_keys, is_truncated, next_continuation_token + def _body_contains_location_constraint(self, body): + if body: + try: + xmltodict.parse(body)["CreateBucketConfiguration"]["LocationConstraint"] + return True + except KeyError: + pass + return False + + def _parse_pab_config(self, body): + parsed_xml = xmltodict.parse(body) + parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None) + + # If Python 2, fix the unicode strings: + if sys.version_info[0] < 3: + parsed_xml = { + "PublicAccessBlockConfiguration": py2_strip_unicode_keys( + dict(parsed_xml["PublicAccessBlockConfiguration"]) + ) + } + + return parsed_xml + def _bucket_response_put( self, request, body, region_name, bucket_name, querystring ): @@ -663,27 +693,23 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): raise e elif "publicAccessBlock" in querystring: - parsed_xml = xmltodict.parse(body) - parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None) - - # If Python 2, fix the unicode strings: - if sys.version_info[0] < 3: - parsed_xml = { - "PublicAccessBlockConfiguration": py2_strip_unicode_keys( - dict(parsed_xml["PublicAccessBlockConfiguration"]) - ) - } - + pab_config = self._parse_pab_config(body) self.backend.put_bucket_public_access_block( - bucket_name, parsed_xml["PublicAccessBlockConfiguration"] + bucket_name, pab_config["PublicAccessBlockConfiguration"] ) return "" else: + # us-east-1, the default AWS region behaves a bit differently + # - you should not use it as a location constraint --> it fails + # - querying the location constraint returns None + # - LocationConstraint has to be specified if outside us-east-1 + if ( + region_name != DEFAULT_REGION_NAME + and not self._body_contains_location_constraint(body) + ): + raise IllegalLocationConstraintException() if body: - # us-east-1, the default AWS region behaves a bit differently - # - you should not use it as a location constraint --> it fails - # - querying the location constraint returns None try: forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][ "LocationConstraint" @@ -854,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): ) return 206, response_headers, response_content[begin : end + 1] - def key_response(self, request, full_url, headers): + def key_or_control_response(self, request, full_url, headers): + # Key and Control are lumped in because splitting out the regex is too much of a pain :/ self.method = request.method self.path = self._get_path(request) self.headers = request.headers if "host" not in self.headers: self.headers["host"] = urlparse(full_url).netloc response_headers = {} + try: - response = self._key_response(request, full_url, headers) + # Is this an S3 control response? + if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url: + response = self._control_response(request, full_url, headers) + else: + response = self._key_response(request, full_url, headers) except S3ClientError as s3error: response = s3error.code, {}, s3error.description @@ -878,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): ) return status_code, response_headers, response_content + def _control_response(self, request, full_url, headers): + parsed_url = urlparse(full_url) + query = parse_qs(parsed_url.query, keep_blank_values=True) + method = request.method + + if hasattr(request, "body"): + # Boto + body = request.body + if hasattr(body, "read"): + body = body.read() + else: + # Flask server + body = request.data + if body is None: + body = b"" + + if method == "GET": + return self._control_response_get(request, query, headers) + elif method == "PUT": + return self._control_response_put(request, body, query, headers) + elif method == "DELETE": + return self._control_response_delete(request, query, headers) + else: + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format( + method + ) + ) + + def _control_response_get(self, request, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "GET", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + public_block_config = self.backend.get_account_public_access_block( + headers["x-amz-account-id"] + ) + template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION) + return ( + 200, + response_headers, + template.render(public_block_config=public_block_config), + ) + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + + def _control_response_put(self, request, body, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "PUT", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + pab_config = self._parse_pab_config(body) + self.backend.put_account_public_access_block( + headers["x-amz-account-id"], + pab_config["PublicAccessBlockConfiguration"], + ) + return 200, response_headers, "" + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + + def _control_response_delete(self, request, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "DELETE", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + self.backend.delete_account_public_access_block(headers["x-amz-account-id"]) + return 200, response_headers, "" + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + def _key_response(self, request, full_url, headers): parsed_url = urlparse(full_url) query = parse_qs(parsed_url.query, keep_blank_values=True) @@ -1082,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): if mdirective is not None and mdirective == "REPLACE": metadata = metadata_from_headers(request.headers) new_key.set_metadata(metadata, replace=True) + tdirective = request.headers.get("x-amz-tagging-directive") + if tdirective == "REPLACE": + tagging = self._tagging_from_headers(request.headers) + new_key.set_tagging(tagging) template = self.response_template(S3_OBJECT_COPY_RESPONSE) response_headers.update(new_key.response_dict) return 200, response_headers, template.render(key=new_key) @@ -1482,7 +1606,7 @@ S3_ALL_BUCKETS = """ 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a webfile - STANDARD 1 {{ count }} {{ count }} diff --git a/moto/s3/urls.py b/moto/s3/urls.py index 7241dbef1..752762184 100644 --- a/moto/s3/urls.py +++ b/moto/s3/urls.py @@ -13,7 +13,7 @@ url_paths = { # subdomain key of path-based bucket "{0}/(?P[^/]+)/?$": S3ResponseInstance.ambiguous_response, # path-based bucket + key - "{0}/(?P[^/]+)/(?P.+)": S3ResponseInstance.key_response, + "{0}/(?P[^/]+)/(?P.+)": S3ResponseInstance.key_or_control_response, # subdomain bucket + key with empty first part of path - "{0}//(?P.*)$": S3ResponseInstance.key_response, + "{0}//(?P.*)$": S3ResponseInstance.key_or_control_response, } diff --git a/moto/s3/utils.py b/moto/s3/utils.py index e7d9e5580..e22b6b860 100644 --- a/moto/s3/utils.py +++ b/moto/s3/utils.py @@ -37,7 +37,7 @@ def bucket_name_from_url(url): REGION_URL_REGEX = re.compile( r"^https?://(s3[-\.](?P.+)\.amazonaws\.com/(.+)|" - r"(.+)\.s3-(?P.+)\.amazonaws\.com)/?" + r"(.+)\.s3[-\.](?P.+)\.amazonaws\.com)/?" ) diff --git a/moto/ses/models.py b/moto/ses/models.py index eacdd8458..4b6ce52c8 100644 --- a/moto/ses/models.py +++ b/moto/ses/models.py @@ -148,11 +148,15 @@ class SESBackend(BaseBackend): def __type_of_message__(self, destinations): """Checks the destination for any special address that could indicate delivery, complaint or bounce like in SES simulator""" - alladdress = ( - destinations.get("ToAddresses", []) - + destinations.get("CcAddresses", []) - + destinations.get("BccAddresses", []) - ) + if isinstance(destinations, list): + alladdress = destinations + else: + alladdress = ( + destinations.get("ToAddresses", []) + + destinations.get("CcAddresses", []) + + destinations.get("BccAddresses", []) + ) + for addr in alladdress: if SESFeedback.SUCCESS_ADDR in addr: return SESFeedback.DELIVERY diff --git a/moto/sqs/exceptions.py b/moto/sqs/exceptions.py index 01123d777..77d7b9fb2 100644 --- a/moto/sqs/exceptions.py +++ b/moto/sqs/exceptions.py @@ -99,3 +99,28 @@ class InvalidAttributeName(RESTError): super(InvalidAttributeName, self).__init__( "InvalidAttributeName", "Unknown Attribute {}.".format(attribute_name) ) + + +class InvalidParameterValue(RESTError): + code = 400 + + def __init__(self, message): + super(InvalidParameterValue, self).__init__("InvalidParameterValue", message) + + +class MissingParameter(RESTError): + code = 400 + + def __init__(self): + super(MissingParameter, self).__init__( + "MissingParameter", "The request must contain the parameter Actions." + ) + + +class OverLimit(RESTError): + code = 403 + + def __init__(self, count): + super(OverLimit, self).__init__( + "OverLimit", "{} Actions were found, maximum allowed is 7.".format(count) + ) diff --git a/moto/sqs/models.py b/moto/sqs/models.py index 40dd6ba97..a54d91c43 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -30,6 +30,9 @@ from .exceptions import ( BatchEntryIdsNotDistinct, TooManyEntriesInBatchRequest, InvalidAttributeName, + InvalidParameterValue, + MissingParameter, + OverLimit, ) from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID @@ -183,6 +186,8 @@ class Queue(BaseModel): "MaximumMessageSize", "MessageRetentionPeriod", "QueueArn", + "Policy", + "RedrivePolicy", "ReceiveMessageWaitTimeSeconds", "VisibilityTimeout", ] @@ -194,6 +199,8 @@ class Queue(BaseModel): "DeleteMessage", "GetQueueAttributes", "GetQueueUrl", + "ListDeadLetterSourceQueues", + "PurgeQueue", "ReceiveMessage", "SendMessage", ) @@ -272,7 +279,7 @@ class Queue(BaseModel): if key in bool_fields: value = value == "true" - if key == "RedrivePolicy" and value is not None: + if key in ["Policy", "RedrivePolicy"] and value is not None: continue setattr(self, camelcase_to_underscores(key), value) @@ -280,6 +287,9 @@ class Queue(BaseModel): if attributes.get("RedrivePolicy", None): self._setup_dlq(attributes["RedrivePolicy"]) + if attributes.get("Policy"): + self.policy = attributes["Policy"] + self.last_modified_timestamp = now def _setup_dlq(self, policy): @@ -471,6 +481,24 @@ class Queue(BaseModel): return self.name raise UnformattedGetAttTemplateException() + @property + def policy(self): + if self._policy_json.get("Statement"): + return json.dumps(self._policy_json) + else: + return None + + @policy.setter + def policy(self, policy): + if policy: + self._policy_json = json.loads(policy) + else: + self._policy_json = { + "Version": "2012-10-17", + "Id": "{}/SQSDefaultPolicy".format(self.queue_arn), + "Statement": [], + } + class SQSBackend(BaseBackend): def __init__(self, region_name): @@ -539,7 +567,7 @@ class SQSBackend(BaseBackend): for name, q in self.queues.items(): if prefix_re.search(name): qs.append(q) - return qs + return qs[:1000] def get_queue(self, queue_name): queue = self.queues.get(queue_name) @@ -801,25 +829,75 @@ class SQSBackend(BaseBackend): def add_permission(self, queue_name, actions, account_ids, label): queue = self.get_queue(queue_name) - if actions is None or len(actions) == 0: - raise RESTError("InvalidParameterValue", "Need at least one Action") - if account_ids is None or len(account_ids) == 0: - raise RESTError("InvalidParameterValue", "Need at least one Account ID") + if not actions: + raise MissingParameter() - if not all([item in Queue.ALLOWED_PERMISSIONS for item in actions]): - raise RESTError("InvalidParameterValue", "Invalid permissions") + if not account_ids: + raise InvalidParameterValue( + "Value [] for parameter PrincipalId is invalid. Reason: Unable to verify." + ) - queue.permissions[label] = (account_ids, actions) + count = len(actions) + if count > 7: + raise OverLimit(count) + + invalid_action = next( + (action for action in actions if action not in Queue.ALLOWED_PERMISSIONS), + None, + ) + if invalid_action: + raise InvalidParameterValue( + "Value SQS:{} for parameter ActionName is invalid. " + "Reason: Only the queue owner is allowed to invoke this action.".format( + invalid_action + ) + ) + + policy = queue._policy_json + statement = next( + ( + statement + for statement in policy["Statement"] + if statement["Sid"] == label + ), + None, + ) + if statement: + raise InvalidParameterValue( + "Value {} for parameter Label is invalid. " + "Reason: Already exists.".format(label) + ) + + principals = [ + "arn:aws:iam::{}:root".format(account_id) for account_id in account_ids + ] + actions = ["SQS:{}".format(action) for action in actions] + + statement = { + "Sid": label, + "Effect": "Allow", + "Principal": {"AWS": principals[0] if len(principals) == 1 else principals}, + "Action": actions[0] if len(actions) == 1 else actions, + "Resource": queue.queue_arn, + } + + queue._policy_json["Statement"].append(statement) def remove_permission(self, queue_name, label): queue = self.get_queue(queue_name) - if label not in queue.permissions: - raise RESTError( - "InvalidParameterValue", "Permission doesnt exist for the given label" + statements = queue._policy_json["Statement"] + statements_new = [ + statement for statement in statements if statement["Sid"] != label + ] + + if len(statements) == len(statements_new): + raise InvalidParameterValue( + "Value {} for parameter Label is invalid. " + "Reason: can't find label on existing policy.".format(label) ) - del queue.permissions[label] + queue._policy_json["Statement"] = statements_new def tag_queue(self, queue_name, tags): queue = self.get_queue(queue_name) diff --git a/moto/swf/models/workflow_execution.py b/moto/swf/models/workflow_execution.py index 4d91b1f6f..17ce819fb 100644 --- a/moto/swf/models/workflow_execution.py +++ b/moto/swf/models/workflow_execution.py @@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel): "executionInfo": self.to_medium_dict(), "executionConfiguration": {"taskList": {"name": self.task_list}}, } + # info + if self.execution_status == "CLOSED": + hsh["executionInfo"]["closeStatus"] = self.close_status + hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp # configuration for key in self._configuration_keys: attr = camelcase_to_underscores(key) diff --git a/moto/utilities/tagging_service.py b/moto/utilities/tagging_service.py index 5eae095ec..89b857277 100644 --- a/moto/utilities/tagging_service.py +++ b/moto/utilities/tagging_service.py @@ -1,5 +1,5 @@ class TaggingService: - def __init__(self, tagName='Tags', keyName='Key', valueName='Value'): + def __init__(self, tagName="Tags", keyName="Key", valueName="Value"): self.tagName = tagName self.keyName = keyName self.valueName = valueName @@ -12,6 +12,12 @@ class TaggingService: result.append({self.keyName: k, self.valueName: v}) return {self.tagName: result} + def delete_all_tags_for_resource(self, arn): + del self.tags[arn] + + def has_tags(self, arn): + return arn in self.tags + def tag_resource(self, arn, tags): if arn not in self.tags: self.tags[arn] = {} diff --git a/scripts/scaffold.py b/scripts/scaffold.py index be154f103..43a648b48 100755 --- a/scripts/scaffold.py +++ b/scripts/scaffold.py @@ -20,8 +20,8 @@ import jinja2 from prompt_toolkit import ( prompt ) -from prompt_toolkit.contrib.completers import WordCompleter -from prompt_toolkit.shortcuts import print_tokens +from prompt_toolkit.completion import WordCompleter +from prompt_toolkit.shortcuts import print_formatted_text from botocore import xform_name from botocore.session import Session @@ -149,12 +149,12 @@ def append_mock_dict_to_backends_py(service): with open(path) as f: lines = [_.replace('\n', '') for _ in f.readlines()] - if any(_ for _ in lines if re.match(".*'{}': {}_backends.*".format(service, service), _)): + if any(_ for _ in lines if re.match(".*\"{}\": {}_backends.*".format(service, service), _)): return - filtered_lines = [_ for _ in lines if re.match(".*'.*':.*_backends.*", _)] + filtered_lines = [_ for _ in lines if re.match(".*\".*\":.*_backends.*", _)] last_elem_line_index = lines.index(filtered_lines[-1]) - new_line = " '{}': {}_backends,".format(service, get_escaped_service(service)) + new_line = " \"{}\": {}_backends,".format(service, get_escaped_service(service)) prev_line = lines[last_elem_line_index] if not prev_line.endswith('{') and not prev_line.endswith(','): lines[last_elem_line_index] += ',' diff --git a/setup.py b/setup.py index 97a6341ff..1dde71ac7 100755 --- a/setup.py +++ b/setup.py @@ -39,11 +39,11 @@ install_requires = [ "werkzeug", "PyYAML>=5.1", "pytz", - "python-dateutil<2.8.1,>=2.1", + "python-dateutil<3.0.0,>=2.1", "python-jose<4.0.0", "mock", "docker>=2.5.1", - "jsondiff==1.1.2", + "jsondiff>=1.1.2", "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", "idna<2.9,>=2.5", diff --git a/tests/test_apigateway/test_apigateway.py b/tests/test_apigateway/test_apigateway.py index 59c0c07f6..8692ccc9b 100644 --- a/tests/test_apigateway/test_apigateway.py +++ b/tests/test_apigateway/test_apigateway.py @@ -26,7 +26,14 @@ def test_create_and_get_rest_api(): response.pop("ResponseMetadata") response.pop("createdDate") response.should.equal( - {"id": api_id, "name": "my_api", "description": "this is my api"} + { + "id": api_id, + "name": "my_api", + "description": "this is my api", + "apiKeySource": "HEADER", + "endpointConfiguration": {"types": ["EDGE"]}, + "tags": {}, + } ) @@ -47,6 +54,114 @@ def test_list_and_delete_apis(): len(response["items"]).should.equal(1) +@mock_apigateway +def test_create_rest_api_with_tags(): + client = boto3.client("apigateway", region_name="us-west-2") + + response = client.create_rest_api( + name="my_api", description="this is my api", tags={"MY_TAG1": "MY_VALUE1"} + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + + assert "tags" in response + response["tags"].should.equal({"MY_TAG1": "MY_VALUE1"}) + + +@mock_apigateway +def test_create_rest_api_invalid_apikeysource(): + client = boto3.client("apigateway", region_name="us-west-2") + + with assert_raises(ClientError) as ex: + client.create_rest_api( + name="my_api", + description="this is my api", + apiKeySource="not a valid api key source", + ) + ex.exception.response["Error"]["Code"].should.equal("ValidationException") + + +@mock_apigateway +def test_create_rest_api_valid_apikeysources(): + client = boto3.client("apigateway", region_name="us-west-2") + + # 1. test creating rest api with HEADER apiKeySource + response = client.create_rest_api( + name="my_api", description="this is my api", apiKeySource="HEADER", + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + response["apiKeySource"].should.equal("HEADER") + + # 2. test creating rest api with AUTHORIZER apiKeySource + response = client.create_rest_api( + name="my_api2", description="this is my api", apiKeySource="AUTHORIZER", + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + response["apiKeySource"].should.equal("AUTHORIZER") + + +@mock_apigateway +def test_create_rest_api_invalid_endpointconfiguration(): + client = boto3.client("apigateway", region_name="us-west-2") + + with assert_raises(ClientError) as ex: + client.create_rest_api( + name="my_api", + description="this is my api", + endpointConfiguration={"types": ["INVALID"]}, + ) + ex.exception.response["Error"]["Code"].should.equal("ValidationException") + + +@mock_apigateway +def test_create_rest_api_valid_endpointconfigurations(): + client = boto3.client("apigateway", region_name="us-west-2") + + # 1. test creating rest api with PRIVATE endpointConfiguration + response = client.create_rest_api( + name="my_api", + description="this is my api", + endpointConfiguration={"types": ["PRIVATE"]}, + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + response["endpointConfiguration"].should.equal( + {"types": ["PRIVATE"],} + ) + + # 2. test creating rest api with REGIONAL endpointConfiguration + response = client.create_rest_api( + name="my_api2", + description="this is my api", + endpointConfiguration={"types": ["REGIONAL"]}, + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + response["endpointConfiguration"].should.equal( + {"types": ["REGIONAL"],} + ) + + # 3. test creating rest api with EDGE endpointConfiguration + response = client.create_rest_api( + name="my_api3", + description="this is my api", + endpointConfiguration={"types": ["EDGE"]}, + ) + api_id = response["id"] + + response = client.get_rest_api(restApiId=api_id) + response["endpointConfiguration"].should.equal( + {"types": ["EDGE"],} + ) + + @mock_apigateway def test_create_resource__validate_name(): client = boto3.client("apigateway", region_name="us-west-2") @@ -58,15 +173,15 @@ def test_create_resource__validate_name(): 0 ]["id"] - invalid_names = ["/users", "users/", "users/{user_id}", "us{er"] - valid_names = ["users", "{user_id}", "user_09", "good-dog"] + invalid_names = ["/users", "users/", "users/{user_id}", "us{er", "us+er"] + valid_names = ["users", "{user_id}", "{proxy+}", "user_09", "good-dog"] # All invalid names should throw an exception for name in invalid_names: with assert_raises(ClientError) as ex: client.create_resource(restApiId=api_id, parentId=root_id, pathPart=name) ex.exception.response["Error"]["Code"].should.equal("BadRequestException") ex.exception.response["Error"]["Message"].should.equal( - "Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end." + "Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end and an optional plus sign before the closing brace." ) # All valid names should go through for name in valid_names: @@ -89,12 +204,7 @@ def test_create_resource(): root_resource["ResponseMetadata"].pop("HTTPHeaders", None) root_resource["ResponseMetadata"].pop("RetryAttempts", None) root_resource.should.equal( - { - "path": "/", - "id": root_id, - "ResponseMetadata": {"HTTPStatusCode": 200}, - "resourceMethods": {"GET": {}}, - } + {"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},} ) client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users") @@ -142,7 +252,6 @@ def test_child_resource(): "parentId": users_id, "id": tags_id, "ResponseMetadata": {"HTTPStatusCode": 200}, - "resourceMethods": {"GET": {}}, } ) @@ -171,6 +280,41 @@ def test_create_method(): { "httpMethod": "GET", "authorizationType": "none", + "apiKeyRequired": False, + "ResponseMetadata": {"HTTPStatusCode": 200}, + } + ) + + +@mock_apigateway +def test_create_method_apikeyrequired(): + client = boto3.client("apigateway", region_name="us-west-2") + response = client.create_rest_api(name="my_api", description="this is my api") + api_id = response["id"] + + resources = client.get_resources(restApiId=api_id) + root_id = [resource for resource in resources["items"] if resource["path"] == "/"][ + 0 + ]["id"] + + client.put_method( + restApiId=api_id, + resourceId=root_id, + httpMethod="GET", + authorizationType="none", + apiKeyRequired=True, + ) + + response = client.get_method(restApiId=api_id, resourceId=root_id, httpMethod="GET") + + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + response.should.equal( + { + "httpMethod": "GET", + "authorizationType": "none", + "apiKeyRequired": True, "ResponseMetadata": {"HTTPStatusCode": 200}, } ) diff --git a/tests/test_autoscaling/test_autoscaling.py b/tests/test_autoscaling/test_autoscaling.py index c46bc7219..2e7255381 100644 --- a/tests/test_autoscaling/test_autoscaling.py +++ b/tests/test_autoscaling/test_autoscaling.py @@ -706,14 +706,14 @@ def test_create_autoscaling_group_boto3(): "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "propogated-tag-key", - "Value": "propogate-tag-value", + "Value": "propagate-tag-value", "PropagateAtLaunch": True, }, { "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "not-propogated-tag-key", - "Value": "not-propogate-tag-value", + "Value": "not-propagate-tag-value", "PropagateAtLaunch": False, }, ], @@ -744,14 +744,14 @@ def test_create_autoscaling_group_from_instance(): "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "propogated-tag-key", - "Value": "propogate-tag-value", + "Value": "propagate-tag-value", "PropagateAtLaunch": True, }, { "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "not-propogated-tag-key", - "Value": "not-propogate-tag-value", + "Value": "not-propagate-tag-value", "PropagateAtLaunch": False, }, ], @@ -1062,7 +1062,7 @@ def test_detach_one_instance_decrement(): "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "propogated-tag-key", - "Value": "propogate-tag-value", + "Value": "propagate-tag-value", "PropagateAtLaunch": True, } ], @@ -1116,7 +1116,7 @@ def test_detach_one_instance(): "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "propogated-tag-key", - "Value": "propogate-tag-value", + "Value": "propagate-tag-value", "PropagateAtLaunch": True, } ], @@ -1169,7 +1169,7 @@ def test_attach_one_instance(): "ResourceId": "test_asg", "ResourceType": "auto-scaling-group", "Key": "propogated-tag-key", - "Value": "propogate-tag-value", + "Value": "propagate-tag-value", "PropagateAtLaunch": True, } ], diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 6fd97e325..4f0bc5063 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -58,8 +58,7 @@ def lambda_handler(event, context): volume_id = event.get('volume_id') vol = ec2.Volume(volume_id) - print('get volume details for %s\\nVolume - %s state=%s, size=%s' % (volume_id, volume_id, vol.state, vol.size)) - return event + return {{'id': vol.id, 'state': vol.state, 'size': vol.size}} """.format( base_url="motoserver:5000" if settings.TEST_SERVER_MODE @@ -79,7 +78,7 @@ def lambda_handler(event, context): def get_test_zip_file4(): pfunc = """ -def lambda_handler(event, context): +def lambda_handler(event, context): raise Exception('I failed!') """ return _process_lambda(pfunc) @@ -87,14 +86,14 @@ def lambda_handler(event, context): @mock_lambda def test_list_functions(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) result = conn.list_functions() result["Functions"].should.have.length_of(0) @mock_lambda def test_invoke_requestresponse_function(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", Runtime="python2.7", @@ -114,7 +113,44 @@ def test_invoke_requestresponse_function(): Payload=json.dumps(in_data), ) - success_result["StatusCode"].should.equal(202) + success_result["StatusCode"].should.equal(200) + result_obj = json.loads( + base64.b64decode(success_result["LogResult"]).decode("utf-8") + ) + + result_obj.should.equal(in_data) + + payload = success_result["Payload"].read().decode("utf-8") + json.loads(payload).should.equal(in_data) + + +@mock_lambda +def test_invoke_requestresponse_function_with_arn(): + from moto.awslambda.models import ACCOUNT_ID + + conn = boto3.client("lambda", "us-west-2") + conn.create_function( + FunctionName="testFunction", + Runtime="python2.7", + Role=get_role_name(), + Handler="lambda_function.lambda_handler", + Code={"ZipFile": get_test_zip_file1()}, + Description="test lambda function", + Timeout=3, + MemorySize=128, + Publish=True, + ) + + in_data = {"msg": "So long and thanks for all the fish"} + success_result = conn.invoke( + FunctionName="arn:aws:lambda:us-west-2:{}:function:testFunction".format( + ACCOUNT_ID + ), + InvocationType="RequestResponse", + Payload=json.dumps(in_data), + ) + + success_result["StatusCode"].should.equal(200) result_obj = json.loads( base64.b64decode(success_result["LogResult"]).decode("utf-8") ) @@ -127,7 +163,7 @@ def test_invoke_requestresponse_function(): @mock_lambda def test_invoke_event_function(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", Runtime="python2.7", @@ -149,7 +185,35 @@ def test_invoke_event_function(): FunctionName="testFunction", InvocationType="Event", Payload=json.dumps(in_data) ) success_result["StatusCode"].should.equal(202) - json.loads(success_result["Payload"].read().decode("utf-8")).should.equal({}) + json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data) + + +@mock_lambda +def test_invoke_dryrun_function(): + conn = boto3.client("lambda", _lambda_region) + conn.create_function( + FunctionName="testFunction", + Runtime="python2.7", + Role=get_role_name(), + Handler="lambda_function.lambda_handler", + Code={"ZipFile": get_test_zip_file1(),}, + Description="test lambda function", + Timeout=3, + MemorySize=128, + Publish=True, + ) + + conn.invoke.when.called_with( + FunctionName="notAFunction", InvocationType="Event", Payload="{}" + ).should.throw(botocore.client.ClientError) + + in_data = {"msg": "So long and thanks for all the fish"} + success_result = conn.invoke( + FunctionName="testFunction", + InvocationType="DryRun", + Payload=json.dumps(in_data), + ) + success_result["StatusCode"].should.equal(204) if settings.TEST_SERVER_MODE: @@ -157,11 +221,11 @@ if settings.TEST_SERVER_MODE: @mock_ec2 @mock_lambda def test_invoke_function_get_ec2_volume(): - conn = boto3.resource("ec2", "us-west-2") - vol = conn.create_volume(Size=99, AvailabilityZone="us-west-2") + conn = boto3.resource("ec2", _lambda_region) + vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region) vol = conn.Volume(vol.id) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", Runtime="python3.7", @@ -180,28 +244,10 @@ if settings.TEST_SERVER_MODE: InvocationType="RequestResponse", Payload=json.dumps(in_data), ) - result["StatusCode"].should.equal(202) - msg = "get volume details for %s\nVolume - %s state=%s, size=%s\n%s" % ( - vol.id, - vol.id, - vol.state, - vol.size, - json.dumps(in_data).replace( - " ", "" - ), # Makes the tests pass as the result is missing the whitespace - ) - - log_result = base64.b64decode(result["LogResult"]).decode("utf-8") - - # The Docker lambda invocation will return an additional '\n', so need to replace it: - log_result = log_result.replace("\n\n", "\n") - log_result.should.equal(msg) - - payload = result["Payload"].read().decode("utf-8") - - # The Docker lambda invocation will return an additional '\n', so need to replace it: - payload = payload.replace("\n\n", "\n") - payload.should.equal(msg) + result["StatusCode"].should.equal(200) + actual_payload = json.loads(result["Payload"].read().decode("utf-8")) + expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size} + actual_payload.should.equal(expected_payload) @mock_logs @@ -209,14 +255,14 @@ if settings.TEST_SERVER_MODE: @mock_ec2 @mock_lambda def test_invoke_function_from_sns(): - logs_conn = boto3.client("logs", region_name="us-west-2") - sns_conn = boto3.client("sns", region_name="us-west-2") + logs_conn = boto3.client("logs", region_name=_lambda_region) + sns_conn = boto3.client("sns", region_name=_lambda_region) sns_conn.create_topic(Name="some-topic") topics_json = sns_conn.list_topics() topics = topics_json["Topics"] topic_arn = topics[0]["TopicArn"] - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) result = conn.create_function( FunctionName="testFunction", Runtime="python2.7", @@ -259,7 +305,7 @@ def test_invoke_function_from_sns(): @mock_lambda def test_create_based_on_s3_with_missing_bucket(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function.when.called_with( FunctionName="testFunction", @@ -279,12 +325,15 @@ def test_create_based_on_s3_with_missing_bucket(): @mock_s3 @freeze_time("2015-01-01 00:00:00") def test_create_function_from_aws_bucket(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) result = conn.create_function( FunctionName="testFunction", @@ -324,6 +373,7 @@ def test_create_function_from_aws_bucket(): "VpcId": "vpc-123abc", }, "ResponseMetadata": {"HTTPStatusCode": 201}, + "State": "Active", } ) @@ -331,7 +381,7 @@ def test_create_function_from_aws_bucket(): @mock_lambda @freeze_time("2015-01-01 00:00:00") def test_create_function_from_zipfile(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() result = conn.create_function( FunctionName="testFunction", @@ -367,6 +417,7 @@ def test_create_function_from_zipfile(): "Version": "1", "VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []}, "ResponseMetadata": {"HTTPStatusCode": 201}, + "State": "Active", } ) @@ -375,12 +426,15 @@ def test_create_function_from_zipfile(): @mock_s3 @freeze_time("2015-01-01 00:00:00") def test_get_function(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file1() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", @@ -435,7 +489,7 @@ def test_get_function(): ) # Test get function when can't find function name - with assert_raises(ClientError): + with assert_raises(conn.exceptions.ResourceNotFoundException): conn.get_function(FunctionName="junk", Qualifier="$LATEST") @@ -444,7 +498,10 @@ def test_get_function(): def test_get_function_by_arn(): bucket_name = "test-bucket" s3_conn = boto3.client("s3", "us-east-1") - s3_conn.create_bucket(Bucket=bucket_name) + s3_conn.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content) @@ -469,12 +526,15 @@ def test_get_function_by_arn(): @mock_lambda @mock_s3 def test_delete_function(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", @@ -505,7 +565,10 @@ def test_delete_function(): def test_delete_function_by_arn(): bucket_name = "test-bucket" s3_conn = boto3.client("s3", "us-east-1") - s3_conn.create_bucket(Bucket=bucket_name) + s3_conn.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content) @@ -530,7 +593,7 @@ def test_delete_function_by_arn(): @mock_lambda def test_delete_unknown_function(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.delete_function.when.called_with( FunctionName="testFunctionThatDoesntExist" ).should.throw(botocore.client.ClientError) @@ -539,12 +602,15 @@ def test_delete_unknown_function(): @mock_lambda @mock_s3 def test_publish(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", @@ -589,12 +655,15 @@ def test_list_create_list_get_delete_list(): test `list -> create -> list -> get -> delete -> list` integration """ - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.list_functions()["Functions"].should.have.length_of(0) @@ -631,6 +700,7 @@ def test_list_create_list_get_delete_list(): "Timeout": 3, "Version": "$LATEST", "VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []}, + "State": "Active", }, "ResponseMetadata": {"HTTPStatusCode": 200}, } @@ -690,12 +760,15 @@ def test_tags(): """ test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration """ - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) function = conn.create_function( FunctionName="testFunction", @@ -747,7 +820,7 @@ def test_tags_not_found(): """ Test list_tags and tag_resource when the lambda with the given arn does not exist """ - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.list_tags.when.called_with( Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID) ).should.throw(botocore.client.ClientError) @@ -765,7 +838,7 @@ def test_tags_not_found(): @mock_lambda def test_invoke_async_function(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", Runtime="python2.7", @@ -788,7 +861,7 @@ def test_invoke_async_function(): @mock_lambda @freeze_time("2015-01-01 00:00:00") def test_get_function_created_with_zipfile(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() result = conn.create_function( FunctionName="testFunction", @@ -827,13 +900,14 @@ def test_get_function_created_with_zipfile(): "Timeout": 3, "Version": "$LATEST", "VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []}, + "State": "Active", } ) @mock_lambda def test_add_function_permission(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() conn.create_function( FunctionName="testFunction", @@ -864,7 +938,7 @@ def test_add_function_permission(): @mock_lambda def test_get_function_policy(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() conn.create_function( FunctionName="testFunction", @@ -899,12 +973,15 @@ def test_get_function_policy(): @mock_lambda @mock_s3 def test_list_versions_by_function(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", @@ -955,12 +1032,15 @@ def test_list_versions_by_function(): @mock_lambda @mock_s3 def test_create_function_with_already_exists(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) conn.create_function( FunctionName="testFunction", @@ -992,7 +1072,7 @@ def test_create_function_with_already_exists(): @mock_lambda @mock_s3 def test_list_versions_by_function_for_nonexistent_function(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) versions = conn.list_versions_by_function(FunctionName="testFunction") assert len(versions["Versions"]) == 0 @@ -1341,12 +1421,15 @@ def test_delete_event_source_mapping(): @mock_lambda @mock_s3 def test_update_configuration(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file2() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) fxn = conn.create_function( FunctionName="testFunction", @@ -1389,7 +1472,7 @@ def test_update_configuration(): @mock_lambda def test_update_function_zip(): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content_one = get_test_zip_file1() @@ -1436,6 +1519,7 @@ def test_update_function_zip(): "Timeout": 3, "Version": "2", "VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []}, + "State": "Active", } ) @@ -1443,13 +1527,16 @@ def test_update_function_zip(): @mock_lambda @mock_s3 def test_update_function_s3(): - s3_conn = boto3.client("s3", "us-west-2") - s3_conn.create_bucket(Bucket="test-bucket") + s3_conn = boto3.client("s3", _lambda_region) + s3_conn.create_bucket( + Bucket="test-bucket", + CreateBucketConfiguration={"LocationConstraint": _lambda_region}, + ) zip_content = get_test_zip_file1() s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) fxn = conn.create_function( FunctionName="testFunctionS3", @@ -1498,6 +1585,7 @@ def test_update_function_s3(): "Timeout": 3, "Version": "2", "VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []}, + "State": "Active", } ) @@ -1529,7 +1617,7 @@ def test_create_function_with_unknown_arn(): def create_invalid_lambda(role): - conn = boto3.client("lambda", "us-west-2") + conn = boto3.client("lambda", _lambda_region) zip_content = get_test_zip_file1() with assert_raises(ClientError) as err: conn.create_function( @@ -1548,7 +1636,7 @@ def create_invalid_lambda(role): def get_role_name(): with mock_iam(): - iam = boto3.client("iam", region_name="us-west-2") + iam = boto3.client("iam", region_name=_lambda_region) try: return iam.get_role(RoleName="my-role")["Role"]["Arn"] except ClientError: diff --git a/tests/test_awslambda/test_lambda_cloudformation.py b/tests/test_awslambda/test_lambda_cloudformation.py index a5d4d23fd..f57354d69 100644 --- a/tests/test_awslambda/test_lambda_cloudformation.py +++ b/tests/test_awslambda/test_lambda_cloudformation.py @@ -94,7 +94,7 @@ def test_lambda_can_be_deleted_by_cloudformation(): # Verify function was deleted with assert_raises(ClientError) as e: lmbda.get_function(FunctionName=created_fn_name) - e.exception.response["Error"]["Code"].should.equal("404") + e.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException") def create_stack(cf, s3): diff --git a/tests/test_awslambda/test_policy.py b/tests/test_awslambda/test_policy.py new file mode 100644 index 000000000..92ef0087b --- /dev/null +++ b/tests/test_awslambda/test_policy.py @@ -0,0 +1,49 @@ +from __future__ import unicode_literals + +import json +import sure + +from moto.awslambda.policy import Policy + + +class MockLambdaFunction: + def __init__(self, arn): + self.function_arn = arn + self.policy = None + + +def test_policy(): + policy = Policy(MockLambdaFunction("arn")) + statement = { + "StatementId": "statement0", + "Action": "lambda:InvokeFunction", + "FunctionName": "function_name", + "Principal": "events.amazonaws.com", + "SourceArn": "arn:aws:events:us-east-1:111111111111:rule/rule_name", + "SourceAccount": "111111111111", + } + + expected = { + "Action": "lambda:InvokeFunction", + "FunctionName": "function_name", + "Principal": {"Service": "events.amazonaws.com"}, + "Effect": "Allow", + "Resource": "arn:$LATEST", + "Sid": "statement0", + "Condition": { + "ArnLike": { + "AWS:SourceArn": "arn:aws:events:us-east-1:111111111111:rule/rule_name", + }, + "StringEquals": {"AWS:SourceAccount": "111111111111"}, + }, + } + + policy.add_statement(json.dumps(statement)) + expected.should.be.equal(policy.statements[0]) + + sid = statement.get("StatementId", None) + if sid == None: + raise "TestCase.statement does not contain StatementId" + + policy.del_statement(sid) + [].should.be.equal(policy.statements) diff --git a/tests/test_cloudformation/fixtures/vpc_eip.py b/tests/test_cloudformation/fixtures/vpc_eip.py index b5bd48c01..154d4c2d4 100644 --- a/tests/test_cloudformation/fixtures/vpc_eip.py +++ b/tests/test_cloudformation/fixtures/vpc_eip.py @@ -1,5 +1,5 @@ -from __future__ import unicode_literals - -template = { - "Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}} -} +from __future__ import unicode_literals + +template = { + "Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}} +} diff --git a/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py b/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py index ff7b75518..546f68cb4 100644 --- a/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py +++ b/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py @@ -1,276 +1,276 @@ -from __future__ import unicode_literals - -template = { - "Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.", - "Parameters": { - "SSHLocation": { - "ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.", - "Description": " The IP address range that can be used to SSH to the EC2 instances", - "Default": "0.0.0.0/0", - "MinLength": "9", - "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", - "MaxLength": "18", - "Type": "String", - }, - "KeyName": { - "Type": "String", - "Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance", - "MinLength": "1", - "AllowedPattern": "[\\x20-\\x7E]*", - "MaxLength": "255", - "ConstraintDescription": "can contain only ASCII characters.", - }, - "InstanceType": { - "Default": "m1.small", - "ConstraintDescription": "must be a valid EC2 instance type.", - "Type": "String", - "Description": "WebServer EC2 instance type", - "AllowedValues": [ - "t1.micro", - "m1.small", - "m1.medium", - "m1.large", - "m1.xlarge", - "m2.xlarge", - "m2.2xlarge", - "m2.4xlarge", - "m3.xlarge", - "m3.2xlarge", - "c1.medium", - "c1.xlarge", - "cc1.4xlarge", - "cc2.8xlarge", - "cg1.4xlarge", - ], - }, - }, - "AWSTemplateFormatVersion": "2010-09-09", - "Outputs": { - "URL": { - "Description": "Newly created application URL", - "Value": { - "Fn::Join": [ - "", - ["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}], - ] - }, - } - }, - "Resources": { - "Subnet": { - "Type": "AWS::EC2::Subnet", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "CidrBlock": "10.0.0.0/24", - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"}, - "Route": { - "Type": "AWS::EC2::Route", - "Properties": { - "GatewayId": {"Ref": "InternetGateway"}, - "DestinationCidrBlock": "0.0.0.0/0", - "RouteTableId": {"Ref": "RouteTable"}, - }, - "DependsOn": "AttachGateway", - }, - "SubnetRouteTableAssociation": { - "Type": "AWS::EC2::SubnetRouteTableAssociation", - "Properties": { - "SubnetId": {"Ref": "Subnet"}, - "RouteTableId": {"Ref": "RouteTable"}, - }, - }, - "InternetGateway": { - "Type": "AWS::EC2::InternetGateway", - "Properties": { - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}] - }, - }, - "RouteTable": { - "Type": "AWS::EC2::RouteTable", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "WebServerWaitCondition": { - "Type": "AWS::CloudFormation::WaitCondition", - "Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"}, - "DependsOn": "WebServerInstance", - }, - "VPC": { - "Type": "AWS::EC2::VPC", - "Properties": { - "CidrBlock": "10.0.0.0/16", - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "InstanceSecurityGroup": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "SecurityGroupIngress": [ - { - "ToPort": "22", - "IpProtocol": "tcp", - "CidrIp": {"Ref": "SSHLocation"}, - "FromPort": "22", - }, - { - "ToPort": "80", - "IpProtocol": "tcp", - "CidrIp": "0.0.0.0/0", - "FromPort": "80", - }, - ], - "VpcId": {"Ref": "VPC"}, - "GroupDescription": "Enable SSH access via port 22", - }, - }, - "WebServerInstance": { - "Type": "AWS::EC2::Instance", - "Properties": { - "UserData": { - "Fn::Base64": { - "Fn::Join": [ - "", - [ - "#!/bin/bash\n", - "yum update -y aws-cfn-bootstrap\n", - "# Helper function\n", - "function error_exit\n", - "{\n", - ' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'', - {"Ref": "WebServerWaitHandle"}, - "'\n", - " exit 1\n", - "}\n", - "# Install the simple web page\n", - "/opt/aws/bin/cfn-init -s ", - {"Ref": "AWS::StackId"}, - " -r WebServerInstance ", - " --region ", - {"Ref": "AWS::Region"}, - " || error_exit 'Failed to run cfn-init'\n", - "# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n", - "/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n", - "# All done so signal success\n", - '/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'', - {"Ref": "WebServerWaitHandle"}, - "'\n", - ], - ] - } - }, - "Tags": [ - {"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}, - {"Value": "Bar", "Key": "Foo"}, - ], - "SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}], - "KeyName": {"Ref": "KeyName"}, - "SubnetId": {"Ref": "Subnet"}, - "ImageId": { - "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"] - }, - "InstanceType": {"Ref": "InstanceType"}, - }, - "Metadata": { - "Comment": "Install a simple PHP application", - "AWS::CloudFormation::Init": { - "config": { - "files": { - "/etc/cfn/cfn-hup.conf": { - "content": { - "Fn::Join": [ - "", - [ - "[main]\n", - "stack=", - {"Ref": "AWS::StackId"}, - "\n", - "region=", - {"Ref": "AWS::Region"}, - "\n", - ], - ] - }, - "owner": "root", - "group": "root", - "mode": "000400", - }, - "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { - "content": { - "Fn::Join": [ - "", - [ - "[cfn-auto-reloader-hook]\n", - "triggers=post.update\n", - "path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n", - "action=/opt/aws/bin/cfn-init -s ", - {"Ref": "AWS::StackId"}, - " -r WebServerInstance ", - " --region ", - {"Ref": "AWS::Region"}, - "\n", - "runas=root\n", - ], - ] - } - }, - "/var/www/html/index.php": { - "content": { - "Fn::Join": [ - "", - [ - "AWS CloudFormation sample PHP application';\n", - "?>\n", - ], - ] - }, - "owner": "apache", - "group": "apache", - "mode": "000644", - }, - }, - "services": { - "sysvinit": { - "httpd": {"ensureRunning": "true", "enabled": "true"}, - "sendmail": { - "ensureRunning": "false", - "enabled": "false", - }, - } - }, - "packages": {"yum": {"httpd": [], "php": []}}, - } - }, - }, - }, - "IPAddress": { - "Type": "AWS::EC2::EIP", - "Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"}, - "DependsOn": "AttachGateway", - }, - "AttachGateway": { - "Type": "AWS::EC2::VPCGatewayAttachment", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "InternetGatewayId": {"Ref": "InternetGateway"}, - }, - }, - }, - "Mappings": { - "RegionMap": { - "ap-southeast-1": {"AMI": "ami-74dda626"}, - "ap-southeast-2": {"AMI": "ami-b3990e89"}, - "us-west-2": {"AMI": "ami-16fd7026"}, - "us-east-1": {"AMI": "ami-7f418316"}, - "ap-northeast-1": {"AMI": "ami-dcfa4edd"}, - "us-west-1": {"AMI": "ami-951945d0"}, - "eu-west-1": {"AMI": "ami-24506250"}, - "sa-east-1": {"AMI": "ami-3e3be423"}, - } - }, -} +from __future__ import unicode_literals + +template = { + "Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.", + "Parameters": { + "SSHLocation": { + "ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.", + "Description": " The IP address range that can be used to SSH to the EC2 instances", + "Default": "0.0.0.0/0", + "MinLength": "9", + "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", + "MaxLength": "18", + "Type": "String", + }, + "KeyName": { + "Type": "String", + "Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance", + "MinLength": "1", + "AllowedPattern": "[\\x20-\\x7E]*", + "MaxLength": "255", + "ConstraintDescription": "can contain only ASCII characters.", + }, + "InstanceType": { + "Default": "m1.small", + "ConstraintDescription": "must be a valid EC2 instance type.", + "Type": "String", + "Description": "WebServer EC2 instance type", + "AllowedValues": [ + "t1.micro", + "m1.small", + "m1.medium", + "m1.large", + "m1.xlarge", + "m2.xlarge", + "m2.2xlarge", + "m2.4xlarge", + "m3.xlarge", + "m3.2xlarge", + "c1.medium", + "c1.xlarge", + "cc1.4xlarge", + "cc2.8xlarge", + "cg1.4xlarge", + ], + }, + }, + "AWSTemplateFormatVersion": "2010-09-09", + "Outputs": { + "URL": { + "Description": "Newly created application URL", + "Value": { + "Fn::Join": [ + "", + ["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}], + ] + }, + } + }, + "Resources": { + "Subnet": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "CidrBlock": "10.0.0.0/24", + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"}, + "Route": { + "Type": "AWS::EC2::Route", + "Properties": { + "GatewayId": {"Ref": "InternetGateway"}, + "DestinationCidrBlock": "0.0.0.0/0", + "RouteTableId": {"Ref": "RouteTable"}, + }, + "DependsOn": "AttachGateway", + }, + "SubnetRouteTableAssociation": { + "Type": "AWS::EC2::SubnetRouteTableAssociation", + "Properties": { + "SubnetId": {"Ref": "Subnet"}, + "RouteTableId": {"Ref": "RouteTable"}, + }, + }, + "InternetGateway": { + "Type": "AWS::EC2::InternetGateway", + "Properties": { + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}] + }, + }, + "RouteTable": { + "Type": "AWS::EC2::RouteTable", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "WebServerWaitCondition": { + "Type": "AWS::CloudFormation::WaitCondition", + "Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"}, + "DependsOn": "WebServerInstance", + }, + "VPC": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16", + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "InstanceSecurityGroup": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "SecurityGroupIngress": [ + { + "ToPort": "22", + "IpProtocol": "tcp", + "CidrIp": {"Ref": "SSHLocation"}, + "FromPort": "22", + }, + { + "ToPort": "80", + "IpProtocol": "tcp", + "CidrIp": "0.0.0.0/0", + "FromPort": "80", + }, + ], + "VpcId": {"Ref": "VPC"}, + "GroupDescription": "Enable SSH access via port 22", + }, + }, + "WebServerInstance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "UserData": { + "Fn::Base64": { + "Fn::Join": [ + "", + [ + "#!/bin/bash\n", + "yum update -y aws-cfn-bootstrap\n", + "# Helper function\n", + "function error_exit\n", + "{\n", + ' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'', + {"Ref": "WebServerWaitHandle"}, + "'\n", + " exit 1\n", + "}\n", + "# Install the simple web page\n", + "/opt/aws/bin/cfn-init -s ", + {"Ref": "AWS::StackId"}, + " -r WebServerInstance ", + " --region ", + {"Ref": "AWS::Region"}, + " || error_exit 'Failed to run cfn-init'\n", + "# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n", + "/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n", + "# All done so signal success\n", + '/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'', + {"Ref": "WebServerWaitHandle"}, + "'\n", + ], + ] + } + }, + "Tags": [ + {"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}, + {"Value": "Bar", "Key": "Foo"}, + ], + "SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}], + "KeyName": {"Ref": "KeyName"}, + "SubnetId": {"Ref": "Subnet"}, + "ImageId": { + "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"] + }, + "InstanceType": {"Ref": "InstanceType"}, + }, + "Metadata": { + "Comment": "Install a simple PHP application", + "AWS::CloudFormation::Init": { + "config": { + "files": { + "/etc/cfn/cfn-hup.conf": { + "content": { + "Fn::Join": [ + "", + [ + "[main]\n", + "stack=", + {"Ref": "AWS::StackId"}, + "\n", + "region=", + {"Ref": "AWS::Region"}, + "\n", + ], + ] + }, + "owner": "root", + "group": "root", + "mode": "000400", + }, + "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { + "content": { + "Fn::Join": [ + "", + [ + "[cfn-auto-reloader-hook]\n", + "triggers=post.update\n", + "path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n", + "action=/opt/aws/bin/cfn-init -s ", + {"Ref": "AWS::StackId"}, + " -r WebServerInstance ", + " --region ", + {"Ref": "AWS::Region"}, + "\n", + "runas=root\n", + ], + ] + } + }, + "/var/www/html/index.php": { + "content": { + "Fn::Join": [ + "", + [ + "AWS CloudFormation sample PHP application';\n", + "?>\n", + ], + ] + }, + "owner": "apache", + "group": "apache", + "mode": "000644", + }, + }, + "services": { + "sysvinit": { + "httpd": {"ensureRunning": "true", "enabled": "true"}, + "sendmail": { + "ensureRunning": "false", + "enabled": "false", + }, + } + }, + "packages": {"yum": {"httpd": [], "php": []}}, + } + }, + }, + }, + "IPAddress": { + "Type": "AWS::EC2::EIP", + "Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"}, + "DependsOn": "AttachGateway", + }, + "AttachGateway": { + "Type": "AWS::EC2::VPCGatewayAttachment", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "InternetGatewayId": {"Ref": "InternetGateway"}, + }, + }, + }, + "Mappings": { + "RegionMap": { + "ap-southeast-1": {"AMI": "ami-74dda626"}, + "ap-southeast-2": {"AMI": "ami-b3990e89"}, + "us-west-2": {"AMI": "ami-16fd7026"}, + "us-east-1": {"AMI": "ami-7f418316"}, + "ap-northeast-1": {"AMI": "ami-dcfa4edd"}, + "us-west-1": {"AMI": "ami-951945d0"}, + "eu-west-1": {"AMI": "ami-24506250"}, + "sa-east-1": {"AMI": "ami-3e3be423"}, + } + }, +} diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud.py b/tests/test_cloudformation/test_cloudformation_stack_crud.py index 75f705ea7..3d1b2ab8c 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud.py @@ -143,7 +143,7 @@ def test_create_stack_with_notification_arn(): @mock_s3_deprecated def test_create_stack_from_s3_url(): s3_conn = boto.s3.connect_to_region("us-west-1") - bucket = s3_conn.create_bucket("foobar") + bucket = s3_conn.create_bucket("foobar", location="us-west-1") key = boto.s3.key.Key(bucket) key.key = "template-key" key.set_contents_from_string(dummy_template_json) diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 40fb2d669..b7e86a1d5 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -522,6 +522,13 @@ def test_boto3_list_stack_set_operations(): list_operation["Summaries"][-1]["Action"].should.equal("UPDATE") +@mock_cloudformation +def test_boto3_bad_list_stack_resources(): + cf_conn = boto3.client("cloudformation", region_name="us-east-1") + with assert_raises(ClientError): + cf_conn.list_stack_resources(StackName="test_stack_set") + + @mock_cloudformation def test_boto3_delete_stack_set(): cf_conn = boto3.client("cloudformation", region_name="us-east-1") diff --git a/tests/test_cloudwatch/test_cloudwatch.py b/tests/test_cloudwatch/test_cloudwatch.py index f1a2e3fd6..cc624e852 100644 --- a/tests/test_cloudwatch/test_cloudwatch.py +++ b/tests/test_cloudwatch/test_cloudwatch.py @@ -1,117 +1,117 @@ -import boto -from boto.ec2.cloudwatch.alarm import MetricAlarm -import sure # noqa - -from moto import mock_cloudwatch_deprecated - - -def alarm_fixture(name="tester", action=None): - action = action or ["arn:alarm"] - return MetricAlarm( - name=name, - namespace="{0}_namespace".format(name), - metric="{0}_metric".format(name), - comparison=">=", - threshold=2.0, - period=60, - evaluation_periods=5, - statistic="Average", - description="A test", - dimensions={"InstanceId": ["i-0123456,i-0123457"]}, - alarm_actions=action, - ok_actions=["arn:ok"], - insufficient_data_actions=["arn:insufficient"], - unit="Seconds", - ) - - -@mock_cloudwatch_deprecated -def test_create_alarm(): - conn = boto.connect_cloudwatch() - - alarm = alarm_fixture() - conn.create_alarm(alarm) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(1) - alarm = alarms[0] - alarm.name.should.equal("tester") - alarm.namespace.should.equal("tester_namespace") - alarm.metric.should.equal("tester_metric") - alarm.comparison.should.equal(">=") - alarm.threshold.should.equal(2.0) - alarm.period.should.equal(60) - alarm.evaluation_periods.should.equal(5) - alarm.statistic.should.equal("Average") - alarm.description.should.equal("A test") - dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) - list(alarm.alarm_actions).should.equal(["arn:alarm"]) - list(alarm.ok_actions).should.equal(["arn:ok"]) - list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"]) - alarm.unit.should.equal("Seconds") - - -@mock_cloudwatch_deprecated -def test_delete_alarm(): - conn = boto.connect_cloudwatch() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - alarm = alarm_fixture() - conn.create_alarm(alarm) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(1) - - alarms[0].delete() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - -@mock_cloudwatch_deprecated -def test_put_metric_data(): - conn = boto.connect_cloudwatch() - - conn.put_metric_data( - namespace="tester", - name="metric", - value=1.5, - dimensions={"InstanceId": ["i-0123456,i-0123457"]}, - ) - - metrics = conn.list_metrics() - metrics.should.have.length_of(1) - metric = metrics[0] - metric.namespace.should.equal("tester") - metric.name.should.equal("metric") - dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) - - -@mock_cloudwatch_deprecated -def test_describe_alarms(): - conn = boto.connect_cloudwatch() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar")) - conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz")) - conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo")) - conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo")) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(4) - alarms = conn.describe_alarms(alarm_name_prefix="nfoo") - alarms.should.have.length_of(2) - alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"]) - alarms.should.have.length_of(3) - alarms = conn.describe_alarms(action_prefix="afoo") - alarms.should.have.length_of(2) - - for alarm in conn.describe_alarms(): - alarm.delete() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) +import boto +from boto.ec2.cloudwatch.alarm import MetricAlarm +import sure # noqa + +from moto import mock_cloudwatch_deprecated + + +def alarm_fixture(name="tester", action=None): + action = action or ["arn:alarm"] + return MetricAlarm( + name=name, + namespace="{0}_namespace".format(name), + metric="{0}_metric".format(name), + comparison=">=", + threshold=2.0, + period=60, + evaluation_periods=5, + statistic="Average", + description="A test", + dimensions={"InstanceId": ["i-0123456,i-0123457"]}, + alarm_actions=action, + ok_actions=["arn:ok"], + insufficient_data_actions=["arn:insufficient"], + unit="Seconds", + ) + + +@mock_cloudwatch_deprecated +def test_create_alarm(): + conn = boto.connect_cloudwatch() + + alarm = alarm_fixture() + conn.create_alarm(alarm) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(1) + alarm = alarms[0] + alarm.name.should.equal("tester") + alarm.namespace.should.equal("tester_namespace") + alarm.metric.should.equal("tester_metric") + alarm.comparison.should.equal(">=") + alarm.threshold.should.equal(2.0) + alarm.period.should.equal(60) + alarm.evaluation_periods.should.equal(5) + alarm.statistic.should.equal("Average") + alarm.description.should.equal("A test") + dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) + list(alarm.alarm_actions).should.equal(["arn:alarm"]) + list(alarm.ok_actions).should.equal(["arn:ok"]) + list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"]) + alarm.unit.should.equal("Seconds") + + +@mock_cloudwatch_deprecated +def test_delete_alarm(): + conn = boto.connect_cloudwatch() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + alarm = alarm_fixture() + conn.create_alarm(alarm) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(1) + + alarms[0].delete() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + +@mock_cloudwatch_deprecated +def test_put_metric_data(): + conn = boto.connect_cloudwatch() + + conn.put_metric_data( + namespace="tester", + name="metric", + value=1.5, + dimensions={"InstanceId": ["i-0123456,i-0123457"]}, + ) + + metrics = conn.list_metrics() + metrics.should.have.length_of(1) + metric = metrics[0] + metric.namespace.should.equal("tester") + metric.name.should.equal("metric") + dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) + + +@mock_cloudwatch_deprecated +def test_describe_alarms(): + conn = boto.connect_cloudwatch() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar")) + conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz")) + conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo")) + conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo")) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(4) + alarms = conn.describe_alarms(alarm_name_prefix="nfoo") + alarms.should.have.length_of(2) + alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"]) + alarms.should.have.length_of(3) + alarms = conn.describe_alarms(action_prefix="afoo") + alarms.should.have.length_of(2) + + for alarm in conn.describe_alarms(): + alarm.delete() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 7ac1038b0..37e1a56a3 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -27,6 +27,11 @@ def test_create_user_pool(): result["UserPool"]["Id"].should_not.be.none result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+") + result["UserPool"]["Arn"].should.equal( + "arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format( + ACCOUNT_ID, result["UserPool"]["Id"] + ) + ) result["UserPool"]["Name"].should.equal(name) result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) @@ -911,6 +916,55 @@ def test_admin_create_existing_user(): caught.should.be.true +@mock_cognitoidp +def test_admin_resend_invitation_existing_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + ) + + caught = False + try: + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + MessageAction="RESEND", + ) + except conn.exceptions.UsernameExistsException: + caught = True + + caught.should.be.false + + +@mock_cognitoidp +def test_admin_resend_invitation_missing_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + caught = False + try: + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + MessageAction="RESEND", + ) + except conn.exceptions.UserNotFoundException: + caught = True + + caught.should.be.true + + @mock_cognitoidp def test_admin_get_user(): conn = boto3.client("cognito-idp", "us-west-2") @@ -958,6 +1012,18 @@ def test_list_users(): result["Users"].should.have.length_of(1) result["Users"][0]["Username"].should.equal(username) + username_bis = str(uuid.uuid4()) + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username_bis, + UserAttributes=[{"Name": "phone_number", "Value": "+33666666666"}], + ) + result = conn.list_users( + UserPoolId=user_pool_id, Filter='phone_number="+33666666666' + ) + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username_bis) + @mock_cognitoidp def test_list_users_returns_limit_items(): @@ -1142,11 +1208,13 @@ def test_token_legitimacy(): id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256")) id_claims["iss"].should.equal(issuer) id_claims["aud"].should.equal(client_id) + id_claims["token_use"].should.equal("id") + for k, v in outputs["additional_fields"].items(): + id_claims[k].should.equal(v) access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256")) access_claims["iss"].should.equal(issuer) access_claims["aud"].should.equal(client_id) - for k, v in outputs["additional_fields"].items(): - access_claims[k].should.equal(v) + access_claims["token_use"].should.equal("access") @mock_cognitoidp diff --git a/tests/test_core/test_server.py b/tests/test_core/test_server.py index 5514223af..205a2ad0f 100644 --- a/tests/test_core/test_server.py +++ b/tests/test_core/test_server.py @@ -46,4 +46,4 @@ def test_domain_dispatched_with_service(): dispatcher = DomainDispatcherApplication(create_backend_app, service="s3") backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"}) keys = set(backend_app.view_functions.keys()) - keys.should.contain("ResponseObject.key_response") + keys.should.contain("ResponseObject.key_or_control_response") diff --git a/tests/test_datapipeline/test_datapipeline.py b/tests/test_datapipeline/test_datapipeline.py index 42063b506..b540d120e 100644 --- a/tests/test_datapipeline/test_datapipeline.py +++ b/tests/test_datapipeline/test_datapipeline.py @@ -1,182 +1,182 @@ -from __future__ import unicode_literals - -import boto.datapipeline -import sure # noqa - -from moto import mock_datapipeline_deprecated -from moto.datapipeline.utils import remove_capitalization_of_dict_keys - - -def get_value_from_fields(key, fields): - for field in fields: - if field["key"] == key: - return field["stringValue"] - - -@mock_datapipeline_deprecated -def test_create_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - - res = conn.create_pipeline("mypipeline", "some-unique-id") - - pipeline_id = res["pipelineId"] - pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ - "pipelineDescriptionList" - ] - pipeline_descriptions.should.have.length_of(1) - - pipeline_description = pipeline_descriptions[0] - pipeline_description["name"].should.equal("mypipeline") - pipeline_description["pipelineId"].should.equal(pipeline_id) - fields = pipeline_description["fields"] - - get_value_from_fields("@pipelineState", fields).should.equal("PENDING") - get_value_from_fields("uniqueId", fields).should.equal("some-unique-id") - - -PIPELINE_OBJECTS = [ - { - "id": "Default", - "name": "Default", - "fields": [{"key": "workerGroup", "stringValue": "workerGroup"}], - }, - { - "id": "Schedule", - "name": "Schedule", - "fields": [ - {"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"}, - {"key": "type", "stringValue": "Schedule"}, - {"key": "period", "stringValue": "1 hour"}, - {"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"}, - ], - }, - { - "id": "SayHello", - "name": "SayHello", - "fields": [ - {"key": "type", "stringValue": "ShellCommandActivity"}, - {"key": "command", "stringValue": "echo hello"}, - {"key": "parent", "refValue": "Default"}, - {"key": "schedule", "refValue": "Schedule"}, - ], - }, -] - - -@mock_datapipeline_deprecated -def test_creating_pipeline_definition(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) - - pipeline_definition = conn.get_pipeline_definition(pipeline_id) - pipeline_definition["pipelineObjects"].should.have.length_of(3) - default_object = pipeline_definition["pipelineObjects"][0] - default_object["name"].should.equal("Default") - default_object["id"].should.equal("Default") - default_object["fields"].should.equal( - [{"key": "workerGroup", "stringValue": "workerGroup"}] - ) - - -@mock_datapipeline_deprecated -def test_describing_pipeline_objects(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) - - objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[ - "pipelineObjects" - ] - - objects.should.have.length_of(2) - default_object = [x for x in objects if x["id"] == "Default"][0] - default_object["name"].should.equal("Default") - default_object["fields"].should.equal( - [{"key": "workerGroup", "stringValue": "workerGroup"}] - ) - - -@mock_datapipeline_deprecated -def test_activate_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - - res = conn.create_pipeline("mypipeline", "some-unique-id") - - pipeline_id = res["pipelineId"] - conn.activate_pipeline(pipeline_id) - - pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ - "pipelineDescriptionList" - ] - pipeline_descriptions.should.have.length_of(1) - pipeline_description = pipeline_descriptions[0] - fields = pipeline_description["fields"] - - get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED") - - -@mock_datapipeline_deprecated -def test_delete_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.delete_pipeline(pipeline_id) - - response = conn.list_pipelines() - - response["pipelineIdList"].should.have.length_of(0) - - -@mock_datapipeline_deprecated -def test_listing_pipelines(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res1 = conn.create_pipeline("mypipeline1", "some-unique-id1") - res2 = conn.create_pipeline("mypipeline2", "some-unique-id2") - - response = conn.list_pipelines() - - response["hasMoreResults"].should.be(False) - response["marker"].should.be.none - response["pipelineIdList"].should.have.length_of(2) - response["pipelineIdList"].should.contain( - {"id": res1["pipelineId"], "name": "mypipeline1"} - ) - response["pipelineIdList"].should.contain( - {"id": res2["pipelineId"], "name": "mypipeline2"} - ) - - -@mock_datapipeline_deprecated -def test_listing_paginated_pipelines(): - conn = boto.datapipeline.connect_to_region("us-west-2") - for i in range(100): - conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i) - - response = conn.list_pipelines() - - response["hasMoreResults"].should.be(True) - response["marker"].should.equal(response["pipelineIdList"][-1]["id"]) - response["pipelineIdList"].should.have.length_of(50) - - -# testing a helper function -def test_remove_capitalization_of_dict_keys(): - result = remove_capitalization_of_dict_keys( - { - "Id": "IdValue", - "Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}], - } - ) - - result.should.equal( - { - "id": "IdValue", - "fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}], - } - ) +from __future__ import unicode_literals + +import boto.datapipeline +import sure # noqa + +from moto import mock_datapipeline_deprecated +from moto.datapipeline.utils import remove_capitalization_of_dict_keys + + +def get_value_from_fields(key, fields): + for field in fields: + if field["key"] == key: + return field["stringValue"] + + +@mock_datapipeline_deprecated +def test_create_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + + res = conn.create_pipeline("mypipeline", "some-unique-id") + + pipeline_id = res["pipelineId"] + pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ + "pipelineDescriptionList" + ] + pipeline_descriptions.should.have.length_of(1) + + pipeline_description = pipeline_descriptions[0] + pipeline_description["name"].should.equal("mypipeline") + pipeline_description["pipelineId"].should.equal(pipeline_id) + fields = pipeline_description["fields"] + + get_value_from_fields("@pipelineState", fields).should.equal("PENDING") + get_value_from_fields("uniqueId", fields).should.equal("some-unique-id") + + +PIPELINE_OBJECTS = [ + { + "id": "Default", + "name": "Default", + "fields": [{"key": "workerGroup", "stringValue": "workerGroup"}], + }, + { + "id": "Schedule", + "name": "Schedule", + "fields": [ + {"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"}, + {"key": "type", "stringValue": "Schedule"}, + {"key": "period", "stringValue": "1 hour"}, + {"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"}, + ], + }, + { + "id": "SayHello", + "name": "SayHello", + "fields": [ + {"key": "type", "stringValue": "ShellCommandActivity"}, + {"key": "command", "stringValue": "echo hello"}, + {"key": "parent", "refValue": "Default"}, + {"key": "schedule", "refValue": "Schedule"}, + ], + }, +] + + +@mock_datapipeline_deprecated +def test_creating_pipeline_definition(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) + + pipeline_definition = conn.get_pipeline_definition(pipeline_id) + pipeline_definition["pipelineObjects"].should.have.length_of(3) + default_object = pipeline_definition["pipelineObjects"][0] + default_object["name"].should.equal("Default") + default_object["id"].should.equal("Default") + default_object["fields"].should.equal( + [{"key": "workerGroup", "stringValue": "workerGroup"}] + ) + + +@mock_datapipeline_deprecated +def test_describing_pipeline_objects(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) + + objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[ + "pipelineObjects" + ] + + objects.should.have.length_of(2) + default_object = [x for x in objects if x["id"] == "Default"][0] + default_object["name"].should.equal("Default") + default_object["fields"].should.equal( + [{"key": "workerGroup", "stringValue": "workerGroup"}] + ) + + +@mock_datapipeline_deprecated +def test_activate_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + + res = conn.create_pipeline("mypipeline", "some-unique-id") + + pipeline_id = res["pipelineId"] + conn.activate_pipeline(pipeline_id) + + pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ + "pipelineDescriptionList" + ] + pipeline_descriptions.should.have.length_of(1) + pipeline_description = pipeline_descriptions[0] + fields = pipeline_description["fields"] + + get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED") + + +@mock_datapipeline_deprecated +def test_delete_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.delete_pipeline(pipeline_id) + + response = conn.list_pipelines() + + response["pipelineIdList"].should.have.length_of(0) + + +@mock_datapipeline_deprecated +def test_listing_pipelines(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res1 = conn.create_pipeline("mypipeline1", "some-unique-id1") + res2 = conn.create_pipeline("mypipeline2", "some-unique-id2") + + response = conn.list_pipelines() + + response["hasMoreResults"].should.be(False) + response["marker"].should.be.none + response["pipelineIdList"].should.have.length_of(2) + response["pipelineIdList"].should.contain( + {"id": res1["pipelineId"], "name": "mypipeline1"} + ) + response["pipelineIdList"].should.contain( + {"id": res2["pipelineId"], "name": "mypipeline2"} + ) + + +@mock_datapipeline_deprecated +def test_listing_paginated_pipelines(): + conn = boto.datapipeline.connect_to_region("us-west-2") + for i in range(100): + conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i) + + response = conn.list_pipelines() + + response["hasMoreResults"].should.be(True) + response["marker"].should.equal(response["pipelineIdList"][-1]["id"]) + response["pipelineIdList"].should.have.length_of(50) + + +# testing a helper function +def test_remove_capitalization_of_dict_keys(): + result = remove_capitalization_of_dict_keys( + { + "Id": "IdValue", + "Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}], + } + ) + + result.should.equal( + { + "id": "IdValue", + "fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}], + } + ) diff --git a/tests/test_dynamodb/test_dynamodb_table_with_range_key.py b/tests/test_dynamodb/test_dynamodb_table_with_range_key.py index 40301025f..6986ae9b3 100644 --- a/tests/test_dynamodb/test_dynamodb_table_with_range_key.py +++ b/tests/test_dynamodb/test_dynamodb_table_with_range_key.py @@ -1,470 +1,470 @@ -from __future__ import unicode_literals - -import boto -import sure # noqa -from freezegun import freeze_time - -from moto import mock_dynamodb_deprecated - -from boto.dynamodb import condition -from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError -from boto.exception import DynamoDBResponseError - - -def create_table(conn): - message_table_schema = conn.create_schema( - hash_key_name="forum_name", - hash_key_proto_value=str, - range_key_name="subject", - range_key_proto_value=str, - ) - - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - return table - - -@freeze_time("2012-01-14") -@mock_dynamodb_deprecated -def test_create_table(): - conn = boto.connect_dynamodb() - create_table(conn) - - expected = { - "Table": { - "CreationDateTime": 1326499200.0, - "ItemCount": 0, - "KeySchema": { - "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}, - "RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"}, - }, - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10, - }, - "TableName": "messages", - "TableSizeBytes": 0, - "TableStatus": "ACTIVE", - } - } - conn.describe_table("messages").should.equal(expected) - - -@mock_dynamodb_deprecated -def test_delete_table(): - conn = boto.connect_dynamodb() - create_table(conn) - conn.list_tables().should.have.length_of(1) - - conn.layer1.delete_table("messages") - conn.list_tables().should.have.length_of(0) - - conn.layer1.delete_table.when.called_with("messages").should.throw( - DynamoDBResponseError - ) - - -@mock_dynamodb_deprecated -def test_update_table_throughput(): - conn = boto.connect_dynamodb() - table = create_table(conn) - table.read_units.should.equal(10) - table.write_units.should.equal(10) - - table.update_throughput(5, 6) - table.refresh() - - table.read_units.should.equal(5) - table.write_units.should.equal(6) - - -@mock_dynamodb_deprecated -def test_item_add_and_describe_and_update(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.has_item("LOLCat Forum", "Check this out!").should.equal(True) - - returned_item = table.get_item( - hash_key="LOLCat Forum", - range_key="Check this out!", - attributes_to_get=["Body", "SentBy"], - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "subject": "Check this out!", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - } - ) - - item["SentBy"] = "User B" - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", - range_key="Check this out!", - attributes_to_get=["Body", "SentBy"], - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "subject": "Check this out!", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - } - ) - - -@mock_dynamodb_deprecated -def test_item_put_without_table(): - conn = boto.connect_dynamodb() - - conn.layer1.put_item.when.called_with( - table_name="undeclared-table", - item=dict(hash_key="LOLCat Forum", range_key="Check this out!"), - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_get_missing_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw( - DynamoDBKeyNotFoundError - ) - table.has_item("foobar", "more").should.equal(False) - - -@mock_dynamodb_deprecated -def test_get_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.get_item.when.called_with( - table_name="undeclared-table", - key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, - ).should.throw(DynamoDBKeyNotFoundError) - - -@mock_dynamodb_deprecated -def test_get_item_without_range_key(): - conn = boto.connect_dynamodb() - message_table_schema = conn.create_schema( - hash_key_name="test_hash", - hash_key_proto_value=int, - range_key_name="test_range", - range_key_proto_value=int, - ) - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - - hash_key = 3241526475 - range_key = 1234567890987 - new_item = table.new_item(hash_key=hash_key, range_key=range_key) - new_item.put() - - table.get_item.when.called_with(hash_key=hash_key).should.throw( - DynamoDBValidationError - ) - - -@mock_dynamodb_deprecated -def test_delete_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete() - response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_attribute_response(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete(return_values="ALL_OLD") - response.should.equal( - { - "Attributes": { - "Body": "http://url_to_lolcat.gif", - "forum_name": "LOLCat Forum", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "SentBy": "User A", - "subject": "Check this out!", - }, - "ConsumedCapacityUnits": 0.5, - } - ) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.delete_item.when.called_with( - table_name="undeclared-table", - key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_query(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) - item.put() - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("1")) - results.response["Items"].should.have.length_of(3) - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("234")) - results.response["Items"].should.have.length_of(2) - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999")) - results.response["Items"].should.have.length_of(0) - - results = table.query( - hash_key="the-key", range_key_condition=condition.CONTAINS("12") - ) - results.response["Items"].should.have.length_of(1) - - results = table.query( - hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7") - ) - results.response["Items"].should.have.length_of(1) - - results = table.query( - hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890") - ) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_query_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.query.when.called_with( - table_name="undeclared-table", - hash_key_value={"S": "the-key"}, - range_key_conditions={ - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) - item.put() - - results = table.scan() - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NULL()}) - results.response["Items"].should.have.length_of(2) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) - results.response["Items"].should.have.length_of(0) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_scan_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.scan.when.called_with( - table_name="undeclared-table", - scan_filter={ - "SentBy": { - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - } - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan_after_has_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - list(table.scan()).should.equal([]) - - table.has_item(hash_key="the-key", range_key="123") - - list(table.scan()).should.equal([]) - - -@mock_dynamodb_deprecated -def test_write_batch(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - batch_list = conn.new_batch_write_list() - - items = [] - items.append( - table.new_item( - hash_key="the-key", - range_key="123", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - }, - ) - ) - - items.append( - table.new_item( - hash_key="the-key", - range_key="789", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - }, - ) - ) - - batch_list.add_batch(table, puts=items) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(2) - - batch_list = conn.new_batch_write_list() - batch_list.add_batch(table, deletes=[("the-key", "789")]) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(1) - - -@mock_dynamodb_deprecated -def test_batch_read(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data) - item.put() - - items = table.batch_get_item([("the-key", "123"), ("another-key", "789")]) - # Iterate through so that batch_item gets called - count = len([x for x in items]) - count.should.equal(2) +from __future__ import unicode_literals + +import boto +import sure # noqa +from freezegun import freeze_time + +from moto import mock_dynamodb_deprecated + +from boto.dynamodb import condition +from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError +from boto.exception import DynamoDBResponseError + + +def create_table(conn): + message_table_schema = conn.create_schema( + hash_key_name="forum_name", + hash_key_proto_value=str, + range_key_name="subject", + range_key_proto_value=str, + ) + + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + return table + + +@freeze_time("2012-01-14") +@mock_dynamodb_deprecated +def test_create_table(): + conn = boto.connect_dynamodb() + create_table(conn) + + expected = { + "Table": { + "CreationDateTime": 1326499200.0, + "ItemCount": 0, + "KeySchema": { + "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}, + "RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"}, + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 10, + "WriteCapacityUnits": 10, + }, + "TableName": "messages", + "TableSizeBytes": 0, + "TableStatus": "ACTIVE", + } + } + conn.describe_table("messages").should.equal(expected) + + +@mock_dynamodb_deprecated +def test_delete_table(): + conn = boto.connect_dynamodb() + create_table(conn) + conn.list_tables().should.have.length_of(1) + + conn.layer1.delete_table("messages") + conn.list_tables().should.have.length_of(0) + + conn.layer1.delete_table.when.called_with("messages").should.throw( + DynamoDBResponseError + ) + + +@mock_dynamodb_deprecated +def test_update_table_throughput(): + conn = boto.connect_dynamodb() + table = create_table(conn) + table.read_units.should.equal(10) + table.write_units.should.equal(10) + + table.update_throughput(5, 6) + table.refresh() + + table.read_units.should.equal(5) + table.write_units.should.equal(6) + + +@mock_dynamodb_deprecated +def test_item_add_and_describe_and_update(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.has_item("LOLCat Forum", "Check this out!").should.equal(True) + + returned_item = table.get_item( + hash_key="LOLCat Forum", + range_key="Check this out!", + attributes_to_get=["Body", "SentBy"], + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "subject": "Check this out!", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + } + ) + + item["SentBy"] = "User B" + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", + range_key="Check this out!", + attributes_to_get=["Body", "SentBy"], + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "subject": "Check this out!", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + } + ) + + +@mock_dynamodb_deprecated +def test_item_put_without_table(): + conn = boto.connect_dynamodb() + + conn.layer1.put_item.when.called_with( + table_name="undeclared-table", + item=dict(hash_key="LOLCat Forum", range_key="Check this out!"), + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_get_missing_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw( + DynamoDBKeyNotFoundError + ) + table.has_item("foobar", "more").should.equal(False) + + +@mock_dynamodb_deprecated +def test_get_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.get_item.when.called_with( + table_name="undeclared-table", + key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, + ).should.throw(DynamoDBKeyNotFoundError) + + +@mock_dynamodb_deprecated +def test_get_item_without_range_key(): + conn = boto.connect_dynamodb() + message_table_schema = conn.create_schema( + hash_key_name="test_hash", + hash_key_proto_value=int, + range_key_name="test_range", + range_key_proto_value=int, + ) + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + + hash_key = 3241526475 + range_key = 1234567890987 + new_item = table.new_item(hash_key=hash_key, range_key=range_key) + new_item.put() + + table.get_item.when.called_with(hash_key=hash_key).should.throw( + DynamoDBValidationError + ) + + +@mock_dynamodb_deprecated +def test_delete_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete() + response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_attribute_response(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete(return_values="ALL_OLD") + response.should.equal( + { + "Attributes": { + "Body": "http://url_to_lolcat.gif", + "forum_name": "LOLCat Forum", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "SentBy": "User A", + "subject": "Check this out!", + }, + "ConsumedCapacityUnits": 0.5, + } + ) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.delete_item.when.called_with( + table_name="undeclared-table", + key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_query(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) + item.put() + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("1")) + results.response["Items"].should.have.length_of(3) + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("234")) + results.response["Items"].should.have.length_of(2) + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999")) + results.response["Items"].should.have.length_of(0) + + results = table.query( + hash_key="the-key", range_key_condition=condition.CONTAINS("12") + ) + results.response["Items"].should.have.length_of(1) + + results = table.query( + hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7") + ) + results.response["Items"].should.have.length_of(1) + + results = table.query( + hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890") + ) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_query_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.query.when.called_with( + table_name="undeclared-table", + hash_key_value={"S": "the-key"}, + range_key_conditions={ + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) + item.put() + + results = table.scan() + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NULL()}) + results.response["Items"].should.have.length_of(2) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) + results.response["Items"].should.have.length_of(0) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_scan_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.scan.when.called_with( + table_name="undeclared-table", + scan_filter={ + "SentBy": { + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + } + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan_after_has_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + list(table.scan()).should.equal([]) + + table.has_item(hash_key="the-key", range_key="123") + + list(table.scan()).should.equal([]) + + +@mock_dynamodb_deprecated +def test_write_batch(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + batch_list = conn.new_batch_write_list() + + items = [] + items.append( + table.new_item( + hash_key="the-key", + range_key="123", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + }, + ) + ) + + items.append( + table.new_item( + hash_key="the-key", + range_key="789", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + }, + ) + ) + + batch_list.add_batch(table, puts=items) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(2) + + batch_list = conn.new_batch_write_list() + batch_list.add_batch(table, deletes=[("the-key", "789")]) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(1) + + +@mock_dynamodb_deprecated +def test_batch_read(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data) + item.put() + + items = table.batch_get_item([("the-key", "123"), ("another-key", "789")]) + # Iterate through so that batch_item gets called + count = len([x for x in items]) + count.should.equal(2) diff --git a/tests/test_dynamodb/test_dynamodb_table_without_range_key.py b/tests/test_dynamodb/test_dynamodb_table_without_range_key.py index e5a268c97..c5031b5d1 100644 --- a/tests/test_dynamodb/test_dynamodb_table_without_range_key.py +++ b/tests/test_dynamodb/test_dynamodb_table_without_range_key.py @@ -1,390 +1,390 @@ -from __future__ import unicode_literals - -import boto -import sure # noqa -from freezegun import freeze_time - -from moto import mock_dynamodb_deprecated - -from boto.dynamodb import condition -from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError -from boto.exception import DynamoDBResponseError - - -def create_table(conn): - message_table_schema = conn.create_schema( - hash_key_name="forum_name", hash_key_proto_value=str - ) - - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - return table - - -@freeze_time("2012-01-14") -@mock_dynamodb_deprecated -def test_create_table(): - conn = boto.connect_dynamodb() - create_table(conn) - - expected = { - "Table": { - "CreationDateTime": 1326499200.0, - "ItemCount": 0, - "KeySchema": { - "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"} - }, - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10, - }, - "TableName": "messages", - "TableSizeBytes": 0, - "TableStatus": "ACTIVE", - } - } - conn.describe_table("messages").should.equal(expected) - - -@mock_dynamodb_deprecated -def test_delete_table(): - conn = boto.connect_dynamodb() - create_table(conn) - conn.list_tables().should.have.length_of(1) - - conn.layer1.delete_table("messages") - conn.list_tables().should.have.length_of(0) - - conn.layer1.delete_table.when.called_with("messages").should.throw( - DynamoDBResponseError - ) - - -@mock_dynamodb_deprecated -def test_update_table_throughput(): - conn = boto.connect_dynamodb() - table = create_table(conn) - table.read_units.should.equal(10) - table.write_units.should.equal(10) - - table.update_throughput(5, 6) - table.refresh() - - table.read_units.should.equal(5) - table.write_units.should.equal(6) - - -@mock_dynamodb_deprecated -def test_item_add_and_describe_and_update(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - } - ) - - item["SentBy"] = "User B" - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - } - ) - - -@mock_dynamodb_deprecated -def test_item_put_without_table(): - conn = boto.connect_dynamodb() - - conn.layer1.put_item.when.called_with( - table_name="undeclared-table", item=dict(hash_key="LOLCat Forum") - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_get_missing_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - table.get_item.when.called_with(hash_key="tester").should.throw( - DynamoDBKeyNotFoundError - ) - - -@mock_dynamodb_deprecated -def test_get_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.get_item.when.called_with( - table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} - ).should.throw(DynamoDBKeyNotFoundError) - - -@mock_dynamodb_deprecated -def test_delete_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete() - response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_attribute_response(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete(return_values="ALL_OLD") - response.should.equal( - { - "Attributes": { - "Body": "http://url_to_lolcat.gif", - "forum_name": "LOLCat Forum", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "SentBy": "User A", - }, - "ConsumedCapacityUnits": 0.5, - } - ) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.delete_item.when.called_with( - table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_query(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", attrs=item_data) - item.put() - - results = table.query(hash_key="the-key") - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_query_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.query.when.called_with( - table_name="undeclared-table", hash_key_value={"S": "the-key"} - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key2", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="the-key3", attrs=item_data) - item.put() - - results = table.scan() - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NULL()}) - results.response["Items"].should.have.length_of(2) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) - results.response["Items"].should.have.length_of(0) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_scan_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.scan.when.called_with( - table_name="undeclared-table", - scan_filter={ - "SentBy": { - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - } - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan_after_has_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - list(table.scan()).should.equal([]) - - table.has_item("the-key") - - list(table.scan()).should.equal([]) - - -@mock_dynamodb_deprecated -def test_write_batch(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - batch_list = conn.new_batch_write_list() - - items = [] - items.append( - table.new_item( - hash_key="the-key", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - }, - ) - ) - - items.append( - table.new_item( - hash_key="the-key2", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - }, - ) - ) - - batch_list.add_batch(table, puts=items) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(2) - - batch_list = conn.new_batch_write_list() - batch_list.add_batch(table, deletes=[("the-key")]) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(1) - - -@mock_dynamodb_deprecated -def test_batch_read(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key1", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key2", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="another-key", attrs=item_data) - item.put() - - items = table.batch_get_item([("the-key1"), ("another-key")]) - # Iterate through so that batch_item gets called - count = len([x for x in items]) - count.should.have.equal(2) +from __future__ import unicode_literals + +import boto +import sure # noqa +from freezegun import freeze_time + +from moto import mock_dynamodb_deprecated + +from boto.dynamodb import condition +from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError +from boto.exception import DynamoDBResponseError + + +def create_table(conn): + message_table_schema = conn.create_schema( + hash_key_name="forum_name", hash_key_proto_value=str + ) + + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + return table + + +@freeze_time("2012-01-14") +@mock_dynamodb_deprecated +def test_create_table(): + conn = boto.connect_dynamodb() + create_table(conn) + + expected = { + "Table": { + "CreationDateTime": 1326499200.0, + "ItemCount": 0, + "KeySchema": { + "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"} + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 10, + "WriteCapacityUnits": 10, + }, + "TableName": "messages", + "TableSizeBytes": 0, + "TableStatus": "ACTIVE", + } + } + conn.describe_table("messages").should.equal(expected) + + +@mock_dynamodb_deprecated +def test_delete_table(): + conn = boto.connect_dynamodb() + create_table(conn) + conn.list_tables().should.have.length_of(1) + + conn.layer1.delete_table("messages") + conn.list_tables().should.have.length_of(0) + + conn.layer1.delete_table.when.called_with("messages").should.throw( + DynamoDBResponseError + ) + + +@mock_dynamodb_deprecated +def test_update_table_throughput(): + conn = boto.connect_dynamodb() + table = create_table(conn) + table.read_units.should.equal(10) + table.write_units.should.equal(10) + + table.update_throughput(5, 6) + table.refresh() + + table.read_units.should.equal(5) + table.write_units.should.equal(6) + + +@mock_dynamodb_deprecated +def test_item_add_and_describe_and_update(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + } + ) + + item["SentBy"] = "User B" + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + } + ) + + +@mock_dynamodb_deprecated +def test_item_put_without_table(): + conn = boto.connect_dynamodb() + + conn.layer1.put_item.when.called_with( + table_name="undeclared-table", item=dict(hash_key="LOLCat Forum") + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_get_missing_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + table.get_item.when.called_with(hash_key="tester").should.throw( + DynamoDBKeyNotFoundError + ) + + +@mock_dynamodb_deprecated +def test_get_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.get_item.when.called_with( + table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} + ).should.throw(DynamoDBKeyNotFoundError) + + +@mock_dynamodb_deprecated +def test_delete_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete() + response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_attribute_response(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete(return_values="ALL_OLD") + response.should.equal( + { + "Attributes": { + "Body": "http://url_to_lolcat.gif", + "forum_name": "LOLCat Forum", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "SentBy": "User A", + }, + "ConsumedCapacityUnits": 0.5, + } + ) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.delete_item.when.called_with( + table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_query(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", attrs=item_data) + item.put() + + results = table.query(hash_key="the-key") + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_query_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.query.when.called_with( + table_name="undeclared-table", hash_key_value={"S": "the-key"} + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key2", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="the-key3", attrs=item_data) + item.put() + + results = table.scan() + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NULL()}) + results.response["Items"].should.have.length_of(2) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) + results.response["Items"].should.have.length_of(0) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_scan_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.scan.when.called_with( + table_name="undeclared-table", + scan_filter={ + "SentBy": { + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + } + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan_after_has_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + list(table.scan()).should.equal([]) + + table.has_item("the-key") + + list(table.scan()).should.equal([]) + + +@mock_dynamodb_deprecated +def test_write_batch(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + batch_list = conn.new_batch_write_list() + + items = [] + items.append( + table.new_item( + hash_key="the-key", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + }, + ) + ) + + items.append( + table.new_item( + hash_key="the-key2", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + }, + ) + ) + + batch_list.add_batch(table, puts=items) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(2) + + batch_list = conn.new_batch_write_list() + batch_list.add_batch(table, deletes=[("the-key")]) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(1) + + +@mock_dynamodb_deprecated +def test_batch_read(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key1", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key2", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="another-key", attrs=item_data) + item.put() + + items = table.batch_get_item([("the-key1"), ("another-key")]) + # Iterate through so that batch_item gets called + count = len([x for x in items]) + count.should.have.equal(2) diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index 831538054..180f460c0 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1719,6 +1719,32 @@ def test_scan_filter4(): assert response["Count"] == 0 +@mock_dynamodb2 +def test_scan_filter_should_not_return_non_existing_attributes(): + table_name = "my-table" + item = {"partitionKey": "pk-2", "my-attr": 42} + # Create table + res = boto3.resource("dynamodb", region_name="us-east-1") + res.create_table( + TableName=table_name, + KeySchema=[{"AttributeName": "partitionKey", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "partitionKey", "AttributeType": "S"}], + BillingMode="PAY_PER_REQUEST", + ) + table = res.Table(table_name) + # Insert items + table.put_item(Item={"partitionKey": "pk-1"}) + table.put_item(Item=item) + # Verify a few operations + # Assert we only find the item that has this attribute + table.scan(FilterExpression=Attr("my-attr").lt(43))["Items"].should.equal([item]) + table.scan(FilterExpression=Attr("my-attr").lte(42))["Items"].should.equal([item]) + table.scan(FilterExpression=Attr("my-attr").gte(42))["Items"].should.equal([item]) + table.scan(FilterExpression=Attr("my-attr").gt(41))["Items"].should.equal([item]) + # Sanity check that we can't find the item if the FE is wrong + table.scan(FilterExpression=Attr("my-attr").gt(43))["Items"].should.equal([]) + + @mock_dynamodb2 def test_bad_scan_filter(): client = boto3.client("dynamodb", region_name="us-east-1") @@ -2505,6 +2531,48 @@ def test_condition_expressions(): ) +@mock_dynamodb2 +def test_condition_expression_numerical_attribute(): + dynamodb = boto3.resource("dynamodb", region_name="us-east-1") + dynamodb.create_table( + TableName="my-table", + KeySchema=[{"AttributeName": "partitionKey", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "partitionKey", "AttributeType": "S"}], + ) + table = dynamodb.Table("my-table") + table.put_item(Item={"partitionKey": "pk-pos", "myAttr": 5}) + table.put_item(Item={"partitionKey": "pk-neg", "myAttr": -5}) + + # try to update the item we put in the table using numerical condition expression + # Specifically, verify that we can compare with a zero-value + # First verify that > and >= work on positive numbers + update_numerical_con_expr( + key="pk-pos", con_expr="myAttr > :zero", res="6", table=table + ) + update_numerical_con_expr( + key="pk-pos", con_expr="myAttr >= :zero", res="7", table=table + ) + # Second verify that < and <= work on negative numbers + update_numerical_con_expr( + key="pk-neg", con_expr="myAttr < :zero", res="-4", table=table + ) + update_numerical_con_expr( + key="pk-neg", con_expr="myAttr <= :zero", res="-3", table=table + ) + + +def update_numerical_con_expr(key, con_expr, res, table): + table.update_item( + Key={"partitionKey": key}, + UpdateExpression="ADD myAttr :one", + ExpressionAttributeValues={":zero": 0, ":one": 1}, + ConditionExpression=con_expr, + ) + table.get_item(Key={"partitionKey": key})["Item"]["myAttr"].should.equal( + Decimal(res) + ) + + @mock_dynamodb2 def test_condition_expression__attr_doesnt_exist(): client = boto3.client("dynamodb", region_name="us-east-1") @@ -3489,6 +3557,83 @@ def test_update_supports_nested_list_append_onto_another_list(): ) +@mock_dynamodb2 +def test_update_supports_list_append_maps(): + client = boto3.client("dynamodb", region_name="us-west-1") + client.create_table( + AttributeDefinitions=[ + {"AttributeName": "id", "AttributeType": "S"}, + {"AttributeName": "rid", "AttributeType": "S"}, + ], + TableName="TestTable", + KeySchema=[ + {"AttributeName": "id", "KeyType": "HASH"}, + {"AttributeName": "rid", "KeyType": "RANGE"}, + ], + ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5}, + ) + client.put_item( + TableName="TestTable", + Item={ + "id": {"S": "nested_list_append"}, + "rid": {"S": "range_key"}, + "a": {"L": [{"M": {"b": {"S": "bar1"}}}]}, + }, + ) + + # Update item using list_append expression + client.update_item( + TableName="TestTable", + Key={"id": {"S": "nested_list_append"}, "rid": {"S": "range_key"}}, + UpdateExpression="SET a = list_append(a, :i)", + ExpressionAttributeValues={":i": {"L": [{"M": {"b": {"S": "bar2"}}}]}}, + ) + + # Verify item is appended to the existing list + result = client.query( + TableName="TestTable", + KeyConditionExpression="id = :i AND begins_with(rid, :r)", + ExpressionAttributeValues={ + ":i": {"S": "nested_list_append"}, + ":r": {"S": "range_key"}, + }, + )["Items"] + result.should.equal( + [ + { + "a": {"L": [{"M": {"b": {"S": "bar1"}}}, {"M": {"b": {"S": "bar2"}}}]}, + "rid": {"S": "range_key"}, + "id": {"S": "nested_list_append"}, + } + ] + ) + + +@mock_dynamodb2 +def test_update_supports_list_append_with_nested_if_not_exists_operation(): + dynamo = boto3.resource("dynamodb", region_name="us-west-1") + table_name = "test" + + dynamo.create_table( + TableName=table_name, + AttributeDefinitions=[{"AttributeName": "Id", "AttributeType": "S"}], + KeySchema=[{"AttributeName": "Id", "KeyType": "HASH"}], + ProvisionedThroughput={"ReadCapacityUnits": 20, "WriteCapacityUnits": 20}, + ) + + table = dynamo.Table(table_name) + + table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}}) + table.update_item( + Key={"Id": "item-id"}, + UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)", + ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]}, + ) + table.get_item(Key={"Id": "item-id"})["Item"].should.equal( + {"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}} + ) + + @mock_dynamodb2 def test_update_catches_invalid_list_append_operation(): client = boto3.client("dynamodb", region_name="us-east-1") @@ -3601,3 +3746,24 @@ def test_allow_update_to_item_with_different_type(): table.get_item(Key={"job_id": "b"})["Item"]["job_details"][ "job_name" ].should.be.equal({"nested": "yes"}) + + +@mock_dynamodb2 +def test_query_catches_when_no_filters(): + dynamo = boto3.resource("dynamodb", region_name="eu-central-1") + dynamo.create_table( + AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}], + TableName="origin-rbu-dev", + KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + table = dynamo.Table("origin-rbu-dev") + + with assert_raises(ClientError) as ex: + table.query(TableName="original-rbu-dev") + + ex.exception.response["Error"]["Code"].should.equal("ValidationException") + ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.exception.response["Error"]["Message"].should.equal( + "Either KeyConditions or QueryFilter should be present" + ) diff --git a/tests/test_ec2/test_account_attributes.py b/tests/test_ec2/test_account_attributes.py index a3135f22e..41c71def5 100644 --- a/tests/test_ec2/test_account_attributes.py +++ b/tests/test_ec2/test_account_attributes.py @@ -1,37 +1,37 @@ -from __future__ import unicode_literals -import boto3 -from moto import mock_ec2 -import sure # noqa - - -@mock_ec2 -def test_describe_account_attributes(): - conn = boto3.client("ec2", region_name="us-east-1") - response = conn.describe_account_attributes() - expected_attribute_values = [ - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "vpc-max-security-groups-per-interface", - }, - { - "AttributeValues": [{"AttributeValue": "20"}], - "AttributeName": "max-instances", - }, - { - "AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}], - "AttributeName": "supported-platforms", - }, - { - "AttributeValues": [{"AttributeValue": "none"}], - "AttributeName": "default-vpc", - }, - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "max-elastic-ips", - }, - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "vpc-max-elastic-ips", - }, - ] - response["AccountAttributes"].should.equal(expected_attribute_values) +from __future__ import unicode_literals +import boto3 +from moto import mock_ec2 +import sure # noqa + + +@mock_ec2 +def test_describe_account_attributes(): + conn = boto3.client("ec2", region_name="us-east-1") + response = conn.describe_account_attributes() + expected_attribute_values = [ + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "vpc-max-security-groups-per-interface", + }, + { + "AttributeValues": [{"AttributeValue": "20"}], + "AttributeName": "max-instances", + }, + { + "AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}], + "AttributeName": "supported-platforms", + }, + { + "AttributeValues": [{"AttributeValue": "none"}], + "AttributeName": "default-vpc", + }, + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "max-elastic-ips", + }, + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "vpc-max-elastic-ips", + }, + ] + response["AccountAttributes"].should.equal(expected_attribute_values) diff --git a/tests/test_ec2/test_amazon_dev_pay.py b/tests/test_ec2/test_amazon_dev_pay.py index 38e1eb751..1dd9cc74e 100644 --- a/tests/test_ec2/test_amazon_dev_pay.py +++ b/tests/test_ec2/test_amazon_dev_pay.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_amazon_dev_pay(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_amazon_dev_pay(): + pass diff --git a/tests/test_ec2/test_amis.py b/tests/test_ec2/test_amis.py index f65352c7c..ad432bb78 100644 --- a/tests/test_ec2/test_amis.py +++ b/tests/test_ec2/test_amis.py @@ -12,6 +12,7 @@ import sure # noqa from moto import mock_ec2_deprecated, mock_ec2 from moto.ec2.models import AMIS, OWNER_ID +from moto.iam.models import ACCOUNT_ID from tests.helpers import requires_boto_gte @@ -251,6 +252,19 @@ def test_ami_pulls_attributes_from_instance(): image.kernel_id.should.equal("test-kernel") +@mock_ec2_deprecated +def test_ami_uses_account_id_if_valid_access_key_is_supplied(): + access_key = "AKIAXXXXXXXXXXXXXXXX" + conn = boto.connect_ec2(access_key, "the_secret") + reservation = conn.run_instances("ami-1234abcd") + instance = reservation.instances[0] + instance.modify_attribute("kernel", "test-kernel") + + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + images = conn.get_all_images(owners=["self"]) + [(ami.id, ami.owner_id) for ami in images].should.equal([(image_id, ACCOUNT_ID)]) + + @mock_ec2_deprecated def test_ami_filters(): conn = boto.connect_ec2("the_key", "the_secret") @@ -773,7 +787,7 @@ def test_ami_filter_wildcard(): instance.create_image(Name="not-matching-image") my_images = ec2_client.describe_images( - Owners=["111122223333"], Filters=[{"Name": "name", "Values": ["test*"]}] + Owners=[ACCOUNT_ID], Filters=[{"Name": "name", "Values": ["test*"]}] )["Images"] my_images.should.have.length_of(1) diff --git a/tests/test_ec2/test_ec2_core.py b/tests/test_ec2/test_ec2_core.py index baffc4882..78b780d97 100644 --- a/tests/test_ec2/test_ec2_core.py +++ b/tests/test_ec2/test_ec2_core.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_ec2/test_ip_addresses.py b/tests/test_ec2/test_ip_addresses.py index a8e927b00..60cf1cfc6 100644 --- a/tests/test_ec2/test_ip_addresses.py +++ b/tests/test_ec2/test_ip_addresses.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_ip_addresses(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_ip_addresses(): + pass diff --git a/tests/test_ec2/test_monitoring.py b/tests/test_ec2/test_monitoring.py index 03be93adf..95bd36e6a 100644 --- a/tests/test_ec2/test_monitoring.py +++ b/tests/test_ec2/test_monitoring.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_monitoring(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_monitoring(): + pass diff --git a/tests/test_ec2/test_placement_groups.py b/tests/test_ec2/test_placement_groups.py index c7494228a..bc389488b 100644 --- a/tests/test_ec2/test_placement_groups.py +++ b/tests/test_ec2/test_placement_groups.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_placement_groups(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_placement_groups(): + pass diff --git a/tests/test_ec2/test_reserved_instances.py b/tests/test_ec2/test_reserved_instances.py index 437d3a95b..47456bc03 100644 --- a/tests/test_ec2/test_reserved_instances.py +++ b/tests/test_ec2/test_reserved_instances.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_reserved_instances(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_reserved_instances(): + pass diff --git a/tests/test_ec2/test_route_tables.py b/tests/test_ec2/test_route_tables.py index dfb3292b6..347464691 100644 --- a/tests/test_ec2/test_route_tables.py +++ b/tests/test_ec2/test_route_tables.py @@ -236,8 +236,8 @@ def test_route_table_associations(): @mock_ec2_deprecated def test_route_table_replace_route_table_association(): """ - Note: Boto has deprecated replace_route_table_assocation (which returns status) - and now uses replace_route_table_assocation_with_assoc (which returns association ID). + Note: Boto has deprecated replace_route_table_association (which returns status) + and now uses replace_route_table_association_with_assoc (which returns association ID). """ conn = boto.connect_vpc("the_key", "the_secret") vpc = conn.create_vpc("10.0.0.0/16") diff --git a/tests/test_ec2/test_virtual_private_gateways.py b/tests/test_ec2/test_virtual_private_gateways.py index f778ac3e5..bb944df0b 100644 --- a/tests/test_ec2/test_virtual_private_gateways.py +++ b/tests/test_ec2/test_virtual_private_gateways.py @@ -1,96 +1,96 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2_deprecated - - -@mock_ec2_deprecated -def test_virtual_private_gateways(): - conn = boto.connect_vpc("the_key", "the_secret") - - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - vpn_gateway.should_not.be.none - vpn_gateway.id.should.match(r"vgw-\w+") - vpn_gateway.type.should.equal("ipsec.1") - vpn_gateway.state.should.equal("available") - vpn_gateway.availability_zone.should.equal("us-east-1a") - - -@mock_ec2_deprecated -def test_describe_vpn_gateway(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - vgws = conn.get_all_vpn_gateways() - vgws.should.have.length_of(1) - - gateway = vgws[0] - gateway.id.should.match(r"vgw-\w+") - gateway.id.should.equal(vpn_gateway.id) - vpn_gateway.type.should.equal("ipsec.1") - vpn_gateway.state.should.equal("available") - vpn_gateway.availability_zone.should.equal("us-east-1a") - - -@mock_ec2_deprecated -def test_vpn_gateway_vpc_attachment(): - conn = boto.connect_vpc("the_key", "the_secret") - vpc = conn.create_vpc("10.0.0.0/16") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(1) - attachments[0].vpc_id.should.equal(vpc.id) - attachments[0].state.should.equal("attached") - - -@mock_ec2_deprecated -def test_delete_vpn_gateway(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.delete_vpn_gateway(vpn_gateway.id) - vgws = conn.get_all_vpn_gateways() - vgws.should.have.length_of(0) - - -@mock_ec2_deprecated -def test_vpn_gateway_tagging(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - vpn_gateway.add_tag("a key", "some value") - - tag = conn.get_all_tags()[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - # Refresh the subnet - vpn_gateway = conn.get_all_vpn_gateways()[0] - vpn_gateway.tags.should.have.length_of(1) - vpn_gateway.tags["a key"].should.equal("some value") - - -@mock_ec2_deprecated -def test_detach_vpn_gateway(): - - conn = boto.connect_vpc("the_key", "the_secret") - vpc = conn.create_vpc("10.0.0.0/16") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(1) - attachments[0].vpc_id.should.equal(vpc.id) - attachments[0].state.should.equal("attached") - - conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(0) +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2_deprecated + + +@mock_ec2_deprecated +def test_virtual_private_gateways(): + conn = boto.connect_vpc("the_key", "the_secret") + + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + vpn_gateway.should_not.be.none + vpn_gateway.id.should.match(r"vgw-\w+") + vpn_gateway.type.should.equal("ipsec.1") + vpn_gateway.state.should.equal("available") + vpn_gateway.availability_zone.should.equal("us-east-1a") + + +@mock_ec2_deprecated +def test_describe_vpn_gateway(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + vgws = conn.get_all_vpn_gateways() + vgws.should.have.length_of(1) + + gateway = vgws[0] + gateway.id.should.match(r"vgw-\w+") + gateway.id.should.equal(vpn_gateway.id) + vpn_gateway.type.should.equal("ipsec.1") + vpn_gateway.state.should.equal("available") + vpn_gateway.availability_zone.should.equal("us-east-1a") + + +@mock_ec2_deprecated +def test_vpn_gateway_vpc_attachment(): + conn = boto.connect_vpc("the_key", "the_secret") + vpc = conn.create_vpc("10.0.0.0/16") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(1) + attachments[0].vpc_id.should.equal(vpc.id) + attachments[0].state.should.equal("attached") + + +@mock_ec2_deprecated +def test_delete_vpn_gateway(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.delete_vpn_gateway(vpn_gateway.id) + vgws = conn.get_all_vpn_gateways() + vgws.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_vpn_gateway_tagging(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + vpn_gateway.add_tag("a key", "some value") + + tag = conn.get_all_tags()[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + # Refresh the subnet + vpn_gateway = conn.get_all_vpn_gateways()[0] + vpn_gateway.tags.should.have.length_of(1) + vpn_gateway.tags["a key"].should.equal("some value") + + +@mock_ec2_deprecated +def test_detach_vpn_gateway(): + + conn = boto.connect_vpc("the_key", "the_secret") + vpc = conn.create_vpc("10.0.0.0/16") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(1) + attachments[0].vpc_id.should.equal(vpc.id) + attachments[0].state.should.equal("attached") + + conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(0) diff --git a/tests/test_ec2/test_vm_export.py b/tests/test_ec2/test_vm_export.py index f8b24f6d4..08215d067 100644 --- a/tests/test_ec2/test_vm_export.py +++ b/tests/test_ec2/test_vm_export.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_vm_export(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_vm_export(): + pass diff --git a/tests/test_ec2/test_vm_import.py b/tests/test_ec2/test_vm_import.py index 66c7561a7..0ebfaaa0c 100644 --- a/tests/test_ec2/test_vm_import.py +++ b/tests/test_ec2/test_vm_import.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_vm_import(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_vm_import(): + pass diff --git a/tests/test_ec2/test_windows.py b/tests/test_ec2/test_windows.py index 364ac2f8a..ae2f7b29a 100644 --- a/tests/test_ec2/test_windows.py +++ b/tests/test_ec2/test_windows.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_windows(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_windows(): + pass diff --git a/tests/test_ecr/test_ecr_boto3.py b/tests/test_ecr/test_ecr_boto3.py index 9115e3fad..82a2c7521 100644 --- a/tests/test_ecr/test_ecr_boto3.py +++ b/tests/test_ecr/test_ecr_boto3.py @@ -77,7 +77,7 @@ def test_describe_repositories(): response = client.describe_repositories() len(response["repositories"]).should.equal(2) - respository_arns = [ + repository_arns = [ "arn:aws:ecr:us-east-1:012345678910:repository/test_repository1", "arn:aws:ecr:us-east-1:012345678910:repository/test_repository0", ] @@ -86,9 +86,9 @@ def test_describe_repositories(): response["repositories"][0]["repositoryArn"], response["repositories"][1]["repositoryArn"], ] - ).should.equal(set(respository_arns)) + ).should.equal(set(repository_arns)) - respository_uris = [ + repository_uris = [ "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository1", "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository0", ] @@ -97,7 +97,7 @@ def test_describe_repositories(): response["repositories"][0]["repositoryUri"], response["repositories"][1]["repositoryUri"], ] - ).should.equal(set(respository_uris)) + ).should.equal(set(repository_uris)) @mock_ecr @@ -108,7 +108,7 @@ def test_describe_repositories_1(): response = client.describe_repositories(registryId="012345678910") len(response["repositories"]).should.equal(2) - respository_arns = [ + repository_arns = [ "arn:aws:ecr:us-east-1:012345678910:repository/test_repository1", "arn:aws:ecr:us-east-1:012345678910:repository/test_repository0", ] @@ -117,9 +117,9 @@ def test_describe_repositories_1(): response["repositories"][0]["repositoryArn"], response["repositories"][1]["repositoryArn"], ] - ).should.equal(set(respository_arns)) + ).should.equal(set(repository_arns)) - respository_uris = [ + repository_uris = [ "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository1", "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository0", ] @@ -128,7 +128,7 @@ def test_describe_repositories_1(): response["repositories"][0]["repositoryUri"], response["repositories"][1]["repositoryUri"], ] - ).should.equal(set(respository_uris)) + ).should.equal(set(repository_uris)) @mock_ecr @@ -147,11 +147,11 @@ def test_describe_repositories_3(): _ = client.create_repository(repositoryName="test_repository0") response = client.describe_repositories(repositoryNames=["test_repository1"]) len(response["repositories"]).should.equal(1) - respository_arn = "arn:aws:ecr:us-east-1:012345678910:repository/test_repository1" - response["repositories"][0]["repositoryArn"].should.equal(respository_arn) + repository_arn = "arn:aws:ecr:us-east-1:012345678910:repository/test_repository1" + response["repositories"][0]["repositoryArn"].should.equal(repository_arn) - respository_uri = "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository1" - response["repositories"][0]["repositoryUri"].should.equal(respository_uri) + repository_uri = "012345678910.dkr.ecr.us-east-1.amazonaws.com/test_repository1" + response["repositories"][0]["repositoryUri"].should.equal(repository_uri) @mock_ecr diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index 973c95b81..69c920192 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -94,6 +94,7 @@ def test_register_task_definition(): "logConfiguration": {"logDriver": "json-file"}, } ], + networkMode="bridge", tags=[ {"key": "createdBy", "value": "moto-unittest"}, {"key": "foo", "value": "bar"}, @@ -124,6 +125,7 @@ def test_register_task_definition(): response["taskDefinition"]["containerDefinitions"][0]["logConfiguration"][ "logDriver" ].should.equal("json-file") + response["taskDefinition"]["networkMode"].should.equal("bridge") @mock_ecs @@ -724,7 +726,7 @@ def test_delete_service(): @mock_ecs -def test_update_non_existant_service(): +def test_update_non_existent_service(): client = boto3.client("ecs", region_name="us-east-1") try: client.update_service( diff --git a/tests/test_elbv2/test_elbv2.py b/tests/test_elbv2/test_elbv2.py index eb5df14c3..af1b19f09 100644 --- a/tests/test_elbv2/test_elbv2.py +++ b/tests/test_elbv2/test_elbv2.py @@ -1391,7 +1391,7 @@ def test_set_security_groups(): len(resp["LoadBalancers"][0]["SecurityGroups"]).should.equal(2) with assert_raises(ClientError): - client.set_security_groups(LoadBalancerArn=arn, SecurityGroups=["non_existant"]) + client.set_security_groups(LoadBalancerArn=arn, SecurityGroups=["non_existent"]) @mock_elbv2 diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py index 6e9ca3a03..cf3743d34 100644 --- a/tests/test_events/test_events.py +++ b/tests/test_events/test_events.py @@ -1,14 +1,18 @@ -import random -import boto3 -import json -import sure # noqa - +from moto.events.models import EventsBackend from moto.events import mock_events +import json +import random +import unittest + +import boto3 from botocore.exceptions import ClientError from moto.core.exceptions import JsonRESTError from nose.tools import assert_raises + from moto.core import ACCOUNT_ID -from moto.events.models import EventsBackend +<< << << < HEAD +== == == = +>>>>>> > 100dbd529f174f18d579a1dcc066d55409f2e38f RULES = [ {"Name": "test1", "ScheduleExpression": "rate(5 minutes)"}, @@ -456,6 +460,11 @@ def test_delete_event_bus_errors(): ClientError, "Cannot delete event bus default." ) + +<< << << < HEAD +== == == = + +>>>>>> > 100dbd529f174f18d579a1dcc066d55409f2e38f @mock_events def test_rule_tagging_happy(): client = generate_environment() @@ -466,7 +475,12 @@ def test_rule_tagging_happy(): client.tag_resource(ResourceARN=rule_arn, Tags=tags) actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags") - assert tags == actual + tc = unittest.TestCase("__init__") + expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}] + tc.assertTrue( + (expected[0] == actual[0] and expected[1] == actual[1]) + or (expected[1] == actual[0] and expected[0] == actual[1]) + ) client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"]) @@ -474,24 +488,25 @@ def test_rule_tagging_happy(): expected = [{"Key": "key2", "Value": "value2"}] assert expected == actual + @mock_events def test_rule_tagging_sad(): - b = EventsBackend("us-west-2") + back_end = EventsBackend("us-west-2") try: - b.tag_resource('unknown', []) - raise 'tag_resource should fail if ResourceARN is not known' + back_end.tag_resource("unknown", []) + raise "tag_resource should fail if ResourceARN is not known" except JsonRESTError: pass try: - b.untag_resource('unknown', []) - raise 'untag_resource should fail if ResourceARN is not known' + back_end.untag_resource("unknown", []) + raise "untag_resource should fail if ResourceARN is not known" except JsonRESTError: pass try: - b.list_tags_for_resource('unknown') - raise 'list_tags_for_resource should fail if ResourceARN is not known' + back_end.list_tags_for_resource("unknown") + raise "list_tags_for_resource should fail if ResourceARN is not known" except JsonRESTError: - pass \ No newline at end of file + pass diff --git a/tests/test_glacier/test_glacier_archives.py b/tests/test_glacier/test_glacier_archives.py index e8fa6045e..ec43e613c 100644 --- a/tests/test_glacier/test_glacier_archives.py +++ b/tests/test_glacier/test_glacier_archives.py @@ -1,21 +1,21 @@ -from __future__ import unicode_literals - -from tempfile import NamedTemporaryFile -import boto.glacier -import sure # noqa - -from moto import mock_glacier_deprecated - - -@mock_glacier_deprecated -def test_create_and_delete_archive(): - the_file = NamedTemporaryFile(delete=False) - the_file.write(b"some stuff") - the_file.close() - - conn = boto.glacier.connect_to_region("us-west-2") - vault = conn.create_vault("my_vault") - - archive_id = vault.upload_archive(the_file.name) - - vault.delete_archive(archive_id) +from __future__ import unicode_literals + +from tempfile import NamedTemporaryFile +import boto.glacier +import sure # noqa + +from moto import mock_glacier_deprecated + + +@mock_glacier_deprecated +def test_create_and_delete_archive(): + the_file = NamedTemporaryFile(delete=False) + the_file.write(b"some stuff") + the_file.close() + + conn = boto.glacier.connect_to_region("us-west-2") + vault = conn.create_vault("my_vault") + + archive_id = vault.upload_archive(the_file.name) + + vault.delete_archive(archive_id) diff --git a/tests/test_glacier/test_glacier_vaults.py b/tests/test_glacier/test_glacier_vaults.py index e64f40a90..93c79423e 100644 --- a/tests/test_glacier/test_glacier_vaults.py +++ b/tests/test_glacier/test_glacier_vaults.py @@ -1,31 +1,31 @@ -from __future__ import unicode_literals - -import boto.glacier -import sure # noqa - -from moto import mock_glacier_deprecated - - -@mock_glacier_deprecated -def test_create_vault(): - conn = boto.glacier.connect_to_region("us-west-2") - - conn.create_vault("my_vault") - - vaults = conn.list_vaults() - vaults.should.have.length_of(1) - vaults[0].name.should.equal("my_vault") - - -@mock_glacier_deprecated -def test_delete_vault(): - conn = boto.glacier.connect_to_region("us-west-2") - - conn.create_vault("my_vault") - - vaults = conn.list_vaults() - vaults.should.have.length_of(1) - - conn.delete_vault("my_vault") - vaults = conn.list_vaults() - vaults.should.have.length_of(0) +from __future__ import unicode_literals + +import boto.glacier +import sure # noqa + +from moto import mock_glacier_deprecated + + +@mock_glacier_deprecated +def test_create_vault(): + conn = boto.glacier.connect_to_region("us-west-2") + + conn.create_vault("my_vault") + + vaults = conn.list_vaults() + vaults.should.have.length_of(1) + vaults[0].name.should.equal("my_vault") + + +@mock_glacier_deprecated +def test_delete_vault(): + conn = boto.glacier.connect_to_region("us-west-2") + + conn.create_vault("my_vault") + + vaults = conn.list_vaults() + vaults.should.have.length_of(1) + + conn.delete_vault("my_vault") + vaults = conn.list_vaults() + vaults.should.have.length_of(0) diff --git a/tests/test_glue/__init__.py b/tests/test_glue/__init__.py index baffc4882..78b780d97 100644 --- a/tests/test_glue/__init__.py +++ b/tests/test_glue/__init__.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_glue/fixtures/__init__.py b/tests/test_glue/fixtures/__init__.py index baffc4882..78b780d97 100644 --- a/tests/test_glue/fixtures/__init__.py +++ b/tests/test_glue/fixtures/__init__.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_glue/helpers.py b/tests/test_glue/helpers.py index 130a879bc..9003a1358 100644 --- a/tests/test_glue/helpers.py +++ b/tests/test_glue/helpers.py @@ -1,97 +1,97 @@ -from __future__ import unicode_literals - -import copy - -from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT - - -def create_database(client, database_name): - return client.create_database(DatabaseInput={"Name": database_name}) - - -def get_database(client, database_name): - return client.get_database(Name=database_name) - - -def create_table_input(database_name, table_name, columns=[], partition_keys=[]): - table_input = copy.deepcopy(TABLE_INPUT) - table_input["Name"] = table_name - table_input["PartitionKeys"] = partition_keys - table_input["StorageDescriptor"]["Columns"] = columns - table_input["StorageDescriptor"][ - "Location" - ] = "s3://my-bucket/{database_name}/{table_name}".format( - database_name=database_name, table_name=table_name - ) - return table_input - - -def create_table(client, database_name, table_name, table_input=None, **kwargs): - if table_input is None: - table_input = create_table_input(database_name, table_name, **kwargs) - - return client.create_table(DatabaseName=database_name, TableInput=table_input) - - -def update_table(client, database_name, table_name, table_input=None, **kwargs): - if table_input is None: - table_input = create_table_input(database_name, table_name, **kwargs) - - return client.update_table(DatabaseName=database_name, TableInput=table_input) - - -def get_table(client, database_name, table_name): - return client.get_table(DatabaseName=database_name, Name=table_name) - - -def get_tables(client, database_name): - return client.get_tables(DatabaseName=database_name) - - -def get_table_versions(client, database_name, table_name): - return client.get_table_versions(DatabaseName=database_name, TableName=table_name) - - -def get_table_version(client, database_name, table_name, version_id): - return client.get_table_version( - DatabaseName=database_name, TableName=table_name, VersionId=version_id - ) - - -def create_partition_input(database_name, table_name, values=[], columns=[]): - root_path = "s3://my-bucket/{database_name}/{table_name}".format( - database_name=database_name, table_name=table_name - ) - - part_input = copy.deepcopy(PARTITION_INPUT) - part_input["Values"] = values - part_input["StorageDescriptor"]["Columns"] = columns - part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path - return part_input - - -def create_partition(client, database_name, table_name, partiton_input=None, **kwargs): - if partiton_input is None: - partiton_input = create_partition_input(database_name, table_name, **kwargs) - return client.create_partition( - DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input - ) - - -def update_partition( - client, database_name, table_name, old_values=[], partiton_input=None, **kwargs -): - if partiton_input is None: - partiton_input = create_partition_input(database_name, table_name, **kwargs) - return client.update_partition( - DatabaseName=database_name, - TableName=table_name, - PartitionInput=partiton_input, - PartitionValueList=old_values, - ) - - -def get_partition(client, database_name, table_name, values): - return client.get_partition( - DatabaseName=database_name, TableName=table_name, PartitionValues=values - ) +from __future__ import unicode_literals + +import copy + +from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT + + +def create_database(client, database_name): + return client.create_database(DatabaseInput={"Name": database_name}) + + +def get_database(client, database_name): + return client.get_database(Name=database_name) + + +def create_table_input(database_name, table_name, columns=[], partition_keys=[]): + table_input = copy.deepcopy(TABLE_INPUT) + table_input["Name"] = table_name + table_input["PartitionKeys"] = partition_keys + table_input["StorageDescriptor"]["Columns"] = columns + table_input["StorageDescriptor"][ + "Location" + ] = "s3://my-bucket/{database_name}/{table_name}".format( + database_name=database_name, table_name=table_name + ) + return table_input + + +def create_table(client, database_name, table_name, table_input=None, **kwargs): + if table_input is None: + table_input = create_table_input(database_name, table_name, **kwargs) + + return client.create_table(DatabaseName=database_name, TableInput=table_input) + + +def update_table(client, database_name, table_name, table_input=None, **kwargs): + if table_input is None: + table_input = create_table_input(database_name, table_name, **kwargs) + + return client.update_table(DatabaseName=database_name, TableInput=table_input) + + +def get_table(client, database_name, table_name): + return client.get_table(DatabaseName=database_name, Name=table_name) + + +def get_tables(client, database_name): + return client.get_tables(DatabaseName=database_name) + + +def get_table_versions(client, database_name, table_name): + return client.get_table_versions(DatabaseName=database_name, TableName=table_name) + + +def get_table_version(client, database_name, table_name, version_id): + return client.get_table_version( + DatabaseName=database_name, TableName=table_name, VersionId=version_id + ) + + +def create_partition_input(database_name, table_name, values=[], columns=[]): + root_path = "s3://my-bucket/{database_name}/{table_name}".format( + database_name=database_name, table_name=table_name + ) + + part_input = copy.deepcopy(PARTITION_INPUT) + part_input["Values"] = values + part_input["StorageDescriptor"]["Columns"] = columns + part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path + return part_input + + +def create_partition(client, database_name, table_name, partiton_input=None, **kwargs): + if partiton_input is None: + partiton_input = create_partition_input(database_name, table_name, **kwargs) + return client.create_partition( + DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input + ) + + +def update_partition( + client, database_name, table_name, old_values=[], partiton_input=None, **kwargs +): + if partiton_input is None: + partiton_input = create_partition_input(database_name, table_name, **kwargs) + return client.update_partition( + DatabaseName=database_name, + TableName=table_name, + PartitionInput=partiton_input, + PartitionValueList=old_values, + ) + + +def get_partition(client, database_name, table_name, values): + return client.get_partition( + DatabaseName=database_name, TableName=table_name, PartitionValues=values + ) diff --git a/tests/test_glue/test_datacatalog.py b/tests/test_glue/test_datacatalog.py index 28281b18f..31731e598 100644 --- a/tests/test_glue/test_datacatalog.py +++ b/tests/test_glue/test_datacatalog.py @@ -132,7 +132,7 @@ def test_get_table_versions(): helpers.update_table(client, database_name, table_name, table_input) version_inputs["2"] = table_input - # Updateing with an indentical input should still create a new version + # Updateing with an identical input should still create a new version helpers.update_table(client, database_name, table_name, table_input) version_inputs["3"] = table_input diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 9a2c1f0dd..995895437 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -785,7 +785,7 @@ def test_delete_login_profile(): conn.delete_login_profile("my-user") -@mock_iam() +@mock_iam def test_create_access_key(): conn = boto3.client("iam", region_name="us-east-1") with assert_raises(ClientError): @@ -798,6 +798,19 @@ def test_create_access_key(): access_key["AccessKeyId"].should.have.length_of(20) access_key["SecretAccessKey"].should.have.length_of(40) assert access_key["AccessKeyId"].startswith("AKIA") + conn = boto3.client( + "iam", + region_name="us-east-1", + aws_access_key_id=access_key["AccessKeyId"], + aws_secret_access_key=access_key["SecretAccessKey"], + ) + access_key = conn.create_access_key()["AccessKey"] + ( + datetime.utcnow() - access_key["CreateDate"].replace(tzinfo=None) + ).seconds.should.be.within(0, 10) + access_key["AccessKeyId"].should.have.length_of(20) + access_key["SecretAccessKey"].should.have.length_of(40) + assert access_key["AccessKeyId"].startswith("AKIA") @mock_iam_deprecated() @@ -825,8 +838,35 @@ def test_get_all_access_keys(): ) +@mock_iam +def test_list_access_keys(): + conn = boto3.client("iam", region_name="us-east-1") + conn.create_user(UserName="my-user") + response = conn.list_access_keys(UserName="my-user") + assert_equals( + response["AccessKeyMetadata"], [], + ) + access_key = conn.create_access_key(UserName="my-user")["AccessKey"] + response = conn.list_access_keys(UserName="my-user") + assert_equals( + sorted(response["AccessKeyMetadata"][0].keys()), + sorted(["Status", "CreateDate", "UserName", "AccessKeyId"]), + ) + conn = boto3.client( + "iam", + region_name="us-east-1", + aws_access_key_id=access_key["AccessKeyId"], + aws_secret_access_key=access_key["SecretAccessKey"], + ) + response = conn.list_access_keys() + assert_equals( + sorted(response["AccessKeyMetadata"][0].keys()), + sorted(["Status", "CreateDate", "UserName", "AccessKeyId"]), + ) + + @mock_iam_deprecated() -def test_delete_access_key(): +def test_delete_access_key_deprecated(): conn = boto.connect_iam() conn.create_user("my-user") access_key_id = conn.create_access_key("my-user")["create_access_key_response"][ @@ -835,6 +875,16 @@ def test_delete_access_key(): conn.delete_access_key(access_key_id, "my-user") +@mock_iam +def test_delete_access_key(): + conn = boto3.client("iam", region_name="us-east-1") + conn.create_user(UserName="my-user") + key = conn.create_access_key(UserName="my-user")["AccessKey"] + conn.delete_access_key(AccessKeyId=key["AccessKeyId"], UserName="my-user") + key = conn.create_access_key(UserName="my-user")["AccessKey"] + conn.delete_access_key(AccessKeyId=key["AccessKeyId"]) + + @mock_iam() def test_mfa_devices(): # Test enable device @@ -1326,6 +1376,9 @@ def test_update_access_key(): ) resp = client.list_access_keys(UserName=username) resp["AccessKeyMetadata"][0]["Status"].should.equal("Inactive") + client.update_access_key(AccessKeyId=key["AccessKeyId"], Status="Active") + resp = client.list_access_keys(UserName=username) + resp["AccessKeyMetadata"][0]["Status"].should.equal("Active") @mock_iam diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 0c0623a6f..f8c4f579c 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -9,6 +9,173 @@ from botocore.exceptions import ClientError from nose.tools import assert_raises +@mock_iot +def test_attach_policy(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert["certificateArn"] + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.have.length_of(1) + res["policies"][0]["policyName"].should.equal("my-policy") + + +@mock_iot +def test_detach_policy(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert["certificateArn"] + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.have.length_of(1) + res["policies"][0]["policyName"].should.equal("my-policy") + + client.detach_policy(policyName=policy_name, target=cert_arn) + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.be.empty + + +@mock_iot +def test_list_attached_policies(): + client = boto3.client("iot", region_name="ap-northeast-1") + cert = client.create_keys_and_certificate(setAsActive=True) + policies = client.list_attached_policies(target=cert["certificateArn"]) + policies["policies"].should.be.empty + + +@mock_iot +def test_policy_versions(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + policy = client.create_policy(policyName=policy_name, policyDocument=doc) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) + policy.should.have.key("policyVersionId").which.should.equal("1") + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) + policy.should.have.key("defaultVersionId").which.should.equal( + policy["defaultVersionId"] + ) + + policy1 = client.create_policy_version( + policyName=policy_name, + policyDocument=json.dumps({"version": "version_1"}), + setAsDefault=True, + ) + policy1.should.have.key("policyArn").which.should_not.be.none + policy1.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy1.should.have.key("policyVersionId").which.should.equal("2") + policy1.should.have.key("isDefaultVersion").which.should.equal(True) + + policy2 = client.create_policy_version( + policyName=policy_name, + policyDocument=json.dumps({"version": "version_2"}), + setAsDefault=False, + ) + policy2.should.have.key("policyArn").which.should_not.be.none + policy2.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_2"}) + ) + policy2.should.have.key("policyVersionId").which.should.equal("3") + policy2.should.have.key("isDefaultVersion").which.should.equal(False) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy1["policyVersionId"] + ) + + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(3) + list( + map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ).count(True).should.equal(1) + default_policy = list( + filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ) + default_policy[0].should.have.key("versionId").should.equal( + policy1["policyVersionId"] + ) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy1["policyVersionId"] + ) + + client.set_default_policy_version( + policyName=policy_name, policyVersionId=policy2["policyVersionId"] + ) + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(3) + list( + map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ).count(True).should.equal(1) + default_policy = list( + filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ) + default_policy[0].should.have.key("versionId").should.equal( + policy2["policyVersionId"] + ) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_2"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy2["policyVersionId"] + ) + + client.delete_policy_version(policyName=policy_name, policyVersionId="1") + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(2) + + client.delete_policy_version( + policyName=policy_name, policyVersionId=policy1["policyVersionId"] + ) + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(1) + + # should fail as it"s the default policy. Should use delete_policy instead + try: + client.delete_policy_version( + policyName=policy_name, policyVersionId=policy2["policyVersionId"] + ) + assert False, "Should have failed in previous call" + except Exception as exception: + exception.response["Error"]["Message"].should.equal( + "Cannot delete the default version of a policy" + ) + + @mock_iot def test_things(): client = boto3.client("iot", region_name="ap-northeast-1") @@ -994,7 +1161,10 @@ def test_create_job(): client = boto3.client("iot", region_name="eu-west-1") name = "my-thing" job_id = "TestJob" - # thing + # thing# job document + # job_document = { + # "field": "value" + # } thing = client.create_thing(thingName=name) thing.should.have.key("thingName").which.should.equal(name) thing.should.have.key("thingArn") @@ -1020,6 +1190,63 @@ def test_create_job(): job.should.have.key("description") +@mock_iot +def test_list_jobs(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing# job document + # job_document = { + # "field": "value" + # } + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job1 = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job1.should.have.key("jobId").which.should.equal(job_id) + job1.should.have.key("jobArn") + job1.should.have.key("description") + + job2 = client.create_job( + jobId=job_id + "1", + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job2.should.have.key("jobId").which.should.equal(job_id + "1") + job2.should.have.key("jobArn") + job2.should.have.key("description") + + jobs = client.list_jobs() + jobs.should.have.key("jobs") + jobs.should_not.have.key("nextToken") + jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id) + jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1") + + @mock_iot def test_describe_job(): client = boto3.client("iot", region_name="eu-west-1") @@ -1124,3 +1351,387 @@ def test_describe_job_1(): job.should.have.key("job").which.should.have.key( "jobExecutionsRolloutConfig" ).which.should.have.key("maximumPerMinute").which.should.equal(10) + + +@mock_iot +def test_delete_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + + client.delete_job(jobId=job_id) + + client.list_jobs()["jobs"].should.have.length_of(0) + + +@mock_iot +def test_cancel_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + + job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are") + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("job").which.should.have.key("status").which.should.equal( + "CANCELED" + ) + job.should.have.key("job").which.should.have.key( + "forceCanceled" + ).which.should.equal(False) + job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal( + "Because" + ) + job.should.have.key("job").which.should.have.key("comment").which.should.equal( + "You are" + ) + + +@mock_iot +def test_get_job_document_with_document_source(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job_document = client.get_job_document(jobId=job_id) + job_document.should.have.key("document").which.should.equal("") + + +@mock_iot +def test_get_job_document_with_document(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job_document = client.get_job_document(jobId=job_id) + job_document.should.have.key("document").which.should.equal('{"field": "value"}') + + +@mock_iot +def test_describe_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.describe_job_execution(jobId=job_id, thingName=name) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("jobId").which.should.equal(job_id) + job_execution["execution"].should.have.key("status").which.should.equal("QUEUED") + job_execution["execution"].should.have.key("forceCanceled").which.should.equal( + False + ) + job_execution["execution"].should.have.key("statusDetails").which.should.equal( + {"detailsMap": {}} + ) + job_execution["execution"].should.have.key("thingArn").which.should.equal( + thing["thingArn"] + ) + job_execution["execution"].should.have.key("queuedAt") + job_execution["execution"].should.have.key("startedAt") + job_execution["execution"].should.have.key("lastUpdatedAt") + job_execution["execution"].should.have.key("executionNumber").which.should.equal( + 123 + ) + job_execution["execution"].should.have.key("versionNumber").which.should.equal(123) + job_execution["execution"].should.have.key( + "approximateSecondsBeforeTimedOut" + ).which.should.equal(123) + + job_execution = client.describe_job_execution( + jobId=job_id, thingName=name, executionNumber=123 + ) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("jobId").which.should.equal(job_id) + job_execution["execution"].should.have.key("status").which.should.equal("QUEUED") + job_execution["execution"].should.have.key("forceCanceled").which.should.equal( + False + ) + job_execution["execution"].should.have.key("statusDetails").which.should.equal( + {"detailsMap": {}} + ) + job_execution["execution"].should.have.key("thingArn").which.should.equal( + thing["thingArn"] + ) + job_execution["execution"].should.have.key("queuedAt") + job_execution["execution"].should.have.key("startedAt") + job_execution["execution"].should.have.key("lastUpdatedAt") + job_execution["execution"].should.have.key("executionNumber").which.should.equal( + 123 + ) + job_execution["execution"].should.have.key("versionNumber").which.should.equal(123) + job_execution["execution"].should.have.key( + "approximateSecondsBeforeTimedOut" + ).which.should.equal(123) + + try: + client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456) + except ClientError as exc: + error_code = exc.response["Error"]["Code"] + error_code.should.equal("ResourceNotFoundException") + else: + raise Exception("Should have raised error") + + +@mock_iot +def test_cancel_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + client.cancel_job_execution(jobId=job_id, thingName=name) + job_execution = client.describe_job_execution(jobId=job_id, thingName=name) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("status").which.should.equal("CANCELED") + + +@mock_iot +def test_delete_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123) + try: + client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123) + except ClientError as exc: + error_code = exc.response["Error"]["Code"] + error_code.should.equal("ResourceNotFoundException") + else: + raise Exception("Should have raised error") + + +@mock_iot +def test_list_job_executions_for_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.list_job_executions_for_job(jobId=job_id) + job_execution.should.have.key("executionSummaries") + job_execution["executionSummaries"][0].should.have.key( + "thingArn" + ).which.should.equal(thing["thingArn"]) + + +@mock_iot +def test_list_job_executions_for_thing(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.list_job_executions_for_thing(thingName=name) + job_execution.should.have.key("executionSummaries") + job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal( + job_id + ) diff --git a/tests/test_kinesis/test_firehose.py b/tests/test_kinesis/test_firehose.py index 5e8c4aa08..4f16dc82d 100644 --- a/tests/test_kinesis/test_firehose.py +++ b/tests/test_kinesis/test_firehose.py @@ -223,7 +223,7 @@ def test_create_stream_without_redshift(): @mock_kinesis -def test_deescribe_non_existant_stream(): +def test_deescribe_non_existent_stream(): client = boto3.client("firehose", region_name="us-east-1") client.describe_delivery_stream.when.called_with( diff --git a/tests/test_kinesis/test_kinesis.py b/tests/test_kinesis/test_kinesis.py index de1764892..b3251bb0f 100644 --- a/tests/test_kinesis/test_kinesis.py +++ b/tests/test_kinesis/test_kinesis.py @@ -32,7 +32,7 @@ def test_create_cluster(): @mock_kinesis_deprecated -def test_describe_non_existant_stream(): +def test_describe_non_existent_stream(): conn = boto.kinesis.connect_to_region("us-east-1") conn.describe_stream.when.called_with("not-a-stream").should.throw( ResourceNotFoundException diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 6a35ee2c8..aaf09a6be 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -1,26 +1,19 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from datetime import date -from datetime import datetime -from dateutil.tz import tzutc import base64 -import os import re -import boto3 import boto.kms -import botocore.exceptions import six import sure # noqa from boto.exception import JSONResponseError from boto.kms.exceptions import AlreadyExistsException, NotFoundException -from freezegun import freeze_time from nose.tools import assert_raises from parameterized import parameterized from moto.core.exceptions import JsonRESTError from moto.kms.models import KmsBackend from moto.kms.exceptions import NotFoundException as MotoNotFoundException -from moto import mock_kms, mock_kms_deprecated +from moto import mock_kms_deprecated PLAINTEXT_VECTORS = ( (b"some encodeable plaintext",), @@ -36,23 +29,6 @@ def _get_encoded_value(plaintext): return plaintext.encode("utf-8") -@mock_kms -def test_create_key(): - conn = boto3.client("kms", region_name="us-east-1") - with freeze_time("2015-01-01 00:00:00"): - key = conn.create_key( - Policy="my policy", - Description="my key", - KeyUsage="ENCRYPT_DECRYPT", - Tags=[{"TagKey": "project", "TagValue": "moto"}], - ) - - key["KeyMetadata"]["Description"].should.equal("my key") - key["KeyMetadata"]["KeyUsage"].should.equal("ENCRYPT_DECRYPT") - key["KeyMetadata"]["Enabled"].should.equal(True) - key["KeyMetadata"]["CreationDate"].should.be.a(date) - - @mock_kms_deprecated def test_describe_key(): conn = boto.kms.connect_to_region("us-west-2") @@ -97,22 +73,6 @@ def test_describe_key_via_alias_not_found(): ) -@parameterized( - ( - ("alias/does-not-exist",), - ("arn:aws:kms:us-east-1:012345678912:alias/does-not-exist",), - ("invalid",), - ) -) -@mock_kms -def test_describe_key_via_alias_invalid_alias(key_id): - client = boto3.client("kms", region_name="us-east-1") - client.create_key(Description="key") - - with assert_raises(client.exceptions.NotFoundException): - client.describe_key(KeyId=key_id) - - @mock_kms_deprecated def test_describe_key_via_arn(): conn = boto.kms.connect_to_region("us-west-2") @@ -240,71 +200,6 @@ def test_generate_data_key(): response["KeyId"].should.equal(key_arn) -@mock_kms -def test_boto3_generate_data_key(): - kms = boto3.client("kms", region_name="us-west-2") - - key = kms.create_key() - key_id = key["KeyMetadata"]["KeyId"] - key_arn = key["KeyMetadata"]["Arn"] - - response = kms.generate_data_key(KeyId=key_id, NumberOfBytes=32) - - # CiphertextBlob must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(response["CiphertextBlob"], validate=True) - # Plaintext must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(response["Plaintext"], validate=True) - - response["KeyId"].should.equal(key_arn) - - -@parameterized(PLAINTEXT_VECTORS) -@mock_kms -def test_encrypt(plaintext): - client = boto3.client("kms", region_name="us-west-2") - - key = client.create_key(Description="key") - key_id = key["KeyMetadata"]["KeyId"] - key_arn = key["KeyMetadata"]["Arn"] - - response = client.encrypt(KeyId=key_id, Plaintext=plaintext) - response["CiphertextBlob"].should_not.equal(plaintext) - - # CiphertextBlob must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(response["CiphertextBlob"], validate=True) - - response["KeyId"].should.equal(key_arn) - - -@parameterized(PLAINTEXT_VECTORS) -@mock_kms -def test_decrypt(plaintext): - client = boto3.client("kms", region_name="us-west-2") - - key = client.create_key(Description="key") - key_id = key["KeyMetadata"]["KeyId"] - key_arn = key["KeyMetadata"]["Arn"] - - encrypt_response = client.encrypt(KeyId=key_id, Plaintext=plaintext) - - client.create_key(Description="key") - # CiphertextBlob must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(encrypt_response["CiphertextBlob"], validate=True) - - decrypt_response = client.decrypt(CiphertextBlob=encrypt_response["CiphertextBlob"]) - - # Plaintext must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(decrypt_response["Plaintext"], validate=True) - - decrypt_response["Plaintext"].should.equal(_get_encoded_value(plaintext)) - decrypt_response["KeyId"].should.equal(key_arn) - - @mock_kms_deprecated def test_disable_key_rotation_with_missing_key(): conn = boto.kms.connect_to_region("us-west-2") @@ -775,25 +670,6 @@ def test__list_aliases(): len(aliases).should.equal(7) -@parameterized( - ( - ("not-a-uuid",), - ("alias/DoesNotExist",), - ("arn:aws:kms:us-east-1:012345678912:alias/DoesNotExist",), - ("d25652e4-d2d2-49f7-929a-671ccda580c6",), - ( - "arn:aws:kms:us-east-1:012345678912:key/d25652e4-d2d2-49f7-929a-671ccda580c6", - ), - ) -) -@mock_kms -def test_invalid_key_ids(key_id): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.generate_data_key(KeyId=key_id, NumberOfBytes=5) - - @mock_kms_deprecated def test__assert_default_policy(): from moto.kms.responses import _assert_default_policy @@ -804,431 +680,3 @@ def test__assert_default_policy(): _assert_default_policy.when.called_with("default").should_not.throw( MotoNotFoundException ) - - -@parameterized(PLAINTEXT_VECTORS) -@mock_kms -def test_kms_encrypt_boto3(plaintext): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="key") - response = client.encrypt(KeyId=key["KeyMetadata"]["KeyId"], Plaintext=plaintext) - - response = client.decrypt(CiphertextBlob=response["CiphertextBlob"]) - response["Plaintext"].should.equal(_get_encoded_value(plaintext)) - - -@mock_kms -def test_disable_key(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="disable-key") - client.disable_key(KeyId=key["KeyMetadata"]["KeyId"]) - - result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == "Disabled" - - -@mock_kms -def test_enable_key(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="enable-key") - client.disable_key(KeyId=key["KeyMetadata"]["KeyId"]) - client.enable_key(KeyId=key["KeyMetadata"]["KeyId"]) - - result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) - assert result["KeyMetadata"]["Enabled"] == True - assert result["KeyMetadata"]["KeyState"] == "Enabled" - - -@mock_kms -def test_schedule_key_deletion(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="schedule-key-deletion") - if os.environ.get("TEST_SERVER_MODE", "false").lower() == "false": - with freeze_time("2015-01-01 12:00:00"): - response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) - assert response["KeyId"] == key["KeyMetadata"]["KeyId"] - assert response["DeletionDate"] == datetime( - 2015, 1, 31, 12, 0, tzinfo=tzutc() - ) - else: - # Can't manipulate time in server mode - response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) - assert response["KeyId"] == key["KeyMetadata"]["KeyId"] - - result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == "PendingDeletion" - assert "DeletionDate" in result["KeyMetadata"] - - -@mock_kms -def test_schedule_key_deletion_custom(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="schedule-key-deletion") - if os.environ.get("TEST_SERVER_MODE", "false").lower() == "false": - with freeze_time("2015-01-01 12:00:00"): - response = client.schedule_key_deletion( - KeyId=key["KeyMetadata"]["KeyId"], PendingWindowInDays=7 - ) - assert response["KeyId"] == key["KeyMetadata"]["KeyId"] - assert response["DeletionDate"] == datetime( - 2015, 1, 8, 12, 0, tzinfo=tzutc() - ) - else: - # Can't manipulate time in server mode - response = client.schedule_key_deletion( - KeyId=key["KeyMetadata"]["KeyId"], PendingWindowInDays=7 - ) - assert response["KeyId"] == key["KeyMetadata"]["KeyId"] - - result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == "PendingDeletion" - assert "DeletionDate" in result["KeyMetadata"] - - -@mock_kms -def test_cancel_key_deletion(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="cancel-key-deletion") - client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) - response = client.cancel_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) - assert response["KeyId"] == key["KeyMetadata"]["KeyId"] - - result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == "Disabled" - assert "DeletionDate" not in result["KeyMetadata"] - - -@mock_kms -def test_update_key_description(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="old_description") - key_id = key["KeyMetadata"]["KeyId"] - - result = client.update_key_description(KeyId=key_id, Description="new_description") - assert "ResponseMetadata" in result - -@mock_kms -def test_key_tagging_happy(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="test-key-tagging") - key_id = key["KeyMetadata"]["KeyId"] - - tags = [{"TagKey": "key1", "TagValue": "value1"}, {"TagKey": "key2", "TagValue": "value2"}] - client.tag_resource(KeyId=key_id, Tags=tags) - - result = client.list_resource_tags(KeyId=key_id) - actual = result.get("Tags", []) - assert tags == actual - - client.untag_resource(KeyId=key_id, TagKeys=["key1"]) - - actual = client.list_resource_tags(KeyId=key_id).get("Tags", []) - expected = [{"TagKey": "key2", "TagValue": "value2"}] - assert expected == actual - -@mock_kms -def test_key_tagging_sad(): - b = KmsBackend() - - try: - b.tag_resource('unknown', []) - raise 'tag_resource should fail if KeyId is not known' - except JsonRESTError: - pass - - try: - b.untag_resource('unknown', []) - raise 'untag_resource should fail if KeyId is not known' - except JsonRESTError: - pass - - try: - b.list_resource_tags('unknown') - raise 'list_resource_tags should fail if KeyId is not known' - except JsonRESTError: - pass - - -@parameterized( - ( - (dict(KeySpec="AES_256"), 32), - (dict(KeySpec="AES_128"), 16), - (dict(NumberOfBytes=64), 64), - (dict(NumberOfBytes=1), 1), - (dict(NumberOfBytes=1024), 1024), - ) -) -@mock_kms -def test_generate_data_key_sizes(kwargs, expected_key_length): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="generate-data-key-size") - - response = client.generate_data_key(KeyId=key["KeyMetadata"]["KeyId"], **kwargs) - - assert len(response["Plaintext"]) == expected_key_length - - -@mock_kms -def test_generate_data_key_decrypt(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="generate-data-key-decrypt") - - resp1 = client.generate_data_key( - KeyId=key["KeyMetadata"]["KeyId"], KeySpec="AES_256" - ) - resp2 = client.decrypt(CiphertextBlob=resp1["CiphertextBlob"]) - - assert resp1["Plaintext"] == resp2["Plaintext"] - - -@parameterized( - ( - (dict(KeySpec="AES_257"),), - (dict(KeySpec="AES_128", NumberOfBytes=16),), - (dict(NumberOfBytes=2048),), - (dict(NumberOfBytes=0),), - (dict(),), - ) -) -@mock_kms -def test_generate_data_key_invalid_size_params(kwargs): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="generate-data-key-size") - - with assert_raises( - (botocore.exceptions.ClientError, botocore.exceptions.ParamValidationError) - ) as err: - client.generate_data_key(KeyId=key["KeyMetadata"]["KeyId"], **kwargs) - - -@parameterized( - ( - ("alias/DoesNotExist",), - ("arn:aws:kms:us-east-1:012345678912:alias/DoesNotExist",), - ("d25652e4-d2d2-49f7-929a-671ccda580c6",), - ( - "arn:aws:kms:us-east-1:012345678912:key/d25652e4-d2d2-49f7-929a-671ccda580c6", - ), - ) -) -@mock_kms -def test_generate_data_key_invalid_key(key_id): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.generate_data_key(KeyId=key_id, KeySpec="AES_256") - - -@parameterized( - ( - ("alias/DoesExist", False), - ("arn:aws:kms:us-east-1:012345678912:alias/DoesExist", False), - ("", True), - ("arn:aws:kms:us-east-1:012345678912:key/", True), - ) -) -@mock_kms -def test_generate_data_key_all_valid_key_ids(prefix, append_key_id): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key() - key_id = key["KeyMetadata"]["KeyId"] - client.create_alias(AliasName="alias/DoesExist", TargetKeyId=key_id) - - target_id = prefix - if append_key_id: - target_id += key_id - - client.generate_data_key(KeyId=key_id, NumberOfBytes=32) - - -@mock_kms -def test_generate_data_key_without_plaintext_decrypt(): - client = boto3.client("kms", region_name="us-east-1") - key = client.create_key(Description="generate-data-key-decrypt") - - resp1 = client.generate_data_key_without_plaintext( - KeyId=key["KeyMetadata"]["KeyId"], KeySpec="AES_256" - ) - - assert "Plaintext" not in resp1 - - -@parameterized(PLAINTEXT_VECTORS) -@mock_kms -def test_re_encrypt_decrypt(plaintext): - client = boto3.client("kms", region_name="us-west-2") - - key_1 = client.create_key(Description="key 1") - key_1_id = key_1["KeyMetadata"]["KeyId"] - key_1_arn = key_1["KeyMetadata"]["Arn"] - key_2 = client.create_key(Description="key 2") - key_2_id = key_2["KeyMetadata"]["KeyId"] - key_2_arn = key_2["KeyMetadata"]["Arn"] - - encrypt_response = client.encrypt( - KeyId=key_1_id, Plaintext=plaintext, EncryptionContext={"encryption": "context"} - ) - - re_encrypt_response = client.re_encrypt( - CiphertextBlob=encrypt_response["CiphertextBlob"], - SourceEncryptionContext={"encryption": "context"}, - DestinationKeyId=key_2_id, - DestinationEncryptionContext={"another": "context"}, - ) - - # CiphertextBlob must NOT be base64-encoded - with assert_raises(Exception): - base64.b64decode(re_encrypt_response["CiphertextBlob"], validate=True) - - re_encrypt_response["SourceKeyId"].should.equal(key_1_arn) - re_encrypt_response["KeyId"].should.equal(key_2_arn) - - decrypt_response_1 = client.decrypt( - CiphertextBlob=encrypt_response["CiphertextBlob"], - EncryptionContext={"encryption": "context"}, - ) - decrypt_response_1["Plaintext"].should.equal(_get_encoded_value(plaintext)) - decrypt_response_1["KeyId"].should.equal(key_1_arn) - - decrypt_response_2 = client.decrypt( - CiphertextBlob=re_encrypt_response["CiphertextBlob"], - EncryptionContext={"another": "context"}, - ) - decrypt_response_2["Plaintext"].should.equal(_get_encoded_value(plaintext)) - decrypt_response_2["KeyId"].should.equal(key_2_arn) - - decrypt_response_1["Plaintext"].should.equal(decrypt_response_2["Plaintext"]) - - -@mock_kms -def test_re_encrypt_to_invalid_destination(): - client = boto3.client("kms", region_name="us-west-2") - - key = client.create_key(Description="key 1") - key_id = key["KeyMetadata"]["KeyId"] - - encrypt_response = client.encrypt(KeyId=key_id, Plaintext=b"some plaintext") - - with assert_raises(client.exceptions.NotFoundException): - client.re_encrypt( - CiphertextBlob=encrypt_response["CiphertextBlob"], - DestinationKeyId="alias/DoesNotExist", - ) - - -@parameterized(((12,), (44,), (91,), (1,), (1024,))) -@mock_kms -def test_generate_random(number_of_bytes): - client = boto3.client("kms", region_name="us-west-2") - - response = client.generate_random(NumberOfBytes=number_of_bytes) - - response["Plaintext"].should.be.a(bytes) - len(response["Plaintext"]).should.equal(number_of_bytes) - - -@parameterized( - ( - (2048, botocore.exceptions.ClientError), - (1025, botocore.exceptions.ClientError), - (0, botocore.exceptions.ParamValidationError), - (-1, botocore.exceptions.ParamValidationError), - (-1024, botocore.exceptions.ParamValidationError), - ) -) -@mock_kms -def test_generate_random_invalid_number_of_bytes(number_of_bytes, error_type): - client = boto3.client("kms", region_name="us-west-2") - - with assert_raises(error_type): - client.generate_random(NumberOfBytes=number_of_bytes) - - -@mock_kms -def test_enable_key_rotation_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.enable_key_rotation(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_disable_key_rotation_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.disable_key_rotation(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_enable_key_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.enable_key(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_disable_key_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.disable_key(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_cancel_key_deletion_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.cancel_key_deletion(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_schedule_key_deletion_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.schedule_key_deletion(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_get_key_rotation_status_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.get_key_rotation_status(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_get_key_policy_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.get_key_policy( - KeyId="12366f9b-1230-123d-123e-123e6ae60c02", PolicyName="default" - ) - - -@mock_kms -def test_list_key_policies_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.list_key_policies(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") - - -@mock_kms -def test_put_key_policy_key_not_found(): - client = boto3.client("kms", region_name="us-east-1") - - with assert_raises(client.exceptions.NotFoundException): - client.put_key_policy( - KeyId="00000000-0000-0000-0000-000000000000", - PolicyName="default", - Policy="new policy", - ) diff --git a/tests/test_kms/test_kms_boto3.py b/tests/test_kms/test_kms_boto3.py new file mode 100644 index 000000000..c125c0557 --- /dev/null +++ b/tests/test_kms/test_kms_boto3.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from datetime import datetime +from dateutil.tz import tzutc +import base64 +import os + +import boto3 +import botocore.exceptions +import six +import sure # noqa +from freezegun import freeze_time +from nose.tools import assert_raises +from parameterized import parameterized + +from moto import mock_kms + +PLAINTEXT_VECTORS = ( + (b"some encodeable plaintext",), + (b"some unencodeable plaintext \xec\x8a\xcf\xb6r\xe9\xb5\xeb\xff\xa23\x16",), + ("some unicode characters ø˚∆øˆˆ∆ßçøˆˆçßøˆ¨¥",), +) + + +def _get_encoded_value(plaintext): + if isinstance(plaintext, six.binary_type): + return plaintext + + return plaintext.encode("utf-8") + + +@mock_kms +def test_create_key(): + conn = boto3.client("kms", region_name="us-east-1") + key = conn.create_key( + Policy="my policy", + Description="my key", + KeyUsage="ENCRYPT_DECRYPT", + Tags=[{"TagKey": "project", "TagValue": "moto"}], + ) + + key["KeyMetadata"]["Arn"].should.equal( + "arn:aws:kms:us-east-1:123456789012:key/{}".format(key["KeyMetadata"]["KeyId"]) + ) + key["KeyMetadata"]["AWSAccountId"].should.equal("123456789012") + key["KeyMetadata"]["CreationDate"].should.be.a(datetime) + key["KeyMetadata"]["CustomerMasterKeySpec"].should.equal("SYMMETRIC_DEFAULT") + key["KeyMetadata"]["Description"].should.equal("my key") + key["KeyMetadata"]["Enabled"].should.be.ok + key["KeyMetadata"]["EncryptionAlgorithms"].should.equal(["SYMMETRIC_DEFAULT"]) + key["KeyMetadata"]["KeyId"].should_not.be.empty + key["KeyMetadata"]["KeyManager"].should.equal("CUSTOMER") + key["KeyMetadata"]["KeyState"].should.equal("Enabled") + key["KeyMetadata"]["KeyUsage"].should.equal("ENCRYPT_DECRYPT") + key["KeyMetadata"]["Origin"].should.equal("AWS_KMS") + key["KeyMetadata"].should_not.have.key("SigningAlgorithms") + + key = conn.create_key(KeyUsage="ENCRYPT_DECRYPT", CustomerMasterKeySpec="RSA_2048",) + + sorted(key["KeyMetadata"]["EncryptionAlgorithms"]).should.equal( + ["RSAES_OAEP_SHA_1", "RSAES_OAEP_SHA_256"] + ) + key["KeyMetadata"].should_not.have.key("SigningAlgorithms") + + key = conn.create_key(KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="RSA_2048",) + + key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms") + sorted(key["KeyMetadata"]["SigningAlgorithms"]).should.equal( + [ + "RSASSA_PKCS1_V1_5_SHA_256", + "RSASSA_PKCS1_V1_5_SHA_384", + "RSASSA_PKCS1_V1_5_SHA_512", + "RSASSA_PSS_SHA_256", + "RSASSA_PSS_SHA_384", + "RSASSA_PSS_SHA_512", + ] + ) + + key = conn.create_key( + KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_SECG_P256K1", + ) + + key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms") + key["KeyMetadata"]["SigningAlgorithms"].should.equal(["ECDSA_SHA_256"]) + + key = conn.create_key( + KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_NIST_P384", + ) + + key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms") + key["KeyMetadata"]["SigningAlgorithms"].should.equal(["ECDSA_SHA_384"]) + + key = conn.create_key( + KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_NIST_P521", + ) + + key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms") + key["KeyMetadata"]["SigningAlgorithms"].should.equal(["ECDSA_SHA_512"]) + + +@mock_kms +def test_describe_key(): + client = boto3.client("kms", region_name="us-east-1") + response = client.create_key(Description="my key", KeyUsage="ENCRYPT_DECRYPT",) + key_id = response["KeyMetadata"]["KeyId"] + + response = client.describe_key(KeyId=key_id) + + response["KeyMetadata"]["AWSAccountId"].should.equal("123456789012") + response["KeyMetadata"]["CreationDate"].should.be.a(datetime) + response["KeyMetadata"]["CustomerMasterKeySpec"].should.equal("SYMMETRIC_DEFAULT") + response["KeyMetadata"]["Description"].should.equal("my key") + response["KeyMetadata"]["Enabled"].should.be.ok + response["KeyMetadata"]["EncryptionAlgorithms"].should.equal(["SYMMETRIC_DEFAULT"]) + response["KeyMetadata"]["KeyId"].should_not.be.empty + response["KeyMetadata"]["KeyManager"].should.equal("CUSTOMER") + response["KeyMetadata"]["KeyState"].should.equal("Enabled") + response["KeyMetadata"]["KeyUsage"].should.equal("ENCRYPT_DECRYPT") + response["KeyMetadata"]["Origin"].should.equal("AWS_KMS") + response["KeyMetadata"].should_not.have.key("SigningAlgorithms") + + +@parameterized( + ( + ("alias/does-not-exist",), + ("arn:aws:kms:us-east-1:012345678912:alias/does-not-exist",), + ("invalid",), + ) +) +@mock_kms +def test_describe_key_via_alias_invalid_alias(key_id): + client = boto3.client("kms", region_name="us-east-1") + client.create_key(Description="key") + + with assert_raises(client.exceptions.NotFoundException): + client.describe_key(KeyId=key_id) + + +@mock_kms +def test_generate_data_key(): + kms = boto3.client("kms", region_name="us-west-2") + + key = kms.create_key() + key_id = key["KeyMetadata"]["KeyId"] + key_arn = key["KeyMetadata"]["Arn"] + + response = kms.generate_data_key(KeyId=key_id, NumberOfBytes=32) + + # CiphertextBlob must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(response["CiphertextBlob"], validate=True) + # Plaintext must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(response["Plaintext"], validate=True) + + response["KeyId"].should.equal(key_arn) + + +@parameterized(PLAINTEXT_VECTORS) +@mock_kms +def test_encrypt(plaintext): + client = boto3.client("kms", region_name="us-west-2") + + key = client.create_key(Description="key") + key_id = key["KeyMetadata"]["KeyId"] + key_arn = key["KeyMetadata"]["Arn"] + + response = client.encrypt(KeyId=key_id, Plaintext=plaintext) + response["CiphertextBlob"].should_not.equal(plaintext) + + # CiphertextBlob must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(response["CiphertextBlob"], validate=True) + + response["KeyId"].should.equal(key_arn) + + +@parameterized(PLAINTEXT_VECTORS) +@mock_kms +def test_decrypt(plaintext): + client = boto3.client("kms", region_name="us-west-2") + + key = client.create_key(Description="key") + key_id = key["KeyMetadata"]["KeyId"] + key_arn = key["KeyMetadata"]["Arn"] + + encrypt_response = client.encrypt(KeyId=key_id, Plaintext=plaintext) + + client.create_key(Description="key") + # CiphertextBlob must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(encrypt_response["CiphertextBlob"], validate=True) + + decrypt_response = client.decrypt(CiphertextBlob=encrypt_response["CiphertextBlob"]) + + # Plaintext must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(decrypt_response["Plaintext"], validate=True) + + decrypt_response["Plaintext"].should.equal(_get_encoded_value(plaintext)) + decrypt_response["KeyId"].should.equal(key_arn) + + +@parameterized( + ( + ("not-a-uuid",), + ("alias/DoesNotExist",), + ("arn:aws:kms:us-east-1:012345678912:alias/DoesNotExist",), + ("d25652e4-d2d2-49f7-929a-671ccda580c6",), + ( + "arn:aws:kms:us-east-1:012345678912:key/d25652e4-d2d2-49f7-929a-671ccda580c6", + ), + ) +) +@mock_kms +def test_invalid_key_ids(key_id): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key(KeyId=key_id, NumberOfBytes=5) + + +@parameterized(PLAINTEXT_VECTORS) +@mock_kms +def test_kms_encrypt(plaintext): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="key") + response = client.encrypt(KeyId=key["KeyMetadata"]["KeyId"], Plaintext=plaintext) + + response = client.decrypt(CiphertextBlob=response["CiphertextBlob"]) + response["Plaintext"].should.equal(_get_encoded_value(plaintext)) + + +@mock_kms +def test_disable_key(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="disable-key") + client.disable_key(KeyId=key["KeyMetadata"]["KeyId"]) + + result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == "Disabled" + + +@mock_kms +def test_enable_key(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="enable-key") + client.disable_key(KeyId=key["KeyMetadata"]["KeyId"]) + client.enable_key(KeyId=key["KeyMetadata"]["KeyId"]) + + result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) + assert result["KeyMetadata"]["Enabled"] == True + assert result["KeyMetadata"]["KeyState"] == "Enabled" + + +@mock_kms +def test_schedule_key_deletion(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="schedule-key-deletion") + if os.environ.get("TEST_SERVER_MODE", "false").lower() == "false": + with freeze_time("2015-01-01 12:00:00"): + response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + assert response["KeyId"] == key["KeyMetadata"]["KeyId"] + assert response["DeletionDate"] == datetime( + 2015, 1, 31, 12, 0, tzinfo=tzutc() + ) + else: + # Can't manipulate time in server mode + response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + assert response["KeyId"] == key["KeyMetadata"]["KeyId"] + + result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == "PendingDeletion" + assert "DeletionDate" in result["KeyMetadata"] + + +@mock_kms +def test_schedule_key_deletion_custom(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="schedule-key-deletion") + if os.environ.get("TEST_SERVER_MODE", "false").lower() == "false": + with freeze_time("2015-01-01 12:00:00"): + response = client.schedule_key_deletion( + KeyId=key["KeyMetadata"]["KeyId"], PendingWindowInDays=7 + ) + assert response["KeyId"] == key["KeyMetadata"]["KeyId"] + assert response["DeletionDate"] == datetime( + 2015, 1, 8, 12, 0, tzinfo=tzutc() + ) + else: + # Can't manipulate time in server mode + response = client.schedule_key_deletion( + KeyId=key["KeyMetadata"]["KeyId"], PendingWindowInDays=7 + ) + assert response["KeyId"] == key["KeyMetadata"]["KeyId"] + + result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == "PendingDeletion" + assert "DeletionDate" in result["KeyMetadata"] + + +@mock_kms +def test_cancel_key_deletion(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="cancel-key-deletion") + client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + response = client.cancel_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + assert response["KeyId"] == key["KeyMetadata"]["KeyId"] + + result = client.describe_key(KeyId=key["KeyMetadata"]["KeyId"]) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == "Disabled" + assert "DeletionDate" not in result["KeyMetadata"] + + +@mock_kms +def test_update_key_description(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="old_description") + key_id = key["KeyMetadata"]["KeyId"] + + result = client.update_key_description(KeyId=key_id, Description="new_description") + assert "ResponseMetadata" in result + + +@mock_kms +def test_tag_resource(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="cancel-key-deletion") + response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + + keyid = response["KeyId"] + response = client.tag_resource( + KeyId=keyid, Tags=[{"TagKey": "string", "TagValue": "string"}] + ) + + # Shouldn't have any data, just header + assert len(response.keys()) == 1 + + +@mock_kms +def test_list_resource_tags(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="cancel-key-deletion") + response = client.schedule_key_deletion(KeyId=key["KeyMetadata"]["KeyId"]) + + keyid = response["KeyId"] + response = client.tag_resource( + KeyId=keyid, Tags=[{"TagKey": "string", "TagValue": "string"}] + ) + + response = client.list_resource_tags(KeyId=keyid) + assert response["Tags"][0]["TagKey"] == "string" + assert response["Tags"][0]["TagValue"] == "string" + + +@parameterized( + ( + (dict(KeySpec="AES_256"), 32), + (dict(KeySpec="AES_128"), 16), + (dict(NumberOfBytes=64), 64), + (dict(NumberOfBytes=1), 1), + (dict(NumberOfBytes=1024), 1024), + ) +) +@mock_kms +def test_generate_data_key_sizes(kwargs, expected_key_length): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="generate-data-key-size") + + response = client.generate_data_key(KeyId=key["KeyMetadata"]["KeyId"], **kwargs) + + assert len(response["Plaintext"]) == expected_key_length + + +@mock_kms +def test_generate_data_key_decrypt(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="generate-data-key-decrypt") + + resp1 = client.generate_data_key( + KeyId=key["KeyMetadata"]["KeyId"], KeySpec="AES_256" + ) + resp2 = client.decrypt(CiphertextBlob=resp1["CiphertextBlob"]) + + assert resp1["Plaintext"] == resp2["Plaintext"] + + +@parameterized( + ( + (dict(KeySpec="AES_257"),), + (dict(KeySpec="AES_128", NumberOfBytes=16),), + (dict(NumberOfBytes=2048),), + (dict(NumberOfBytes=0),), + (dict(),), + ) +) +@mock_kms +def test_generate_data_key_invalid_size_params(kwargs): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="generate-data-key-size") + + with assert_raises( + (botocore.exceptions.ClientError, botocore.exceptions.ParamValidationError) + ) as err: + client.generate_data_key(KeyId=key["KeyMetadata"]["KeyId"], **kwargs) + + +@parameterized( + ( + ("alias/DoesNotExist",), + ("arn:aws:kms:us-east-1:012345678912:alias/DoesNotExist",), + ("d25652e4-d2d2-49f7-929a-671ccda580c6",), + ( + "arn:aws:kms:us-east-1:012345678912:key/d25652e4-d2d2-49f7-929a-671ccda580c6", + ), + ) +) +@mock_kms +def test_generate_data_key_invalid_key(key_id): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key(KeyId=key_id, KeySpec="AES_256") + + +@parameterized( + ( + ("alias/DoesExist", False), + ("arn:aws:kms:us-east-1:012345678912:alias/DoesExist", False), + ("", True), + ("arn:aws:kms:us-east-1:012345678912:key/", True), + ) +) +@mock_kms +def test_generate_data_key_all_valid_key_ids(prefix, append_key_id): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key() + key_id = key["KeyMetadata"]["KeyId"] + client.create_alias(AliasName="alias/DoesExist", TargetKeyId=key_id) + + target_id = prefix + if append_key_id: + target_id += key_id + + client.generate_data_key(KeyId=key_id, NumberOfBytes=32) + + +@mock_kms +def test_generate_data_key_without_plaintext_decrypt(): + client = boto3.client("kms", region_name="us-east-1") + key = client.create_key(Description="generate-data-key-decrypt") + + resp1 = client.generate_data_key_without_plaintext( + KeyId=key["KeyMetadata"]["KeyId"], KeySpec="AES_256" + ) + + assert "Plaintext" not in resp1 + + +@parameterized(PLAINTEXT_VECTORS) +@mock_kms +def test_re_encrypt_decrypt(plaintext): + client = boto3.client("kms", region_name="us-west-2") + + key_1 = client.create_key(Description="key 1") + key_1_id = key_1["KeyMetadata"]["KeyId"] + key_1_arn = key_1["KeyMetadata"]["Arn"] + key_2 = client.create_key(Description="key 2") + key_2_id = key_2["KeyMetadata"]["KeyId"] + key_2_arn = key_2["KeyMetadata"]["Arn"] + + encrypt_response = client.encrypt( + KeyId=key_1_id, Plaintext=plaintext, EncryptionContext={"encryption": "context"} + ) + + re_encrypt_response = client.re_encrypt( + CiphertextBlob=encrypt_response["CiphertextBlob"], + SourceEncryptionContext={"encryption": "context"}, + DestinationKeyId=key_2_id, + DestinationEncryptionContext={"another": "context"}, + ) + + # CiphertextBlob must NOT be base64-encoded + with assert_raises(Exception): + base64.b64decode(re_encrypt_response["CiphertextBlob"], validate=True) + + re_encrypt_response["SourceKeyId"].should.equal(key_1_arn) + re_encrypt_response["KeyId"].should.equal(key_2_arn) + + decrypt_response_1 = client.decrypt( + CiphertextBlob=encrypt_response["CiphertextBlob"], + EncryptionContext={"encryption": "context"}, + ) + decrypt_response_1["Plaintext"].should.equal(_get_encoded_value(plaintext)) + decrypt_response_1["KeyId"].should.equal(key_1_arn) + + decrypt_response_2 = client.decrypt( + CiphertextBlob=re_encrypt_response["CiphertextBlob"], + EncryptionContext={"another": "context"}, + ) + decrypt_response_2["Plaintext"].should.equal(_get_encoded_value(plaintext)) + decrypt_response_2["KeyId"].should.equal(key_2_arn) + + decrypt_response_1["Plaintext"].should.equal(decrypt_response_2["Plaintext"]) + + +@mock_kms +def test_re_encrypt_to_invalid_destination(): + client = boto3.client("kms", region_name="us-west-2") + + key = client.create_key(Description="key 1") + key_id = key["KeyMetadata"]["KeyId"] + + encrypt_response = client.encrypt(KeyId=key_id, Plaintext=b"some plaintext") + + with assert_raises(client.exceptions.NotFoundException): + client.re_encrypt( + CiphertextBlob=encrypt_response["CiphertextBlob"], + DestinationKeyId="alias/DoesNotExist", + ) + + +@parameterized(((12,), (44,), (91,), (1,), (1024,))) +@mock_kms +def test_generate_random(number_of_bytes): + client = boto3.client("kms", region_name="us-west-2") + + response = client.generate_random(NumberOfBytes=number_of_bytes) + + response["Plaintext"].should.be.a(bytes) + len(response["Plaintext"]).should.equal(number_of_bytes) + + +@parameterized( + ( + (2048, botocore.exceptions.ClientError), + (1025, botocore.exceptions.ClientError), + (0, botocore.exceptions.ParamValidationError), + (-1, botocore.exceptions.ParamValidationError), + (-1024, botocore.exceptions.ParamValidationError), + ) +) +@mock_kms +def test_generate_random_invalid_number_of_bytes(number_of_bytes, error_type): + client = boto3.client("kms", region_name="us-west-2") + + with assert_raises(error_type): + client.generate_random(NumberOfBytes=number_of_bytes) + + +@mock_kms +def test_enable_key_rotation_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key_rotation(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_disable_key_rotation_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key_rotation(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_enable_key_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_disable_key_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_cancel_key_deletion_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.cancel_key_deletion(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_schedule_key_deletion_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.schedule_key_deletion(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_get_key_rotation_status_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_rotation_status(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_get_key_policy_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_policy( + KeyId="12366f9b-1230-123d-123e-123e6ae60c02", PolicyName="default" + ) + + +@mock_kms +def test_list_key_policies_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.list_key_policies(KeyId="12366f9b-1230-123d-123e-123e6ae60c02") + + +@mock_kms +def test_put_key_policy_key_not_found(): + client = boto3.client("kms", region_name="us-east-1") + + with assert_raises(client.exceptions.NotFoundException): + client.put_key_policy( + KeyId="00000000-0000-0000-0000-000000000000", + PolicyName="default", + Policy="new policy", + ) diff --git a/tests/test_kms/test_utils.py b/tests/test_kms/test_utils.py index 29ea969b5..4c84ed127 100644 --- a/tests/test_kms/test_utils.py +++ b/tests/test_kms/test_utils.py @@ -102,7 +102,7 @@ def test_deserialize_ciphertext_blob(raw, serialized): @parameterized(((ec[0],) for ec in ENCRYPTION_CONTEXT_VECTORS)) def test_encrypt_decrypt_cycle(encryption_context): plaintext = b"some secret plaintext" - master_key = Key("nop", "nop", "nop", "nop") + master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = encrypt( @@ -133,7 +133,7 @@ def test_encrypt_unknown_key_id(): def test_decrypt_invalid_ciphertext_format(): - master_key = Key("nop", "nop", "nop", "nop") + master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key_map = {master_key.id: master_key} with assert_raises(InvalidCiphertextException): @@ -153,7 +153,7 @@ def test_decrypt_unknwown_key_id(): def test_decrypt_invalid_ciphertext(): - master_key = Key("nop", "nop", "nop", "nop") + master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = ( master_key.id.encode("utf-8") + b"123456789012" @@ -171,7 +171,7 @@ def test_decrypt_invalid_ciphertext(): def test_decrypt_invalid_encryption_context(): plaintext = b"some secret plaintext" - master_key = Key("nop", "nop", "nop", "nop") + master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = encrypt( diff --git a/tests/test_opsworks/test_apps.py b/tests/test_opsworks/test_apps.py index 417140df2..1d3445c7d 100644 --- a/tests/test_opsworks/test_apps.py +++ b/tests/test_opsworks/test_apps.py @@ -1,76 +1,76 @@ -from __future__ import unicode_literals -import boto3 -from freezegun import freeze_time -import sure # noqa -import re - -from moto import mock_opsworks - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_create_app_response(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_app(StackId=stack_id, Type="other", Name="TestApp") - - response.should.contain("AppId") - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp") - - response.should.contain("AppId") - - # ClientError - client.create_app.when.called_with( - StackId=stack_id, Type="other", Name="TestApp" - ).should.throw(Exception, re.compile(r'already an app named "TestApp"')) - - # ClientError - client.create_app.when.called_with( - StackId="nothere", Type="other", Name="TestApp" - ).should.throw(Exception, "nothere") - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_describe_apps(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"] - - rv1 = client.describe_apps(StackId=stack_id) - rv2 = client.describe_apps(AppIds=[app_id]) - rv1["Apps"].should.equal(rv2["Apps"]) - - rv1["Apps"][0]["Name"].should.equal("TestApp") - - # ClientError - client.describe_apps.when.called_with( - StackId=stack_id, AppIds=[app_id] - ).should.throw(Exception, "Please provide one or more app IDs or a stack ID") - # ClientError - client.describe_apps.when.called_with(StackId="nothere").should.throw( - Exception, "Unable to find stack with ID nothere" - ) - # ClientError - client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw( - Exception, "nothere" - ) +from __future__ import unicode_literals +import boto3 +from freezegun import freeze_time +import sure # noqa +import re + +from moto import mock_opsworks + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_create_app_response(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_app(StackId=stack_id, Type="other", Name="TestApp") + + response.should.contain("AppId") + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp") + + response.should.contain("AppId") + + # ClientError + client.create_app.when.called_with( + StackId=stack_id, Type="other", Name="TestApp" + ).should.throw(Exception, re.compile(r'already an app named "TestApp"')) + + # ClientError + client.create_app.when.called_with( + StackId="nothere", Type="other", Name="TestApp" + ).should.throw(Exception, "nothere") + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_describe_apps(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"] + + rv1 = client.describe_apps(StackId=stack_id) + rv2 = client.describe_apps(AppIds=[app_id]) + rv1["Apps"].should.equal(rv2["Apps"]) + + rv1["Apps"][0]["Name"].should.equal("TestApp") + + # ClientError + client.describe_apps.when.called_with( + StackId=stack_id, AppIds=[app_id] + ).should.throw(Exception, "Please provide one or more app IDs or a stack ID") + # ClientError + client.describe_apps.when.called_with(StackId="nothere").should.throw( + Exception, "Unable to find stack with ID nothere" + ) + # ClientError + client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw( + Exception, "nothere" + ) diff --git a/tests/test_opsworks/test_instances.py b/tests/test_opsworks/test_instances.py index 55d23f08e..5f0dc2040 100644 --- a/tests/test_opsworks/test_instances.py +++ b/tests/test_opsworks/test_instances.py @@ -1,206 +1,206 @@ -from __future__ import unicode_literals -import boto3 -import sure # noqa - -from moto import mock_opsworks -from moto import mock_ec2 - - -@mock_opsworks -def test_create_instance(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - layer_id = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - )["LayerId"] - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - second_layer_id = client.create_layer( - StackId=second_stack_id, - Type="custom", - Name="SecondTestLayer", - Shortname="SecondTestLayerShortName", - )["LayerId"] - - response = client.create_instance( - StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro" - ) - - response.should.contain("InstanceId") - - client.create_instance.when.called_with( - StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro" - ).should.throw(Exception, "Unable to find stack with ID nothere") - - client.create_instance.when.called_with( - StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro" - ).should.throw(Exception, "nothere") - # ClientError - client.create_instance.when.called_with( - StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro" - ).should.throw(Exception, "Please only provide layer IDs from the same stack") - # ClientError - client.start_instance.when.called_with(InstanceId="nothere").should.throw( - Exception, "Unable to find instance with ID nothere" - ) - - -@mock_opsworks -def test_describe_instances(): - """ - create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2) - - populate S1L1 with 2 instances (S1L1_i1, S1L1_i2) - populate S2L1 with 1 instance (S2L1_i1) - populate S2L2 with 3 instances (S2L2_i1..2) - """ - - client = boto3.client("opsworks", region_name="us-east-1") - S1 = client.create_stack( - Name="S1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - S1L1 = client.create_layer( - StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1" - )["LayerId"] - S2 = client.create_stack( - Name="S2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - S2L1 = client.create_layer( - StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1" - )["LayerId"] - S2L2 = client.create_layer( - StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2" - )["LayerId"] - - S1L1_i1 = client.create_instance( - StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" - )["InstanceId"] - S1L1_i2 = client.create_instance( - StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" - )["InstanceId"] - S2L1_i1 = client.create_instance( - StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro" - )["InstanceId"] - S2L2_i1 = client.create_instance( - StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" - )["InstanceId"] - S2L2_i2 = client.create_instance( - StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" - )["InstanceId"] - - # instances in Stack 1 - response = client.describe_instances(StackId=S1)["Instances"] - response.should.have.length_of(2) - S1L1_i1.should.be.within([i["InstanceId"] for i in response]) - S1L1_i2.should.be.within([i["InstanceId"] for i in response]) - - response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"] - sorted(response2, key=lambda d: d["InstanceId"]).should.equal( - sorted(response, key=lambda d: d["InstanceId"]) - ) - - response3 = client.describe_instances(LayerId=S1L1)["Instances"] - sorted(response3, key=lambda d: d["InstanceId"]).should.equal( - sorted(response, key=lambda d: d["InstanceId"]) - ) - - response = client.describe_instances(StackId=S1)["Instances"] - response.should.have.length_of(2) - S1L1_i1.should.be.within([i["InstanceId"] for i in response]) - S1L1_i2.should.be.within([i["InstanceId"] for i in response]) - - # instances in Stack 2 - response = client.describe_instances(StackId=S2)["Instances"] - response.should.have.length_of(3) - S2L1_i1.should.be.within([i["InstanceId"] for i in response]) - S2L2_i1.should.be.within([i["InstanceId"] for i in response]) - S2L2_i2.should.be.within([i["InstanceId"] for i in response]) - - response = client.describe_instances(LayerId=S2L1)["Instances"] - response.should.have.length_of(1) - S2L1_i1.should.be.within([i["InstanceId"] for i in response]) - - response = client.describe_instances(LayerId=S2L2)["Instances"] - response.should.have.length_of(2) - S2L1_i1.should_not.be.within([i["InstanceId"] for i in response]) - - # ClientError - client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw( - Exception, "Please provide either one or more" - ) - # ClientError - client.describe_instances.when.called_with(StackId="nothere").should.throw( - Exception, "nothere" - ) - # ClientError - client.describe_instances.when.called_with(LayerId="nothere").should.throw( - Exception, "nothere" - ) - # ClientError - client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw( - Exception, "nothere" - ) - - -@mock_opsworks -@mock_ec2 -def test_ec2_integration(): - """ - instances created via OpsWorks should be discoverable via ec2 - """ - - opsworks = boto3.client("opsworks", region_name="us-east-1") - stack_id = opsworks.create_stack( - Name="S1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - layer_id = opsworks.create_layer( - StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1" - )["LayerId"] - - instance_id = opsworks.create_instance( - StackId=stack_id, - LayerIds=[layer_id], - InstanceType="t2.micro", - SshKeyName="testSSH", - )["InstanceId"] - - ec2 = boto3.client("ec2", region_name="us-east-1") - - # Before starting the instance, it shouldn't be discoverable via ec2 - reservations = ec2.describe_instances()["Reservations"] - assert reservations.should.be.empty - - # After starting the instance, it should be discoverable via ec2 - opsworks.start_instance(InstanceId=instance_id) - reservations = ec2.describe_instances()["Reservations"] - reservations[0]["Instances"].should.have.length_of(1) - instance = reservations[0]["Instances"][0] - opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0] - - instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"]) - instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"]) +from __future__ import unicode_literals +import boto3 +import sure # noqa + +from moto import mock_opsworks +from moto import mock_ec2 + + +@mock_opsworks +def test_create_instance(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + layer_id = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + )["LayerId"] + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + second_layer_id = client.create_layer( + StackId=second_stack_id, + Type="custom", + Name="SecondTestLayer", + Shortname="SecondTestLayerShortName", + )["LayerId"] + + response = client.create_instance( + StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro" + ) + + response.should.contain("InstanceId") + + client.create_instance.when.called_with( + StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro" + ).should.throw(Exception, "Unable to find stack with ID nothere") + + client.create_instance.when.called_with( + StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro" + ).should.throw(Exception, "nothere") + # ClientError + client.create_instance.when.called_with( + StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro" + ).should.throw(Exception, "Please only provide layer IDs from the same stack") + # ClientError + client.start_instance.when.called_with(InstanceId="nothere").should.throw( + Exception, "Unable to find instance with ID nothere" + ) + + +@mock_opsworks +def test_describe_instances(): + """ + create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2) + + populate S1L1 with 2 instances (S1L1_i1, S1L1_i2) + populate S2L1 with 1 instance (S2L1_i1) + populate S2L2 with 3 instances (S2L2_i1..2) + """ + + client = boto3.client("opsworks", region_name="us-east-1") + S1 = client.create_stack( + Name="S1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + S1L1 = client.create_layer( + StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1" + )["LayerId"] + S2 = client.create_stack( + Name="S2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + S2L1 = client.create_layer( + StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1" + )["LayerId"] + S2L2 = client.create_layer( + StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2" + )["LayerId"] + + S1L1_i1 = client.create_instance( + StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" + )["InstanceId"] + S1L1_i2 = client.create_instance( + StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" + )["InstanceId"] + S2L1_i1 = client.create_instance( + StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro" + )["InstanceId"] + S2L2_i1 = client.create_instance( + StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" + )["InstanceId"] + S2L2_i2 = client.create_instance( + StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" + )["InstanceId"] + + # instances in Stack 1 + response = client.describe_instances(StackId=S1)["Instances"] + response.should.have.length_of(2) + S1L1_i1.should.be.within([i["InstanceId"] for i in response]) + S1L1_i2.should.be.within([i["InstanceId"] for i in response]) + + response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"] + sorted(response2, key=lambda d: d["InstanceId"]).should.equal( + sorted(response, key=lambda d: d["InstanceId"]) + ) + + response3 = client.describe_instances(LayerId=S1L1)["Instances"] + sorted(response3, key=lambda d: d["InstanceId"]).should.equal( + sorted(response, key=lambda d: d["InstanceId"]) + ) + + response = client.describe_instances(StackId=S1)["Instances"] + response.should.have.length_of(2) + S1L1_i1.should.be.within([i["InstanceId"] for i in response]) + S1L1_i2.should.be.within([i["InstanceId"] for i in response]) + + # instances in Stack 2 + response = client.describe_instances(StackId=S2)["Instances"] + response.should.have.length_of(3) + S2L1_i1.should.be.within([i["InstanceId"] for i in response]) + S2L2_i1.should.be.within([i["InstanceId"] for i in response]) + S2L2_i2.should.be.within([i["InstanceId"] for i in response]) + + response = client.describe_instances(LayerId=S2L1)["Instances"] + response.should.have.length_of(1) + S2L1_i1.should.be.within([i["InstanceId"] for i in response]) + + response = client.describe_instances(LayerId=S2L2)["Instances"] + response.should.have.length_of(2) + S2L1_i1.should_not.be.within([i["InstanceId"] for i in response]) + + # ClientError + client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw( + Exception, "Please provide either one or more" + ) + # ClientError + client.describe_instances.when.called_with(StackId="nothere").should.throw( + Exception, "nothere" + ) + # ClientError + client.describe_instances.when.called_with(LayerId="nothere").should.throw( + Exception, "nothere" + ) + # ClientError + client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw( + Exception, "nothere" + ) + + +@mock_opsworks +@mock_ec2 +def test_ec2_integration(): + """ + instances created via OpsWorks should be discoverable via ec2 + """ + + opsworks = boto3.client("opsworks", region_name="us-east-1") + stack_id = opsworks.create_stack( + Name="S1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + layer_id = opsworks.create_layer( + StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1" + )["LayerId"] + + instance_id = opsworks.create_instance( + StackId=stack_id, + LayerIds=[layer_id], + InstanceType="t2.micro", + SshKeyName="testSSH", + )["InstanceId"] + + ec2 = boto3.client("ec2", region_name="us-east-1") + + # Before starting the instance, it shouldn't be discoverable via ec2 + reservations = ec2.describe_instances()["Reservations"] + assert reservations.should.be.empty + + # After starting the instance, it should be discoverable via ec2 + opsworks.start_instance(InstanceId=instance_id) + reservations = ec2.describe_instances()["Reservations"] + reservations[0]["Instances"].should.have.length_of(1) + instance = reservations[0]["Instances"][0] + opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0] + + instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"]) + instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"]) diff --git a/tests/test_opsworks/test_layers.py b/tests/test_opsworks/test_layers.py index 850666381..08d5a1ce4 100644 --- a/tests/test_opsworks/test_layers.py +++ b/tests/test_opsworks/test_layers.py @@ -1,96 +1,96 @@ -from __future__ import unicode_literals -import boto3 -from freezegun import freeze_time -import sure # noqa -import re - -from moto import mock_opsworks - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_create_layer_response(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - ) - - response.should.contain("LayerId") - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_layer( - StackId=second_stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - ) - - response.should.contain("LayerId") - - # ClientError - client.create_layer.when.called_with( - StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_" - ).should.throw(Exception, re.compile(r'already a layer named "TestLayer"')) - # ClientError - client.create_layer.when.called_with( - StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName" - ).should.throw( - Exception, re.compile(r'already a layer with shortname "TestLayerShortName"') - ) - # ClientError - client.create_layer.when.called_with( - StackId="nothere", Type="custom", Name="TestLayer", Shortname="_" - ).should.throw(Exception, "nothere") - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_describe_layers(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - layer_id = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - )["LayerId"] - - rv1 = client.describe_layers(StackId=stack_id) - rv2 = client.describe_layers(LayerIds=[layer_id]) - rv1["Layers"].should.equal(rv2["Layers"]) - - rv1["Layers"][0]["Name"].should.equal("TestLayer") - - # ClientError - client.describe_layers.when.called_with( - StackId=stack_id, LayerIds=[layer_id] - ).should.throw(Exception, "Please provide one or more layer IDs or a stack ID") - # ClientError - client.describe_layers.when.called_with(StackId="nothere").should.throw( - Exception, "Unable to find stack with ID nothere" - ) - # ClientError - client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw( - Exception, "nothere" - ) +from __future__ import unicode_literals +import boto3 +from freezegun import freeze_time +import sure # noqa +import re + +from moto import mock_opsworks + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_create_layer_response(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + ) + + response.should.contain("LayerId") + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_layer( + StackId=second_stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + ) + + response.should.contain("LayerId") + + # ClientError + client.create_layer.when.called_with( + StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_" + ).should.throw(Exception, re.compile(r'already a layer named "TestLayer"')) + # ClientError + client.create_layer.when.called_with( + StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName" + ).should.throw( + Exception, re.compile(r'already a layer with shortname "TestLayerShortName"') + ) + # ClientError + client.create_layer.when.called_with( + StackId="nothere", Type="custom", Name="TestLayer", Shortname="_" + ).should.throw(Exception, "nothere") + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_describe_layers(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + layer_id = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + )["LayerId"] + + rv1 = client.describe_layers(StackId=stack_id) + rv2 = client.describe_layers(LayerIds=[layer_id]) + rv1["Layers"].should.equal(rv2["Layers"]) + + rv1["Layers"][0]["Name"].should.equal("TestLayer") + + # ClientError + client.describe_layers.when.called_with( + StackId=stack_id, LayerIds=[layer_id] + ).should.throw(Exception, "Please provide one or more layer IDs or a stack ID") + # ClientError + client.describe_layers.when.called_with(StackId="nothere").should.throw( + Exception, "Unable to find stack with ID nothere" + ) + # ClientError + client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw( + Exception, "nothere" + ) diff --git a/tests/test_organizations/test_organizations_boto3.py b/tests/test_organizations/test_organizations_boto3.py index dd79ae787..876e83712 100644 --- a/tests/test_organizations/test_organizations_boto3.py +++ b/tests/test_organizations/test_organizations_boto3.py @@ -713,3 +713,41 @@ def test_untag_resource_errors(): ex.response["Error"]["Message"].should.equal( "You provided a value that does not match the required pattern." ) + + +@mock_organizations +def test_update_organizational_unit(): + client = boto3.client("organizations", region_name="us-east-1") + org = client.create_organization(FeatureSet="ALL")["Organization"] + root_id = client.list_roots()["Roots"][0]["Id"] + ou_name = "ou01" + response = client.create_organizational_unit(ParentId=root_id, Name=ou_name) + validate_organizational_unit(org, response) + response["OrganizationalUnit"]["Name"].should.equal(ou_name) + new_ou_name = "ou02" + response = client.update_organizational_unit( + OrganizationalUnitId=response["OrganizationalUnit"]["Id"], Name=new_ou_name + ) + validate_organizational_unit(org, response) + response["OrganizationalUnit"]["Name"].should.equal(new_ou_name) + + +@mock_organizations +def test_update_organizational_unit_duplicate_error(): + client = boto3.client("organizations", region_name="us-east-1") + org = client.create_organization(FeatureSet="ALL")["Organization"] + root_id = client.list_roots()["Roots"][0]["Id"] + ou_name = "ou01" + response = client.create_organizational_unit(ParentId=root_id, Name=ou_name) + validate_organizational_unit(org, response) + response["OrganizationalUnit"]["Name"].should.equal(ou_name) + with assert_raises(ClientError) as e: + client.update_organizational_unit( + OrganizationalUnitId=response["OrganizationalUnit"]["Id"], Name=ou_name + ) + exc = e.exception + exc.operation_name.should.equal("UpdateOrganizationalUnit") + exc.response["Error"]["Code"].should.contain("DuplicateOrganizationalUnitException") + exc.response["Error"]["Message"].should.equal( + "An OU with the same name already exists." + ) diff --git a/tests/test_polly/test_polly.py b/tests/test_polly/test_polly.py index e172b98d0..5428cdeb7 100644 --- a/tests/test_polly/test_polly.py +++ b/tests/test_polly/test_polly.py @@ -1,263 +1,263 @@ -from __future__ import unicode_literals - -from botocore.exceptions import ClientError -import boto3 -import sure # noqa -from nose.tools import assert_raises -from moto import mock_polly - -# Polly only available in a few regions -DEFAULT_REGION = "eu-west-1" - -LEXICON_XML = """ - - - W3C - World Wide Web Consortium - -""" - - -@mock_polly -def test_describe_voices(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - resp = client.describe_voices() - len(resp["Voices"]).should.be.greater_than(1) - - resp = client.describe_voices(LanguageCode="en-GB") - len(resp["Voices"]).should.equal(3) - - try: - client.describe_voices(LanguageCode="SOME_LANGUAGE") - except ClientError as err: - err.response["Error"]["Code"].should.equal("400") - else: - raise RuntimeError("Should of raised an exception") - - -@mock_polly -def test_put_list_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - resp = client.list_lexicons() - len(resp["Lexicons"]).should.equal(1) - - -@mock_polly -def test_put_get_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - resp = client.get_lexicon(Name="test") - resp.should.contain("Lexicon") - resp.should.contain("LexiconAttributes") - - -@mock_polly -def test_put_lexicon_bad_name(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - try: - client.put_lexicon(Name="test-invalid", Content=LEXICON_XML) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised an exception") - - -@mock_polly -def test_synthesize_speech(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg")) - for output_format, content_type in tests: - resp = client.synthesize_speech( - LexiconNames=["test"], - OutputFormat=output_format, - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - resp["ContentType"].should.equal(content_type) - - -@mock_polly -def test_synthesize_speech_bad_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test2"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("LexiconNotFoundException") - else: - raise RuntimeError("Should of raised LexiconNotFoundException") - - -@mock_polly -def test_synthesize_speech_bad_output_format(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="invalid", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_sample_rate(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="18000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidSampleRateException") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_text_type(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="invalid", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_voice_id(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Luke", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_text_too_long(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234" * 376, # = 3008 characters - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("TextLengthExceededException") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_speech_marks1(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - SpeechMarkTypes=["word"], - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal( - "MarksNotSupportedForFormatException" - ) - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_speech_marks2(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="ssml", - SpeechMarkTypes=["word"], - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal( - "MarksNotSupportedForFormatException" - ) - else: - raise RuntimeError("Should of raised ") +from __future__ import unicode_literals + +from botocore.exceptions import ClientError +import boto3 +import sure # noqa +from nose.tools import assert_raises +from moto import mock_polly + +# Polly only available in a few regions +DEFAULT_REGION = "eu-west-1" + +LEXICON_XML = """ + + + W3C + World Wide Web Consortium + +""" + + +@mock_polly +def test_describe_voices(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + resp = client.describe_voices() + len(resp["Voices"]).should.be.greater_than(1) + + resp = client.describe_voices(LanguageCode="en-GB") + len(resp["Voices"]).should.equal(3) + + try: + client.describe_voices(LanguageCode="SOME_LANGUAGE") + except ClientError as err: + err.response["Error"]["Code"].should.equal("400") + else: + raise RuntimeError("Should of raised an exception") + + +@mock_polly +def test_put_list_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + resp = client.list_lexicons() + len(resp["Lexicons"]).should.equal(1) + + +@mock_polly +def test_put_get_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + resp = client.get_lexicon(Name="test") + resp.should.contain("Lexicon") + resp.should.contain("LexiconAttributes") + + +@mock_polly +def test_put_lexicon_bad_name(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + try: + client.put_lexicon(Name="test-invalid", Content=LEXICON_XML) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised an exception") + + +@mock_polly +def test_synthesize_speech(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg")) + for output_format, content_type in tests: + resp = client.synthesize_speech( + LexiconNames=["test"], + OutputFormat=output_format, + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + resp["ContentType"].should.equal(content_type) + + +@mock_polly +def test_synthesize_speech_bad_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test2"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("LexiconNotFoundException") + else: + raise RuntimeError("Should of raised LexiconNotFoundException") + + +@mock_polly +def test_synthesize_speech_bad_output_format(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="invalid", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_sample_rate(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="18000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidSampleRateException") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_text_type(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="invalid", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_voice_id(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Luke", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_text_too_long(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234" * 376, # = 3008 characters + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("TextLengthExceededException") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_speech_marks1(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + SpeechMarkTypes=["word"], + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal( + "MarksNotSupportedForFormatException" + ) + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_speech_marks2(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="ssml", + SpeechMarkTypes=["word"], + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal( + "MarksNotSupportedForFormatException" + ) + else: + raise RuntimeError("Should of raised ") diff --git a/tests/test_rds/test_rds.py b/tests/test_rds/test_rds.py index 4ebea0cf3..a3e7dc9dd 100644 --- a/tests/test_rds/test_rds.py +++ b/tests/test_rds/test_rds.py @@ -68,7 +68,7 @@ def test_get_databases_paginated(): @mock_rds_deprecated -def test_describe_non_existant_database(): +def test_describe_non_existent_database(): conn = boto.rds.connect_to_region("us-west-2") conn.get_all_dbinstances.when.called_with("not-a-db").should.throw(BotoServerError) @@ -86,7 +86,7 @@ def test_delete_database(): @mock_rds_deprecated -def test_delete_non_existant_database(): +def test_delete_non_existent_database(): conn = boto.rds.connect_to_region("us-west-2") conn.delete_dbinstance.when.called_with("not-a-db").should.throw(BotoServerError) @@ -119,7 +119,7 @@ def test_get_security_groups(): @mock_rds_deprecated -def test_get_non_existant_security_group(): +def test_get_non_existent_security_group(): conn = boto.rds.connect_to_region("us-west-2") conn.get_all_dbsecurity_groups.when.called_with("not-a-sg").should.throw( BotoServerError @@ -138,7 +138,7 @@ def test_delete_database_security_group(): @mock_rds_deprecated -def test_delete_non_existant_security_group(): +def test_delete_non_existent_security_group(): conn = boto.rds.connect_to_region("us-west-2") conn.delete_dbsecurity_group.when.called_with("not-a-db").should.throw( BotoServerError diff --git a/tests/test_rds2/test_rds2.py b/tests/test_rds2/test_rds2.py index 47b45539d..e93ff43e9 100644 --- a/tests/test_rds2/test_rds2.py +++ b/tests/test_rds2/test_rds2.py @@ -312,7 +312,7 @@ def test_get_databases_paginated(): @mock_rds2 -def test_describe_non_existant_database(): +def test_describe_non_existent_database(): conn = boto3.client("rds", region_name="us-west-2") conn.describe_db_instances.when.called_with( DBInstanceIdentifier="not-a-db" @@ -378,7 +378,7 @@ def test_rename_db_instance(): @mock_rds2 -def test_modify_non_existant_database(): +def test_modify_non_existent_database(): conn = boto3.client("rds", region_name="us-west-2") conn.modify_db_instance.when.called_with( DBInstanceIdentifier="not-a-db", AllocatedStorage=20, ApplyImmediately=True @@ -403,7 +403,7 @@ def test_reboot_db_instance(): @mock_rds2 -def test_reboot_non_existant_database(): +def test_reboot_non_existent_database(): conn = boto3.client("rds", region_name="us-west-2") conn.reboot_db_instance.when.called_with( DBInstanceIdentifier="not-a-db" @@ -444,7 +444,7 @@ def test_delete_database(): @mock_rds2 -def test_delete_non_existant_database(): +def test_delete_non_existent_database(): conn = boto3.client("rds2", region_name="us-west-2") conn.delete_db_instance.when.called_with( DBInstanceIdentifier="not-a-db" @@ -663,7 +663,7 @@ def test_describe_option_group(): @mock_rds2 -def test_describe_non_existant_option_group(): +def test_describe_non_existent_option_group(): conn = boto3.client("rds", region_name="us-west-2") conn.describe_option_groups.when.called_with( OptionGroupName="not-a-option-group" @@ -688,10 +688,10 @@ def test_delete_option_group(): @mock_rds2 -def test_delete_non_existant_option_group(): +def test_delete_non_existent_option_group(): conn = boto3.client("rds", region_name="us-west-2") conn.delete_option_group.when.called_with( - OptionGroupName="non-existant" + OptionGroupName="non-existent" ).should.throw(ClientError) @@ -754,10 +754,10 @@ def test_modify_option_group_no_options(): @mock_rds2 -def test_modify_non_existant_option_group(): +def test_modify_non_existent_option_group(): conn = boto3.client("rds", region_name="us-west-2") conn.modify_option_group.when.called_with( - OptionGroupName="non-existant", + OptionGroupName="non-existent", OptionsToInclude=[ ( "OptionName", @@ -771,7 +771,7 @@ def test_modify_non_existant_option_group(): @mock_rds2 -def test_delete_non_existant_database(): +def test_delete_non_existent_database(): conn = boto3.client("rds", region_name="us-west-2") conn.delete_db_instance.when.called_with( DBInstanceIdentifier="not-a-db" @@ -1053,7 +1053,7 @@ def test_get_security_groups(): @mock_rds2 -def test_get_non_existant_security_group(): +def test_get_non_existent_security_group(): conn = boto3.client("rds", region_name="us-west-2") conn.describe_db_security_groups.when.called_with( DBSecurityGroupName="not-a-sg" @@ -1076,7 +1076,7 @@ def test_delete_database_security_group(): @mock_rds2 -def test_delete_non_existant_security_group(): +def test_delete_non_existent_security_group(): conn = boto3.client("rds", region_name="us-west-2") conn.delete_db_security_group.when.called_with( DBSecurityGroupName="not-a-db" @@ -1615,7 +1615,7 @@ def test_describe_db_parameter_group(): @mock_rds2 -def test_describe_non_existant_db_parameter_group(): +def test_describe_non_existent_db_parameter_group(): conn = boto3.client("rds", region_name="us-west-2") db_parameter_groups = conn.describe_db_parameter_groups(DBParameterGroupName="test") len(db_parameter_groups["DBParameterGroups"]).should.equal(0) @@ -1669,10 +1669,10 @@ def test_modify_db_parameter_group(): @mock_rds2 -def test_delete_non_existant_db_parameter_group(): +def test_delete_non_existent_db_parameter_group(): conn = boto3.client("rds", region_name="us-west-2") conn.delete_db_parameter_group.when.called_with( - DBParameterGroupName="non-existant" + DBParameterGroupName="non-existent" ).should.throw(ClientError) @@ -1689,3 +1689,36 @@ def test_create_parameter_group_with_tags(): ResourceName="arn:aws:rds:us-west-2:1234567890:pg:test" ) result["TagList"].should.equal([{"Value": "bar", "Key": "foo"}]) + + +@mock_rds2 +def test_create_db_with_iam_authentication(): + conn = boto3.client("rds", region_name="us-west-2") + + database = conn.create_db_instance( + DBInstanceIdentifier="rds", + DBInstanceClass="db.t1.micro", + Engine="postgres", + EnableIAMDatabaseAuthentication=True, + ) + + db_instance = database["DBInstance"] + db_instance["IAMDatabaseAuthenticationEnabled"].should.equal(True) + + +@mock_rds2 +def test_create_db_snapshot_with_iam_authentication(): + conn = boto3.client("rds", region_name="us-west-2") + + conn.create_db_instance( + DBInstanceIdentifier="rds", + DBInstanceClass="db.t1.micro", + Engine="postgres", + EnableIAMDatabaseAuthentication=True, + ) + + snapshot = conn.create_db_snapshot( + DBInstanceIdentifier="rds", DBSnapshotIdentifier="snapshot" + ).get("DBSnapshot") + + snapshot.get("IAMDatabaseAuthenticationEnabled").should.equal(True) diff --git a/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py b/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py index 84f7a8b86..3ee517ce8 100644 --- a/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py +++ b/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py @@ -21,7 +21,10 @@ def test_get_resources_s3(): # Create 4 buckets for i in range(1, 5): i_str = str(i) - s3_client.create_bucket(Bucket="test_bucket" + i_str) + s3_client.create_bucket( + Bucket="test_bucket" + i_str, + CreateBucketConfiguration={"LocationConstraint": "eu-central-1"}, + ) s3_client.put_bucket_tagging( Bucket="test_bucket" + i_str, Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]}, diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 0e9a1e2c0..746c78719 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -862,6 +862,8 @@ def test_list_resource_record_sets_name_type_filters(): StartRecordName=all_records[start_with][1], ) + response["IsTruncated"].should.equal(False) + returned_records = [ (record["Type"], record["Name"]) for record in response["ResourceRecordSets"] ] diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 682213d13..2193f8b27 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -5,6 +5,7 @@ import datetime import os import sys +from boto3 import Session from six.moves.urllib.request import urlopen from six.moves.urllib.error import HTTPError from functools import wraps @@ -27,6 +28,7 @@ from parameterized import parameterized import six import requests import tests.backport_assert_raises # noqa +from moto.s3.responses import DEFAULT_REGION_NAME from nose import SkipTest from nose.tools import assert_raises @@ -68,7 +70,7 @@ class MyModel(object): self.value = value def save(self): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.put_object(Bucket="mybucket", Key=self.name, Body=self.value) @@ -119,7 +121,7 @@ def test_append_to_value__empty_key(): @mock_s3 def test_my_model_save(): # Create Bucket so that test can run - conn = boto3.resource("s3", region_name="us-east-1") + conn = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) conn.create_bucket(Bucket="mybucket") #################################### @@ -133,7 +135,7 @@ def test_my_model_save(): @mock_s3 def test_key_etag(): - conn = boto3.resource("s3", region_name="us-east-1") + conn = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) conn.create_bucket(Bucket="mybucket") model_instance = MyModel("steve", "is awesome") @@ -519,9 +521,9 @@ def test_bucket_with_dash(): def test_create_existing_bucket(): "Trying to create a bucket that already exists should raise an Error" conn = boto.s3.connect_to_region("us-west-2") - conn.create_bucket("foobar") + conn.create_bucket("foobar", location="us-west-2") with assert_raises(S3CreateError): - conn.create_bucket("foobar") + conn.create_bucket("foobar", location="us-west-2") @mock_s3_deprecated @@ -535,7 +537,7 @@ def test_create_existing_bucket_in_us_east_1(): us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists it Amazon S3 will not do anything). """ - conn = boto.s3.connect_to_region("us-east-1") + conn = boto.s3.connect_to_region(DEFAULT_REGION_NAME) conn.create_bucket("foobar") bucket = conn.create_bucket("foobar") bucket.name.should.equal("foobar") @@ -544,7 +546,7 @@ def test_create_existing_bucket_in_us_east_1(): @mock_s3_deprecated def test_other_region(): conn = S3Connection("key", "secret", host="s3-website-ap-southeast-2.amazonaws.com") - conn.create_bucket("foobar") + conn.create_bucket("foobar", location="ap-southeast-2") list(conn.get_bucket("foobar").get_all_keys()).should.equal([]) @@ -566,7 +568,7 @@ def test_bucket_deletion(): # Get non-existing bucket conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError) - # Delete non-existant bucket + # Delete non-existent bucket conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) @@ -995,7 +997,9 @@ def test_bucket_acl_switching(): def test_s3_object_in_public_bucket(): s3 = boto3.resource("s3") bucket = s3.Bucket("test-bucket") - bucket.create(ACL="public-read") + bucket.create( + ACL="public-read", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) bucket.put_object(Body=b"ABCD", Key="file.txt") s3_anonymous = boto3.resource("s3") @@ -1026,7 +1030,9 @@ def test_s3_object_in_public_bucket(): def test_s3_object_in_private_bucket(): s3 = boto3.resource("s3") bucket = s3.Bucket("test-bucket") - bucket.create(ACL="private") + bucket.create( + ACL="private", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) bucket.put_object(ACL="private", Body=b"ABCD", Key="file.txt") s3_anonymous = boto3.resource("s3") @@ -1086,19 +1092,425 @@ def test_setting_content_encoding(): @mock_s3_deprecated def test_bucket_location(): conn = boto.s3.connect_to_region("us-west-2") - bucket = conn.create_bucket("mybucket") + bucket = conn.create_bucket("mybucket", location="us-west-2") bucket.get_location().should.equal("us-west-2") @mock_s3 -def test_bucket_location_us_east_1(): - cli = boto3.client("s3") +def test_bucket_location_default(): + cli = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" # No LocationConstraint ==> us-east-1 cli.create_bucket(Bucket=bucket_name) cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal(None) +@mock_s3 +def test_bucket_location_nondefault(): + cli = boto3.client("s3", region_name="eu-central-1") + bucket_name = "mybucket" + # LocationConstraint set for non default regions + resp = cli.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": "eu-central-1"}, + ) + cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal( + "eu-central-1" + ) + + +# Test uses current Region to determine whether to throw an error +# Region is retrieved based on current URL +# URL will always be localhost in Server Mode, so can't run it there +if not settings.TEST_SERVER_MODE: + + @mock_s3 + def test_s3_location_should_error_outside_useast1(): + s3 = boto3.client("s3", region_name="eu-west-1") + + bucket_name = "asdfasdfsdfdsfasda" + + with assert_raises(ClientError) as e: + s3.create_bucket(Bucket=bucket_name) + e.exception.response["Error"]["Message"].should.equal( + "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to." + ) + + # All tests for s3-control cannot be run under the server without a modification of the + # hosts file on your system. This is due to the fact that the URL to the host is in the form of: + # ACCOUNT_ID.s3-control.amazonaws.com <-- That Account ID part is the problem. If you want to + # make use of the moto server, update your hosts file for `THE_ACCOUNT_ID_FOR_MOTO.localhost` + # and this will work fine. + + @mock_s3 + def test_get_public_access_block_for_account(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + + # With an invalid account ID: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId="111111111111") + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Without one defined: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId=ACCOUNT_ID) + assert ( + ce.exception.response["Error"]["Code"] + == "NoSuchPublicAccessBlockConfiguration" + ) + + # Put a with an invalid account ID: + with assert_raises(ClientError) as ce: + client.put_public_access_block( + AccountId="111111111111", + PublicAccessBlockConfiguration={"BlockPublicAcls": True}, + ) + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Put with an invalid PAB: + with assert_raises(ClientError) as ce: + client.put_public_access_block( + AccountId=ACCOUNT_ID, PublicAccessBlockConfiguration={} + ) + assert ce.exception.response["Error"]["Code"] == "InvalidRequest" + assert ( + "Must specify at least one configuration." + in ce.exception.response["Error"]["Message"] + ) + + # Correct PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Get the correct PAB (for all regions): + for region in Session().get_available_regions("s3control"): + region_client = boto3.client("s3control", region_name=region) + assert region_client.get_public_access_block(AccountId=ACCOUNT_ID)[ + "PublicAccessBlockConfiguration" + ] == { + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + } + + # Delete with an invalid account ID: + with assert_raises(ClientError) as ce: + client.delete_public_access_block(AccountId="111111111111") + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Delete successfully: + client.delete_public_access_block(AccountId=ACCOUNT_ID) + + # Confirm that it's deleted: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId=ACCOUNT_ID) + assert ( + ce.exception.response["Error"]["Code"] + == "NoSuchPublicAccessBlockConfiguration" + ) + + @mock_s3 + @mock_config + def test_config_list_account_pab(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + config_client = boto3.client("config", region_name="us-west-2") + + # Create the aggregator: + account_aggregation_source = { + "AccountIds": [ACCOUNT_ID], + "AllAwsRegions": True, + } + config_client.put_configuration_aggregator( + ConfigurationAggregatorName="testing", + AccountAggregationSources=[account_aggregation_source], + ) + + # Without a PAB in place: + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock" + ) + assert not result["resourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + ) + assert not result["ResourceIdentifiers"] + + # Create a PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Test that successful queries work (non-aggregated): + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock" + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", + resourceIds=[ACCOUNT_ID, "nope"], + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceName="" + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + + # Test that successful queries work (aggregated): + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + ) + regions = {region for region in Session().get_available_regions("config")} + for r in result["ResourceIdentifiers"]: + regions.remove(r.pop("SourceRegion")) + assert r == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + } + + # Just check that the len is the same -- this should be reasonable + regions = {region for region in Session().get_available_regions("config")} + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": ""}, + ) + assert len(regions) == len(result["ResourceIdentifiers"]) + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": "", "ResourceId": ACCOUNT_ID}, + ) + assert len(regions) == len(result["ResourceIdentifiers"]) + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={ + "ResourceName": "", + "ResourceId": ACCOUNT_ID, + "Region": "us-west-2", + }, + ) + assert ( + result["ResourceIdentifiers"][0]["SourceRegion"] == "us-west-2" + and len(result["ResourceIdentifiers"]) == 1 + ) + + # Test aggregator pagination: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Limit=1, + ) + regions = sorted( + [region for region in Session().get_available_regions("config")] + ) + assert result["ResourceIdentifiers"][0] == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + "SourceRegion": regions[0], + } + assert result["NextToken"] == regions[1] + + # Get the next region: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Limit=1, + NextToken=regions[1], + ) + assert result["ResourceIdentifiers"][0] == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + "SourceRegion": regions[1], + } + + # Non-aggregated with incorrect info: + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceName="nope" + ) + assert not result["resourceIdentifiers"] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceIds=["nope"] + ) + assert not result["resourceIdentifiers"] + + # Aggregated with incorrect info: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": "nope"}, + ) + assert not result["ResourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceId": "nope"}, + ) + assert not result["ResourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"Region": "Nope"}, + ) + assert not result["ResourceIdentifiers"] + + @mock_s3 + @mock_config + def test_config_get_account_pab(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + config_client = boto3.client("config", region_name="us-west-2") + + # Create the aggregator: + account_aggregation_source = { + "AccountIds": [ACCOUNT_ID], + "AllAwsRegions": True, + } + config_client.put_configuration_aggregator( + ConfigurationAggregatorName="testing", + AccountAggregationSources=[account_aggregation_source], + ) + + # Without a PAB in place: + with assert_raises(ClientError) as ce: + config_client.get_resource_config_history( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID + ) + assert ( + ce.exception.response["Error"]["Code"] == "ResourceNotDiscoveredException" + ) + # aggregate + result = config_client.batch_get_resource_config( + resourceKeys=[ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": "ACCOUNT_ID", + } + ] + ) + assert not result["baseConfigurationItems"] + result = config_client.batch_get_aggregate_resource_config( + ConfigurationAggregatorName="testing", + ResourceIdentifiers=[ + { + "SourceAccountId": ACCOUNT_ID, + "SourceRegion": "us-west-2", + "ResourceId": ACCOUNT_ID, + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "ResourceName": "", + } + ], + ) + assert not result["BaseConfigurationItems"] + + # Create a PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Get the proper config: + proper_config = { + "blockPublicAcls": True, + "ignorePublicAcls": True, + "blockPublicPolicy": True, + "restrictPublicBuckets": True, + } + result = config_client.get_resource_config_history( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID + ) + assert ( + json.loads(result["configurationItems"][0]["configuration"]) + == proper_config + ) + assert ( + result["configurationItems"][0]["accountId"] + == result["configurationItems"][0]["resourceId"] + == ACCOUNT_ID + ) + result = config_client.batch_get_resource_config( + resourceKeys=[ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + ) + assert len(result["baseConfigurationItems"]) == 1 + assert ( + json.loads(result["baseConfigurationItems"][0]["configuration"]) + == proper_config + ) + assert ( + result["baseConfigurationItems"][0]["accountId"] + == result["baseConfigurationItems"][0]["resourceId"] + == ACCOUNT_ID + ) + + for region in Session().get_available_regions("s3control"): + result = config_client.batch_get_aggregate_resource_config( + ConfigurationAggregatorName="testing", + ResourceIdentifiers=[ + { + "SourceAccountId": ACCOUNT_ID, + "SourceRegion": region, + "ResourceId": ACCOUNT_ID, + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "ResourceName": "", + } + ], + ) + assert len(result["BaseConfigurationItems"]) == 1 + assert ( + json.loads(result["BaseConfigurationItems"][0]["configuration"]) + == proper_config + ) + + @mock_s3_deprecated def test_ranged_get(): conn = boto.connect_s3() @@ -1222,7 +1634,7 @@ def test_key_with_trailing_slash_in_ordinary_calling_format(): @mock_s3 def test_boto3_key_etag(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome") resp = s3.get_object(Bucket="mybucket", Key="steve") @@ -1231,7 +1643,7 @@ def test_boto3_key_etag(): @mock_s3 def test_website_redirect_location(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome") @@ -1248,7 +1660,7 @@ def test_website_redirect_location(): @mock_s3 def test_boto3_list_objects_truncated_response(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="one", Body=b"1") s3.put_object(Bucket="mybucket", Key="two", Body=b"22") @@ -1294,7 +1706,7 @@ def test_boto3_list_objects_truncated_response(): @mock_s3 def test_boto3_list_keys_xml_escaped(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") key_name = "Q&A.txt" s3.put_object(Bucket="mybucket", Key=key_name, Body=b"is awesome") @@ -1314,7 +1726,7 @@ def test_boto3_list_keys_xml_escaped(): @mock_s3 def test_boto3_list_objects_v2_common_prefix_pagination(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") max_keys = 1 @@ -1343,7 +1755,7 @@ def test_boto3_list_objects_v2_common_prefix_pagination(): @mock_s3 def test_boto3_list_objects_v2_truncated_response(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="one", Body=b"1") s3.put_object(Bucket="mybucket", Key="two", Body=b"22") @@ -1400,7 +1812,7 @@ def test_boto3_list_objects_v2_truncated_response(): @mock_s3 def test_boto3_list_objects_v2_truncated_response_start_after(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="one", Body=b"1") s3.put_object(Bucket="mybucket", Key="two", Body=b"22") @@ -1442,7 +1854,7 @@ def test_boto3_list_objects_v2_truncated_response_start_after(): @mock_s3 def test_boto3_list_objects_v2_fetch_owner(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="one", Body=b"11") @@ -1456,7 +1868,7 @@ def test_boto3_list_objects_v2_fetch_owner(): @mock_s3 def test_boto3_list_objects_v2_truncate_combined_keys_and_folders(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") s3.put_object(Bucket="mybucket", Key="1/2", Body="") s3.put_object(Bucket="mybucket", Key="2", Body="") @@ -1486,7 +1898,7 @@ def test_boto3_list_objects_v2_truncate_combined_keys_and_folders(): @mock_s3 def test_boto3_bucket_create(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="blah") s3.Object("blah", "hello.txt").put(Body="some text") @@ -1511,10 +1923,11 @@ def test_bucket_create_duplicate(): @mock_s3 def test_bucket_create_force_us_east_1(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) with assert_raises(ClientError) as exc: s3.create_bucket( - Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "us-east-1"} + Bucket="blah", + CreateBucketConfiguration={"LocationConstraint": DEFAULT_REGION_NAME}, ) exc.exception.response["Error"]["Code"].should.equal("InvalidLocationConstraint") @@ -1522,7 +1935,9 @@ def test_bucket_create_force_us_east_1(): @mock_s3 def test_boto3_bucket_create_eu_central(): s3 = boto3.resource("s3", region_name="eu-central-1") - s3.create_bucket(Bucket="blah") + s3.create_bucket( + Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "eu-central-1"} + ) s3.Object("blah", "hello.txt").put(Body="some text") @@ -1533,7 +1948,7 @@ def test_boto3_bucket_create_eu_central(): @mock_s3 def test_boto3_head_object(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="blah") s3.Object("blah", "hello.txt").put(Body="some text") @@ -1551,7 +1966,7 @@ def test_boto3_head_object(): @mock_s3 def test_boto3_bucket_deletion(): - cli = boto3.client("s3", region_name="us-east-1") + cli = boto3.client("s3", region_name=DEFAULT_REGION_NAME) cli.create_bucket(Bucket="foobar") cli.put_object(Bucket="foobar", Key="the-key", Body="some value") @@ -1582,7 +1997,7 @@ def test_boto3_bucket_deletion(): @mock_s3 def test_boto3_get_object(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="blah") s3.Object("blah", "hello.txt").put(Body="some text") @@ -1599,7 +2014,7 @@ def test_boto3_get_object(): @mock_s3 def test_boto3_get_missing_object_with_part_number(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="blah") with assert_raises(ClientError) as e: @@ -1612,7 +2027,7 @@ def test_boto3_get_missing_object_with_part_number(): @mock_s3 def test_boto3_head_object_with_versioning(): - s3 = boto3.resource("s3", region_name="us-east-1") + s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) bucket = s3.create_bucket(Bucket="blah") bucket.Versioning().enable() @@ -1642,7 +2057,7 @@ def test_boto3_head_object_with_versioning(): @mock_s3 def test_boto3_copy_object_with_versioning(): - client = boto3.client("s3", region_name="us-east-1") + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client.create_bucket( Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "eu-west-1"} @@ -1706,7 +2121,7 @@ def test_boto3_copy_object_with_versioning(): @mock_s3 def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): - client = boto3.client("s3", region_name="us-east-1") + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client.create_bucket( Bucket="src", CreateBucketConfiguration={"LocationConstraint": "eu-west-1"} @@ -1728,9 +2143,37 @@ def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): obj2_version_new.should_not.equal(None) +@mock_s3 +def test_boto3_copy_object_with_replacement_tagging(): + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + client.create_bucket(Bucket="mybucket") + client.put_object( + Bucket="mybucket", Key="original", Body=b"test", Tagging="tag=old" + ) + + client.copy_object( + CopySource={"Bucket": "mybucket", "Key": "original"}, + Bucket="mybucket", + Key="copy1", + TaggingDirective="REPLACE", + Tagging="tag=new", + ) + client.copy_object( + CopySource={"Bucket": "mybucket", "Key": "original"}, + Bucket="mybucket", + Key="copy2", + TaggingDirective="COPY", + ) + + tags1 = client.get_object_tagging(Bucket="mybucket", Key="copy1")["TagSet"] + tags1.should.equal([{"Key": "tag", "Value": "new"}]) + tags2 = client.get_object_tagging(Bucket="mybucket", Key="copy2")["TagSet"] + tags2.should.equal([{"Key": "tag", "Value": "old"}]) + + @mock_s3 def test_boto3_deleted_versionings_list(): - client = boto3.client("s3", region_name="us-east-1") + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client.create_bucket(Bucket="blah") client.put_bucket_versioning( @@ -1747,7 +2190,7 @@ def test_boto3_deleted_versionings_list(): @mock_s3 def test_boto3_delete_versioned_bucket(): - client = boto3.client("s3", region_name="us-east-1") + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client.create_bucket(Bucket="blah") client.put_bucket_versioning( @@ -1762,7 +2205,7 @@ def test_boto3_delete_versioned_bucket(): @mock_s3 def test_boto3_get_object_if_modified_since(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "blah" s3.create_bucket(Bucket=bucket_name) @@ -1782,7 +2225,7 @@ def test_boto3_get_object_if_modified_since(): @mock_s3 def test_boto3_head_object_if_modified_since(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "blah" s3.create_bucket(Bucket=bucket_name) @@ -1804,7 +2247,7 @@ def test_boto3_head_object_if_modified_since(): @reduced_min_part_size def test_boto3_multipart_etag(): # Create Bucket so that test can run - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") upload_id = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")["UploadId"] @@ -1848,7 +2291,7 @@ def test_boto3_multipart_etag(): @mock_s3 @reduced_min_part_size def test_boto3_multipart_part_size(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="mybucket") mpu = s3.create_multipart_upload(Bucket="mybucket", Key="the-key") @@ -1883,7 +2326,7 @@ def test_boto3_multipart_part_size(): @mock_s3 def test_boto3_put_object_with_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -1897,7 +2340,7 @@ def test_boto3_put_object_with_tagging(): @mock_s3 def test_boto3_put_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) @@ -1944,7 +2387,7 @@ def test_boto3_put_bucket_tagging(): @mock_s3 def test_boto3_get_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) s3.put_bucket_tagging( @@ -1975,7 +2418,7 @@ def test_boto3_get_bucket_tagging(): @mock_s3 def test_boto3_delete_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) @@ -2002,7 +2445,7 @@ def test_boto3_delete_bucket_tagging(): @mock_s3 def test_boto3_put_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) @@ -2062,7 +2505,7 @@ def test_boto3_put_bucket_cors(): @mock_s3 def test_boto3_get_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) @@ -2103,7 +2546,7 @@ def test_boto3_get_bucket_cors(): @mock_s3 def test_boto3_delete_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" s3.create_bucket(Bucket=bucket_name) s3.put_bucket_cors( @@ -2127,7 +2570,7 @@ def test_boto3_delete_bucket_cors(): @mock_s3 def test_put_bucket_acl_body(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="bucket") bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"] s3.put_bucket_acl( @@ -2225,7 +2668,7 @@ def test_put_bucket_acl_body(): @mock_s3 def test_put_bucket_notification(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="bucket") # With no configuration: @@ -2421,7 +2864,7 @@ def test_put_bucket_notification(): @mock_s3 def test_put_bucket_notification_errors(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket="bucket") # With incorrect ARNs: @@ -2488,7 +2931,7 @@ def test_put_bucket_notification_errors(): @mock_s3 def test_boto3_put_bucket_logging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" log_bucket = "logbucket" wrong_region_bucket = "wrongregionlogbucket" @@ -2667,7 +3110,7 @@ def test_boto3_put_bucket_logging(): @mock_s3 def test_boto3_put_object_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -2711,7 +3154,7 @@ def test_boto3_put_object_tagging(): @mock_s3 def test_boto3_put_object_tagging_on_earliest_version(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -2778,7 +3221,7 @@ def test_boto3_put_object_tagging_on_earliest_version(): @mock_s3 def test_boto3_put_object_tagging_on_both_version(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -2858,7 +3301,7 @@ def test_boto3_put_object_tagging_on_both_version(): @mock_s3 def test_boto3_put_object_tagging_with_single_tag(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -2876,7 +3319,7 @@ def test_boto3_put_object_tagging_with_single_tag(): @mock_s3 def test_boto3_get_object_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-tags" s3.create_bucket(Bucket=bucket_name) @@ -2905,7 +3348,7 @@ def test_boto3_get_object_tagging(): @mock_s3 def test_boto3_list_object_versions(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions" s3.create_bucket(Bucket=bucket_name) @@ -2927,7 +3370,7 @@ def test_boto3_list_object_versions(): @mock_s3 def test_boto3_list_object_versions_with_versioning_disabled(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions" s3.create_bucket(Bucket=bucket_name) @@ -2950,7 +3393,7 @@ def test_boto3_list_object_versions_with_versioning_disabled(): @mock_s3 def test_boto3_list_object_versions_with_versioning_enabled_late(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions" s3.create_bucket(Bucket=bucket_name) @@ -2978,7 +3421,7 @@ def test_boto3_list_object_versions_with_versioning_enabled_late(): @mock_s3 def test_boto3_bad_prefix_list_object_versions(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions" bad_prefix = "key-that-does-not-exist" @@ -2997,7 +3440,7 @@ def test_boto3_bad_prefix_list_object_versions(): @mock_s3 def test_boto3_delete_markers(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions-and-unicode-ó" s3.create_bucket(Bucket=bucket_name) @@ -3040,7 +3483,7 @@ def test_boto3_delete_markers(): @mock_s3 def test_boto3_multiple_delete_markers(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" key = "key-with-versions-and-unicode-ó" s3.create_bucket(Bucket=bucket_name) @@ -3091,7 +3534,7 @@ def test_boto3_multiple_delete_markers(): def test_get_stream_gzipped(): payload = b"this is some stuff here" - s3_client = boto3.client("s3", region_name="us-east-1") + s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3_client.create_bucket(Bucket="moto-tests") buffer_ = BytesIO() with GzipFile(fileobj=buffer_, mode="w") as f: @@ -3129,7 +3572,7 @@ TEST_XML = """\ @mock_s3 def test_boto3_bucket_name_too_long(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) with assert_raises(ClientError) as exc: s3.create_bucket(Bucket="x" * 64) exc.exception.response["Error"]["Code"].should.equal("InvalidBucketName") @@ -3137,7 +3580,7 @@ def test_boto3_bucket_name_too_long(): @mock_s3 def test_boto3_bucket_name_too_short(): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) with assert_raises(ClientError) as exc: s3.create_bucket(Bucket="x" * 2) exc.exception.response["Error"]["Code"].should.equal("InvalidBucketName") @@ -3146,7 +3589,7 @@ def test_boto3_bucket_name_too_short(): @mock_s3 def test_accelerated_none_when_unspecified(): bucket_name = "some_bucket" - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket=bucket_name) resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name) resp.shouldnt.have.key("Status") @@ -3155,7 +3598,7 @@ def test_accelerated_none_when_unspecified(): @mock_s3 def test_can_enable_bucket_acceleration(): bucket_name = "some_bucket" - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket=bucket_name) resp = s3.put_bucket_accelerate_configuration( Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"} @@ -3171,7 +3614,7 @@ def test_can_enable_bucket_acceleration(): @mock_s3 def test_can_suspend_bucket_acceleration(): bucket_name = "some_bucket" - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket=bucket_name) resp = s3.put_bucket_accelerate_configuration( Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"} @@ -3191,7 +3634,10 @@ def test_can_suspend_bucket_acceleration(): def test_suspending_acceleration_on_not_configured_bucket_does_nothing(): bucket_name = "some_bucket" s3 = boto3.client("s3") - s3.create_bucket(Bucket=bucket_name) + s3.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": "us-west-1"}, + ) resp = s3.put_bucket_accelerate_configuration( Bucket=bucket_name, AccelerateConfiguration={"Status": "Suspended"} ) @@ -3205,7 +3651,7 @@ def test_suspending_acceleration_on_not_configured_bucket_does_nothing(): @mock_s3 def test_accelerate_configuration_status_validation(): bucket_name = "some_bucket" - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket=bucket_name) with assert_raises(ClientError) as exc: s3.put_bucket_accelerate_configuration( @@ -3217,7 +3663,7 @@ def test_accelerate_configuration_status_validation(): @mock_s3 def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots(): bucket_name = "some.bucket.with.dots" - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) s3.create_bucket(Bucket=bucket_name) with assert_raises(ClientError) as exc: s3.put_bucket_accelerate_configuration( @@ -3227,7 +3673,7 @@ def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots(): def store_and_read_back_a_key(key): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" body = b"Some body" @@ -3255,7 +3701,7 @@ def test_root_dir_with_empty_name_works(): ) @mock_s3 def test_delete_objects_with_url_encoded_key(key): - s3 = boto3.client("s3", region_name="us-east-1") + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) bucket_name = "mybucket" body = b"Some body" @@ -3282,7 +3728,7 @@ def test_delete_objects_with_url_encoded_key(key): @mock_s3 @mock_config def test_public_access_block(): - client = boto3.client("s3") + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client.create_bucket(Bucket="mybucket") # Try to get the public access block (should not exist by default) @@ -3349,7 +3795,7 @@ def test_public_access_block(): assert ce.exception.response["ResponseMetadata"]["HTTPStatusCode"] == 400 # Test that things work with AWS Config: - config_client = boto3.client("config", region_name="us-east-1") + config_client = boto3.client("config", region_name=DEFAULT_REGION_NAME) result = config_client.get_resource_config_history( resourceType="AWS::S3::Bucket", resourceId="mybucket" ) diff --git a/tests/test_s3/test_s3_lifecycle.py b/tests/test_s3/test_s3_lifecycle.py index 260b248f1..0a2e66b5c 100644 --- a/tests/test_s3/test_s3_lifecycle.py +++ b/tests/test_s3/test_s3_lifecycle.py @@ -16,7 +16,7 @@ from moto import mock_s3_deprecated, mock_s3 @mock_s3_deprecated def test_lifecycle_create(): conn = boto.s3.connect_to_region("us-west-1") - bucket = conn.create_bucket("foobar") + bucket = conn.create_bucket("foobar", location="us-west-1") lifecycle = Lifecycle() lifecycle.add_rule("myid", "", "Enabled", 30) @@ -33,7 +33,9 @@ def test_lifecycle_create(): @mock_s3 def test_lifecycle_with_filters(): client = boto3.client("s3") - client.create_bucket(Bucket="bucket") + client.create_bucket( + Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) # Create a lifecycle rule with a Filter (no tags): lfc = { @@ -245,7 +247,9 @@ def test_lifecycle_with_filters(): @mock_s3 def test_lifecycle_with_eodm(): client = boto3.client("s3") - client.create_bucket(Bucket="bucket") + client.create_bucket( + Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) lfc = { "Rules": [ @@ -293,7 +297,9 @@ def test_lifecycle_with_eodm(): @mock_s3 def test_lifecycle_with_nve(): client = boto3.client("s3") - client.create_bucket(Bucket="bucket") + client.create_bucket( + Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) lfc = { "Rules": [ @@ -327,7 +333,9 @@ def test_lifecycle_with_nve(): @mock_s3 def test_lifecycle_with_nvt(): client = boto3.client("s3") - client.create_bucket(Bucket="bucket") + client.create_bucket( + Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) lfc = { "Rules": [ @@ -393,7 +401,9 @@ def test_lifecycle_with_nvt(): @mock_s3 def test_lifecycle_with_aimu(): client = boto3.client("s3") - client.create_bucket(Bucket="bucket") + client.create_bucket( + Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) lfc = { "Rules": [ @@ -432,7 +442,7 @@ def test_lifecycle_with_aimu(): @mock_s3_deprecated def test_lifecycle_with_glacier_transition(): conn = boto.s3.connect_to_region("us-west-1") - bucket = conn.create_bucket("foobar") + bucket = conn.create_bucket("foobar", location="us-west-1") lifecycle = Lifecycle() transition = Transition(days=30, storage_class="GLACIER") @@ -451,7 +461,7 @@ def test_lifecycle_with_glacier_transition(): @mock_s3_deprecated def test_lifecycle_multi(): conn = boto.s3.connect_to_region("us-west-1") - bucket = conn.create_bucket("foobar") + bucket = conn.create_bucket("foobar", location="us-west-1") date = "2022-10-12T00:00:00.000Z" sc = "GLACIER" @@ -493,7 +503,7 @@ def test_lifecycle_multi(): @mock_s3_deprecated def test_lifecycle_delete(): conn = boto.s3.connect_to_region("us-west-1") - bucket = conn.create_bucket("foobar") + bucket = conn.create_bucket("foobar", location="us-west-1") lifecycle = Lifecycle() lifecycle.add_rule(expiration=30) diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py index dbdc85c42..f1a0479b2 100644 --- a/tests/test_s3/test_s3_storageclass.py +++ b/tests/test_s3/test_s3_storageclass.py @@ -11,7 +11,7 @@ from moto import mock_s3 @mock_s3 def test_s3_storage_class_standard(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3.create_bucket(Bucket="Bucket") # add an object to the bucket with standard storage @@ -26,7 +26,9 @@ def test_s3_storage_class_standard(): @mock_s3 def test_s3_storage_class_infrequent_access(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"} + ) # add an object to the bucket with standard storage @@ -46,7 +48,9 @@ def test_s3_storage_class_infrequent_access(): def test_s3_storage_class_intelligent_tiering(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"} + ) s3.put_object( Bucket="Bucket", Key="my_key_infrequent", @@ -61,7 +65,7 @@ def test_s3_storage_class_intelligent_tiering(): @mock_s3 def test_s3_storage_class_copy(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3.create_bucket(Bucket="Bucket") s3.put_object( Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD" @@ -86,7 +90,7 @@ def test_s3_storage_class_copy(): @mock_s3 def test_s3_invalid_copied_storage_class(): - s3 = boto3.client("s3") + s3 = boto3.client("s3", region_name="us-east-1") s3.create_bucket(Bucket="Bucket") s3.put_object( Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD" @@ -119,7 +123,9 @@ def test_s3_invalid_copied_storage_class(): @mock_s3 def test_s3_invalid_storage_class(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) # Try to add an object with an invalid storage class with assert_raises(ClientError) as err: @@ -137,7 +143,9 @@ def test_s3_invalid_storage_class(): @mock_s3 def test_s3_default_storage_class(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body") @@ -150,7 +158,9 @@ def test_s3_default_storage_class(): @mock_s3 def test_s3_copy_object_error_for_glacier_storage_class(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) s3.put_object( Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER" @@ -169,7 +179,9 @@ def test_s3_copy_object_error_for_glacier_storage_class(): @mock_s3 def test_s3_copy_object_error_for_deep_archive_storage_class(): s3 = boto3.client("s3") - s3.create_bucket(Bucket="Bucket") + s3.create_bucket( + Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} + ) s3.put_object( Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE" diff --git a/tests/test_s3bucket_path/test_s3bucket_path.py b/tests/test_s3bucket_path/test_s3bucket_path.py index e204d0527..a1bdc5b02 100644 --- a/tests/test_s3bucket_path/test_s3bucket_path.py +++ b/tests/test_s3bucket_path/test_s3bucket_path.py @@ -174,7 +174,7 @@ def test_bucket_deletion(): # Get non-existing bucket conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError) - # Delete non-existant bucket + # Delete non-existent bucket conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) diff --git a/tests/test_ses/test_ses_sns_boto3.py b/tests/test_ses/test_ses_sns_boto3.py index fc58d88aa..43d4000bf 100644 --- a/tests/test_ses/test_ses_sns_boto3.py +++ b/tests/test_ses/test_ses_sns_boto3.py @@ -40,6 +40,8 @@ def __setup_feedback_env__( ) # Verify SES domain ses_conn.verify_domain_identity(Domain=domain) + # Specify email address to allow for raw e-mails to be processed + ses_conn.verify_email_identity(EmailAddress="test@example.com") # Setup SES notification topic if expected_msg is not None: ses_conn.set_identity_notification_topic( @@ -47,7 +49,7 @@ def __setup_feedback_env__( ) -def __test_sns_feedback__(addr, expected_msg): +def __test_sns_feedback__(addr, expected_msg, raw_email=False): region_name = "us-east-1" ses_conn = boto3.client("ses", region_name=region_name) sns_conn = boto3.client("sns", region_name=region_name) @@ -73,7 +75,18 @@ def __test_sns_feedback__(addr, expected_msg): "Body": {"Text": {"Data": "test body"}}, }, ) - ses_conn.send_email(**kwargs) + if raw_email: + kwargs.pop("Message") + kwargs.pop("Destination") + kwargs.update( + { + "Destinations": [addr + "@" + domain], + "RawMessage": {"Data": bytearray("raw_email", "utf-8")}, + } + ) + ses_conn.send_raw_email(**kwargs) + else: + ses_conn.send_email(**kwargs) # Wait for messages in the queues queue = sqs_conn.get_queue_by_name(QueueName=queue) @@ -112,3 +125,12 @@ def test_sns_feedback_complaint(): @mock_ses def test_sns_feedback_delivery(): __test_sns_feedback__(SESFeedback.SUCCESS_ADDR, SESFeedback.DELIVERY) + + +@mock_sqs +@mock_sns +@mock_ses +def test_sns_feedback_delivery_raw_email(): + __test_sns_feedback__( + SESFeedback.SUCCESS_ADDR, SESFeedback.DELIVERY, raw_email=True + ) diff --git a/tests/test_sns/test_application.py b/tests/test_sns/test_application.py index e4fe93d53..1f5526219 100644 --- a/tests/test_sns/test_application.py +++ b/tests/test_sns/test_application.py @@ -88,8 +88,8 @@ def test_list_platform_applications(): conn.create_platform_application(name="application1", platform="APNS") conn.create_platform_application(name="application2", platform="APNS") - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["ListPlatformApplicationsResponse"][ + applications_response = conn.list_platform_applications() + applications = applications_response["ListPlatformApplicationsResponse"][ "ListPlatformApplicationsResult" ]["PlatformApplications"] applications.should.have.length_of(2) @@ -101,8 +101,8 @@ def test_delete_platform_application(): conn.create_platform_application(name="application1", platform="APNS") conn.create_platform_application(name="application2", platform="APNS") - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["ListPlatformApplicationsResponse"][ + applications_response = conn.list_platform_applications() + applications = applications_response["ListPlatformApplicationsResponse"][ "ListPlatformApplicationsResult" ]["PlatformApplications"] applications.should.have.length_of(2) @@ -110,8 +110,8 @@ def test_delete_platform_application(): application_arn = applications[0]["PlatformApplicationArn"] conn.delete_platform_application(application_arn) - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["ListPlatformApplicationsResponse"][ + applications_response = conn.list_platform_applications() + applications = applications_response["ListPlatformApplicationsResponse"][ "ListPlatformApplicationsResult" ]["PlatformApplications"] applications.should.have.length_of(1) diff --git a/tests/test_sns/test_application_boto3.py b/tests/test_sns/test_application_boto3.py index 6f9be2926..f23b07543 100644 --- a/tests/test_sns/test_application_boto3.py +++ b/tests/test_sns/test_application_boto3.py @@ -88,8 +88,8 @@ def test_list_platform_applications(): Name="application2", Platform="APNS", Attributes={} ) - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["PlatformApplications"] + applications_response = conn.list_platform_applications() + applications = applications_response["PlatformApplications"] applications.should.have.length_of(2) @@ -103,15 +103,15 @@ def test_delete_platform_application(): Name="application2", Platform="APNS", Attributes={} ) - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["PlatformApplications"] + applications_response = conn.list_platform_applications() + applications = applications_response["PlatformApplications"] applications.should.have.length_of(2) application_arn = applications[0]["PlatformApplicationArn"] conn.delete_platform_application(PlatformApplicationArn=application_arn) - applications_repsonse = conn.list_platform_applications() - applications = applications_repsonse["PlatformApplications"] + applications_response = conn.list_platform_applications() + applications = applications_response["PlatformApplications"] applications.should.have.length_of(1) diff --git a/tests/test_sns/test_publishing_boto3.py b/tests/test_sns/test_publishing_boto3.py index d85c8fefe..51e0a9f57 100644 --- a/tests/test_sns/test_publishing_boto3.py +++ b/tests/test_sns/test_publishing_boto3.py @@ -806,7 +806,7 @@ def test_filtering_string_array_with_string_no_array_no_match(): topic.publish( Message="no_match", MessageAttributes={ - "price": {"DataType": "String.Array", "StringValue": "one hundread"} + "price": {"DataType": "String.Array", "StringValue": "one hundred"} }, ) diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py index 639d6e51c..f2ab8c37c 100644 --- a/tests/test_sqs/test_sqs.py +++ b/tests/test_sqs/test_sqs.py @@ -132,6 +132,35 @@ def test_create_queue_with_tags(): ) +@mock_sqs +def test_create_queue_with_policy(): + client = boto3.client("sqs", region_name="us-east-1") + response = client.create_queue( + QueueName="test-queue", + Attributes={ + "Policy": json.dumps( + { + "Version": "2012-10-17", + "Id": "test", + "Statement": [{"Effect": "Allow", "Principal": "*", "Action": "*"}], + } + ) + }, + ) + queue_url = response["QueueUrl"] + + response = client.get_queue_attributes( + QueueUrl=queue_url, AttributeNames=["Policy"] + ) + json.loads(response["Attributes"]["Policy"]).should.equal( + { + "Version": "2012-10-17", + "Id": "test", + "Statement": [{"Effect": "Allow", "Principal": "*", "Action": "*"}], + } + ) + + @mock_sqs def test_get_queue_url(): client = boto3.client("sqs", region_name="us-east-1") @@ -331,7 +360,20 @@ def test_delete_queue(): @mock_sqs def test_get_queue_attributes(): client = boto3.client("sqs", region_name="us-east-1") - response = client.create_queue(QueueName="test-queue") + + dlq_resp = client.create_queue(QueueName="test-dlr-queue") + dlq_arn1 = client.get_queue_attributes(QueueUrl=dlq_resp["QueueUrl"])["Attributes"][ + "QueueArn" + ] + + response = client.create_queue( + QueueName="test-queue", + Attributes={ + "RedrivePolicy": json.dumps( + {"deadLetterTargetArn": dlq_arn1, "maxReceiveCount": 2} + ), + }, + ) queue_url = response["QueueUrl"] response = client.get_queue_attributes(QueueUrl=queue_url) @@ -356,6 +398,7 @@ def test_get_queue_attributes(): "ApproximateNumberOfMessages", "MaximumMessageSize", "QueueArn", + "RedrivePolicy", "VisibilityTimeout", ], ) @@ -366,6 +409,9 @@ def test_get_queue_attributes(): "MaximumMessageSize": "65536", "QueueArn": "arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID), "VisibilityTimeout": "30", + "RedrivePolicy": json.dumps( + {"deadLetterTargetArn": dlq_arn1, "maxReceiveCount": 2} + ), } ) @@ -1169,18 +1215,169 @@ def test_permissions(): Actions=["SendMessage"], ) - with assert_raises(ClientError): - client.add_permission( - QueueUrl=queue_url, - Label="account2", - AWSAccountIds=["222211111111"], - Actions=["SomeRubbish"], - ) + response = client.get_queue_attributes( + QueueUrl=queue_url, AttributeNames=["Policy"] + ) + policy = json.loads(response["Attributes"]["Policy"]) + policy["Version"].should.equal("2012-10-17") + policy["Id"].should.equal( + "arn:aws:sqs:us-east-1:123456789012:test-dlr-queue.fifo/SQSDefaultPolicy" + ) + sorted(policy["Statement"], key=lambda x: x["Sid"]).should.equal( + [ + { + "Sid": "account1", + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, + "Action": "SQS:*", + "Resource": "arn:aws:sqs:us-east-1:123456789012:test-dlr-queue.fifo", + }, + { + "Sid": "account2", + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::222211111111:root"}, + "Action": "SQS:SendMessage", + "Resource": "arn:aws:sqs:us-east-1:123456789012:test-dlr-queue.fifo", + }, + ] + ) client.remove_permission(QueueUrl=queue_url, Label="account2") - with assert_raises(ClientError): - client.remove_permission(QueueUrl=queue_url, Label="non_existant") + response = client.get_queue_attributes( + QueueUrl=queue_url, AttributeNames=["Policy"] + ) + json.loads(response["Attributes"]["Policy"]).should.equal( + { + "Version": "2012-10-17", + "Id": "arn:aws:sqs:us-east-1:123456789012:test-dlr-queue.fifo/SQSDefaultPolicy", + "Statement": [ + { + "Sid": "account1", + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, + "Action": "SQS:*", + "Resource": "arn:aws:sqs:us-east-1:123456789012:test-dlr-queue.fifo", + }, + ], + } + ) + + +@mock_sqs +def test_add_permission_errors(): + client = boto3.client("sqs", region_name="us-east-1") + response = client.create_queue(QueueName="test-queue") + queue_url = response["QueueUrl"] + client.add_permission( + QueueUrl=queue_url, + Label="test", + AWSAccountIds=["111111111111"], + Actions=["ReceiveMessage"], + ) + + with assert_raises(ClientError) as e: + client.add_permission( + QueueUrl=queue_url, + Label="test", + AWSAccountIds=["111111111111"], + Actions=["ReceiveMessage", "SendMessage"], + ) + ex = e.exception + ex.operation_name.should.equal("AddPermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.response["Error"]["Code"].should.contain("InvalidParameterValue") + ex.response["Error"]["Message"].should.equal( + "Value test for parameter Label is invalid. " "Reason: Already exists." + ) + + with assert_raises(ClientError) as e: + client.add_permission( + QueueUrl=queue_url, + Label="test-2", + AWSAccountIds=["111111111111"], + Actions=["RemovePermission"], + ) + ex = e.exception + ex.operation_name.should.equal("AddPermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.response["Error"]["Code"].should.contain("InvalidParameterValue") + ex.response["Error"]["Message"].should.equal( + "Value SQS:RemovePermission for parameter ActionName is invalid. " + "Reason: Only the queue owner is allowed to invoke this action." + ) + + with assert_raises(ClientError) as e: + client.add_permission( + QueueUrl=queue_url, + Label="test-2", + AWSAccountIds=["111111111111"], + Actions=[], + ) + ex = e.exception + ex.operation_name.should.equal("AddPermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.response["Error"]["Code"].should.contain("MissingParameter") + ex.response["Error"]["Message"].should.equal( + "The request must contain the parameter Actions." + ) + + with assert_raises(ClientError) as e: + client.add_permission( + QueueUrl=queue_url, + Label="test-2", + AWSAccountIds=[], + Actions=["ReceiveMessage"], + ) + ex = e.exception + ex.operation_name.should.equal("AddPermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.response["Error"]["Code"].should.contain("InvalidParameterValue") + ex.response["Error"]["Message"].should.equal( + "Value [] for parameter PrincipalId is invalid. Reason: Unable to verify." + ) + + with assert_raises(ClientError) as e: + client.add_permission( + QueueUrl=queue_url, + Label="test-2", + AWSAccountIds=["111111111111"], + Actions=[ + "ChangeMessageVisibility", + "DeleteMessage", + "GetQueueAttributes", + "GetQueueUrl", + "ListDeadLetterSourceQueues", + "PurgeQueue", + "ReceiveMessage", + "SendMessage", + ], + ) + ex = e.exception + ex.operation_name.should.equal("AddPermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403) + ex.response["Error"]["Code"].should.contain("OverLimit") + ex.response["Error"]["Message"].should.equal( + "8 Actions were found, maximum allowed is 7." + ) + + +@mock_sqs +def test_remove_permission_errors(): + client = boto3.client("sqs", region_name="us-east-1") + response = client.create_queue(QueueName="test-queue") + queue_url = response["QueueUrl"] + + with assert_raises(ClientError) as e: + client.remove_permission(QueueUrl=queue_url, Label="test") + ex = e.exception + ex.operation_name.should.equal("RemovePermission") + ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400) + ex.response["Error"]["Code"].should.contain("InvalidParameterValue") + ex.response["Error"]["Message"].should.equal( + "Value test for parameter Label is invalid. " + "Reason: can't find label on existing policy." + ) @mock_sqs @@ -1562,3 +1759,23 @@ def test_receive_message_for_queue_with_receive_message_wait_time_seconds_set(): ) queue.receive_messages() + + +@mock_sqs +def test_list_queues_limits_to_1000_queues(): + client = boto3.client("sqs", region_name="us-east-1") + + for i in range(1001): + client.create_queue(QueueName="test-queue-{0}".format(i)) + + client.list_queues()["QueueUrls"].should.have.length_of(1000) + client.list_queues(QueueNamePrefix="test-queue")["QueueUrls"].should.have.length_of( + 1000 + ) + + resource = boto3.resource("sqs", region_name="us-east-1") + + list(resource.queues.all()).should.have.length_of(1000) + list(resource.queues.filter(QueueNamePrefix="test-queue")).should.have.length_of( + 1000 + ) diff --git a/tests/test_swf/models/test_domain.py b/tests/test_swf/models/test_domain.py index 389e516df..32940753f 100644 --- a/tests/test_swf/models/test_domain.py +++ b/tests/test_swf/models/test_domain.py @@ -1,107 +1,107 @@ -from collections import namedtuple -import sure # noqa - -from moto.swf.exceptions import SWFUnknownResourceFault -from moto.swf.models import Domain - -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises # noqa - -# Fake WorkflowExecution for tests purposes -WorkflowExecution = namedtuple( - "WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"] -) - - -def test_domain_short_dict_representation(): - domain = Domain("foo", "52") - domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"}) - - domain.description = "foo bar" - domain.to_short_dict()["description"].should.equal("foo bar") - - -def test_domain_full_dict_representation(): - domain = Domain("foo", "52") - - domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict()) - _config = domain.to_full_dict()["configuration"] - _config["workflowExecutionRetentionPeriodInDays"].should.equal("52") - - -def test_domain_string_representation(): - domain = Domain("my-domain", "60") - str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)") - - -def test_domain_add_to_activity_task_list(): - domain = Domain("my-domain", "60") - domain.add_to_activity_task_list("foo", "bar") - domain.activity_task_lists.should.equal({"foo": ["bar"]}) - - -def test_domain_activity_tasks(): - domain = Domain("my-domain", "60") - domain.add_to_activity_task_list("foo", "bar") - domain.add_to_activity_task_list("other", "baz") - sorted(domain.activity_tasks).should.equal(["bar", "baz"]) - - -def test_domain_add_to_decision_task_list(): - domain = Domain("my-domain", "60") - domain.add_to_decision_task_list("foo", "bar") - domain.decision_task_lists.should.equal({"foo": ["bar"]}) - - -def test_domain_decision_tasks(): - domain = Domain("my-domain", "60") - domain.add_to_decision_task_list("foo", "bar") - domain.add_to_decision_task_list("other", "baz") - sorted(domain.decision_tasks).should.equal(["bar", "baz"]) - - -def test_domain_get_workflow_execution(): - domain = Domain("my-domain", "60") - - wfe1 = WorkflowExecution( - workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True - ) - wfe2 = WorkflowExecution( - workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False - ) - wfe3 = WorkflowExecution( - workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True - ) - wfe4 = WorkflowExecution( - workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False - ) - domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4] - - # get workflow execution through workflow_id and run_id - domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1) - domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2) - domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4) - - domain.get_workflow_execution.when.called_with( - "wf-id-1", run_id="non-existent" - ).should.throw(SWFUnknownResourceFault) - - # get OPEN workflow execution by default if no run_id - domain.get_workflow_execution("wf-id-1").should.equal(wfe1) - domain.get_workflow_execution.when.called_with("wf-id-3").should.throw( - SWFUnknownResourceFault - ) - domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw( - SWFUnknownResourceFault - ) - - # raise_if_closed attribute - domain.get_workflow_execution( - "wf-id-1", run_id="run-id-1", raise_if_closed=True - ).should.equal(wfe1) - domain.get_workflow_execution.when.called_with( - "wf-id-3", run_id="run-id-4", raise_if_closed=True - ).should.throw(SWFUnknownResourceFault) - - # raise_if_none attribute - domain.get_workflow_execution("foo", raise_if_none=False).should.be.none +from collections import namedtuple +import sure # noqa + +from moto.swf.exceptions import SWFUnknownResourceFault +from moto.swf.models import Domain + +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises # noqa + +# Fake WorkflowExecution for tests purposes +WorkflowExecution = namedtuple( + "WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"] +) + + +def test_domain_short_dict_representation(): + domain = Domain("foo", "52") + domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"}) + + domain.description = "foo bar" + domain.to_short_dict()["description"].should.equal("foo bar") + + +def test_domain_full_dict_representation(): + domain = Domain("foo", "52") + + domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict()) + _config = domain.to_full_dict()["configuration"] + _config["workflowExecutionRetentionPeriodInDays"].should.equal("52") + + +def test_domain_string_representation(): + domain = Domain("my-domain", "60") + str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)") + + +def test_domain_add_to_activity_task_list(): + domain = Domain("my-domain", "60") + domain.add_to_activity_task_list("foo", "bar") + domain.activity_task_lists.should.equal({"foo": ["bar"]}) + + +def test_domain_activity_tasks(): + domain = Domain("my-domain", "60") + domain.add_to_activity_task_list("foo", "bar") + domain.add_to_activity_task_list("other", "baz") + sorted(domain.activity_tasks).should.equal(["bar", "baz"]) + + +def test_domain_add_to_decision_task_list(): + domain = Domain("my-domain", "60") + domain.add_to_decision_task_list("foo", "bar") + domain.decision_task_lists.should.equal({"foo": ["bar"]}) + + +def test_domain_decision_tasks(): + domain = Domain("my-domain", "60") + domain.add_to_decision_task_list("foo", "bar") + domain.add_to_decision_task_list("other", "baz") + sorted(domain.decision_tasks).should.equal(["bar", "baz"]) + + +def test_domain_get_workflow_execution(): + domain = Domain("my-domain", "60") + + wfe1 = WorkflowExecution( + workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True + ) + wfe2 = WorkflowExecution( + workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False + ) + wfe3 = WorkflowExecution( + workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True + ) + wfe4 = WorkflowExecution( + workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False + ) + domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4] + + # get workflow execution through workflow_id and run_id + domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1) + domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2) + domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4) + + domain.get_workflow_execution.when.called_with( + "wf-id-1", run_id="non-existent" + ).should.throw(SWFUnknownResourceFault) + + # get OPEN workflow execution by default if no run_id + domain.get_workflow_execution("wf-id-1").should.equal(wfe1) + domain.get_workflow_execution.when.called_with("wf-id-3").should.throw( + SWFUnknownResourceFault + ) + domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw( + SWFUnknownResourceFault + ) + + # raise_if_closed attribute + domain.get_workflow_execution( + "wf-id-1", run_id="run-id-1", raise_if_closed=True + ).should.equal(wfe1) + domain.get_workflow_execution.when.called_with( + "wf-id-3", run_id="run-id-4", raise_if_closed=True + ).should.throw(SWFUnknownResourceFault) + + # raise_if_none attribute + domain.get_workflow_execution("foo", raise_if_none=False).should.be.none diff --git a/tests/test_swf/models/test_timeout.py b/tests/test_swf/models/test_timeout.py index fb52652fd..0ee059065 100644 --- a/tests/test_swf/models/test_timeout.py +++ b/tests/test_swf/models/test_timeout.py @@ -1,19 +1,19 @@ -from freezegun import freeze_time -import sure # noqa - -from moto.swf.models import Timeout - -from ..utils import make_workflow_execution - - -def test_timeout_creation(): - wfe = make_workflow_execution() - - # epoch 1420113600 == "2015-01-01 13:00:00" - timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE") - - with freeze_time("2015-01-01 12:00:00"): - timeout.reached.should.be.falsy - - with freeze_time("2015-01-01 13:00:00"): - timeout.reached.should.be.truthy +from freezegun import freeze_time +import sure # noqa + +from moto.swf.models import Timeout + +from ..utils import make_workflow_execution + + +def test_timeout_creation(): + wfe = make_workflow_execution() + + # epoch 1420113600 == "2015-01-01 13:00:00" + timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE") + + with freeze_time("2015-01-01 12:00:00"): + timeout.reached.should.be.falsy + + with freeze_time("2015-01-01 13:00:00"): + timeout.reached.should.be.truthy diff --git a/tests/test_swf/models/test_workflow_execution.py b/tests/test_swf/models/test_workflow_execution.py index 6c73a9686..503198f46 100644 --- a/tests/test_swf/models/test_workflow_execution.py +++ b/tests/test_swf/models/test_workflow_execution.py @@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation(): ) +def test_closed_workflow_execution_full_dict_representation(): + domain = get_basic_domain() + wf_type = WorkflowType( + "test-workflow", + "v1.0", + task_list="queue", + default_child_policy="ABANDON", + default_execution_start_to_close_timeout="300", + default_task_start_to_close_timeout="300", + ) + wfe = WorkflowExecution(domain, wf_type, "ab1234") + wfe.execution_status = "CLOSED" + wfe.close_status = "CANCELED" + wfe.close_timestamp = 1420066801.123 + + fd = wfe.to_full_dict() + medium_dict = wfe.to_medium_dict() + medium_dict["closeStatus"] = "CANCELED" + medium_dict["closeTimestamp"] = 1420066801.123 + fd["executionInfo"].should.equal(medium_dict) + fd["openCounts"]["openTimers"].should.equal(0) + fd["openCounts"]["openDecisionTasks"].should.equal(0) + fd["openCounts"]["openActivityTasks"].should.equal(0) + fd["executionConfiguration"].should.equal( + { + "childPolicy": "ABANDON", + "executionStartToCloseTimeout": "300", + "taskList": {"name": "queue"}, + "taskStartToCloseTimeout": "300", + } + ) + + def test_workflow_execution_list_dict_representation(): domain = get_basic_domain() wf_type = WorkflowType( diff --git a/tests/test_swf/responses/test_domains.py b/tests/test_swf/responses/test_domains.py index 638bd410e..199219d27 100644 --- a/tests/test_swf/responses/test_domains.py +++ b/tests/test_swf/responses/test_domains.py @@ -1,114 +1,114 @@ -import boto -from boto.swf.exceptions import SWFResponseError -import sure # noqa - -from moto import mock_swf_deprecated - - -# RegisterDomain endpoint -@mock_swf_deprecated -def test_register_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - all_domains = conn.list_domains("REGISTERED") - domain = all_domains["domainInfos"][0] - - domain["name"].should.equal("test-domain") - domain["status"].should.equal("REGISTERED") - domain["description"].should.equal("A test domain") - - -@mock_swf_deprecated -def test_register_already_existing_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - conn.register_domain.when.called_with( - "test-domain", "60", description="A test domain" - ).should.throw(SWFResponseError) - - -@mock_swf_deprecated -def test_register_with_wrong_parameter_type(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.register_domain.when.called_with( - "test-domain", 60, description="A test domain" - ).should.throw(SWFResponseError) - - -# ListDomains endpoint -@mock_swf_deprecated -def test_list_domains_order(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("b-test-domain", "60") - conn.register_domain("a-test-domain", "60") - conn.register_domain("c-test-domain", "60") - - all_domains = conn.list_domains("REGISTERED") - names = [domain["name"] for domain in all_domains["domainInfos"]] - names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) - - -@mock_swf_deprecated -def test_list_domains_reverse_order(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("b-test-domain", "60") - conn.register_domain("a-test-domain", "60") - conn.register_domain("c-test-domain", "60") - - all_domains = conn.list_domains("REGISTERED", reverse_order=True) - names = [domain["name"] for domain in all_domains["domainInfos"]] - names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) - - -# DeprecateDomain endpoint -@mock_swf_deprecated -def test_deprecate_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - conn.deprecate_domain("test-domain") - - all_domains = conn.list_domains("DEPRECATED") - domain = all_domains["domainInfos"][0] - - domain["name"].should.equal("test-domain") - - -@mock_swf_deprecated -def test_deprecate_already_deprecated_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - conn.deprecate_domain("test-domain") - - conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError) - - -@mock_swf_deprecated -def test_deprecate_non_existent_domain(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.deprecate_domain.when.called_with("non-existent").should.throw( - SWFResponseError - ) - - -# DescribeDomain endpoint -@mock_swf_deprecated -def test_describe_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - domain = conn.describe_domain("test-domain") - domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60") - domain["domainInfo"]["description"].should.equal("A test domain") - domain["domainInfo"]["name"].should.equal("test-domain") - domain["domainInfo"]["status"].should.equal("REGISTERED") - - -@mock_swf_deprecated -def test_describe_non_existent_domain(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError) +import boto +from boto.swf.exceptions import SWFResponseError +import sure # noqa + +from moto import mock_swf_deprecated + + +# RegisterDomain endpoint +@mock_swf_deprecated +def test_register_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + all_domains = conn.list_domains("REGISTERED") + domain = all_domains["domainInfos"][0] + + domain["name"].should.equal("test-domain") + domain["status"].should.equal("REGISTERED") + domain["description"].should.equal("A test domain") + + +@mock_swf_deprecated +def test_register_already_existing_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + conn.register_domain.when.called_with( + "test-domain", "60", description="A test domain" + ).should.throw(SWFResponseError) + + +@mock_swf_deprecated +def test_register_with_wrong_parameter_type(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.register_domain.when.called_with( + "test-domain", 60, description="A test domain" + ).should.throw(SWFResponseError) + + +# ListDomains endpoint +@mock_swf_deprecated +def test_list_domains_order(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("b-test-domain", "60") + conn.register_domain("a-test-domain", "60") + conn.register_domain("c-test-domain", "60") + + all_domains = conn.list_domains("REGISTERED") + names = [domain["name"] for domain in all_domains["domainInfos"]] + names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) + + +@mock_swf_deprecated +def test_list_domains_reverse_order(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("b-test-domain", "60") + conn.register_domain("a-test-domain", "60") + conn.register_domain("c-test-domain", "60") + + all_domains = conn.list_domains("REGISTERED", reverse_order=True) + names = [domain["name"] for domain in all_domains["domainInfos"]] + names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) + + +# DeprecateDomain endpoint +@mock_swf_deprecated +def test_deprecate_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + conn.deprecate_domain("test-domain") + + all_domains = conn.list_domains("DEPRECATED") + domain = all_domains["domainInfos"][0] + + domain["name"].should.equal("test-domain") + + +@mock_swf_deprecated +def test_deprecate_already_deprecated_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + conn.deprecate_domain("test-domain") + + conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError) + + +@mock_swf_deprecated +def test_deprecate_non_existent_domain(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.deprecate_domain.when.called_with("non-existent").should.throw( + SWFResponseError + ) + + +# DescribeDomain endpoint +@mock_swf_deprecated +def test_describe_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + domain = conn.describe_domain("test-domain") + domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60") + domain["domainInfo"]["description"].should.equal("A test domain") + domain["domainInfo"]["name"].should.equal("test-domain") + domain["domainInfo"]["status"].should.equal("REGISTERED") + + +@mock_swf_deprecated +def test_describe_non_existent_domain(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError) diff --git a/tests/test_swf/test_utils.py b/tests/test_swf/test_utils.py index 328342bbe..143804ca9 100644 --- a/tests/test_swf/test_utils.py +++ b/tests/test_swf/test_utils.py @@ -1,9 +1,9 @@ -import sure # noqa - -from moto.swf.utils import decapitalize - - -def test_decapitalize(): - cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"} - for before, after in cases.items(): - decapitalize(before).should.equal(after) +import sure # noqa + +from moto.swf.utils import decapitalize + + +def test_decapitalize(): + cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"} + for before, after in cases.items(): + decapitalize(before).should.equal(after) diff --git a/tests/test_swf/utils.py b/tests/test_swf/utils.py index 48c2cbd94..2b2a2d025 100644 --- a/tests/test_swf/utils.py +++ b/tests/test_swf/utils.py @@ -5,7 +5,7 @@ from moto.swf.models import ActivityType, Domain, WorkflowType, WorkflowExecutio # Some useful constants # Here are some activity timeouts we use in moto/swf tests ; they're extracted -# from semi-real world example, the goal is mostly to have predictible and +# from semi-real world example, the goal is mostly to have predictable and # intuitive behaviour in moto/swf own tests... ACTIVITY_TASK_TIMEOUTS = { "heartbeatTimeout": "300", # 5 mins diff --git a/tests/test_utilities/test_tagging_service.py b/tests/test_utilities/test_tagging_service.py index 94415cb2a..249e903fe 100644 --- a/tests/test_utilities/test_tagging_service.py +++ b/tests/test_utilities/test_tagging_service.py @@ -1,53 +1,79 @@ -import unittest +import sure from moto.utilities.tagging_service import TaggingService -class TestTaggingService(unittest.TestCase): - def test_list_empty(self): - svc = TaggingService() - result = svc.list_tags_for_resource('test') - self.assertEqual(result, {'Tags': []}) +def test_list_empty(): + svc = TaggingService() + result = svc.list_tags_for_resource("test") - def test_create_tag(self): - svc = TaggingService('TheTags', 'TagKey', 'TagValue') - tags = [{'TagKey': 'key_key', 'TagValue': 'value_value'}] - svc.tag_resource('arn', tags) - actual = svc.list_tags_for_resource('arn') - expected = {'TheTags': [{'TagKey': 'key_key', 'TagValue': 'value_value'}]} - self.assertDictEqual(expected, actual) - - def test_create_tag_without_value(self): - svc = TaggingService() - tags = [{'Key': 'key_key'}] - svc.tag_resource('arn', tags) - actual = svc.list_tags_for_resource('arn') - expected = {'Tags': [{'Key': 'key_key', 'Value': ''}]} - self.assertDictEqual(expected, actual) - - def test_delete_tag(self): - svc = TaggingService() - tags = [{'Key': 'key_key', 'Value': 'value_value'}] - svc.tag_resource('arn', tags) - svc.untag_resource('arn', ['key_key']) - result = svc.list_tags_for_resource('arn') - self.assertEqual( - result, {'Tags': []}) - - def test_list_empty_delete(self): - svc = TaggingService() - svc.untag_resource('arn', ['key_key']) - result = svc.list_tags_for_resource('arn') - self.assertEqual( - result, {'Tags': []}) - - def test_extract_tag_names(self): - svc = TaggingService() - tags = [{'Key': 'key1', 'Value': 'value1'}, {'Key': 'key2', 'Value': 'value2'}] - actual = svc.extract_tag_names(tags) - expected = ['key1', 'key2'] - self.assertEqual(expected, actual) + {"Tags": []}.should.be.equal(result) -if __name__ == '__main__': - unittest.main() +def test_create_tag(): + svc = TaggingService("TheTags", "TagKey", "TagValue") + tags = [{"TagKey": "key_key", "TagValue": "value_value"}] + svc.tag_resource("arn", tags) + actual = svc.list_tags_for_resource("arn") + expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]} + + expected.should.be.equal(actual) + + +def test_create_tag_without_value(): + svc = TaggingService() + tags = [{"Key": "key_key"}] + svc.tag_resource("arn", tags) + actual = svc.list_tags_for_resource("arn") + expected = {"Tags": [{"Key": "key_key", "Value": None}]} + + expected.should.be.equal(actual) + + +def test_delete_tag_using_names(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + svc.tag_resource("arn", tags) + svc.untag_resource_using_names("arn", ["key_key"]) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_delete_all_tags_for_resource(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + tags2 = [{"Key": "key_key2", "Value": "value_value2"}] + svc.tag_resource("arn", tags) + svc.tag_resource("arn", tags2) + svc.delete_all_tags_for_resource("arn") + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_list_empty_delete(): + svc = TaggingService() + svc.untag_resource_using_names("arn", ["key_key"]) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_delete_tag_using_tags(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + svc.tag_resource("arn", tags) + svc.untag_resource_using_tags("arn", tags) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_extract_tag_names(): + svc = TaggingService() + tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}] + actual = svc.extract_tag_names(tags) + expected = ["key1", "key2"] + + expected.should.be.equal(actual)