diff --git a/.travis.yml b/.travis.yml index ac9322211..8f218134b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,11 +26,12 @@ install: fi docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh & fi + travis_retry pip install -r requirements-dev.txt travis_retry pip install boto==2.45.0 travis_retry pip install boto3 travis_retry pip install dist/moto*.gz travis_retry pip install coveralls==1.1 - travis_retry pip install -r requirements-dev.txt + travis_retry pip install coverage==4.5.4 if [ "$TEST_SERVER_MODE" = "true" ]; then python wait_for.py diff --git a/README.md b/README.md index f5c45a6b6..6fb942aef 100644 --- a/README.md +++ b/README.md @@ -450,6 +450,16 @@ boto3.resource( ) ``` +### Caveats +The standalone server has some caveats with some services. The following services +require that you update your hosts file for your code to work properly: + +1. `s3-control` + +For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`. +As a result, you need to add that entry to your host file for your tests to function properly. + + ## Install diff --git a/docs/conf.py b/docs/conf.py index 28a4b4e6b..a902d0ecf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,10 @@ author = 'Steve Pulec' # built documents. # # The short X.Y version. -version = '0.4.10' +import moto +version = moto.__version__ # The full version, including alpha/beta/rc tags. -release = '0.4.10' +release = moto.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/docs/getting_started.rst b/docs/docs/getting_started.rst index d52e76235..ffe37f3a0 100644 --- a/docs/docs/getting_started.rst +++ b/docs/docs/getting_started.rst @@ -24,8 +24,7 @@ For example, we have the following code we want to test: .. sourcecode:: python - import boto - from boto.s3.key import Key + import boto3 class MyModel(object): def __init__(self, name, value): @@ -33,11 +32,8 @@ For example, we have the following code we want to test: self.value = value def save(self): - conn = boto.connect_s3() - bucket = conn.get_bucket('mybucket') - k = Key(bucket) - k.key = self.name - k.set_contents_from_string(self.value) + s3 = boto3.client('s3', region_name='us-east-1') + s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment. @@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out. .. sourcecode:: python - import boto + import boto3 from moto import mock_s3 from mymodule import MyModel @mock_s3 def test_my_model_save(): - conn = boto.connect_s3() + conn = boto3.resource('s3', region_name='us-east-1') # We need to create the bucket since this is all in Moto's 'virtual' AWS account - conn.create_bucket('mybucket') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' Context manager ~~~~~~~~~~~~~~~ @@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out. def test_my_model_save(): with mock_s3(): - conn = boto.connect_s3() - conn.create_bucket('mybucket') + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' Raw ~~~ @@ -91,13 +93,16 @@ You can also start and stop the mocking manually. mock = mock_s3() mock.start() - conn = boto.connect_s3() - conn.create_bucket('mybucket') + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') model_instance = MyModel('steve', 'is awesome') model_instance.save() - assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' + body = conn.Object('mybucket', 'steve').get()[ + 'Body'].read().decode("utf-8") + + assert body == 'is awesome' mock.stop() diff --git a/moto/apigateway/exceptions.py b/moto/apigateway/exceptions.py index 2a306ab99..ccb870f52 100644 --- a/moto/apigateway/exceptions.py +++ b/moto/apigateway/exceptions.py @@ -85,6 +85,15 @@ class NoMethodDefined(BadRequestException): ) +class AuthorizerNotFoundException(RESTError): + code = 404 + + def __init__(self): + super(AuthorizerNotFoundException, self).__init__( + "NotFoundException", "Invalid Authorizer identifier specified" + ) + + class StageNotFoundException(RESTError): code = 404 diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index 937b9b08c..5b02e6204 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -28,6 +28,7 @@ from .exceptions import ( InvalidHttpEndpoint, InvalidResourcePathException, InvalidRequestInput, + AuthorizerNotFoundException, StageNotFoundException, RoleNotSpecified, NoIntegrationDefined, @@ -117,14 +118,15 @@ class Resource(BaseModel): self.api_id = api_id self.path_part = path_part self.parent_id = parent_id - self.resource_methods = {"GET": {}} + self.resource_methods = {} def to_dict(self): response = { "path": self.get_path(), "id": self.id, - "resourceMethods": self.resource_methods, } + if self.resource_methods: + response["resourceMethods"] = self.resource_methods if self.parent_id: response["parentId"] = self.parent_id response["pathPart"] = self.path_part @@ -186,6 +188,54 @@ class Resource(BaseModel): return self.resource_methods[method_type].pop("methodIntegration") +class Authorizer(BaseModel, dict): + def __init__(self, id, name, authorizer_type, **kwargs): + super(Authorizer, self).__init__() + self["id"] = id + self["name"] = name + self["type"] = authorizer_type + if kwargs.get("provider_arns"): + self["providerARNs"] = kwargs.get("provider_arns") + if kwargs.get("auth_type"): + self["authType"] = kwargs.get("auth_type") + if kwargs.get("authorizer_uri"): + self["authorizerUri"] = kwargs.get("authorizer_uri") + if kwargs.get("authorizer_credentials"): + self["authorizerCredentials"] = kwargs.get("authorizer_credentials") + if kwargs.get("identity_source"): + self["identitySource"] = kwargs.get("identity_source") + if kwargs.get("identity_validation_expression"): + self["identityValidationExpression"] = kwargs.get( + "identity_validation_expression" + ) + self["authorizerResultTtlInSeconds"] = kwargs.get("authorizer_result_ttl") + + def apply_operations(self, patch_operations): + for op in patch_operations: + if "/authorizerUri" in op["path"]: + self["authorizerUri"] = op["value"] + elif "/authorizerCredentials" in op["path"]: + self["authorizerCredentials"] = op["value"] + elif "/authorizerResultTtlInSeconds" in op["path"]: + self["authorizerResultTtlInSeconds"] = int(op["value"]) + elif "/authType" in op["path"]: + self["authType"] = op["value"] + elif "/identitySource" in op["path"]: + self["identitySource"] = op["value"] + elif "/identityValidationExpression" in op["path"]: + self["identityValidationExpression"] = op["value"] + elif "/name" in op["path"]: + self["name"] = op["value"] + elif "/providerARNs" in op["path"]: + # TODO: add and remove + raise Exception('Patch operation for "%s" not implemented' % op["path"]) + elif "/type" in op["path"]: + self["type"] = op["value"] + else: + raise Exception('Patch operation "%s" not implemented' % op["op"]) + return self + + class Stage(BaseModel, dict): def __init__( self, @@ -411,6 +461,7 @@ class RestAPI(BaseModel): self.tags = kwargs.get("tags") or {} self.deployments = {} + self.authorizers = {} self.stages = {} self.resources = {} @@ -478,6 +529,34 @@ class RestAPI(BaseModel): ), ) + def create_authorizer( + self, + id, + name, + authorizer_type, + provider_arns=None, + auth_type=None, + authorizer_uri=None, + authorizer_credentials=None, + identity_source=None, + identiy_validation_expression=None, + authorizer_result_ttl=None, + ): + authorizer = Authorizer( + id=id, + name=name, + authorizer_type=authorizer_type, + provider_arns=provider_arns, + auth_type=auth_type, + authorizer_uri=authorizer_uri, + authorizer_credentials=authorizer_credentials, + identity_source=identity_source, + identiy_validation_expression=identiy_validation_expression, + authorizer_result_ttl=authorizer_result_ttl, + ) + self.authorizers[id] = authorizer + return authorizer + def create_stage( self, name, @@ -517,6 +596,9 @@ class RestAPI(BaseModel): def get_deployment(self, deployment_id): return self.deployments[deployment_id] + def get_authorizers(self): + return list(self.authorizers.values()) + def get_stages(self): return list(self.stages.values()) @@ -612,6 +694,46 @@ class APIGatewayBackend(BaseBackend): ) return method + def get_authorizer(self, restapi_id, authorizer_id): + api = self.get_rest_api(restapi_id) + authorizer = api.authorizers.get(authorizer_id) + if authorizer is None: + raise AuthorizerNotFoundException() + else: + return authorizer + + def get_authorizers(self, restapi_id): + api = self.get_rest_api(restapi_id) + return api.get_authorizers() + + def create_authorizer(self, restapi_id, name, authorizer_type, **kwargs): + api = self.get_rest_api(restapi_id) + authorizer_id = create_id() + authorizer = api.create_authorizer( + authorizer_id, + name, + authorizer_type, + provider_arns=kwargs.get("provider_arns"), + auth_type=kwargs.get("auth_type"), + authorizer_uri=kwargs.get("authorizer_uri"), + authorizer_credentials=kwargs.get("authorizer_credentials"), + identity_source=kwargs.get("identity_source"), + identiy_validation_expression=kwargs.get("identiy_validation_expression"), + authorizer_result_ttl=kwargs.get("authorizer_result_ttl"), + ) + return api.authorizers.get(authorizer["id"]) + + def update_authorizer(self, restapi_id, authorizer_id, patch_operations): + authorizer = self.get_authorizer(restapi_id, authorizer_id) + if not authorizer: + api = self.get_rest_api(restapi_id) + authorizer = api.authorizers[authorizer_id] = Authorizer() + return authorizer.apply_operations(patch_operations) + + def delete_authorizer(self, restapi_id, authorizer_id): + api = self.get_rest_api(restapi_id) + del api.authorizers[authorizer_id] + def get_stage(self, function_id, stage_name): api = self.get_rest_api(function_id) stage = api.stages.get(stage_name) diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index 6a22a4708..f0ed6adc9 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -8,11 +8,13 @@ from .exceptions import ( ApiKeyNotFoundException, BadRequestException, CrossAccountNotAllowed, + AuthorizerNotFoundException, StageNotFoundException, ApiKeyAlreadyExists, ) API_KEY_SOURCES = ["AUTHORIZER", "HEADER"] +AUTHORIZER_TYPES = ["TOKEN", "REQUEST", "COGNITO_USER_POOLS"] ENDPOINT_CONFIGURATION_TYPES = ["PRIVATE", "EDGE", "REGIONAL"] @@ -177,6 +179,88 @@ class APIGatewayResponse(BaseResponse): ) return 200, {}, json.dumps(method_response) + def restapis_authorizers(self, request, full_url, headers): + self.setup_class(request, full_url, headers) + url_path_parts = self.path.split("/") + restapi_id = url_path_parts[2] + + if self.method == "POST": + name = self._get_param("name") + authorizer_type = self._get_param("type") + + provider_arns = self._get_param_with_default_value("providerARNs", None) + auth_type = self._get_param_with_default_value("authType", None) + authorizer_uri = self._get_param_with_default_value("authorizerUri", None) + authorizer_credentials = self._get_param_with_default_value( + "authorizerCredentials", None + ) + identity_source = self._get_param_with_default_value("identitySource", None) + identiy_validation_expression = self._get_param_with_default_value( + "identityValidationExpression", None + ) + authorizer_result_ttl = self._get_param_with_default_value( + "authorizerResultTtlInSeconds", 300 + ) + + # Param validation + if authorizer_type and authorizer_type not in AUTHORIZER_TYPES: + return self.error( + "ValidationException", + ( + "1 validation error detected: " + "Value '{authorizer_type}' at 'createAuthorizerInput.type' failed " + "to satisfy constraint: Member must satisfy enum value set: " + "[TOKEN, REQUEST, COGNITO_USER_POOLS]" + ).format(authorizer_type=authorizer_type), + ) + + authorizer_response = self.backend.create_authorizer( + restapi_id, + name, + authorizer_type, + provider_arns=provider_arns, + auth_type=auth_type, + authorizer_uri=authorizer_uri, + authorizer_credentials=authorizer_credentials, + identity_source=identity_source, + identiy_validation_expression=identiy_validation_expression, + authorizer_result_ttl=authorizer_result_ttl, + ) + elif self.method == "GET": + authorizers = self.backend.get_authorizers(restapi_id) + return 200, {}, json.dumps({"item": authorizers}) + + return 200, {}, json.dumps(authorizer_response) + + def authorizers(self, request, full_url, headers): + self.setup_class(request, full_url, headers) + url_path_parts = self.path.split("/") + restapi_id = url_path_parts[2] + authorizer_id = url_path_parts[4] + + if self.method == "GET": + try: + authorizer_response = self.backend.get_authorizer( + restapi_id, authorizer_id + ) + except AuthorizerNotFoundException as error: + return ( + error.code, + {}, + '{{"message":"{0}","code":"{1}"}}'.format( + error.message, error.error_type + ), + ) + elif self.method == "PATCH": + patch_operations = self._get_param("patchOperations") + authorizer_response = self.backend.update_authorizer( + restapi_id, authorizer_id, patch_operations + ) + elif self.method == "DELETE": + self.backend.delete_authorizer(restapi_id, authorizer_id) + return 202, {}, "{}" + return 200, {}, json.dumps(authorizer_response) + def restapis_stages(self, request, full_url, headers): self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") diff --git a/moto/apigateway/urls.py b/moto/apigateway/urls.py index bb2b2d216..4ef6ae72b 100644 --- a/moto/apigateway/urls.py +++ b/moto/apigateway/urls.py @@ -7,6 +7,8 @@ url_paths = { "{0}/restapis$": APIGatewayResponse().restapis, "{0}/restapis/(?P[^/]+)/?$": APIGatewayResponse().restapis_individual, "{0}/restapis/(?P[^/]+)/resources$": APIGatewayResponse().resources, + "{0}/restapis/(?P[^/]+)/authorizers$": APIGatewayResponse().restapis_authorizers, + "{0}/restapis/(?P[^/]+)/authorizers/(?P[^/]+)/?$": APIGatewayResponse().authorizers, "{0}/restapis/(?P[^/]+)/stages$": APIGatewayResponse().restapis_stages, "{0}/restapis/(?P[^/]+)/stages/(?P[^/]+)/?$": APIGatewayResponse().stages, "{0}/restapis/(?P[^/]+)/deployments$": APIGatewayResponse().deployments, diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 96b23a404..93e297551 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -14,6 +14,7 @@ from jose import jws from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel +from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID from .exceptions import ( GroupExistsException, NotAuthorizedError, @@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel): def __init__(self, region, name, extended_config): self.region = region self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex)) + self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format( + self.region, DEFAULT_ACCOUNT_ID, self.id + ) self.name = name self.status = None self.extended_config = extended_config or {} @@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel): def _base_json(self): return { "Id": self.id, + "Arn": self.arn, "Name": self.name, "Status": self.status, "CreationDate": time.mktime(self.creation_date.timetuple()), @@ -564,12 +569,17 @@ class CognitoIdpBackend(BaseBackend): user.groups.discard(group) # User - def admin_create_user(self, user_pool_id, username, temporary_password, attributes): + def admin_create_user( + self, user_pool_id, username, message_action, temporary_password, attributes + ): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) - if username in user_pool.users: + if message_action and message_action == "RESEND": + if username not in user_pool.users: + raise UserNotFoundError(username) + elif username in user_pool.users: raise UsernameExistsException(username) user = CognitoIdpUser( diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index fa3b7b0b5..6c89c4806 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse): def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") username = self._get_param("Username") + message_action = self._get_param("MessageAction") temporary_password = self._get_param("TemporaryPassword") user = cognitoidp_backends[self.region].admin_create_user( user_pool_id, username, + message_action, temporary_password, self._get_param("UserAttributes", []), ) diff --git a/moto/config/models.py b/moto/config/models.py index 45dccd1ba..a66576979 100644 --- a/moto/config/models.py +++ b/moto/config/models.py @@ -43,7 +43,7 @@ from moto.config.exceptions import ( ) from moto.core import BaseBackend, BaseModel -from moto.s3.config import s3_config_query +from moto.s3.config import s3_account_public_access_block_query, s3_config_query from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID @@ -58,7 +58,10 @@ POP_STRINGS = [ DEFAULT_PAGE_SIZE = 100 # Map the Config resource type to a backend: -RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query} +RESOURCE_MAP = { + "AWS::S3::Bucket": s3_config_query, + "AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query, +} def datetime2int(date): @@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend): backend_region=backend_query_region, ) - result = { - "resourceIdentifiers": [ - { - "resourceType": identifier["type"], - "resourceId": identifier["id"], - "resourceName": identifier["name"], - } - for identifier in identifiers - ] - } + resource_identifiers = [] + for identifier in identifiers: + item = {"resourceType": identifier["type"], "resourceId": identifier["id"]} + + # Some resource types lack names: + if identifier.get("name"): + item["resourceName"] = identifier["name"] + + resource_identifiers.append(item) + + result = {"resourceIdentifiers": resource_identifiers} if new_token: result["nextToken"] = new_token @@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend): resource_region=resource_region, ) - result = { - "ResourceIdentifiers": [ - { - "SourceAccountId": DEFAULT_ACCOUNT_ID, - "SourceRegion": identifier["region"], - "ResourceType": identifier["type"], - "ResourceId": identifier["id"], - "ResourceName": identifier["name"], - } - for identifier in identifiers - ] - } + resource_identifiers = [] + for identifier in identifiers: + item = { + "SourceAccountId": DEFAULT_ACCOUNT_ID, + "SourceRegion": identifier["region"], + "ResourceType": identifier["type"], + "ResourceId": identifier["id"], + } + + if identifier.get("name"): + item["ResourceName"] = identifier["name"] + + resource_identifiers.append(item) + + result = {"ResourceIdentifiers": resource_identifiers} if new_token: result["NextToken"] = new_token diff --git a/moto/core/models.py b/moto/core/models.py index 3be3bbd8e..ffb2ffd9f 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -606,12 +606,13 @@ class ConfigQueryModel(object): As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter from there. It may be valuable to make this a concatenation of the region and resource name. - :param resource_region: - :param resource_ids: - :param resource_name: - :param limit: - :param next_token: + :param resource_ids: A list of resource IDs + :param resource_name: The individual name of a resource + :param limit: How many per page + :param next_token: The item that will page on :param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query. + :param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a + non-aggregated query. :return: This should return a list of Dicts that have the following fields: [ { diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index 82c3559ea..88f750775 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -1406,6 +1406,7 @@ class DynamoDBBackend(BaseBackend): range_value = None item = table.get_item(hash_value, range_value) + orig_item = copy.deepcopy(item) if not expected: expected = {} @@ -1439,6 +1440,8 @@ class DynamoDBBackend(BaseBackend): ) else: item.update_with_attribute_updates(attribute_updates) + if table.stream_shard is not None: + table.stream_shard.add(orig_item, item) return item def delete_item( diff --git a/moto/emr/models.py b/moto/emr/models.py index 713b15b9f..d9ec2fd69 100644 --- a/moto/emr/models.py +++ b/moto/emr/models.py @@ -86,6 +86,9 @@ class FakeStep(BaseModel): self.start_datetime = None self.state = state + def start(self): + self.start_datetime = datetime.now(pytz.utc) + class FakeCluster(BaseModel): def __init__( @@ -204,6 +207,8 @@ class FakeCluster(BaseModel): self.start_cluster() self.run_bootstrap_actions() + if self.steps: + self.steps[0].start() @property def instance_groups(self): diff --git a/moto/emr/responses.py b/moto/emr/responses.py index 94847ec8b..38b9774e1 100644 --- a/moto/emr/responses.py +++ b/moto/emr/responses.py @@ -835,7 +835,7 @@ LIST_STEPS_TEMPLATE = """ max_results else None + else: + token = int(token) + jobs = filtered_jobs[token : token + max_results] + next_token = ( + str(token + max_results) + if len(filtered_jobs) > token + max_results + else None + ) + + return jobs, next_token + + def describe_job_execution(self, job_id, thing_name, execution_number): + try: + job_execution = self.job_executions[(job_id, thing_name)] + except KeyError: + raise ResourceNotFoundException() + + if job_execution is None or ( + execution_number is not None + and job_execution.execution_number != execution_number + ): + raise ResourceNotFoundException() + + return job_execution + + def cancel_job_execution( + self, job_id, thing_name, force, expected_version, status_details + ): + job_execution = self.job_executions[(job_id, thing_name)] + + if job_execution is None: + raise ResourceNotFoundException() + + job_execution.force_canceled = ( + force if force is not None else job_execution.force_canceled + ) + # TODO: implement expected_version and status_details (at most 10 can be specified) + + if job_execution.status == "IN_PROGRESS" and force: + job_execution.status = "CANCELED" + self.job_executions[(job_id, thing_name)] = job_execution + elif job_execution.status != "IN_PROGRESS": + job_execution.status = "CANCELED" + self.job_executions[(job_id, thing_name)] = job_execution + else: + raise InvalidStateTransitionException() + + def delete_job_execution(self, job_id, thing_name, execution_number, force): + job_execution = self.job_executions[(job_id, thing_name)] + + if job_execution.execution_number != execution_number: + raise ResourceNotFoundException() + + if job_execution.status == "IN_PROGRESS" and force: + del self.job_executions[(job_id, thing_name)] + elif job_execution.status != "IN_PROGRESS": + del self.job_executions[(job_id, thing_name)] + else: + raise InvalidStateTransitionException() + + def list_job_executions_for_job(self, job_id, status, max_results, next_token): + job_executions = [ + self.job_executions[je].to_dict() + for je in self.job_executions + if je[0] == job_id + ] + + if status is not None: + job_executions = list( + filter( + lambda elem: status in elem["status"] and elem["status"] == status, + job_executions, + ) + ) + + token = next_token + if token is None: + job_executions = job_executions[0:max_results] + next_token = str(max_results) if len(job_executions) > max_results else None + else: + token = int(token) + job_executions = job_executions[token : token + max_results] + next_token = ( + str(token + max_results) + if len(job_executions) > token + max_results + else None + ) + + return job_executions, next_token + + def list_job_executions_for_thing( + self, thing_name, status, max_results, next_token + ): + job_executions = [ + self.job_executions[je].to_dict() + for je in self.job_executions + if je[1] == thing_name + ] + + if status is not None: + job_executions = list( + filter( + lambda elem: status in elem["status"] and elem["status"] == status, + job_executions, + ) + ) + + token = next_token + if token is None: + job_executions = job_executions[0:max_results] + next_token = str(max_results) if len(job_executions) > max_results else None + else: + token = int(token) + job_executions = job_executions[token : token + max_results] + next_token = ( + str(token + max_results) + if len(job_executions) > token + max_results + else None + ) + + return job_executions, next_token + iot_backends = {} for region in Session().get_available_regions("iot"): diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 5981eaa37..c12d4b5c5 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import json +from six.moves.urllib.parse import unquote from moto.core.responses import BaseResponse from .models import iot_backends @@ -141,6 +142,8 @@ class IoTResponse(BaseResponse): createdAt=job.created_at, description=job.description, documentParameters=job.document_parameters, + forceCanceled=job.force, + reasonCode=job.reason_code, jobArn=job.job_arn, jobExecutionsRolloutConfig=job.job_executions_rollout_config, jobId=job.job_id, @@ -154,6 +157,127 @@ class IoTResponse(BaseResponse): ) ) + def delete_job(self): + job_id = self._get_param("jobId") + force = self._get_bool_param("force") + + self.iot_backend.delete_job(job_id=job_id, force=force) + + return json.dumps(dict()) + + def cancel_job(self): + job_id = self._get_param("jobId") + reason_code = self._get_param("reasonCode") + comment = self._get_param("comment") + force = self._get_bool_param("force") + + job = self.iot_backend.cancel_job( + job_id=job_id, reason_code=reason_code, comment=comment, force=force + ) + + return json.dumps(job.to_dict()) + + def get_job_document(self): + job = self.iot_backend.get_job_document(job_id=self._get_param("jobId")) + + if job.document is not None: + return json.dumps({"document": job.document}) + else: + # job.document_source is not None: + # TODO: needs to be implemented to get document_source's content from S3 + return json.dumps({"document": ""}) + + def list_jobs(self): + status = (self._get_param("status"),) + target_selection = (self._get_param("targetSelection"),) + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + previous_next_token = self._get_param("nextToken") + thing_group_name = (self._get_param("thingGroupName"),) + thing_group_id = self._get_param("thingGroupId") + jobs, next_token = self.iot_backend.list_jobs( + status=status, + target_selection=target_selection, + max_results=max_results, + token=previous_next_token, + thing_group_name=thing_group_name, + thing_group_id=thing_group_id, + ) + + return json.dumps(dict(jobs=jobs, nextToken=next_token)) + + def describe_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + execution_number = self._get_int_param("executionNumber") + job_execution = self.iot_backend.describe_job_execution( + job_id=job_id, thing_name=thing_name, execution_number=execution_number + ) + + return json.dumps(dict(execution=job_execution.to_get_dict())) + + def cancel_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + force = self._get_bool_param("force") + expected_version = self._get_int_param("expectedVersion") + status_details = self._get_param("statusDetails") + + self.iot_backend.cancel_job_execution( + job_id=job_id, + thing_name=thing_name, + force=force, + expected_version=expected_version, + status_details=status_details, + ) + + return json.dumps(dict()) + + def delete_job_execution(self): + job_id = self._get_param("jobId") + thing_name = self._get_param("thingName") + execution_number = self._get_int_param("executionNumber") + force = self._get_bool_param("force") + + self.iot_backend.delete_job_execution( + job_id=job_id, + thing_name=thing_name, + execution_number=execution_number, + force=force, + ) + + return json.dumps(dict()) + + def list_job_executions_for_job(self): + job_id = self._get_param("jobId") + status = self._get_param("status") + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + next_token = self._get_param("nextToken") + job_executions, next_token = self.iot_backend.list_job_executions_for_job( + job_id=job_id, status=status, max_results=max_results, next_token=next_token + ) + + return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token)) + + def list_job_executions_for_thing(self): + thing_name = self._get_param("thingName") + status = self._get_param("status") + max_results = self._get_int_param( + "maxResults", 50 + ) # not the default, but makes testing easier + next_token = self._get_param("nextToken") + job_executions, next_token = self.iot_backend.list_job_executions_for_thing( + thing_name=thing_name, + status=status, + max_results=max_results, + next_token=next_token, + ) + + return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token)) + def create_keys_and_certificate(self): set_as_active = self._get_bool_param("setAsActive") cert, key_pair = self.iot_backend.create_keys_and_certificate( @@ -241,12 +365,61 @@ class IoTResponse(BaseResponse): self.iot_backend.delete_policy(policy_name=policy_name) return json.dumps(dict()) + def create_policy_version(self): + policy_name = self._get_param("policyName") + policy_document = self._get_param("policyDocument") + set_as_default = self._get_bool_param("setAsDefault") + policy_version = self.iot_backend.create_policy_version( + policy_name, policy_document, set_as_default + ) + + return json.dumps(dict(policy_version.to_dict_at_creation())) + + def set_default_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + self.iot_backend.set_default_policy_version(policy_name, version_id) + + return json.dumps(dict()) + + def get_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + policy_version = self.iot_backend.get_policy_version(policy_name, version_id) + return json.dumps(dict(policy_version.to_get_dict())) + + def list_policy_versions(self): + policy_name = self._get_param("policyName") + policiy_versions = self.iot_backend.list_policy_versions( + policy_name=policy_name + ) + + return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions])) + + def delete_policy_version(self): + policy_name = self._get_param("policyName") + version_id = self._get_param("policyVersionId") + self.iot_backend.delete_policy_version(policy_name, version_id) + + return json.dumps(dict()) + def attach_policy(self): policy_name = self._get_param("policyName") target = self._get_param("target") self.iot_backend.attach_policy(policy_name=policy_name, target=target) return json.dumps(dict()) + def list_attached_policies(self): + principal = unquote(self._get_param("target")) + # marker = self._get_param("marker") + # page_size = self._get_int_param("pageSize") + policies = self.iot_backend.list_attached_policies(target=principal) + # TODO: implement pagination in the future + next_marker = None + return json.dumps( + dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker) + ) + def attach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get("x-amzn-iot-principal") diff --git a/moto/kms/models.py b/moto/kms/models.py index ff5d0a356..36f72e6de 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -8,7 +8,8 @@ from boto3 import Session from moto.core import BaseBackend, BaseModel from moto.core.utils import unix_time - +from moto.utilities.tagging_service import TaggingService +from moto.core.exceptions import JsonRESTError from moto.iam.models import ACCOUNT_ID from .utils import decrypt, encrypt, generate_key_id, generate_master_key @@ -16,7 +17,7 @@ from .utils import decrypt, encrypt, generate_key_id, generate_master_key class Key(BaseModel): def __init__( - self, policy, key_usage, customer_master_key_spec, description, tags, region + self, policy, key_usage, customer_master_key_spec, description, region ): self.id = generate_key_id() self.creation_date = unix_time() @@ -29,7 +30,6 @@ class Key(BaseModel): self.account_id = ACCOUNT_ID self.key_rotation_status = False self.deletion_date = None - self.tags = tags or {} self.key_material = generate_master_key() self.origin = "AWS_KMS" self.key_manager = "CUSTOMER" @@ -111,11 +111,12 @@ class Key(BaseModel): key_usage="ENCRYPT_DECRYPT", customer_master_key_spec="SYMMETRIC_DEFAULT", description=properties["Description"], - tags=properties.get("Tags"), + tags=properties.get("Tags", []), region=region_name, ) key.key_rotation_status = properties["EnableKeyRotation"] key.enabled = properties["Enabled"] + return key def get_cfn_attribute(self, attribute_name): @@ -130,32 +131,26 @@ class KmsBackend(BaseBackend): def __init__(self): self.keys = {} self.key_to_aliases = defaultdict(set) + self.tagger = TaggingService(keyName="TagKey", valueName="TagValue") def create_key( self, policy, key_usage, customer_master_key_spec, description, tags, region ): - key = Key( - policy, key_usage, customer_master_key_spec, description, tags, region - ) + key = Key(policy, key_usage, customer_master_key_spec, description, region) self.keys[key.id] = key + if tags is not None and len(tags) > 0: + self.tag_resource(key.id, tags) return key def update_key_description(self, key_id, description): key = self.keys[self.get_key_id(key_id)] key.description = description - def tag_resource(self, key_id, tags): - key = self.keys[self.get_key_id(key_id)] - key.tags = tags - - def list_resource_tags(self, key_id): - key = self.keys[self.get_key_id(key_id)] - return key.tags - def delete_key(self, key_id): if key_id in self.keys: if key_id in self.key_to_aliases: self.key_to_aliases.pop(key_id) + self.tagger.delete_all_tags_for_resource(key_id) return self.keys.pop(key_id) @@ -325,6 +320,32 @@ class KmsBackend(BaseBackend): return plaintext, ciphertext_blob, arn + def list_resource_tags(self, key_id): + if key_id in self.keys: + return self.tagger.list_tags_for_resource(key_id) + raise JsonRESTError( + "NotFoundException", + "The request was rejected because the specified entity or resource could not be found.", + ) + + def tag_resource(self, key_id, tags): + if key_id in self.keys: + self.tagger.tag_resource(key_id, tags) + return {} + raise JsonRESTError( + "NotFoundException", + "The request was rejected because the specified entity or resource could not be found.", + ) + + def untag_resource(self, key_id, tag_names): + if key_id in self.keys: + self.tagger.untag_resource_using_names(key_id, tag_names) + return {} + raise JsonRESTError( + "NotFoundException", + "The request was rejected because the specified entity or resource could not be found.", + ) + kms_backends = {} for region in Session().get_available_regions("kms"): diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 15b990bbb..995c097e0 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -144,17 +144,27 @@ class KmsResponse(BaseResponse): self._validate_cmk_id(key_id) - self.kms_backend.tag_resource(key_id, tags) - return json.dumps({}) + result = self.kms_backend.tag_resource(key_id, tags) + return json.dumps(result) + + def untag_resource(self): + """https://docs.aws.amazon.com/kms/latest/APIReference/API_UntagResource.html""" + key_id = self.parameters.get("KeyId") + tag_names = self.parameters.get("TagKeys") + + self._validate_cmk_id(key_id) + + result = self.kms_backend.untag_resource(key_id, tag_names) + return json.dumps(result) def list_resource_tags(self): """https://docs.aws.amazon.com/kms/latest/APIReference/API_ListResourceTags.html""" key_id = self.parameters.get("KeyId") - self._validate_cmk_id(key_id) tags = self.kms_backend.list_resource_tags(key_id) - return json.dumps({"Tags": tags, "NextMarker": None, "Truncated": False}) + tags.update({"NextMarker": None, "Truncated": False}) + return json.dumps(tags) def describe_key(self): """https://docs.aws.amazon.com/kms/latest/APIReference/API_DescribeKey.html""" diff --git a/moto/resourcegroupstaggingapi/models.py b/moto/resourcegroupstaggingapi/models.py index 850ab5c04..d05a53f81 100644 --- a/moto/resourcegroupstaggingapi/models.py +++ b/moto/resourcegroupstaggingapi/models.py @@ -318,7 +318,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): # KMS def get_kms_tags(kms_key_id): result = [] - for tag in self.kms_backend.list_resource_tags(kms_key_id): + for tag in self.kms_backend.list_resource_tags(kms_key_id).get("Tags", []): result.append({"Key": tag["TagKey"], "Value": tag["TagValue"]}) return result diff --git a/moto/s3/config.py b/moto/s3/config.py index 8098addfc..04b4315f3 100644 --- a/moto/s3/config.py +++ b/moto/s3/config.py @@ -1,8 +1,13 @@ +import datetime import json +import time + +from boto3 import Session from moto.core.exceptions import InvalidNextTokenException from moto.core.models import ConfigQueryModel from moto.s3 import s3_backends +from moto.s3.models import get_moto_s3_account_id class S3ConfigQuery(ConfigQueryModel): @@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel): return config_data +class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel): + def list_config_service_resources( + self, + resource_ids, + resource_name, + limit, + next_token, + backend_region=None, + resource_region=None, + ): + # For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID + # There is no resource name -- it should be a blank string "" if provided. + + # The resource name can only ever be None or an empty string: + if resource_name is not None and resource_name != "": + return [], None + + pab = None + account_id = get_moto_s3_account_id() + regions = [region for region in Session().get_available_regions("config")] + + # If a resource ID was passed in, then filter accordingly: + if resource_ids: + for id in resource_ids: + if account_id == id: + pab = self.backends["global"].account_public_access_block + break + + # Otherwise, just grab the one from the backend: + if not resource_ids: + pab = self.backends["global"].account_public_access_block + + # If it's not present, then return nothing + if not pab: + return [], None + + # Filter on regions (and paginate on them as well): + if backend_region: + pab_list = [backend_region] + elif resource_region: + # Invalid region? + if resource_region not in regions: + return [], None + + pab_list = [resource_region] + + # Aggregated query where no regions were supplied so return them all: + else: + pab_list = regions + + # Pagination logic: + sorted_regions = sorted(pab_list) + new_token = None + + # Get the start: + if not next_token: + start = 0 + else: + # Tokens for this moto feature is just the region-name: + # For OTHER non-global resource types, it's the region concatenated with the resource ID. + if next_token not in sorted_regions: + raise InvalidNextTokenException() + + start = sorted_regions.index(next_token) + + # Get the list of items to collect: + pab_list = sorted_regions[start : (start + limit)] + + if len(sorted_regions) > (start + limit): + new_token = sorted_regions[start + limit] + + return ( + [ + { + "type": "AWS::S3::AccountPublicAccessBlock", + "id": account_id, + "region": region, + } + for region in pab_list + ], + new_token, + ) + + def get_config_resource( + self, resource_id, resource_name=None, backend_region=None, resource_region=None + ): + # Do we even have this defined? + if not self.backends["global"].account_public_access_block: + return None + + # Resource name can only ever be "" if it's supplied: + if resource_name is not None and resource_name != "": + return None + + # Are we filtering based on region? + account_id = get_moto_s3_account_id() + regions = [region for region in Session().get_available_regions("config")] + + # Is the resource ID correct?: + if account_id == resource_id: + if backend_region: + pab_region = backend_region + + # Invalid region? + elif resource_region not in regions: + return None + + else: + pab_region = resource_region + + else: + return None + + # Format the PAB to the AWS Config format: + creation_time = datetime.datetime.utcnow() + config_data = { + "version": "1.3", + "accountId": account_id, + "configurationItemCaptureTime": str(creation_time), + "configurationItemStatus": "OK", + "configurationStateId": str( + int(time.mktime(creation_time.timetuple())) + ), # PY2 and 3 compatible + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": account_id, + "awsRegion": pab_region, + "availabilityZone": "Not Applicable", + "configuration": self.backends[ + "global" + ].account_public_access_block.to_config_dict(), + "supplementaryConfiguration": {}, + } + + # The 'configuration' field is also a JSON string: + config_data["configuration"] = json.dumps(config_data["configuration"]) + + return config_data + + s3_config_query = S3ConfigQuery(s3_backends) +s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery( + s3_backends +) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index bc339772e..e26f384d5 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -359,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError): *args, **kwargs ) + + +class WrongPublicAccessBlockAccountIdError(S3ClientError): + code = 403 + + def __init__(self): + super(WrongPublicAccessBlockAccountIdError, self).__init__( + "AccessDenied", "Access Denied" + ) diff --git a/moto/s3/models.py b/moto/s3/models.py index fe8e908ef..5a665e27e 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -19,7 +19,7 @@ import uuid import six from bisect import insort -from moto.core import BaseBackend, BaseModel +from moto.core import ACCOUNT_ID, BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from .exceptions import ( BucketAlreadyExists, @@ -37,6 +37,7 @@ from .exceptions import ( CrossLocationLoggingProhibitted, NoSuchPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration, + WrongPublicAccessBlockAccountIdError, ) from .utils import clean_key_name, _VersionedKeyStore @@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a" +def get_moto_s3_account_id(): + """This makes it easy for mocking AWS Account IDs when using AWS Config + -- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free. + """ + return ACCOUNT_ID + + class FakeDeleteMarker(BaseModel): def __init__(self, key): self.key = key @@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel): class S3Backend(BaseBackend): def __init__(self): self.buckets = {} + self.account_public_access_block = None def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: @@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend): return bucket.public_access_block + def get_account_public_access_block(self, account_id): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + if not self.account_public_access_block: + raise NoSuchPublicAccessBlockConfiguration() + + return self.account_public_access_block + def set_key( self, bucket_name, key_name, value, storage=None, etag=None, multipart=None ): @@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend): bucket = self.get_bucket(bucket_name) bucket.public_access_block = None + def delete_account_public_access_block(self, account_id): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + self.account_public_access_block = None + def put_bucket_notification_configuration(self, bucket_name, notification_config): bucket = self.get_bucket(bucket_name) bucket.set_notification_configuration(notification_config) @@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend): pub_block_config.get("RestrictPublicBuckets"), ) + def put_account_public_access_block(self, account_id, pub_block_config): + # The account ID should equal the account id that is set for Moto: + if account_id != ACCOUNT_ID: + raise WrongPublicAccessBlockAccountIdError() + + if not pub_block_config: + raise InvalidPublicAccessBlockConfiguration() + + self.account_public_access_block = PublicAccessBlock( + pub_block_config.get("BlockPublicAcls"), + pub_block_config.get("IgnorePublicAcls"), + pub_block_config.get("BlockPublicPolicy"), + pub_block_config.get("RestrictPublicBuckets"), + ) + def initiate_multipart(self, bucket_name, key_name, metadata): bucket = self.get_bucket(bucket_name) new_multipart = FakeMultipart(key_name, metadata) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 6041201bf..b74be9a63 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -4,6 +4,7 @@ import re import sys import six +from botocore.awsrequest import AWSPreparedRequest from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys from six.moves.urllib.parse import parse_qs, urlparse, unquote @@ -123,6 +124,11 @@ ACTION_MAP = { "uploadId": "PutObject", }, }, + "CONTROL": { + "GET": {"publicAccessBlock": "GetPublicAccessBlock"}, + "PUT": {"publicAccessBlock": "PutPublicAccessBlock"}, + "DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"}, + }, } @@ -168,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): or host.startswith("localhost") or host.startswith("localstack") or re.match(r"^[^.]+$", host) - or re.match(r"^.*\.svc\.cluster\.local$", host) + or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host) ): # Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev), # (3) local host names that do not contain a "." (e.g., Docker container host names), or @@ -220,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): # Depending on which calling format the client is using, we don't know # if this is a bucket or key request so we have to check if self.subdomain_based_buckets(request): - return self.key_response(request, full_url, headers) + return self.key_or_control_response(request, full_url, headers) else: # Using path-based buckets return self.bucket_response(request, full_url, headers) @@ -287,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return self._bucket_response_post(request, body, bucket_name) else: raise NotImplementedError( - "Method {0} has not been impelemented in the S3 backend yet".format( + "Method {0} has not been implemented in the S3 backend yet".format( method ) ) @@ -595,6 +601,20 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): pass return False + def _parse_pab_config(self, body): + parsed_xml = xmltodict.parse(body) + parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None) + + # If Python 2, fix the unicode strings: + if sys.version_info[0] < 3: + parsed_xml = { + "PublicAccessBlockConfiguration": py2_strip_unicode_keys( + dict(parsed_xml["PublicAccessBlockConfiguration"]) + ) + } + + return parsed_xml + def _bucket_response_put( self, request, body, region_name, bucket_name, querystring ): @@ -673,19 +693,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): raise e elif "publicAccessBlock" in querystring: - parsed_xml = xmltodict.parse(body) - parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None) - - # If Python 2, fix the unicode strings: - if sys.version_info[0] < 3: - parsed_xml = { - "PublicAccessBlockConfiguration": py2_strip_unicode_keys( - dict(parsed_xml["PublicAccessBlockConfiguration"]) - ) - } - + pab_config = self._parse_pab_config(body) self.backend.put_bucket_public_access_block( - bucket_name, parsed_xml["PublicAccessBlockConfiguration"] + bucket_name, pab_config["PublicAccessBlockConfiguration"] ) return "" @@ -870,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): ) return 206, response_headers, response_content[begin : end + 1] - def key_response(self, request, full_url, headers): + def key_or_control_response(self, request, full_url, headers): + # Key and Control are lumped in because splitting out the regex is too much of a pain :/ self.method = request.method self.path = self._get_path(request) self.headers = request.headers if "host" not in self.headers: self.headers["host"] = urlparse(full_url).netloc response_headers = {} + try: - response = self._key_response(request, full_url, headers) + # Is this an S3 control response? + if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url: + response = self._control_response(request, full_url, headers) + else: + response = self._key_response(request, full_url, headers) except S3ClientError as s3error: response = s3error.code, {}, s3error.description @@ -894,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): ) return status_code, response_headers, response_content + def _control_response(self, request, full_url, headers): + parsed_url = urlparse(full_url) + query = parse_qs(parsed_url.query, keep_blank_values=True) + method = request.method + + if hasattr(request, "body"): + # Boto + body = request.body + if hasattr(body, "read"): + body = body.read() + else: + # Flask server + body = request.data + if body is None: + body = b"" + + if method == "GET": + return self._control_response_get(request, query, headers) + elif method == "PUT": + return self._control_response_put(request, body, query, headers) + elif method == "DELETE": + return self._control_response_delete(request, query, headers) + else: + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format( + method + ) + ) + + def _control_response_get(self, request, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "GET", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + public_block_config = self.backend.get_account_public_access_block( + headers["x-amz-account-id"] + ) + template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION) + return ( + 200, + response_headers, + template.render(public_block_config=public_block_config), + ) + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + + def _control_response_put(self, request, body, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "PUT", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + pab_config = self._parse_pab_config(body) + self.backend.put_account_public_access_block( + headers["x-amz-account-id"], + pab_config["PublicAccessBlockConfiguration"], + ) + return 200, response_headers, "" + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + + def _control_response_delete(self, request, query, headers): + action = self.path.split("?")[0].split("/")[ + -1 + ] # Gets the action out of the URL sans query params. + self._set_action("CONTROL", "DELETE", action) + self._authenticate_and_authorize_s3_action() + + response_headers = {} + if "publicAccessBlock" in action: + self.backend.delete_account_public_access_block(headers["x-amz-account-id"]) + return 200, response_headers, "" + + raise NotImplementedError( + "Method {0} has not been implemented in the S3 backend yet".format(action) + ) + def _key_response(self, request, full_url, headers): parsed_url = urlparse(full_url) query = parse_qs(parsed_url.query, keep_blank_values=True) @@ -1098,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): if mdirective is not None and mdirective == "REPLACE": metadata = metadata_from_headers(request.headers) new_key.set_metadata(metadata, replace=True) + tdirective = request.headers.get("x-amz-tagging-directive") + if tdirective == "REPLACE": + tagging = self._tagging_from_headers(request.headers) + new_key.set_tagging(tagging) template = self.response_template(S3_OBJECT_COPY_RESPONSE) response_headers.update(new_key.response_dict) return 200, response_headers, template.render(key=new_key) diff --git a/moto/s3/urls.py b/moto/s3/urls.py index 7241dbef1..752762184 100644 --- a/moto/s3/urls.py +++ b/moto/s3/urls.py @@ -13,7 +13,7 @@ url_paths = { # subdomain key of path-based bucket "{0}/(?P[^/]+)/?$": S3ResponseInstance.ambiguous_response, # path-based bucket + key - "{0}/(?P[^/]+)/(?P.+)": S3ResponseInstance.key_response, + "{0}/(?P[^/]+)/(?P.+)": S3ResponseInstance.key_or_control_response, # subdomain bucket + key with empty first part of path - "{0}//(?P.*)$": S3ResponseInstance.key_response, + "{0}//(?P.*)$": S3ResponseInstance.key_or_control_response, } diff --git a/moto/swf/models/workflow_execution.py b/moto/swf/models/workflow_execution.py index 4d91b1f6f..17ce819fb 100644 --- a/moto/swf/models/workflow_execution.py +++ b/moto/swf/models/workflow_execution.py @@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel): "executionInfo": self.to_medium_dict(), "executionConfiguration": {"taskList": {"name": self.task_list}}, } + # info + if self.execution_status == "CLOSED": + hsh["executionInfo"]["closeStatus"] = self.close_status + hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp # configuration for key in self._configuration_keys: attr = camelcase_to_underscores(key) diff --git a/moto/swf/models/workflow_type.py b/moto/swf/models/workflow_type.py index ddb2475b2..137f0e221 100644 --- a/moto/swf/models/workflow_type.py +++ b/moto/swf/models/workflow_type.py @@ -8,6 +8,8 @@ class WorkflowType(GenericType): "defaultChildPolicy", "defaultExecutionStartToCloseTimeout", "defaultTaskStartToCloseTimeout", + "defaultTaskPriority", + "defaultLambdaRole", ] @property diff --git a/moto/swf/responses.py b/moto/swf/responses.py index 98b736cda..2b7794ffd 100644 --- a/moto/swf/responses.py +++ b/moto/swf/responses.py @@ -300,6 +300,8 @@ class SWFResponse(BaseResponse): default_execution_start_to_close_timeout = self._params.get( "defaultExecutionStartToCloseTimeout" ) + default_task_priority = self._params.get("defaultTaskPriority") + default_lambda_role = self._params.get("defaultLambdaRole") description = self._params.get("description") self._check_string(domain) @@ -309,10 +311,10 @@ class SWFResponse(BaseResponse): self._check_none_or_string(default_child_policy) self._check_none_or_string(default_task_start_to_close_timeout) self._check_none_or_string(default_execution_start_to_close_timeout) + self._check_none_or_string(default_task_priority) + self._check_none_or_string(default_lambda_role) self._check_none_or_string(description) - # TODO: add defaultTaskPriority when boto gets to support it - # TODO: add defaultLambdaRole when boto gets to support it self.swf_backend.register_type( "workflow", domain, @@ -322,6 +324,8 @@ class SWFResponse(BaseResponse): default_child_policy=default_child_policy, default_task_start_to_close_timeout=default_task_start_to_close_timeout, default_execution_start_to_close_timeout=default_execution_start_to_close_timeout, + default_task_priority=default_task_priority, + default_lambda_role=default_lambda_role, description=description, ) return "" diff --git a/moto/utilities/__init__.py b/moto/utilities/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/moto/utilities/tagging_service.py b/moto/utilities/tagging_service.py new file mode 100644 index 000000000..89b857277 --- /dev/null +++ b/moto/utilities/tagging_service.py @@ -0,0 +1,62 @@ +class TaggingService: + def __init__(self, tagName="Tags", keyName="Key", valueName="Value"): + self.tagName = tagName + self.keyName = keyName + self.valueName = valueName + self.tags = {} + + def list_tags_for_resource(self, arn): + result = [] + if arn in self.tags: + for k, v in self.tags[arn].items(): + result.append({self.keyName: k, self.valueName: v}) + return {self.tagName: result} + + def delete_all_tags_for_resource(self, arn): + del self.tags[arn] + + def has_tags(self, arn): + return arn in self.tags + + def tag_resource(self, arn, tags): + if arn not in self.tags: + self.tags[arn] = {} + for t in tags: + if self.valueName in t: + self.tags[arn][t[self.keyName]] = t[self.valueName] + else: + self.tags[arn][t[self.keyName]] = None + + def untag_resource_using_names(self, arn, tag_names): + for name in tag_names: + if name in self.tags.get(arn, {}): + del self.tags[arn][name] + + def untag_resource_using_tags(self, arn, tags): + m = self.tags.get(arn, {}) + for t in tags: + if self.keyName in t: + if t[self.keyName] in m: + if self.valueName in t: + if m[t[self.keyName]] != t[self.valueName]: + continue + # If both key and value are provided, match both before deletion + del m[t[self.keyName]] + + def extract_tag_names(self, tags): + results = [] + if len(tags) == 0: + return results + for tag in tags: + if self.keyName in tag: + results.append(tag[self.keyName]) + return results + + def flatten_tag_list(self, tags): + result = {} + for t in tags: + if self.valueName in t: + result[t[self.keyName]] = t[self.valueName] + else: + result[t[self.keyName]] = None + return result diff --git a/tests/test_apigateway/test_apigateway.py b/tests/test_apigateway/test_apigateway.py index c92fc08f4..0952f2674 100644 --- a/tests/test_apigateway/test_apigateway.py +++ b/tests/test_apigateway/test_apigateway.py @@ -8,7 +8,7 @@ import sure # noqa from botocore.exceptions import ClientError import responses -from moto import mock_apigateway, settings +from moto import mock_apigateway, mock_cognitoidp, settings from moto.core import ACCOUNT_ID from nose.tools import assert_raises @@ -204,12 +204,7 @@ def test_create_resource(): root_resource["ResponseMetadata"].pop("HTTPHeaders", None) root_resource["ResponseMetadata"].pop("RetryAttempts", None) root_resource.should.equal( - { - "path": "/", - "id": root_id, - "ResponseMetadata": {"HTTPStatusCode": 200}, - "resourceMethods": {"GET": {}}, - } + {"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},} ) client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users") @@ -257,7 +252,6 @@ def test_child_resource(): "parentId": users_id, "id": tags_id, "ResponseMetadata": {"HTTPStatusCode": 200}, - "resourceMethods": {"GET": {}}, } ) @@ -582,6 +576,254 @@ def test_integration_response(): response["methodIntegration"]["integrationResponses"].should.equal({}) +@mock_apigateway +@mock_cognitoidp +def test_update_authorizer_configuration(): + client = boto3.client("apigateway", region_name="us-west-2") + authorizer_name = "my_authorizer" + response = client.create_rest_api(name="my_api", description="this is my api") + api_id = response["id"] + + cognito_client = boto3.client("cognito-idp", region_name="us-west-2") + user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[ + "UserPool" + ]["Arn"] + + response = client.create_authorizer( + restApiId=api_id, + name=authorizer_name, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id = response["id"] + + response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id) + # createdDate is hard to match against, remove it + response.pop("createdDate", None) + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + response.should.equal( + { + "id": authorizer_id, + "name": authorizer_name, + "type": "COGNITO_USER_POOLS", + "providerARNs": [user_pool_arn], + "identitySource": "method.request.header.Authorization", + "authorizerResultTtlInSeconds": 300, + "ResponseMetadata": {"HTTPStatusCode": 200}, + } + ) + + client.update_authorizer( + restApiId=api_id, + authorizerId=authorizer_id, + patchOperations=[{"op": "replace", "path": "/type", "value": "TOKEN"}], + ) + + authorizer = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id) + + authorizer.should.have.key("type").which.should.equal("TOKEN") + + client.update_authorizer( + restApiId=api_id, + authorizerId=authorizer_id, + patchOperations=[{"op": "replace", "path": "/type", "value": "REQUEST"}], + ) + + authorizer = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id) + + authorizer.should.have.key("type").which.should.equal("REQUEST") + + # TODO: implement mult-update tests + + try: + client.update_authorizer( + restApiId=api_id, + authorizerId=authorizer_id, + patchOperations=[ + {"op": "add", "path": "/notasetting", "value": "eu-west-1"} + ], + ) + assert False.should.be.ok # Fail, should not be here + except Exception: + assert True.should.be.ok + + +@mock_apigateway +def test_non_existent_authorizer(): + client = boto3.client("apigateway", region_name="us-west-2") + response = client.create_rest_api(name="my_api", description="this is my api") + api_id = response["id"] + + client.get_authorizer.when.called_with( + restApiId=api_id, authorizerId="xxx" + ).should.throw(ClientError) + + +@mock_apigateway +@mock_cognitoidp +def test_create_authorizer(): + client = boto3.client("apigateway", region_name="us-west-2") + authorizer_name = "my_authorizer" + response = client.create_rest_api(name="my_api", description="this is my api") + api_id = response["id"] + + cognito_client = boto3.client("cognito-idp", region_name="us-west-2") + user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[ + "UserPool" + ]["Arn"] + + response = client.create_authorizer( + restApiId=api_id, + name=authorizer_name, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id = response["id"] + + response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id) + # createdDate is hard to match against, remove it + response.pop("createdDate", None) + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + response.should.equal( + { + "id": authorizer_id, + "name": authorizer_name, + "type": "COGNITO_USER_POOLS", + "providerARNs": [user_pool_arn], + "identitySource": "method.request.header.Authorization", + "authorizerResultTtlInSeconds": 300, + "ResponseMetadata": {"HTTPStatusCode": 200}, + } + ) + + authorizer_name2 = "my_authorizer2" + response = client.create_authorizer( + restApiId=api_id, + name=authorizer_name2, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id2 = response["id"] + + response = client.get_authorizers(restApiId=api_id) + + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + + response["items"][0]["id"].should.match( + r"{0}|{1}".format(authorizer_id2, authorizer_id) + ) + response["items"][1]["id"].should.match( + r"{0}|{1}".format(authorizer_id2, authorizer_id) + ) + + new_authorizer_name_with_vars = "authorizer_with_vars" + response = client.create_authorizer( + restApiId=api_id, + name=new_authorizer_name_with_vars, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id3 = response["id"] + + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + + response.should.equal( + { + "name": new_authorizer_name_with_vars, + "id": authorizer_id3, + "type": "COGNITO_USER_POOLS", + "providerARNs": [user_pool_arn], + "identitySource": "method.request.header.Authorization", + "authorizerResultTtlInSeconds": 300, + "ResponseMetadata": {"HTTPStatusCode": 200}, + } + ) + + stage = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id3) + stage["name"].should.equal(new_authorizer_name_with_vars) + stage["id"].should.equal(authorizer_id3) + stage["type"].should.equal("COGNITO_USER_POOLS") + stage["providerARNs"].should.equal([user_pool_arn]) + stage["identitySource"].should.equal("method.request.header.Authorization") + stage["authorizerResultTtlInSeconds"].should.equal(300) + + +@mock_apigateway +@mock_cognitoidp +def test_delete_authorizer(): + client = boto3.client("apigateway", region_name="us-west-2") + authorizer_name = "my_authorizer" + response = client.create_rest_api(name="my_api", description="this is my api") + api_id = response["id"] + + cognito_client = boto3.client("cognito-idp", region_name="us-west-2") + user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[ + "UserPool" + ]["Arn"] + + response = client.create_authorizer( + restApiId=api_id, + name=authorizer_name, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id = response["id"] + + response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id) + # createdDate is hard to match against, remove it + response.pop("createdDate", None) + # this is hard to match against, so remove it + response["ResponseMetadata"].pop("HTTPHeaders", None) + response["ResponseMetadata"].pop("RetryAttempts", None) + response.should.equal( + { + "id": authorizer_id, + "name": authorizer_name, + "type": "COGNITO_USER_POOLS", + "providerARNs": [user_pool_arn], + "identitySource": "method.request.header.Authorization", + "authorizerResultTtlInSeconds": 300, + "ResponseMetadata": {"HTTPStatusCode": 200}, + } + ) + + authorizer_name2 = "my_authorizer2" + response = client.create_authorizer( + restApiId=api_id, + name=authorizer_name2, + type="COGNITO_USER_POOLS", + providerARNs=[user_pool_arn], + identitySource="method.request.header.Authorization", + ) + authorizer_id2 = response["id"] + + authorizers = client.get_authorizers(restApiId=api_id)["items"] + sorted([authorizer["name"] for authorizer in authorizers]).should.equal( + sorted([authorizer_name2, authorizer_name]) + ) + # delete stage + response = client.delete_authorizer(restApiId=api_id, authorizerId=authorizer_id2) + response["ResponseMetadata"]["HTTPStatusCode"].should.equal(202) + # verify other stage still exists + authorizers = client.get_authorizers(restApiId=api_id)["items"] + sorted([authorizer["name"] for authorizer in authorizers]).should.equal( + sorted([authorizer_name]) + ) + + @mock_apigateway def test_update_stage_configuration(): client = boto3.client("apigateway", region_name="us-west-2") diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index d26d78fd4..2bd8f4bb3 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -150,7 +150,7 @@ def test_invoke_requestresponse_function_with_arn(): Payload=json.dumps(in_data), ) - success_result["StatusCode"].should.equal(202) + success_result["StatusCode"].should.equal(200) result_obj = json.loads( base64.b64decode(success_result["LogResult"]).decode("utf-8") ) @@ -1161,7 +1161,7 @@ def test_invoke_function_from_sqs(): @mock_logs @mock_lambda @mock_dynamodb2 -def test_invoke_function_from_dynamodb(): +def test_invoke_function_from_dynamodb_put(): logs_conn = boto3.client("logs", region_name="us-east-1") dynamodb = boto3.client("dynamodb", region_name="us-east-1") table_name = "table_with_stream" @@ -1218,6 +1218,72 @@ def test_invoke_function_from_dynamodb(): assert False, "Test Failed" +@mock_logs +@mock_lambda +@mock_dynamodb2 +def test_invoke_function_from_dynamodb_update(): + logs_conn = boto3.client("logs", region_name="us-east-1") + dynamodb = boto3.client("dynamodb", region_name="us-east-1") + table_name = "table_with_stream" + table = dynamodb.create_table( + TableName=table_name, + KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], + StreamSpecification={ + "StreamEnabled": True, + "StreamViewType": "NEW_AND_OLD_IMAGES", + }, + ) + dynamodb.put_item(TableName=table_name, Item={"id": {"S": "item 1"}}) + + conn = boto3.client("lambda", region_name="us-east-1") + func = conn.create_function( + FunctionName="testFunction", + Runtime="python2.7", + Role=get_role_name(), + Handler="lambda_function.lambda_handler", + Code={"ZipFile": get_test_zip_file3()}, + Description="test lambda function executed after a DynamoDB table is updated", + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_event_source_mapping( + EventSourceArn=table["TableDescription"]["LatestStreamArn"], + FunctionName=func["FunctionArn"], + ) + + assert response["EventSourceArn"] == table["TableDescription"]["LatestStreamArn"] + assert response["State"] == "Enabled" + dynamodb.update_item( + TableName=table_name, + Key={"id": {"S": "item 1"}}, + UpdateExpression="set #attr = :val", + ExpressionAttributeNames={"#attr": "new_attr"}, + ExpressionAttributeValues={":val": {"S": "new_val"}}, + ) + start = time.time() + while (time.time() - start) < 30: + result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction") + log_streams = result.get("logStreams") + if not log_streams: + time.sleep(1) + continue + + assert len(log_streams) == 1 + result = logs_conn.get_log_events( + logGroupName="/aws/lambda/testFunction", + logStreamName=log_streams[0]["logStreamName"], + ) + for event in result.get("events"): + if event["message"] == "get_test_zip_file3 success": + return + time.sleep(1) + + assert False, "Test Failed" + + @mock_logs @mock_lambda @mock_sqs diff --git a/tests/test_cloudformation/fixtures/vpc_eip.py b/tests/test_cloudformation/fixtures/vpc_eip.py index b5bd48c01..154d4c2d4 100644 --- a/tests/test_cloudformation/fixtures/vpc_eip.py +++ b/tests/test_cloudformation/fixtures/vpc_eip.py @@ -1,5 +1,5 @@ -from __future__ import unicode_literals - -template = { - "Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}} -} +from __future__ import unicode_literals + +template = { + "Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}} +} diff --git a/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py b/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py index ff7b75518..546f68cb4 100644 --- a/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py +++ b/tests/test_cloudformation/fixtures/vpc_single_instance_in_subnet.py @@ -1,276 +1,276 @@ -from __future__ import unicode_literals - -template = { - "Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.", - "Parameters": { - "SSHLocation": { - "ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.", - "Description": " The IP address range that can be used to SSH to the EC2 instances", - "Default": "0.0.0.0/0", - "MinLength": "9", - "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", - "MaxLength": "18", - "Type": "String", - }, - "KeyName": { - "Type": "String", - "Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance", - "MinLength": "1", - "AllowedPattern": "[\\x20-\\x7E]*", - "MaxLength": "255", - "ConstraintDescription": "can contain only ASCII characters.", - }, - "InstanceType": { - "Default": "m1.small", - "ConstraintDescription": "must be a valid EC2 instance type.", - "Type": "String", - "Description": "WebServer EC2 instance type", - "AllowedValues": [ - "t1.micro", - "m1.small", - "m1.medium", - "m1.large", - "m1.xlarge", - "m2.xlarge", - "m2.2xlarge", - "m2.4xlarge", - "m3.xlarge", - "m3.2xlarge", - "c1.medium", - "c1.xlarge", - "cc1.4xlarge", - "cc2.8xlarge", - "cg1.4xlarge", - ], - }, - }, - "AWSTemplateFormatVersion": "2010-09-09", - "Outputs": { - "URL": { - "Description": "Newly created application URL", - "Value": { - "Fn::Join": [ - "", - ["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}], - ] - }, - } - }, - "Resources": { - "Subnet": { - "Type": "AWS::EC2::Subnet", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "CidrBlock": "10.0.0.0/24", - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"}, - "Route": { - "Type": "AWS::EC2::Route", - "Properties": { - "GatewayId": {"Ref": "InternetGateway"}, - "DestinationCidrBlock": "0.0.0.0/0", - "RouteTableId": {"Ref": "RouteTable"}, - }, - "DependsOn": "AttachGateway", - }, - "SubnetRouteTableAssociation": { - "Type": "AWS::EC2::SubnetRouteTableAssociation", - "Properties": { - "SubnetId": {"Ref": "Subnet"}, - "RouteTableId": {"Ref": "RouteTable"}, - }, - }, - "InternetGateway": { - "Type": "AWS::EC2::InternetGateway", - "Properties": { - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}] - }, - }, - "RouteTable": { - "Type": "AWS::EC2::RouteTable", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "WebServerWaitCondition": { - "Type": "AWS::CloudFormation::WaitCondition", - "Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"}, - "DependsOn": "WebServerInstance", - }, - "VPC": { - "Type": "AWS::EC2::VPC", - "Properties": { - "CidrBlock": "10.0.0.0/16", - "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], - }, - }, - "InstanceSecurityGroup": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "SecurityGroupIngress": [ - { - "ToPort": "22", - "IpProtocol": "tcp", - "CidrIp": {"Ref": "SSHLocation"}, - "FromPort": "22", - }, - { - "ToPort": "80", - "IpProtocol": "tcp", - "CidrIp": "0.0.0.0/0", - "FromPort": "80", - }, - ], - "VpcId": {"Ref": "VPC"}, - "GroupDescription": "Enable SSH access via port 22", - }, - }, - "WebServerInstance": { - "Type": "AWS::EC2::Instance", - "Properties": { - "UserData": { - "Fn::Base64": { - "Fn::Join": [ - "", - [ - "#!/bin/bash\n", - "yum update -y aws-cfn-bootstrap\n", - "# Helper function\n", - "function error_exit\n", - "{\n", - ' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'', - {"Ref": "WebServerWaitHandle"}, - "'\n", - " exit 1\n", - "}\n", - "# Install the simple web page\n", - "/opt/aws/bin/cfn-init -s ", - {"Ref": "AWS::StackId"}, - " -r WebServerInstance ", - " --region ", - {"Ref": "AWS::Region"}, - " || error_exit 'Failed to run cfn-init'\n", - "# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n", - "/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n", - "# All done so signal success\n", - '/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'', - {"Ref": "WebServerWaitHandle"}, - "'\n", - ], - ] - } - }, - "Tags": [ - {"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}, - {"Value": "Bar", "Key": "Foo"}, - ], - "SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}], - "KeyName": {"Ref": "KeyName"}, - "SubnetId": {"Ref": "Subnet"}, - "ImageId": { - "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"] - }, - "InstanceType": {"Ref": "InstanceType"}, - }, - "Metadata": { - "Comment": "Install a simple PHP application", - "AWS::CloudFormation::Init": { - "config": { - "files": { - "/etc/cfn/cfn-hup.conf": { - "content": { - "Fn::Join": [ - "", - [ - "[main]\n", - "stack=", - {"Ref": "AWS::StackId"}, - "\n", - "region=", - {"Ref": "AWS::Region"}, - "\n", - ], - ] - }, - "owner": "root", - "group": "root", - "mode": "000400", - }, - "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { - "content": { - "Fn::Join": [ - "", - [ - "[cfn-auto-reloader-hook]\n", - "triggers=post.update\n", - "path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n", - "action=/opt/aws/bin/cfn-init -s ", - {"Ref": "AWS::StackId"}, - " -r WebServerInstance ", - " --region ", - {"Ref": "AWS::Region"}, - "\n", - "runas=root\n", - ], - ] - } - }, - "/var/www/html/index.php": { - "content": { - "Fn::Join": [ - "", - [ - "AWS CloudFormation sample PHP application';\n", - "?>\n", - ], - ] - }, - "owner": "apache", - "group": "apache", - "mode": "000644", - }, - }, - "services": { - "sysvinit": { - "httpd": {"ensureRunning": "true", "enabled": "true"}, - "sendmail": { - "ensureRunning": "false", - "enabled": "false", - }, - } - }, - "packages": {"yum": {"httpd": [], "php": []}}, - } - }, - }, - }, - "IPAddress": { - "Type": "AWS::EC2::EIP", - "Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"}, - "DependsOn": "AttachGateway", - }, - "AttachGateway": { - "Type": "AWS::EC2::VPCGatewayAttachment", - "Properties": { - "VpcId": {"Ref": "VPC"}, - "InternetGatewayId": {"Ref": "InternetGateway"}, - }, - }, - }, - "Mappings": { - "RegionMap": { - "ap-southeast-1": {"AMI": "ami-74dda626"}, - "ap-southeast-2": {"AMI": "ami-b3990e89"}, - "us-west-2": {"AMI": "ami-16fd7026"}, - "us-east-1": {"AMI": "ami-7f418316"}, - "ap-northeast-1": {"AMI": "ami-dcfa4edd"}, - "us-west-1": {"AMI": "ami-951945d0"}, - "eu-west-1": {"AMI": "ami-24506250"}, - "sa-east-1": {"AMI": "ami-3e3be423"}, - } - }, -} +from __future__ import unicode_literals + +template = { + "Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.", + "Parameters": { + "SSHLocation": { + "ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.", + "Description": " The IP address range that can be used to SSH to the EC2 instances", + "Default": "0.0.0.0/0", + "MinLength": "9", + "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", + "MaxLength": "18", + "Type": "String", + }, + "KeyName": { + "Type": "String", + "Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance", + "MinLength": "1", + "AllowedPattern": "[\\x20-\\x7E]*", + "MaxLength": "255", + "ConstraintDescription": "can contain only ASCII characters.", + }, + "InstanceType": { + "Default": "m1.small", + "ConstraintDescription": "must be a valid EC2 instance type.", + "Type": "String", + "Description": "WebServer EC2 instance type", + "AllowedValues": [ + "t1.micro", + "m1.small", + "m1.medium", + "m1.large", + "m1.xlarge", + "m2.xlarge", + "m2.2xlarge", + "m2.4xlarge", + "m3.xlarge", + "m3.2xlarge", + "c1.medium", + "c1.xlarge", + "cc1.4xlarge", + "cc2.8xlarge", + "cg1.4xlarge", + ], + }, + }, + "AWSTemplateFormatVersion": "2010-09-09", + "Outputs": { + "URL": { + "Description": "Newly created application URL", + "Value": { + "Fn::Join": [ + "", + ["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}], + ] + }, + } + }, + "Resources": { + "Subnet": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "CidrBlock": "10.0.0.0/24", + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"}, + "Route": { + "Type": "AWS::EC2::Route", + "Properties": { + "GatewayId": {"Ref": "InternetGateway"}, + "DestinationCidrBlock": "0.0.0.0/0", + "RouteTableId": {"Ref": "RouteTable"}, + }, + "DependsOn": "AttachGateway", + }, + "SubnetRouteTableAssociation": { + "Type": "AWS::EC2::SubnetRouteTableAssociation", + "Properties": { + "SubnetId": {"Ref": "Subnet"}, + "RouteTableId": {"Ref": "RouteTable"}, + }, + }, + "InternetGateway": { + "Type": "AWS::EC2::InternetGateway", + "Properties": { + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}] + }, + }, + "RouteTable": { + "Type": "AWS::EC2::RouteTable", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "WebServerWaitCondition": { + "Type": "AWS::CloudFormation::WaitCondition", + "Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"}, + "DependsOn": "WebServerInstance", + }, + "VPC": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16", + "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], + }, + }, + "InstanceSecurityGroup": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "SecurityGroupIngress": [ + { + "ToPort": "22", + "IpProtocol": "tcp", + "CidrIp": {"Ref": "SSHLocation"}, + "FromPort": "22", + }, + { + "ToPort": "80", + "IpProtocol": "tcp", + "CidrIp": "0.0.0.0/0", + "FromPort": "80", + }, + ], + "VpcId": {"Ref": "VPC"}, + "GroupDescription": "Enable SSH access via port 22", + }, + }, + "WebServerInstance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "UserData": { + "Fn::Base64": { + "Fn::Join": [ + "", + [ + "#!/bin/bash\n", + "yum update -y aws-cfn-bootstrap\n", + "# Helper function\n", + "function error_exit\n", + "{\n", + ' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'', + {"Ref": "WebServerWaitHandle"}, + "'\n", + " exit 1\n", + "}\n", + "# Install the simple web page\n", + "/opt/aws/bin/cfn-init -s ", + {"Ref": "AWS::StackId"}, + " -r WebServerInstance ", + " --region ", + {"Ref": "AWS::Region"}, + " || error_exit 'Failed to run cfn-init'\n", + "# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n", + "/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n", + "# All done so signal success\n", + '/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'', + {"Ref": "WebServerWaitHandle"}, + "'\n", + ], + ] + } + }, + "Tags": [ + {"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}, + {"Value": "Bar", "Key": "Foo"}, + ], + "SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}], + "KeyName": {"Ref": "KeyName"}, + "SubnetId": {"Ref": "Subnet"}, + "ImageId": { + "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"] + }, + "InstanceType": {"Ref": "InstanceType"}, + }, + "Metadata": { + "Comment": "Install a simple PHP application", + "AWS::CloudFormation::Init": { + "config": { + "files": { + "/etc/cfn/cfn-hup.conf": { + "content": { + "Fn::Join": [ + "", + [ + "[main]\n", + "stack=", + {"Ref": "AWS::StackId"}, + "\n", + "region=", + {"Ref": "AWS::Region"}, + "\n", + ], + ] + }, + "owner": "root", + "group": "root", + "mode": "000400", + }, + "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { + "content": { + "Fn::Join": [ + "", + [ + "[cfn-auto-reloader-hook]\n", + "triggers=post.update\n", + "path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n", + "action=/opt/aws/bin/cfn-init -s ", + {"Ref": "AWS::StackId"}, + " -r WebServerInstance ", + " --region ", + {"Ref": "AWS::Region"}, + "\n", + "runas=root\n", + ], + ] + } + }, + "/var/www/html/index.php": { + "content": { + "Fn::Join": [ + "", + [ + "AWS CloudFormation sample PHP application';\n", + "?>\n", + ], + ] + }, + "owner": "apache", + "group": "apache", + "mode": "000644", + }, + }, + "services": { + "sysvinit": { + "httpd": {"ensureRunning": "true", "enabled": "true"}, + "sendmail": { + "ensureRunning": "false", + "enabled": "false", + }, + } + }, + "packages": {"yum": {"httpd": [], "php": []}}, + } + }, + }, + }, + "IPAddress": { + "Type": "AWS::EC2::EIP", + "Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"}, + "DependsOn": "AttachGateway", + }, + "AttachGateway": { + "Type": "AWS::EC2::VPCGatewayAttachment", + "Properties": { + "VpcId": {"Ref": "VPC"}, + "InternetGatewayId": {"Ref": "InternetGateway"}, + }, + }, + }, + "Mappings": { + "RegionMap": { + "ap-southeast-1": {"AMI": "ami-74dda626"}, + "ap-southeast-2": {"AMI": "ami-b3990e89"}, + "us-west-2": {"AMI": "ami-16fd7026"}, + "us-east-1": {"AMI": "ami-7f418316"}, + "ap-northeast-1": {"AMI": "ami-dcfa4edd"}, + "us-west-1": {"AMI": "ami-951945d0"}, + "eu-west-1": {"AMI": "ami-24506250"}, + "sa-east-1": {"AMI": "ami-3e3be423"}, + } + }, +} diff --git a/tests/test_cloudwatch/test_cloudwatch.py b/tests/test_cloudwatch/test_cloudwatch.py index f1a2e3fd6..cc624e852 100644 --- a/tests/test_cloudwatch/test_cloudwatch.py +++ b/tests/test_cloudwatch/test_cloudwatch.py @@ -1,117 +1,117 @@ -import boto -from boto.ec2.cloudwatch.alarm import MetricAlarm -import sure # noqa - -from moto import mock_cloudwatch_deprecated - - -def alarm_fixture(name="tester", action=None): - action = action or ["arn:alarm"] - return MetricAlarm( - name=name, - namespace="{0}_namespace".format(name), - metric="{0}_metric".format(name), - comparison=">=", - threshold=2.0, - period=60, - evaluation_periods=5, - statistic="Average", - description="A test", - dimensions={"InstanceId": ["i-0123456,i-0123457"]}, - alarm_actions=action, - ok_actions=["arn:ok"], - insufficient_data_actions=["arn:insufficient"], - unit="Seconds", - ) - - -@mock_cloudwatch_deprecated -def test_create_alarm(): - conn = boto.connect_cloudwatch() - - alarm = alarm_fixture() - conn.create_alarm(alarm) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(1) - alarm = alarms[0] - alarm.name.should.equal("tester") - alarm.namespace.should.equal("tester_namespace") - alarm.metric.should.equal("tester_metric") - alarm.comparison.should.equal(">=") - alarm.threshold.should.equal(2.0) - alarm.period.should.equal(60) - alarm.evaluation_periods.should.equal(5) - alarm.statistic.should.equal("Average") - alarm.description.should.equal("A test") - dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) - list(alarm.alarm_actions).should.equal(["arn:alarm"]) - list(alarm.ok_actions).should.equal(["arn:ok"]) - list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"]) - alarm.unit.should.equal("Seconds") - - -@mock_cloudwatch_deprecated -def test_delete_alarm(): - conn = boto.connect_cloudwatch() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - alarm = alarm_fixture() - conn.create_alarm(alarm) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(1) - - alarms[0].delete() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - -@mock_cloudwatch_deprecated -def test_put_metric_data(): - conn = boto.connect_cloudwatch() - - conn.put_metric_data( - namespace="tester", - name="metric", - value=1.5, - dimensions={"InstanceId": ["i-0123456,i-0123457"]}, - ) - - metrics = conn.list_metrics() - metrics.should.have.length_of(1) - metric = metrics[0] - metric.namespace.should.equal("tester") - metric.name.should.equal("metric") - dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) - - -@mock_cloudwatch_deprecated -def test_describe_alarms(): - conn = boto.connect_cloudwatch() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) - - conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar")) - conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz")) - conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo")) - conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo")) - - alarms = conn.describe_alarms() - alarms.should.have.length_of(4) - alarms = conn.describe_alarms(alarm_name_prefix="nfoo") - alarms.should.have.length_of(2) - alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"]) - alarms.should.have.length_of(3) - alarms = conn.describe_alarms(action_prefix="afoo") - alarms.should.have.length_of(2) - - for alarm in conn.describe_alarms(): - alarm.delete() - - alarms = conn.describe_alarms() - alarms.should.have.length_of(0) +import boto +from boto.ec2.cloudwatch.alarm import MetricAlarm +import sure # noqa + +from moto import mock_cloudwatch_deprecated + + +def alarm_fixture(name="tester", action=None): + action = action or ["arn:alarm"] + return MetricAlarm( + name=name, + namespace="{0}_namespace".format(name), + metric="{0}_metric".format(name), + comparison=">=", + threshold=2.0, + period=60, + evaluation_periods=5, + statistic="Average", + description="A test", + dimensions={"InstanceId": ["i-0123456,i-0123457"]}, + alarm_actions=action, + ok_actions=["arn:ok"], + insufficient_data_actions=["arn:insufficient"], + unit="Seconds", + ) + + +@mock_cloudwatch_deprecated +def test_create_alarm(): + conn = boto.connect_cloudwatch() + + alarm = alarm_fixture() + conn.create_alarm(alarm) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(1) + alarm = alarms[0] + alarm.name.should.equal("tester") + alarm.namespace.should.equal("tester_namespace") + alarm.metric.should.equal("tester_metric") + alarm.comparison.should.equal(">=") + alarm.threshold.should.equal(2.0) + alarm.period.should.equal(60) + alarm.evaluation_periods.should.equal(5) + alarm.statistic.should.equal("Average") + alarm.description.should.equal("A test") + dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) + list(alarm.alarm_actions).should.equal(["arn:alarm"]) + list(alarm.ok_actions).should.equal(["arn:ok"]) + list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"]) + alarm.unit.should.equal("Seconds") + + +@mock_cloudwatch_deprecated +def test_delete_alarm(): + conn = boto.connect_cloudwatch() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + alarm = alarm_fixture() + conn.create_alarm(alarm) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(1) + + alarms[0].delete() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + +@mock_cloudwatch_deprecated +def test_put_metric_data(): + conn = boto.connect_cloudwatch() + + conn.put_metric_data( + namespace="tester", + name="metric", + value=1.5, + dimensions={"InstanceId": ["i-0123456,i-0123457"]}, + ) + + metrics = conn.list_metrics() + metrics.should.have.length_of(1) + metric = metrics[0] + metric.namespace.should.equal("tester") + metric.name.should.equal("metric") + dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) + + +@mock_cloudwatch_deprecated +def test_describe_alarms(): + conn = boto.connect_cloudwatch() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) + + conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar")) + conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz")) + conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo")) + conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo")) + + alarms = conn.describe_alarms() + alarms.should.have.length_of(4) + alarms = conn.describe_alarms(alarm_name_prefix="nfoo") + alarms.should.have.length_of(2) + alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"]) + alarms.should.have.length_of(3) + alarms = conn.describe_alarms(action_prefix="afoo") + alarms.should.have.length_of(2) + + for alarm in conn.describe_alarms(): + alarm.delete() + + alarms = conn.describe_alarms() + alarms.should.have.length_of(0) diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 2f7ed11e5..37e1a56a3 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -27,6 +27,11 @@ def test_create_user_pool(): result["UserPool"]["Id"].should_not.be.none result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+") + result["UserPool"]["Arn"].should.equal( + "arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format( + ACCOUNT_ID, result["UserPool"]["Id"] + ) + ) result["UserPool"]["Name"].should.equal(name) result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) @@ -911,6 +916,55 @@ def test_admin_create_existing_user(): caught.should.be.true +@mock_cognitoidp +def test_admin_resend_invitation_existing_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + ) + + caught = False + try: + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + MessageAction="RESEND", + ) + except conn.exceptions.UsernameExistsException: + caught = True + + caught.should.be.false + + +@mock_cognitoidp +def test_admin_resend_invitation_missing_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + caught = False + try: + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[{"Name": "thing", "Value": value}], + MessageAction="RESEND", + ) + except conn.exceptions.UserNotFoundException: + caught = True + + caught.should.be.true + + @mock_cognitoidp def test_admin_get_user(): conn = boto3.client("cognito-idp", "us-west-2") diff --git a/tests/test_core/test_server.py b/tests/test_core/test_server.py index 5514223af..205a2ad0f 100644 --- a/tests/test_core/test_server.py +++ b/tests/test_core/test_server.py @@ -46,4 +46,4 @@ def test_domain_dispatched_with_service(): dispatcher = DomainDispatcherApplication(create_backend_app, service="s3") backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"}) keys = set(backend_app.view_functions.keys()) - keys.should.contain("ResponseObject.key_response") + keys.should.contain("ResponseObject.key_or_control_response") diff --git a/tests/test_datapipeline/test_datapipeline.py b/tests/test_datapipeline/test_datapipeline.py index 42063b506..b540d120e 100644 --- a/tests/test_datapipeline/test_datapipeline.py +++ b/tests/test_datapipeline/test_datapipeline.py @@ -1,182 +1,182 @@ -from __future__ import unicode_literals - -import boto.datapipeline -import sure # noqa - -from moto import mock_datapipeline_deprecated -from moto.datapipeline.utils import remove_capitalization_of_dict_keys - - -def get_value_from_fields(key, fields): - for field in fields: - if field["key"] == key: - return field["stringValue"] - - -@mock_datapipeline_deprecated -def test_create_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - - res = conn.create_pipeline("mypipeline", "some-unique-id") - - pipeline_id = res["pipelineId"] - pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ - "pipelineDescriptionList" - ] - pipeline_descriptions.should.have.length_of(1) - - pipeline_description = pipeline_descriptions[0] - pipeline_description["name"].should.equal("mypipeline") - pipeline_description["pipelineId"].should.equal(pipeline_id) - fields = pipeline_description["fields"] - - get_value_from_fields("@pipelineState", fields).should.equal("PENDING") - get_value_from_fields("uniqueId", fields).should.equal("some-unique-id") - - -PIPELINE_OBJECTS = [ - { - "id": "Default", - "name": "Default", - "fields": [{"key": "workerGroup", "stringValue": "workerGroup"}], - }, - { - "id": "Schedule", - "name": "Schedule", - "fields": [ - {"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"}, - {"key": "type", "stringValue": "Schedule"}, - {"key": "period", "stringValue": "1 hour"}, - {"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"}, - ], - }, - { - "id": "SayHello", - "name": "SayHello", - "fields": [ - {"key": "type", "stringValue": "ShellCommandActivity"}, - {"key": "command", "stringValue": "echo hello"}, - {"key": "parent", "refValue": "Default"}, - {"key": "schedule", "refValue": "Schedule"}, - ], - }, -] - - -@mock_datapipeline_deprecated -def test_creating_pipeline_definition(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) - - pipeline_definition = conn.get_pipeline_definition(pipeline_id) - pipeline_definition["pipelineObjects"].should.have.length_of(3) - default_object = pipeline_definition["pipelineObjects"][0] - default_object["name"].should.equal("Default") - default_object["id"].should.equal("Default") - default_object["fields"].should.equal( - [{"key": "workerGroup", "stringValue": "workerGroup"}] - ) - - -@mock_datapipeline_deprecated -def test_describing_pipeline_objects(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) - - objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[ - "pipelineObjects" - ] - - objects.should.have.length_of(2) - default_object = [x for x in objects if x["id"] == "Default"][0] - default_object["name"].should.equal("Default") - default_object["fields"].should.equal( - [{"key": "workerGroup", "stringValue": "workerGroup"}] - ) - - -@mock_datapipeline_deprecated -def test_activate_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - - res = conn.create_pipeline("mypipeline", "some-unique-id") - - pipeline_id = res["pipelineId"] - conn.activate_pipeline(pipeline_id) - - pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ - "pipelineDescriptionList" - ] - pipeline_descriptions.should.have.length_of(1) - pipeline_description = pipeline_descriptions[0] - fields = pipeline_description["fields"] - - get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED") - - -@mock_datapipeline_deprecated -def test_delete_pipeline(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res = conn.create_pipeline("mypipeline", "some-unique-id") - pipeline_id = res["pipelineId"] - - conn.delete_pipeline(pipeline_id) - - response = conn.list_pipelines() - - response["pipelineIdList"].should.have.length_of(0) - - -@mock_datapipeline_deprecated -def test_listing_pipelines(): - conn = boto.datapipeline.connect_to_region("us-west-2") - res1 = conn.create_pipeline("mypipeline1", "some-unique-id1") - res2 = conn.create_pipeline("mypipeline2", "some-unique-id2") - - response = conn.list_pipelines() - - response["hasMoreResults"].should.be(False) - response["marker"].should.be.none - response["pipelineIdList"].should.have.length_of(2) - response["pipelineIdList"].should.contain( - {"id": res1["pipelineId"], "name": "mypipeline1"} - ) - response["pipelineIdList"].should.contain( - {"id": res2["pipelineId"], "name": "mypipeline2"} - ) - - -@mock_datapipeline_deprecated -def test_listing_paginated_pipelines(): - conn = boto.datapipeline.connect_to_region("us-west-2") - for i in range(100): - conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i) - - response = conn.list_pipelines() - - response["hasMoreResults"].should.be(True) - response["marker"].should.equal(response["pipelineIdList"][-1]["id"]) - response["pipelineIdList"].should.have.length_of(50) - - -# testing a helper function -def test_remove_capitalization_of_dict_keys(): - result = remove_capitalization_of_dict_keys( - { - "Id": "IdValue", - "Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}], - } - ) - - result.should.equal( - { - "id": "IdValue", - "fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}], - } - ) +from __future__ import unicode_literals + +import boto.datapipeline +import sure # noqa + +from moto import mock_datapipeline_deprecated +from moto.datapipeline.utils import remove_capitalization_of_dict_keys + + +def get_value_from_fields(key, fields): + for field in fields: + if field["key"] == key: + return field["stringValue"] + + +@mock_datapipeline_deprecated +def test_create_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + + res = conn.create_pipeline("mypipeline", "some-unique-id") + + pipeline_id = res["pipelineId"] + pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ + "pipelineDescriptionList" + ] + pipeline_descriptions.should.have.length_of(1) + + pipeline_description = pipeline_descriptions[0] + pipeline_description["name"].should.equal("mypipeline") + pipeline_description["pipelineId"].should.equal(pipeline_id) + fields = pipeline_description["fields"] + + get_value_from_fields("@pipelineState", fields).should.equal("PENDING") + get_value_from_fields("uniqueId", fields).should.equal("some-unique-id") + + +PIPELINE_OBJECTS = [ + { + "id": "Default", + "name": "Default", + "fields": [{"key": "workerGroup", "stringValue": "workerGroup"}], + }, + { + "id": "Schedule", + "name": "Schedule", + "fields": [ + {"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"}, + {"key": "type", "stringValue": "Schedule"}, + {"key": "period", "stringValue": "1 hour"}, + {"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"}, + ], + }, + { + "id": "SayHello", + "name": "SayHello", + "fields": [ + {"key": "type", "stringValue": "ShellCommandActivity"}, + {"key": "command", "stringValue": "echo hello"}, + {"key": "parent", "refValue": "Default"}, + {"key": "schedule", "refValue": "Schedule"}, + ], + }, +] + + +@mock_datapipeline_deprecated +def test_creating_pipeline_definition(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) + + pipeline_definition = conn.get_pipeline_definition(pipeline_id) + pipeline_definition["pipelineObjects"].should.have.length_of(3) + default_object = pipeline_definition["pipelineObjects"][0] + default_object["name"].should.equal("Default") + default_object["id"].should.equal("Default") + default_object["fields"].should.equal( + [{"key": "workerGroup", "stringValue": "workerGroup"}] + ) + + +@mock_datapipeline_deprecated +def test_describing_pipeline_objects(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) + + objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[ + "pipelineObjects" + ] + + objects.should.have.length_of(2) + default_object = [x for x in objects if x["id"] == "Default"][0] + default_object["name"].should.equal("Default") + default_object["fields"].should.equal( + [{"key": "workerGroup", "stringValue": "workerGroup"}] + ) + + +@mock_datapipeline_deprecated +def test_activate_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + + res = conn.create_pipeline("mypipeline", "some-unique-id") + + pipeline_id = res["pipelineId"] + conn.activate_pipeline(pipeline_id) + + pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ + "pipelineDescriptionList" + ] + pipeline_descriptions.should.have.length_of(1) + pipeline_description = pipeline_descriptions[0] + fields = pipeline_description["fields"] + + get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED") + + +@mock_datapipeline_deprecated +def test_delete_pipeline(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res = conn.create_pipeline("mypipeline", "some-unique-id") + pipeline_id = res["pipelineId"] + + conn.delete_pipeline(pipeline_id) + + response = conn.list_pipelines() + + response["pipelineIdList"].should.have.length_of(0) + + +@mock_datapipeline_deprecated +def test_listing_pipelines(): + conn = boto.datapipeline.connect_to_region("us-west-2") + res1 = conn.create_pipeline("mypipeline1", "some-unique-id1") + res2 = conn.create_pipeline("mypipeline2", "some-unique-id2") + + response = conn.list_pipelines() + + response["hasMoreResults"].should.be(False) + response["marker"].should.be.none + response["pipelineIdList"].should.have.length_of(2) + response["pipelineIdList"].should.contain( + {"id": res1["pipelineId"], "name": "mypipeline1"} + ) + response["pipelineIdList"].should.contain( + {"id": res2["pipelineId"], "name": "mypipeline2"} + ) + + +@mock_datapipeline_deprecated +def test_listing_paginated_pipelines(): + conn = boto.datapipeline.connect_to_region("us-west-2") + for i in range(100): + conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i) + + response = conn.list_pipelines() + + response["hasMoreResults"].should.be(True) + response["marker"].should.equal(response["pipelineIdList"][-1]["id"]) + response["pipelineIdList"].should.have.length_of(50) + + +# testing a helper function +def test_remove_capitalization_of_dict_keys(): + result = remove_capitalization_of_dict_keys( + { + "Id": "IdValue", + "Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}], + } + ) + + result.should.equal( + { + "id": "IdValue", + "fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}], + } + ) diff --git a/tests/test_dynamodb/test_dynamodb_table_with_range_key.py b/tests/test_dynamodb/test_dynamodb_table_with_range_key.py index 40301025f..6986ae9b3 100644 --- a/tests/test_dynamodb/test_dynamodb_table_with_range_key.py +++ b/tests/test_dynamodb/test_dynamodb_table_with_range_key.py @@ -1,470 +1,470 @@ -from __future__ import unicode_literals - -import boto -import sure # noqa -from freezegun import freeze_time - -from moto import mock_dynamodb_deprecated - -from boto.dynamodb import condition -from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError -from boto.exception import DynamoDBResponseError - - -def create_table(conn): - message_table_schema = conn.create_schema( - hash_key_name="forum_name", - hash_key_proto_value=str, - range_key_name="subject", - range_key_proto_value=str, - ) - - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - return table - - -@freeze_time("2012-01-14") -@mock_dynamodb_deprecated -def test_create_table(): - conn = boto.connect_dynamodb() - create_table(conn) - - expected = { - "Table": { - "CreationDateTime": 1326499200.0, - "ItemCount": 0, - "KeySchema": { - "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}, - "RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"}, - }, - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10, - }, - "TableName": "messages", - "TableSizeBytes": 0, - "TableStatus": "ACTIVE", - } - } - conn.describe_table("messages").should.equal(expected) - - -@mock_dynamodb_deprecated -def test_delete_table(): - conn = boto.connect_dynamodb() - create_table(conn) - conn.list_tables().should.have.length_of(1) - - conn.layer1.delete_table("messages") - conn.list_tables().should.have.length_of(0) - - conn.layer1.delete_table.when.called_with("messages").should.throw( - DynamoDBResponseError - ) - - -@mock_dynamodb_deprecated -def test_update_table_throughput(): - conn = boto.connect_dynamodb() - table = create_table(conn) - table.read_units.should.equal(10) - table.write_units.should.equal(10) - - table.update_throughput(5, 6) - table.refresh() - - table.read_units.should.equal(5) - table.write_units.should.equal(6) - - -@mock_dynamodb_deprecated -def test_item_add_and_describe_and_update(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.has_item("LOLCat Forum", "Check this out!").should.equal(True) - - returned_item = table.get_item( - hash_key="LOLCat Forum", - range_key="Check this out!", - attributes_to_get=["Body", "SentBy"], - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "subject": "Check this out!", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - } - ) - - item["SentBy"] = "User B" - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", - range_key="Check this out!", - attributes_to_get=["Body", "SentBy"], - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "subject": "Check this out!", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - } - ) - - -@mock_dynamodb_deprecated -def test_item_put_without_table(): - conn = boto.connect_dynamodb() - - conn.layer1.put_item.when.called_with( - table_name="undeclared-table", - item=dict(hash_key="LOLCat Forum", range_key="Check this out!"), - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_get_missing_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw( - DynamoDBKeyNotFoundError - ) - table.has_item("foobar", "more").should.equal(False) - - -@mock_dynamodb_deprecated -def test_get_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.get_item.when.called_with( - table_name="undeclared-table", - key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, - ).should.throw(DynamoDBKeyNotFoundError) - - -@mock_dynamodb_deprecated -def test_get_item_without_range_key(): - conn = boto.connect_dynamodb() - message_table_schema = conn.create_schema( - hash_key_name="test_hash", - hash_key_proto_value=int, - range_key_name="test_range", - range_key_proto_value=int, - ) - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - - hash_key = 3241526475 - range_key = 1234567890987 - new_item = table.new_item(hash_key=hash_key, range_key=range_key) - new_item.put() - - table.get_item.when.called_with(hash_key=hash_key).should.throw( - DynamoDBValidationError - ) - - -@mock_dynamodb_deprecated -def test_delete_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete() - response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_attribute_response(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item( - hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data - ) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete(return_values="ALL_OLD") - response.should.equal( - { - "Attributes": { - "Body": "http://url_to_lolcat.gif", - "forum_name": "LOLCat Forum", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "SentBy": "User A", - "subject": "Check this out!", - }, - "ConsumedCapacityUnits": 0.5, - } - ) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.delete_item.when.called_with( - table_name="undeclared-table", - key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_query(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) - item.put() - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("1")) - results.response["Items"].should.have.length_of(3) - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("234")) - results.response["Items"].should.have.length_of(2) - - results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999")) - results.response["Items"].should.have.length_of(0) - - results = table.query( - hash_key="the-key", range_key_condition=condition.CONTAINS("12") - ) - results.response["Items"].should.have.length_of(1) - - results = table.query( - hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7") - ) - results.response["Items"].should.have.length_of(1) - - results = table.query( - hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890") - ) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_query_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.query.when.called_with( - table_name="undeclared-table", - hash_key_value={"S": "the-key"}, - range_key_conditions={ - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) - item.put() - - results = table.scan() - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NULL()}) - results.response["Items"].should.have.length_of(2) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) - results.response["Items"].should.have.length_of(0) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_scan_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.scan.when.called_with( - table_name="undeclared-table", - scan_filter={ - "SentBy": { - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - } - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan_after_has_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - list(table.scan()).should.equal([]) - - table.has_item(hash_key="the-key", range_key="123") - - list(table.scan()).should.equal([]) - - -@mock_dynamodb_deprecated -def test_write_batch(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - batch_list = conn.new_batch_write_list() - - items = [] - items.append( - table.new_item( - hash_key="the-key", - range_key="123", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - }, - ) - ) - - items.append( - table.new_item( - hash_key="the-key", - range_key="789", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - }, - ) - ) - - batch_list.add_batch(table, puts=items) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(2) - - batch_list = conn.new_batch_write_list() - batch_list.add_batch(table, deletes=[("the-key", "789")]) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(1) - - -@mock_dynamodb_deprecated -def test_batch_read(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data) - item.put() - - items = table.batch_get_item([("the-key", "123"), ("another-key", "789")]) - # Iterate through so that batch_item gets called - count = len([x for x in items]) - count.should.equal(2) +from __future__ import unicode_literals + +import boto +import sure # noqa +from freezegun import freeze_time + +from moto import mock_dynamodb_deprecated + +from boto.dynamodb import condition +from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError +from boto.exception import DynamoDBResponseError + + +def create_table(conn): + message_table_schema = conn.create_schema( + hash_key_name="forum_name", + hash_key_proto_value=str, + range_key_name="subject", + range_key_proto_value=str, + ) + + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + return table + + +@freeze_time("2012-01-14") +@mock_dynamodb_deprecated +def test_create_table(): + conn = boto.connect_dynamodb() + create_table(conn) + + expected = { + "Table": { + "CreationDateTime": 1326499200.0, + "ItemCount": 0, + "KeySchema": { + "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}, + "RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"}, + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 10, + "WriteCapacityUnits": 10, + }, + "TableName": "messages", + "TableSizeBytes": 0, + "TableStatus": "ACTIVE", + } + } + conn.describe_table("messages").should.equal(expected) + + +@mock_dynamodb_deprecated +def test_delete_table(): + conn = boto.connect_dynamodb() + create_table(conn) + conn.list_tables().should.have.length_of(1) + + conn.layer1.delete_table("messages") + conn.list_tables().should.have.length_of(0) + + conn.layer1.delete_table.when.called_with("messages").should.throw( + DynamoDBResponseError + ) + + +@mock_dynamodb_deprecated +def test_update_table_throughput(): + conn = boto.connect_dynamodb() + table = create_table(conn) + table.read_units.should.equal(10) + table.write_units.should.equal(10) + + table.update_throughput(5, 6) + table.refresh() + + table.read_units.should.equal(5) + table.write_units.should.equal(6) + + +@mock_dynamodb_deprecated +def test_item_add_and_describe_and_update(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.has_item("LOLCat Forum", "Check this out!").should.equal(True) + + returned_item = table.get_item( + hash_key="LOLCat Forum", + range_key="Check this out!", + attributes_to_get=["Body", "SentBy"], + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "subject": "Check this out!", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + } + ) + + item["SentBy"] = "User B" + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", + range_key="Check this out!", + attributes_to_get=["Body", "SentBy"], + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "subject": "Check this out!", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + } + ) + + +@mock_dynamodb_deprecated +def test_item_put_without_table(): + conn = boto.connect_dynamodb() + + conn.layer1.put_item.when.called_with( + table_name="undeclared-table", + item=dict(hash_key="LOLCat Forum", range_key="Check this out!"), + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_get_missing_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw( + DynamoDBKeyNotFoundError + ) + table.has_item("foobar", "more").should.equal(False) + + +@mock_dynamodb_deprecated +def test_get_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.get_item.when.called_with( + table_name="undeclared-table", + key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, + ).should.throw(DynamoDBKeyNotFoundError) + + +@mock_dynamodb_deprecated +def test_get_item_without_range_key(): + conn = boto.connect_dynamodb() + message_table_schema = conn.create_schema( + hash_key_name="test_hash", + hash_key_proto_value=int, + range_key_name="test_range", + range_key_proto_value=int, + ) + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + + hash_key = 3241526475 + range_key = 1234567890987 + new_item = table.new_item(hash_key=hash_key, range_key=range_key) + new_item.put() + + table.get_item.when.called_with(hash_key=hash_key).should.throw( + DynamoDBValidationError + ) + + +@mock_dynamodb_deprecated +def test_delete_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete() + response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_attribute_response(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item( + hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data + ) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete(return_values="ALL_OLD") + response.should.equal( + { + "Attributes": { + "Body": "http://url_to_lolcat.gif", + "forum_name": "LOLCat Forum", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "SentBy": "User A", + "subject": "Check this out!", + }, + "ConsumedCapacityUnits": 0.5, + } + ) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.delete_item.when.called_with( + table_name="undeclared-table", + key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_query(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) + item.put() + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("1")) + results.response["Items"].should.have.length_of(3) + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("234")) + results.response["Items"].should.have.length_of(2) + + results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999")) + results.response["Items"].should.have.length_of(0) + + results = table.query( + hash_key="the-key", range_key_condition=condition.CONTAINS("12") + ) + results.response["Items"].should.have.length_of(1) + + results = table.query( + hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7") + ) + results.response["Items"].should.have.length_of(1) + + results = table.query( + hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890") + ) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_query_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.query.when.called_with( + table_name="undeclared-table", + hash_key_value={"S": "the-key"}, + range_key_conditions={ + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) + item.put() + + results = table.scan() + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NULL()}) + results.response["Items"].should.have.length_of(2) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) + results.response["Items"].should.have.length_of(0) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_scan_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.scan.when.called_with( + table_name="undeclared-table", + scan_filter={ + "SentBy": { + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + } + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan_after_has_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + list(table.scan()).should.equal([]) + + table.has_item(hash_key="the-key", range_key="123") + + list(table.scan()).should.equal([]) + + +@mock_dynamodb_deprecated +def test_write_batch(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + batch_list = conn.new_batch_write_list() + + items = [] + items.append( + table.new_item( + hash_key="the-key", + range_key="123", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + }, + ) + ) + + items.append( + table.new_item( + hash_key="the-key", + range_key="789", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + }, + ) + ) + + batch_list.add_batch(table, puts=items) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(2) + + batch_list = conn.new_batch_write_list() + batch_list.add_batch(table, deletes=[("the-key", "789")]) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(1) + + +@mock_dynamodb_deprecated +def test_batch_read(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data) + item.put() + + items = table.batch_get_item([("the-key", "123"), ("another-key", "789")]) + # Iterate through so that batch_item gets called + count = len([x for x in items]) + count.should.equal(2) diff --git a/tests/test_dynamodb/test_dynamodb_table_without_range_key.py b/tests/test_dynamodb/test_dynamodb_table_without_range_key.py index e5a268c97..c5031b5d1 100644 --- a/tests/test_dynamodb/test_dynamodb_table_without_range_key.py +++ b/tests/test_dynamodb/test_dynamodb_table_without_range_key.py @@ -1,390 +1,390 @@ -from __future__ import unicode_literals - -import boto -import sure # noqa -from freezegun import freeze_time - -from moto import mock_dynamodb_deprecated - -from boto.dynamodb import condition -from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError -from boto.exception import DynamoDBResponseError - - -def create_table(conn): - message_table_schema = conn.create_schema( - hash_key_name="forum_name", hash_key_proto_value=str - ) - - table = conn.create_table( - name="messages", schema=message_table_schema, read_units=10, write_units=10 - ) - return table - - -@freeze_time("2012-01-14") -@mock_dynamodb_deprecated -def test_create_table(): - conn = boto.connect_dynamodb() - create_table(conn) - - expected = { - "Table": { - "CreationDateTime": 1326499200.0, - "ItemCount": 0, - "KeySchema": { - "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"} - }, - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10, - }, - "TableName": "messages", - "TableSizeBytes": 0, - "TableStatus": "ACTIVE", - } - } - conn.describe_table("messages").should.equal(expected) - - -@mock_dynamodb_deprecated -def test_delete_table(): - conn = boto.connect_dynamodb() - create_table(conn) - conn.list_tables().should.have.length_of(1) - - conn.layer1.delete_table("messages") - conn.list_tables().should.have.length_of(0) - - conn.layer1.delete_table.when.called_with("messages").should.throw( - DynamoDBResponseError - ) - - -@mock_dynamodb_deprecated -def test_update_table_throughput(): - conn = boto.connect_dynamodb() - table = create_table(conn) - table.read_units.should.equal(10) - table.write_units.should.equal(10) - - table.update_throughput(5, 6) - table.refresh() - - table.read_units.should.equal(5) - table.write_units.should.equal(6) - - -@mock_dynamodb_deprecated -def test_item_add_and_describe_and_update(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - } - ) - - item["SentBy"] = "User B" - item.put() - - returned_item = table.get_item( - hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] - ) - dict(returned_item).should.equal( - { - "forum_name": "LOLCat Forum", - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - } - ) - - -@mock_dynamodb_deprecated -def test_item_put_without_table(): - conn = boto.connect_dynamodb() - - conn.layer1.put_item.when.called_with( - table_name="undeclared-table", item=dict(hash_key="LOLCat Forum") - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_get_missing_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - table.get_item.when.called_with(hash_key="tester").should.throw( - DynamoDBKeyNotFoundError - ) - - -@mock_dynamodb_deprecated -def test_get_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.get_item.when.called_with( - table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} - ).should.throw(DynamoDBKeyNotFoundError) - - -@mock_dynamodb_deprecated -def test_delete_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete() - response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_attribute_response(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) - item.put() - - table.refresh() - table.item_count.should.equal(1) - - response = item.delete(return_values="ALL_OLD") - response.should.equal( - { - "Attributes": { - "Body": "http://url_to_lolcat.gif", - "forum_name": "LOLCat Forum", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "SentBy": "User A", - }, - "ConsumedCapacityUnits": 0.5, - } - ) - table.refresh() - table.item_count.should.equal(0) - - item.delete.when.called_with().should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_delete_item_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.delete_item.when.called_with( - table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_query(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", attrs=item_data) - item.put() - - results = table.query(hash_key="the-key") - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_query_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.query.when.called_with( - table_name="undeclared-table", hash_key_value={"S": "the-key"} - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key2", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="the-key3", attrs=item_data) - item.put() - - results = table.scan() - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) - results.response["Items"].should.have.length_of(3) - - results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) - results.response["Items"].should.have.length_of(1) - - results = table.scan(scan_filter={"Ids": condition.NULL()}) - results.response["Items"].should.have.length_of(2) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) - results.response["Items"].should.have.length_of(0) - - results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) - results.response["Items"].should.have.length_of(1) - - -@mock_dynamodb_deprecated -def test_scan_with_undeclared_table(): - conn = boto.connect_dynamodb() - - conn.layer1.scan.when.called_with( - table_name="undeclared-table", - scan_filter={ - "SentBy": { - "AttributeValueList": [{"S": "User B"}], - "ComparisonOperator": "EQ", - } - }, - ).should.throw(DynamoDBResponseError) - - -@mock_dynamodb_deprecated -def test_scan_after_has_item(): - conn = boto.connect_dynamodb() - table = create_table(conn) - list(table.scan()).should.equal([]) - - table.has_item("the-key") - - list(table.scan()).should.equal([]) - - -@mock_dynamodb_deprecated -def test_write_batch(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - batch_list = conn.new_batch_write_list() - - items = [] - items.append( - table.new_item( - hash_key="the-key", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - }, - ) - ) - - items.append( - table.new_item( - hash_key="the-key2", - attrs={ - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - }, - ) - ) - - batch_list.add_batch(table, puts=items) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(2) - - batch_list = conn.new_batch_write_list() - batch_list.add_batch(table, deletes=[("the-key")]) - conn.batch_write_item(batch_list) - - table.refresh() - table.item_count.should.equal(1) - - -@mock_dynamodb_deprecated -def test_batch_read(): - conn = boto.connect_dynamodb() - table = create_table(conn) - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User A", - "ReceivedTime": "12/9/2011 11:36:03 PM", - } - item = table.new_item(hash_key="the-key1", attrs=item_data) - item.put() - - item = table.new_item(hash_key="the-key2", attrs=item_data) - item.put() - - item_data = { - "Body": "http://url_to_lolcat.gif", - "SentBy": "User B", - "ReceivedTime": "12/9/2011 11:36:03 PM", - "Ids": set([1, 2, 3]), - "PK": 7, - } - item = table.new_item(hash_key="another-key", attrs=item_data) - item.put() - - items = table.batch_get_item([("the-key1"), ("another-key")]) - # Iterate through so that batch_item gets called - count = len([x for x in items]) - count.should.have.equal(2) +from __future__ import unicode_literals + +import boto +import sure # noqa +from freezegun import freeze_time + +from moto import mock_dynamodb_deprecated + +from boto.dynamodb import condition +from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError +from boto.exception import DynamoDBResponseError + + +def create_table(conn): + message_table_schema = conn.create_schema( + hash_key_name="forum_name", hash_key_proto_value=str + ) + + table = conn.create_table( + name="messages", schema=message_table_schema, read_units=10, write_units=10 + ) + return table + + +@freeze_time("2012-01-14") +@mock_dynamodb_deprecated +def test_create_table(): + conn = boto.connect_dynamodb() + create_table(conn) + + expected = { + "Table": { + "CreationDateTime": 1326499200.0, + "ItemCount": 0, + "KeySchema": { + "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"} + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 10, + "WriteCapacityUnits": 10, + }, + "TableName": "messages", + "TableSizeBytes": 0, + "TableStatus": "ACTIVE", + } + } + conn.describe_table("messages").should.equal(expected) + + +@mock_dynamodb_deprecated +def test_delete_table(): + conn = boto.connect_dynamodb() + create_table(conn) + conn.list_tables().should.have.length_of(1) + + conn.layer1.delete_table("messages") + conn.list_tables().should.have.length_of(0) + + conn.layer1.delete_table.when.called_with("messages").should.throw( + DynamoDBResponseError + ) + + +@mock_dynamodb_deprecated +def test_update_table_throughput(): + conn = boto.connect_dynamodb() + table = create_table(conn) + table.read_units.should.equal(10) + table.write_units.should.equal(10) + + table.update_throughput(5, 6) + table.refresh() + + table.read_units.should.equal(5) + table.write_units.should.equal(6) + + +@mock_dynamodb_deprecated +def test_item_add_and_describe_and_update(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + } + ) + + item["SentBy"] = "User B" + item.put() + + returned_item = table.get_item( + hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] + ) + dict(returned_item).should.equal( + { + "forum_name": "LOLCat Forum", + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + } + ) + + +@mock_dynamodb_deprecated +def test_item_put_without_table(): + conn = boto.connect_dynamodb() + + conn.layer1.put_item.when.called_with( + table_name="undeclared-table", item=dict(hash_key="LOLCat Forum") + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_get_missing_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + table.get_item.when.called_with(hash_key="tester").should.throw( + DynamoDBKeyNotFoundError + ) + + +@mock_dynamodb_deprecated +def test_get_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.get_item.when.called_with( + table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} + ).should.throw(DynamoDBKeyNotFoundError) + + +@mock_dynamodb_deprecated +def test_delete_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete() + response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_attribute_response(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) + item.put() + + table.refresh() + table.item_count.should.equal(1) + + response = item.delete(return_values="ALL_OLD") + response.should.equal( + { + "Attributes": { + "Body": "http://url_to_lolcat.gif", + "forum_name": "LOLCat Forum", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "SentBy": "User A", + }, + "ConsumedCapacityUnits": 0.5, + } + ) + table.refresh() + table.item_count.should.equal(0) + + item.delete.when.called_with().should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_delete_item_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.delete_item.when.called_with( + table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_query(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", attrs=item_data) + item.put() + + results = table.query(hash_key="the-key") + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_query_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.query.when.called_with( + table_name="undeclared-table", hash_key_value={"S": "the-key"} + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key2", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="the-key3", attrs=item_data) + item.put() + + results = table.scan() + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) + results.response["Items"].should.have.length_of(3) + + results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) + results.response["Items"].should.have.length_of(1) + + results = table.scan(scan_filter={"Ids": condition.NULL()}) + results.response["Items"].should.have.length_of(2) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) + results.response["Items"].should.have.length_of(0) + + results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) + results.response["Items"].should.have.length_of(1) + + +@mock_dynamodb_deprecated +def test_scan_with_undeclared_table(): + conn = boto.connect_dynamodb() + + conn.layer1.scan.when.called_with( + table_name="undeclared-table", + scan_filter={ + "SentBy": { + "AttributeValueList": [{"S": "User B"}], + "ComparisonOperator": "EQ", + } + }, + ).should.throw(DynamoDBResponseError) + + +@mock_dynamodb_deprecated +def test_scan_after_has_item(): + conn = boto.connect_dynamodb() + table = create_table(conn) + list(table.scan()).should.equal([]) + + table.has_item("the-key") + + list(table.scan()).should.equal([]) + + +@mock_dynamodb_deprecated +def test_write_batch(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + batch_list = conn.new_batch_write_list() + + items = [] + items.append( + table.new_item( + hash_key="the-key", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + }, + ) + ) + + items.append( + table.new_item( + hash_key="the-key2", + attrs={ + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + }, + ) + ) + + batch_list.add_batch(table, puts=items) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(2) + + batch_list = conn.new_batch_write_list() + batch_list.add_batch(table, deletes=[("the-key")]) + conn.batch_write_item(batch_list) + + table.refresh() + table.item_count.should.equal(1) + + +@mock_dynamodb_deprecated +def test_batch_read(): + conn = boto.connect_dynamodb() + table = create_table(conn) + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User A", + "ReceivedTime": "12/9/2011 11:36:03 PM", + } + item = table.new_item(hash_key="the-key1", attrs=item_data) + item.put() + + item = table.new_item(hash_key="the-key2", attrs=item_data) + item.put() + + item_data = { + "Body": "http://url_to_lolcat.gif", + "SentBy": "User B", + "ReceivedTime": "12/9/2011 11:36:03 PM", + "Ids": set([1, 2, 3]), + "PK": 7, + } + item = table.new_item(hash_key="another-key", attrs=item_data) + item.put() + + items = table.batch_get_item([("the-key1"), ("another-key")]) + # Iterate through so that batch_item gets called + count = len([x for x in items]) + count.should.have.equal(2) diff --git a/tests/test_ec2/test_account_attributes.py b/tests/test_ec2/test_account_attributes.py index a3135f22e..41c71def5 100644 --- a/tests/test_ec2/test_account_attributes.py +++ b/tests/test_ec2/test_account_attributes.py @@ -1,37 +1,37 @@ -from __future__ import unicode_literals -import boto3 -from moto import mock_ec2 -import sure # noqa - - -@mock_ec2 -def test_describe_account_attributes(): - conn = boto3.client("ec2", region_name="us-east-1") - response = conn.describe_account_attributes() - expected_attribute_values = [ - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "vpc-max-security-groups-per-interface", - }, - { - "AttributeValues": [{"AttributeValue": "20"}], - "AttributeName": "max-instances", - }, - { - "AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}], - "AttributeName": "supported-platforms", - }, - { - "AttributeValues": [{"AttributeValue": "none"}], - "AttributeName": "default-vpc", - }, - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "max-elastic-ips", - }, - { - "AttributeValues": [{"AttributeValue": "5"}], - "AttributeName": "vpc-max-elastic-ips", - }, - ] - response["AccountAttributes"].should.equal(expected_attribute_values) +from __future__ import unicode_literals +import boto3 +from moto import mock_ec2 +import sure # noqa + + +@mock_ec2 +def test_describe_account_attributes(): + conn = boto3.client("ec2", region_name="us-east-1") + response = conn.describe_account_attributes() + expected_attribute_values = [ + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "vpc-max-security-groups-per-interface", + }, + { + "AttributeValues": [{"AttributeValue": "20"}], + "AttributeName": "max-instances", + }, + { + "AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}], + "AttributeName": "supported-platforms", + }, + { + "AttributeValues": [{"AttributeValue": "none"}], + "AttributeName": "default-vpc", + }, + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "max-elastic-ips", + }, + { + "AttributeValues": [{"AttributeValue": "5"}], + "AttributeName": "vpc-max-elastic-ips", + }, + ] + response["AccountAttributes"].should.equal(expected_attribute_values) diff --git a/tests/test_ec2/test_amazon_dev_pay.py b/tests/test_ec2/test_amazon_dev_pay.py index 38e1eb751..1dd9cc74e 100644 --- a/tests/test_ec2/test_amazon_dev_pay.py +++ b/tests/test_ec2/test_amazon_dev_pay.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_amazon_dev_pay(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_amazon_dev_pay(): + pass diff --git a/tests/test_ec2/test_ec2_core.py b/tests/test_ec2/test_ec2_core.py index baffc4882..78b780d97 100644 --- a/tests/test_ec2/test_ec2_core.py +++ b/tests/test_ec2/test_ec2_core.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_ec2/test_ip_addresses.py b/tests/test_ec2/test_ip_addresses.py index a8e927b00..60cf1cfc6 100644 --- a/tests/test_ec2/test_ip_addresses.py +++ b/tests/test_ec2/test_ip_addresses.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_ip_addresses(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_ip_addresses(): + pass diff --git a/tests/test_ec2/test_monitoring.py b/tests/test_ec2/test_monitoring.py index 03be93adf..95bd36e6a 100644 --- a/tests/test_ec2/test_monitoring.py +++ b/tests/test_ec2/test_monitoring.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_monitoring(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_monitoring(): + pass diff --git a/tests/test_ec2/test_placement_groups.py b/tests/test_ec2/test_placement_groups.py index c7494228a..bc389488b 100644 --- a/tests/test_ec2/test_placement_groups.py +++ b/tests/test_ec2/test_placement_groups.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_placement_groups(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_placement_groups(): + pass diff --git a/tests/test_ec2/test_reserved_instances.py b/tests/test_ec2/test_reserved_instances.py index 437d3a95b..47456bc03 100644 --- a/tests/test_ec2/test_reserved_instances.py +++ b/tests/test_ec2/test_reserved_instances.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_reserved_instances(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_reserved_instances(): + pass diff --git a/tests/test_ec2/test_virtual_private_gateways.py b/tests/test_ec2/test_virtual_private_gateways.py index f778ac3e5..bb944df0b 100644 --- a/tests/test_ec2/test_virtual_private_gateways.py +++ b/tests/test_ec2/test_virtual_private_gateways.py @@ -1,96 +1,96 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2_deprecated - - -@mock_ec2_deprecated -def test_virtual_private_gateways(): - conn = boto.connect_vpc("the_key", "the_secret") - - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - vpn_gateway.should_not.be.none - vpn_gateway.id.should.match(r"vgw-\w+") - vpn_gateway.type.should.equal("ipsec.1") - vpn_gateway.state.should.equal("available") - vpn_gateway.availability_zone.should.equal("us-east-1a") - - -@mock_ec2_deprecated -def test_describe_vpn_gateway(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - vgws = conn.get_all_vpn_gateways() - vgws.should.have.length_of(1) - - gateway = vgws[0] - gateway.id.should.match(r"vgw-\w+") - gateway.id.should.equal(vpn_gateway.id) - vpn_gateway.type.should.equal("ipsec.1") - vpn_gateway.state.should.equal("available") - vpn_gateway.availability_zone.should.equal("us-east-1a") - - -@mock_ec2_deprecated -def test_vpn_gateway_vpc_attachment(): - conn = boto.connect_vpc("the_key", "the_secret") - vpc = conn.create_vpc("10.0.0.0/16") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(1) - attachments[0].vpc_id.should.equal(vpc.id) - attachments[0].state.should.equal("attached") - - -@mock_ec2_deprecated -def test_delete_vpn_gateway(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.delete_vpn_gateway(vpn_gateway.id) - vgws = conn.get_all_vpn_gateways() - vgws.should.have.length_of(0) - - -@mock_ec2_deprecated -def test_vpn_gateway_tagging(): - conn = boto.connect_vpc("the_key", "the_secret") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - vpn_gateway.add_tag("a key", "some value") - - tag = conn.get_all_tags()[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - # Refresh the subnet - vpn_gateway = conn.get_all_vpn_gateways()[0] - vpn_gateway.tags.should.have.length_of(1) - vpn_gateway.tags["a key"].should.equal("some value") - - -@mock_ec2_deprecated -def test_detach_vpn_gateway(): - - conn = boto.connect_vpc("the_key", "the_secret") - vpc = conn.create_vpc("10.0.0.0/16") - vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") - - conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(1) - attachments[0].vpc_id.should.equal(vpc.id) - attachments[0].state.should.equal("attached") - - conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) - - gateway = conn.get_all_vpn_gateways()[0] - attachments = gateway.attachments - attachments.should.have.length_of(0) +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2_deprecated + + +@mock_ec2_deprecated +def test_virtual_private_gateways(): + conn = boto.connect_vpc("the_key", "the_secret") + + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + vpn_gateway.should_not.be.none + vpn_gateway.id.should.match(r"vgw-\w+") + vpn_gateway.type.should.equal("ipsec.1") + vpn_gateway.state.should.equal("available") + vpn_gateway.availability_zone.should.equal("us-east-1a") + + +@mock_ec2_deprecated +def test_describe_vpn_gateway(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + vgws = conn.get_all_vpn_gateways() + vgws.should.have.length_of(1) + + gateway = vgws[0] + gateway.id.should.match(r"vgw-\w+") + gateway.id.should.equal(vpn_gateway.id) + vpn_gateway.type.should.equal("ipsec.1") + vpn_gateway.state.should.equal("available") + vpn_gateway.availability_zone.should.equal("us-east-1a") + + +@mock_ec2_deprecated +def test_vpn_gateway_vpc_attachment(): + conn = boto.connect_vpc("the_key", "the_secret") + vpc = conn.create_vpc("10.0.0.0/16") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(1) + attachments[0].vpc_id.should.equal(vpc.id) + attachments[0].state.should.equal("attached") + + +@mock_ec2_deprecated +def test_delete_vpn_gateway(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.delete_vpn_gateway(vpn_gateway.id) + vgws = conn.get_all_vpn_gateways() + vgws.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_vpn_gateway_tagging(): + conn = boto.connect_vpc("the_key", "the_secret") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + vpn_gateway.add_tag("a key", "some value") + + tag = conn.get_all_tags()[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + # Refresh the subnet + vpn_gateway = conn.get_all_vpn_gateways()[0] + vpn_gateway.tags.should.have.length_of(1) + vpn_gateway.tags["a key"].should.equal("some value") + + +@mock_ec2_deprecated +def test_detach_vpn_gateway(): + + conn = boto.connect_vpc("the_key", "the_secret") + vpc = conn.create_vpc("10.0.0.0/16") + vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") + + conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(1) + attachments[0].vpc_id.should.equal(vpc.id) + attachments[0].state.should.equal("attached") + + conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) + + gateway = conn.get_all_vpn_gateways()[0] + attachments = gateway.attachments + attachments.should.have.length_of(0) diff --git a/tests/test_ec2/test_vm_export.py b/tests/test_ec2/test_vm_export.py index f8b24f6d4..08215d067 100644 --- a/tests/test_ec2/test_vm_export.py +++ b/tests/test_ec2/test_vm_export.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_vm_export(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_vm_export(): + pass diff --git a/tests/test_ec2/test_vm_import.py b/tests/test_ec2/test_vm_import.py index 66c7561a7..0ebfaaa0c 100644 --- a/tests/test_ec2/test_vm_import.py +++ b/tests/test_ec2/test_vm_import.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_vm_import(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_vm_import(): + pass diff --git a/tests/test_ec2/test_windows.py b/tests/test_ec2/test_windows.py index 364ac2f8a..ae2f7b29a 100644 --- a/tests/test_ec2/test_windows.py +++ b/tests/test_ec2/test_windows.py @@ -1,10 +1,10 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2 - - -@mock_ec2 -def test_windows(): - pass +from __future__ import unicode_literals +import boto +import sure # noqa + +from moto import mock_ec2 + + +@mock_ec2 +def test_windows(): + pass diff --git a/tests/test_emr/test_emr_boto3.py b/tests/test_emr/test_emr_boto3.py index 212444abf..d849247bd 100644 --- a/tests/test_emr/test_emr_boto3.py +++ b/tests/test_emr/test_emr_boto3.py @@ -752,7 +752,9 @@ def test_steps(): # StateChangeReason x["Status"]["Timeline"]["CreationDateTime"].should.be.a("datetime.datetime") # x['Status']['Timeline']['EndDateTime'].should.be.a('datetime.datetime') - # x['Status']['Timeline']['StartDateTime'].should.be.a('datetime.datetime') + # Only the first step will have started - we don't know anything about when it finishes, so the second step never starts + if x["Name"] == "My wordcount example": + x["Status"]["Timeline"]["StartDateTime"].should.be.a("datetime.datetime") x = client.describe_step(ClusterId=cluster_id, StepId=x["Id"])["Step"] x["ActionOnFailure"].should.equal("TERMINATE_CLUSTER") diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py index 14d872806..80fadb449 100644 --- a/tests/test_events/test_events.py +++ b/tests/test_events/test_events.py @@ -1,11 +1,14 @@ -import random -import boto3 -import json -import sure # noqa - +from moto.events.models import EventsBackend from moto.events import mock_events +import json +import random +import unittest + +import boto3 from botocore.exceptions import ClientError +from moto.core.exceptions import JsonRESTError from nose.tools import assert_raises + from moto.core import ACCOUNT_ID RULES = [ @@ -136,14 +139,6 @@ def test_list_rule_names_by_target(): assert rule in test_2_target["Rules"] -@mock_events -def test_list_rules(): - client = generate_environment() - - rules = client.list_rules() - assert len(rules["Rules"]) == len(RULES) - - @mock_events def test_delete_rule(): client = generate_environment() @@ -461,3 +456,50 @@ def test_delete_event_bus_errors(): client.delete_event_bus.when.called_with(Name="default").should.throw( ClientError, "Cannot delete event bus default." ) + + +@mock_events +def test_rule_tagging_happy(): + client = generate_environment() + rule_name = get_random_rule()["Name"] + rule_arn = client.describe_rule(Name=rule_name).get("Arn") + + tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}] + client.tag_resource(ResourceARN=rule_arn, Tags=tags) + + actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags") + tc = unittest.TestCase("__init__") + expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}] + tc.assertTrue( + (expected[0] == actual[0] and expected[1] == actual[1]) + or (expected[1] == actual[0] and expected[0] == actual[1]) + ) + + client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"]) + + actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags") + expected = [{"Key": "key2", "Value": "value2"}] + assert expected == actual + + +@mock_events +def test_rule_tagging_sad(): + back_end = EventsBackend("us-west-2") + + try: + back_end.tag_resource("unknown", []) + raise "tag_resource should fail if ResourceARN is not known" + except JsonRESTError: + pass + + try: + back_end.untag_resource("unknown", []) + raise "untag_resource should fail if ResourceARN is not known" + except JsonRESTError: + pass + + try: + back_end.list_tags_for_resource("unknown") + raise "list_tags_for_resource should fail if ResourceARN is not known" + except JsonRESTError: + pass diff --git a/tests/test_glacier/test_glacier_archives.py b/tests/test_glacier/test_glacier_archives.py index e8fa6045e..ec43e613c 100644 --- a/tests/test_glacier/test_glacier_archives.py +++ b/tests/test_glacier/test_glacier_archives.py @@ -1,21 +1,21 @@ -from __future__ import unicode_literals - -from tempfile import NamedTemporaryFile -import boto.glacier -import sure # noqa - -from moto import mock_glacier_deprecated - - -@mock_glacier_deprecated -def test_create_and_delete_archive(): - the_file = NamedTemporaryFile(delete=False) - the_file.write(b"some stuff") - the_file.close() - - conn = boto.glacier.connect_to_region("us-west-2") - vault = conn.create_vault("my_vault") - - archive_id = vault.upload_archive(the_file.name) - - vault.delete_archive(archive_id) +from __future__ import unicode_literals + +from tempfile import NamedTemporaryFile +import boto.glacier +import sure # noqa + +from moto import mock_glacier_deprecated + + +@mock_glacier_deprecated +def test_create_and_delete_archive(): + the_file = NamedTemporaryFile(delete=False) + the_file.write(b"some stuff") + the_file.close() + + conn = boto.glacier.connect_to_region("us-west-2") + vault = conn.create_vault("my_vault") + + archive_id = vault.upload_archive(the_file.name) + + vault.delete_archive(archive_id) diff --git a/tests/test_glacier/test_glacier_vaults.py b/tests/test_glacier/test_glacier_vaults.py index e64f40a90..93c79423e 100644 --- a/tests/test_glacier/test_glacier_vaults.py +++ b/tests/test_glacier/test_glacier_vaults.py @@ -1,31 +1,31 @@ -from __future__ import unicode_literals - -import boto.glacier -import sure # noqa - -from moto import mock_glacier_deprecated - - -@mock_glacier_deprecated -def test_create_vault(): - conn = boto.glacier.connect_to_region("us-west-2") - - conn.create_vault("my_vault") - - vaults = conn.list_vaults() - vaults.should.have.length_of(1) - vaults[0].name.should.equal("my_vault") - - -@mock_glacier_deprecated -def test_delete_vault(): - conn = boto.glacier.connect_to_region("us-west-2") - - conn.create_vault("my_vault") - - vaults = conn.list_vaults() - vaults.should.have.length_of(1) - - conn.delete_vault("my_vault") - vaults = conn.list_vaults() - vaults.should.have.length_of(0) +from __future__ import unicode_literals + +import boto.glacier +import sure # noqa + +from moto import mock_glacier_deprecated + + +@mock_glacier_deprecated +def test_create_vault(): + conn = boto.glacier.connect_to_region("us-west-2") + + conn.create_vault("my_vault") + + vaults = conn.list_vaults() + vaults.should.have.length_of(1) + vaults[0].name.should.equal("my_vault") + + +@mock_glacier_deprecated +def test_delete_vault(): + conn = boto.glacier.connect_to_region("us-west-2") + + conn.create_vault("my_vault") + + vaults = conn.list_vaults() + vaults.should.have.length_of(1) + + conn.delete_vault("my_vault") + vaults = conn.list_vaults() + vaults.should.have.length_of(0) diff --git a/tests/test_glue/__init__.py b/tests/test_glue/__init__.py index baffc4882..78b780d97 100644 --- a/tests/test_glue/__init__.py +++ b/tests/test_glue/__init__.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_glue/fixtures/__init__.py b/tests/test_glue/fixtures/__init__.py index baffc4882..78b780d97 100644 --- a/tests/test_glue/fixtures/__init__.py +++ b/tests/test_glue/fixtures/__init__.py @@ -1 +1 @@ -from __future__ import unicode_literals +from __future__ import unicode_literals diff --git a/tests/test_glue/helpers.py b/tests/test_glue/helpers.py index 130a879bc..9003a1358 100644 --- a/tests/test_glue/helpers.py +++ b/tests/test_glue/helpers.py @@ -1,97 +1,97 @@ -from __future__ import unicode_literals - -import copy - -from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT - - -def create_database(client, database_name): - return client.create_database(DatabaseInput={"Name": database_name}) - - -def get_database(client, database_name): - return client.get_database(Name=database_name) - - -def create_table_input(database_name, table_name, columns=[], partition_keys=[]): - table_input = copy.deepcopy(TABLE_INPUT) - table_input["Name"] = table_name - table_input["PartitionKeys"] = partition_keys - table_input["StorageDescriptor"]["Columns"] = columns - table_input["StorageDescriptor"][ - "Location" - ] = "s3://my-bucket/{database_name}/{table_name}".format( - database_name=database_name, table_name=table_name - ) - return table_input - - -def create_table(client, database_name, table_name, table_input=None, **kwargs): - if table_input is None: - table_input = create_table_input(database_name, table_name, **kwargs) - - return client.create_table(DatabaseName=database_name, TableInput=table_input) - - -def update_table(client, database_name, table_name, table_input=None, **kwargs): - if table_input is None: - table_input = create_table_input(database_name, table_name, **kwargs) - - return client.update_table(DatabaseName=database_name, TableInput=table_input) - - -def get_table(client, database_name, table_name): - return client.get_table(DatabaseName=database_name, Name=table_name) - - -def get_tables(client, database_name): - return client.get_tables(DatabaseName=database_name) - - -def get_table_versions(client, database_name, table_name): - return client.get_table_versions(DatabaseName=database_name, TableName=table_name) - - -def get_table_version(client, database_name, table_name, version_id): - return client.get_table_version( - DatabaseName=database_name, TableName=table_name, VersionId=version_id - ) - - -def create_partition_input(database_name, table_name, values=[], columns=[]): - root_path = "s3://my-bucket/{database_name}/{table_name}".format( - database_name=database_name, table_name=table_name - ) - - part_input = copy.deepcopy(PARTITION_INPUT) - part_input["Values"] = values - part_input["StorageDescriptor"]["Columns"] = columns - part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path - return part_input - - -def create_partition(client, database_name, table_name, partiton_input=None, **kwargs): - if partiton_input is None: - partiton_input = create_partition_input(database_name, table_name, **kwargs) - return client.create_partition( - DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input - ) - - -def update_partition( - client, database_name, table_name, old_values=[], partiton_input=None, **kwargs -): - if partiton_input is None: - partiton_input = create_partition_input(database_name, table_name, **kwargs) - return client.update_partition( - DatabaseName=database_name, - TableName=table_name, - PartitionInput=partiton_input, - PartitionValueList=old_values, - ) - - -def get_partition(client, database_name, table_name, values): - return client.get_partition( - DatabaseName=database_name, TableName=table_name, PartitionValues=values - ) +from __future__ import unicode_literals + +import copy + +from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT + + +def create_database(client, database_name): + return client.create_database(DatabaseInput={"Name": database_name}) + + +def get_database(client, database_name): + return client.get_database(Name=database_name) + + +def create_table_input(database_name, table_name, columns=[], partition_keys=[]): + table_input = copy.deepcopy(TABLE_INPUT) + table_input["Name"] = table_name + table_input["PartitionKeys"] = partition_keys + table_input["StorageDescriptor"]["Columns"] = columns + table_input["StorageDescriptor"][ + "Location" + ] = "s3://my-bucket/{database_name}/{table_name}".format( + database_name=database_name, table_name=table_name + ) + return table_input + + +def create_table(client, database_name, table_name, table_input=None, **kwargs): + if table_input is None: + table_input = create_table_input(database_name, table_name, **kwargs) + + return client.create_table(DatabaseName=database_name, TableInput=table_input) + + +def update_table(client, database_name, table_name, table_input=None, **kwargs): + if table_input is None: + table_input = create_table_input(database_name, table_name, **kwargs) + + return client.update_table(DatabaseName=database_name, TableInput=table_input) + + +def get_table(client, database_name, table_name): + return client.get_table(DatabaseName=database_name, Name=table_name) + + +def get_tables(client, database_name): + return client.get_tables(DatabaseName=database_name) + + +def get_table_versions(client, database_name, table_name): + return client.get_table_versions(DatabaseName=database_name, TableName=table_name) + + +def get_table_version(client, database_name, table_name, version_id): + return client.get_table_version( + DatabaseName=database_name, TableName=table_name, VersionId=version_id + ) + + +def create_partition_input(database_name, table_name, values=[], columns=[]): + root_path = "s3://my-bucket/{database_name}/{table_name}".format( + database_name=database_name, table_name=table_name + ) + + part_input = copy.deepcopy(PARTITION_INPUT) + part_input["Values"] = values + part_input["StorageDescriptor"]["Columns"] = columns + part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path + return part_input + + +def create_partition(client, database_name, table_name, partiton_input=None, **kwargs): + if partiton_input is None: + partiton_input = create_partition_input(database_name, table_name, **kwargs) + return client.create_partition( + DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input + ) + + +def update_partition( + client, database_name, table_name, old_values=[], partiton_input=None, **kwargs +): + if partiton_input is None: + partiton_input = create_partition_input(database_name, table_name, **kwargs) + return client.update_partition( + DatabaseName=database_name, + TableName=table_name, + PartitionInput=partiton_input, + PartitionValueList=old_values, + ) + + +def get_partition(client, database_name, table_name, values): + return client.get_partition( + DatabaseName=database_name, TableName=table_name, PartitionValues=values + ) diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 0c0623a6f..f8c4f579c 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -9,6 +9,173 @@ from botocore.exceptions import ClientError from nose.tools import assert_raises +@mock_iot +def test_attach_policy(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert["certificateArn"] + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.have.length_of(1) + res["policies"][0]["policyName"].should.equal("my-policy") + + +@mock_iot +def test_detach_policy(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert["certificateArn"] + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.have.length_of(1) + res["policies"][0]["policyName"].should.equal("my-policy") + + client.detach_policy(policyName=policy_name, target=cert_arn) + res = client.list_attached_policies(target=cert_arn) + res.should.have.key("policies").which.should.be.empty + + +@mock_iot +def test_list_attached_policies(): + client = boto3.client("iot", region_name="ap-northeast-1") + cert = client.create_keys_and_certificate(setAsActive=True) + policies = client.list_attached_policies(target=cert["certificateArn"]) + policies["policies"].should.be.empty + + +@mock_iot +def test_policy_versions(): + client = boto3.client("iot", region_name="ap-northeast-1") + policy_name = "my-policy" + doc = "{}" + + policy = client.create_policy(policyName=policy_name, policyDocument=doc) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) + policy.should.have.key("policyVersionId").which.should.equal("1") + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) + policy.should.have.key("defaultVersionId").which.should.equal( + policy["defaultVersionId"] + ) + + policy1 = client.create_policy_version( + policyName=policy_name, + policyDocument=json.dumps({"version": "version_1"}), + setAsDefault=True, + ) + policy1.should.have.key("policyArn").which.should_not.be.none + policy1.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy1.should.have.key("policyVersionId").which.should.equal("2") + policy1.should.have.key("isDefaultVersion").which.should.equal(True) + + policy2 = client.create_policy_version( + policyName=policy_name, + policyDocument=json.dumps({"version": "version_2"}), + setAsDefault=False, + ) + policy2.should.have.key("policyArn").which.should_not.be.none + policy2.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_2"}) + ) + policy2.should.have.key("policyVersionId").which.should.equal("3") + policy2.should.have.key("isDefaultVersion").which.should.equal(False) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy1["policyVersionId"] + ) + + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(3) + list( + map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ).count(True).should.equal(1) + default_policy = list( + filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ) + default_policy[0].should.have.key("versionId").should.equal( + policy1["policyVersionId"] + ) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_1"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy1["policyVersionId"] + ) + + client.set_default_policy_version( + policyName=policy_name, policyVersionId=policy2["policyVersionId"] + ) + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(3) + list( + map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ).count(True).should.equal(1) + default_policy = list( + filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) + ) + default_policy[0].should.have.key("versionId").should.equal( + policy2["policyVersionId"] + ) + + policy = client.get_policy(policyName=policy_name) + policy.should.have.key("policyName").which.should.equal(policy_name) + policy.should.have.key("policyArn").which.should_not.be.none + policy.should.have.key("policyDocument").which.should.equal( + json.dumps({"version": "version_2"}) + ) + policy.should.have.key("defaultVersionId").which.should.equal( + policy2["policyVersionId"] + ) + + client.delete_policy_version(policyName=policy_name, policyVersionId="1") + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(2) + + client.delete_policy_version( + policyName=policy_name, policyVersionId=policy1["policyVersionId"] + ) + policy_versions = client.list_policy_versions(policyName=policy_name) + policy_versions.should.have.key("policyVersions").which.should.have.length_of(1) + + # should fail as it"s the default policy. Should use delete_policy instead + try: + client.delete_policy_version( + policyName=policy_name, policyVersionId=policy2["policyVersionId"] + ) + assert False, "Should have failed in previous call" + except Exception as exception: + exception.response["Error"]["Message"].should.equal( + "Cannot delete the default version of a policy" + ) + + @mock_iot def test_things(): client = boto3.client("iot", region_name="ap-northeast-1") @@ -994,7 +1161,10 @@ def test_create_job(): client = boto3.client("iot", region_name="eu-west-1") name = "my-thing" job_id = "TestJob" - # thing + # thing# job document + # job_document = { + # "field": "value" + # } thing = client.create_thing(thingName=name) thing.should.have.key("thingName").which.should.equal(name) thing.should.have.key("thingArn") @@ -1020,6 +1190,63 @@ def test_create_job(): job.should.have.key("description") +@mock_iot +def test_list_jobs(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing# job document + # job_document = { + # "field": "value" + # } + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job1 = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job1.should.have.key("jobId").which.should.equal(job_id) + job1.should.have.key("jobArn") + job1.should.have.key("description") + + job2 = client.create_job( + jobId=job_id + "1", + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job2.should.have.key("jobId").which.should.equal(job_id + "1") + job2.should.have.key("jobArn") + job2.should.have.key("description") + + jobs = client.list_jobs() + jobs.should.have.key("jobs") + jobs.should_not.have.key("nextToken") + jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id) + jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1") + + @mock_iot def test_describe_job(): client = boto3.client("iot", region_name="eu-west-1") @@ -1124,3 +1351,387 @@ def test_describe_job_1(): job.should.have.key("job").which.should.have.key( "jobExecutionsRolloutConfig" ).which.should.have.key("maximumPerMinute").which.should.equal(10) + + +@mock_iot +def test_delete_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + + client.delete_job(jobId=job_id) + + client.list_jobs()["jobs"].should.have.length_of(0) + + +@mock_iot +def test_cancel_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + + job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are") + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job = client.describe_job(jobId=job_id) + job.should.have.key("job") + job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("job").which.should.have.key("status").which.should.equal( + "CANCELED" + ) + job.should.have.key("job").which.should.have.key( + "forceCanceled" + ).which.should.equal(False) + job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal( + "Because" + ) + job.should.have.key("job").which.should.have.key("comment").which.should.equal( + "You are" + ) + + +@mock_iot +def test_get_job_document_with_document_source(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job_document = client.get_job_document(jobId=job_id) + job_document.should.have.key("document").which.should.equal("") + + +@mock_iot +def test_get_job_document_with_document(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + + job_document = client.get_job_document(jobId=job_id) + job_document.should.have.key("document").which.should.equal('{"field": "value"}') + + +@mock_iot +def test_describe_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.describe_job_execution(jobId=job_id, thingName=name) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("jobId").which.should.equal(job_id) + job_execution["execution"].should.have.key("status").which.should.equal("QUEUED") + job_execution["execution"].should.have.key("forceCanceled").which.should.equal( + False + ) + job_execution["execution"].should.have.key("statusDetails").which.should.equal( + {"detailsMap": {}} + ) + job_execution["execution"].should.have.key("thingArn").which.should.equal( + thing["thingArn"] + ) + job_execution["execution"].should.have.key("queuedAt") + job_execution["execution"].should.have.key("startedAt") + job_execution["execution"].should.have.key("lastUpdatedAt") + job_execution["execution"].should.have.key("executionNumber").which.should.equal( + 123 + ) + job_execution["execution"].should.have.key("versionNumber").which.should.equal(123) + job_execution["execution"].should.have.key( + "approximateSecondsBeforeTimedOut" + ).which.should.equal(123) + + job_execution = client.describe_job_execution( + jobId=job_id, thingName=name, executionNumber=123 + ) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("jobId").which.should.equal(job_id) + job_execution["execution"].should.have.key("status").which.should.equal("QUEUED") + job_execution["execution"].should.have.key("forceCanceled").which.should.equal( + False + ) + job_execution["execution"].should.have.key("statusDetails").which.should.equal( + {"detailsMap": {}} + ) + job_execution["execution"].should.have.key("thingArn").which.should.equal( + thing["thingArn"] + ) + job_execution["execution"].should.have.key("queuedAt") + job_execution["execution"].should.have.key("startedAt") + job_execution["execution"].should.have.key("lastUpdatedAt") + job_execution["execution"].should.have.key("executionNumber").which.should.equal( + 123 + ) + job_execution["execution"].should.have.key("versionNumber").which.should.equal(123) + job_execution["execution"].should.have.key( + "approximateSecondsBeforeTimedOut" + ).which.should.equal(123) + + try: + client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456) + except ClientError as exc: + error_code = exc.response["Error"]["Code"] + error_code.should.equal("ResourceNotFoundException") + else: + raise Exception("Should have raised error") + + +@mock_iot +def test_cancel_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + client.cancel_job_execution(jobId=job_id, thingName=name) + job_execution = client.describe_job_execution(jobId=job_id, thingName=name) + job_execution.should.have.key("execution") + job_execution["execution"].should.have.key("status").which.should.equal("CANCELED") + + +@mock_iot +def test_delete_job_execution(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123) + try: + client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123) + except ClientError as exc: + error_code = exc.response["Error"]["Code"] + error_code.should.equal("ResourceNotFoundException") + else: + raise Exception("Should have raised error") + + +@mock_iot +def test_list_job_executions_for_job(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.list_job_executions_for_job(jobId=job_id) + job_execution.should.have.key("executionSummaries") + job_execution["executionSummaries"][0].should.have.key( + "thingArn" + ).which.should.equal(thing["thingArn"]) + + +@mock_iot +def test_list_job_executions_for_thing(): + client = boto3.client("iot", region_name="eu-west-1") + name = "my-thing" + job_id = "TestJob" + # thing + thing = client.create_thing(thingName=name) + thing.should.have.key("thingName").which.should.equal(name) + thing.should.have.key("thingArn") + + # job document + job_document = {"field": "value"} + + job = client.create_job( + jobId=job_id, + targets=[thing["thingArn"]], + document=json.dumps(job_document), + description="Description", + presignedUrlConfig={ + "roleArn": "arn:aws:iam::1:role/service-role/iot_job_role", + "expiresInSec": 123, + }, + targetSelection="CONTINUOUS", + jobExecutionsRolloutConfig={"maximumPerMinute": 10}, + ) + + job.should.have.key("jobId").which.should.equal(job_id) + job.should.have.key("jobArn") + job.should.have.key("description") + + job_execution = client.list_job_executions_for_thing(thingName=name) + job_execution.should.have.key("executionSummaries") + job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal( + job_id + ) diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 9ce324373..a04a24a82 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -4,15 +4,17 @@ import base64 import re import boto.kms +import boto3 import six import sure # noqa from boto.exception import JSONResponseError from boto.kms.exceptions import AlreadyExistsException, NotFoundException from nose.tools import assert_raises from parameterized import parameterized - +from moto.core.exceptions import JsonRESTError +from moto.kms.models import KmsBackend from moto.kms.exceptions import NotFoundException as MotoNotFoundException -from moto import mock_kms_deprecated +from moto import mock_kms_deprecated, mock_kms PLAINTEXT_VECTORS = ( (b"some encodeable plaintext",), @@ -679,3 +681,77 @@ def test__assert_default_policy(): _assert_default_policy.when.called_with("default").should_not.throw( MotoNotFoundException ) + + +if six.PY2: + sort = sorted +else: + sort = lambda l: sorted(l, key=lambda d: d.keys()) + + +@mock_kms +def test_key_tag_on_create_key_happy(): + client = boto3.client("kms", region_name="us-east-1") + + tags = [ + {"TagKey": "key1", "TagValue": "value1"}, + {"TagKey": "key2", "TagValue": "value2"}, + ] + key = client.create_key(Description="test-key-tagging", Tags=tags) + key_id = key["KeyMetadata"]["KeyId"] + + result = client.list_resource_tags(KeyId=key_id) + actual = result.get("Tags", []) + assert sort(tags) == sort(actual) + + client.untag_resource(KeyId=key_id, TagKeys=["key1"]) + + actual = client.list_resource_tags(KeyId=key_id).get("Tags", []) + expected = [{"TagKey": "key2", "TagValue": "value2"}] + assert sort(expected) == sort(actual) + + +@mock_kms +def test_key_tag_added_happy(): + client = boto3.client("kms", region_name="us-east-1") + + key = client.create_key(Description="test-key-tagging") + key_id = key["KeyMetadata"]["KeyId"] + tags = [ + {"TagKey": "key1", "TagValue": "value1"}, + {"TagKey": "key2", "TagValue": "value2"}, + ] + client.tag_resource(KeyId=key_id, Tags=tags) + + result = client.list_resource_tags(KeyId=key_id) + actual = result.get("Tags", []) + assert sort(tags) == sort(actual) + + client.untag_resource(KeyId=key_id, TagKeys=["key1"]) + + actual = client.list_resource_tags(KeyId=key_id).get("Tags", []) + expected = [{"TagKey": "key2", "TagValue": "value2"}] + assert sort(expected) == sort(actual) + + +@mock_kms_deprecated +def test_key_tagging_sad(): + b = KmsBackend() + + try: + b.tag_resource("unknown", []) + raise "tag_resource should fail if KeyId is not known" + except JsonRESTError: + pass + + try: + b.untag_resource("unknown", []) + raise "untag_resource should fail if KeyId is not known" + except JsonRESTError: + pass + + try: + b.list_resource_tags("unknown") + raise "list_resource_tags should fail if KeyId is not known" + except JsonRESTError: + pass diff --git a/tests/test_kms/test_utils.py b/tests/test_kms/test_utils.py index 4c84ed127..4446635f3 100644 --- a/tests/test_kms/test_utils.py +++ b/tests/test_kms/test_utils.py @@ -102,7 +102,7 @@ def test_deserialize_ciphertext_blob(raw, serialized): @parameterized(((ec[0],) for ec in ENCRYPTION_CONTEXT_VECTORS)) def test_encrypt_decrypt_cycle(encryption_context): plaintext = b"some secret plaintext" - master_key = Key("nop", "nop", "nop", "nop", [], "nop") + master_key = Key("nop", "nop", "nop", "nop", "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = encrypt( @@ -133,7 +133,7 @@ def test_encrypt_unknown_key_id(): def test_decrypt_invalid_ciphertext_format(): - master_key = Key("nop", "nop", "nop", "nop", [], "nop") + master_key = Key("nop", "nop", "nop", "nop", "nop") master_key_map = {master_key.id: master_key} with assert_raises(InvalidCiphertextException): @@ -153,7 +153,7 @@ def test_decrypt_unknwown_key_id(): def test_decrypt_invalid_ciphertext(): - master_key = Key("nop", "nop", "nop", "nop", [], "nop") + master_key = Key("nop", "nop", "nop", "nop", "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = ( master_key.id.encode("utf-8") + b"123456789012" @@ -171,7 +171,7 @@ def test_decrypt_invalid_ciphertext(): def test_decrypt_invalid_encryption_context(): plaintext = b"some secret plaintext" - master_key = Key("nop", "nop", "nop", "nop", [], "nop") + master_key = Key("nop", "nop", "nop", "nop", "nop") master_key_map = {master_key.id: master_key} ciphertext_blob = encrypt( diff --git a/tests/test_opsworks/test_apps.py b/tests/test_opsworks/test_apps.py index 417140df2..1d3445c7d 100644 --- a/tests/test_opsworks/test_apps.py +++ b/tests/test_opsworks/test_apps.py @@ -1,76 +1,76 @@ -from __future__ import unicode_literals -import boto3 -from freezegun import freeze_time -import sure # noqa -import re - -from moto import mock_opsworks - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_create_app_response(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_app(StackId=stack_id, Type="other", Name="TestApp") - - response.should.contain("AppId") - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp") - - response.should.contain("AppId") - - # ClientError - client.create_app.when.called_with( - StackId=stack_id, Type="other", Name="TestApp" - ).should.throw(Exception, re.compile(r'already an app named "TestApp"')) - - # ClientError - client.create_app.when.called_with( - StackId="nothere", Type="other", Name="TestApp" - ).should.throw(Exception, "nothere") - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_describe_apps(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"] - - rv1 = client.describe_apps(StackId=stack_id) - rv2 = client.describe_apps(AppIds=[app_id]) - rv1["Apps"].should.equal(rv2["Apps"]) - - rv1["Apps"][0]["Name"].should.equal("TestApp") - - # ClientError - client.describe_apps.when.called_with( - StackId=stack_id, AppIds=[app_id] - ).should.throw(Exception, "Please provide one or more app IDs or a stack ID") - # ClientError - client.describe_apps.when.called_with(StackId="nothere").should.throw( - Exception, "Unable to find stack with ID nothere" - ) - # ClientError - client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw( - Exception, "nothere" - ) +from __future__ import unicode_literals +import boto3 +from freezegun import freeze_time +import sure # noqa +import re + +from moto import mock_opsworks + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_create_app_response(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_app(StackId=stack_id, Type="other", Name="TestApp") + + response.should.contain("AppId") + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp") + + response.should.contain("AppId") + + # ClientError + client.create_app.when.called_with( + StackId=stack_id, Type="other", Name="TestApp" + ).should.throw(Exception, re.compile(r'already an app named "TestApp"')) + + # ClientError + client.create_app.when.called_with( + StackId="nothere", Type="other", Name="TestApp" + ).should.throw(Exception, "nothere") + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_describe_apps(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"] + + rv1 = client.describe_apps(StackId=stack_id) + rv2 = client.describe_apps(AppIds=[app_id]) + rv1["Apps"].should.equal(rv2["Apps"]) + + rv1["Apps"][0]["Name"].should.equal("TestApp") + + # ClientError + client.describe_apps.when.called_with( + StackId=stack_id, AppIds=[app_id] + ).should.throw(Exception, "Please provide one or more app IDs or a stack ID") + # ClientError + client.describe_apps.when.called_with(StackId="nothere").should.throw( + Exception, "Unable to find stack with ID nothere" + ) + # ClientError + client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw( + Exception, "nothere" + ) diff --git a/tests/test_opsworks/test_instances.py b/tests/test_opsworks/test_instances.py index 55d23f08e..5f0dc2040 100644 --- a/tests/test_opsworks/test_instances.py +++ b/tests/test_opsworks/test_instances.py @@ -1,206 +1,206 @@ -from __future__ import unicode_literals -import boto3 -import sure # noqa - -from moto import mock_opsworks -from moto import mock_ec2 - - -@mock_opsworks -def test_create_instance(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - layer_id = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - )["LayerId"] - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - second_layer_id = client.create_layer( - StackId=second_stack_id, - Type="custom", - Name="SecondTestLayer", - Shortname="SecondTestLayerShortName", - )["LayerId"] - - response = client.create_instance( - StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro" - ) - - response.should.contain("InstanceId") - - client.create_instance.when.called_with( - StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro" - ).should.throw(Exception, "Unable to find stack with ID nothere") - - client.create_instance.when.called_with( - StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro" - ).should.throw(Exception, "nothere") - # ClientError - client.create_instance.when.called_with( - StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro" - ).should.throw(Exception, "Please only provide layer IDs from the same stack") - # ClientError - client.start_instance.when.called_with(InstanceId="nothere").should.throw( - Exception, "Unable to find instance with ID nothere" - ) - - -@mock_opsworks -def test_describe_instances(): - """ - create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2) - - populate S1L1 with 2 instances (S1L1_i1, S1L1_i2) - populate S2L1 with 1 instance (S2L1_i1) - populate S2L2 with 3 instances (S2L2_i1..2) - """ - - client = boto3.client("opsworks", region_name="us-east-1") - S1 = client.create_stack( - Name="S1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - S1L1 = client.create_layer( - StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1" - )["LayerId"] - S2 = client.create_stack( - Name="S2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - S2L1 = client.create_layer( - StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1" - )["LayerId"] - S2L2 = client.create_layer( - StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2" - )["LayerId"] - - S1L1_i1 = client.create_instance( - StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" - )["InstanceId"] - S1L1_i2 = client.create_instance( - StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" - )["InstanceId"] - S2L1_i1 = client.create_instance( - StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro" - )["InstanceId"] - S2L2_i1 = client.create_instance( - StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" - )["InstanceId"] - S2L2_i2 = client.create_instance( - StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" - )["InstanceId"] - - # instances in Stack 1 - response = client.describe_instances(StackId=S1)["Instances"] - response.should.have.length_of(2) - S1L1_i1.should.be.within([i["InstanceId"] for i in response]) - S1L1_i2.should.be.within([i["InstanceId"] for i in response]) - - response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"] - sorted(response2, key=lambda d: d["InstanceId"]).should.equal( - sorted(response, key=lambda d: d["InstanceId"]) - ) - - response3 = client.describe_instances(LayerId=S1L1)["Instances"] - sorted(response3, key=lambda d: d["InstanceId"]).should.equal( - sorted(response, key=lambda d: d["InstanceId"]) - ) - - response = client.describe_instances(StackId=S1)["Instances"] - response.should.have.length_of(2) - S1L1_i1.should.be.within([i["InstanceId"] for i in response]) - S1L1_i2.should.be.within([i["InstanceId"] for i in response]) - - # instances in Stack 2 - response = client.describe_instances(StackId=S2)["Instances"] - response.should.have.length_of(3) - S2L1_i1.should.be.within([i["InstanceId"] for i in response]) - S2L2_i1.should.be.within([i["InstanceId"] for i in response]) - S2L2_i2.should.be.within([i["InstanceId"] for i in response]) - - response = client.describe_instances(LayerId=S2L1)["Instances"] - response.should.have.length_of(1) - S2L1_i1.should.be.within([i["InstanceId"] for i in response]) - - response = client.describe_instances(LayerId=S2L2)["Instances"] - response.should.have.length_of(2) - S2L1_i1.should_not.be.within([i["InstanceId"] for i in response]) - - # ClientError - client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw( - Exception, "Please provide either one or more" - ) - # ClientError - client.describe_instances.when.called_with(StackId="nothere").should.throw( - Exception, "nothere" - ) - # ClientError - client.describe_instances.when.called_with(LayerId="nothere").should.throw( - Exception, "nothere" - ) - # ClientError - client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw( - Exception, "nothere" - ) - - -@mock_opsworks -@mock_ec2 -def test_ec2_integration(): - """ - instances created via OpsWorks should be discoverable via ec2 - """ - - opsworks = boto3.client("opsworks", region_name="us-east-1") - stack_id = opsworks.create_stack( - Name="S1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - layer_id = opsworks.create_layer( - StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1" - )["LayerId"] - - instance_id = opsworks.create_instance( - StackId=stack_id, - LayerIds=[layer_id], - InstanceType="t2.micro", - SshKeyName="testSSH", - )["InstanceId"] - - ec2 = boto3.client("ec2", region_name="us-east-1") - - # Before starting the instance, it shouldn't be discoverable via ec2 - reservations = ec2.describe_instances()["Reservations"] - assert reservations.should.be.empty - - # After starting the instance, it should be discoverable via ec2 - opsworks.start_instance(InstanceId=instance_id) - reservations = ec2.describe_instances()["Reservations"] - reservations[0]["Instances"].should.have.length_of(1) - instance = reservations[0]["Instances"][0] - opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0] - - instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"]) - instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"]) +from __future__ import unicode_literals +import boto3 +import sure # noqa + +from moto import mock_opsworks +from moto import mock_ec2 + + +@mock_opsworks +def test_create_instance(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + layer_id = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + )["LayerId"] + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + second_layer_id = client.create_layer( + StackId=second_stack_id, + Type="custom", + Name="SecondTestLayer", + Shortname="SecondTestLayerShortName", + )["LayerId"] + + response = client.create_instance( + StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro" + ) + + response.should.contain("InstanceId") + + client.create_instance.when.called_with( + StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro" + ).should.throw(Exception, "Unable to find stack with ID nothere") + + client.create_instance.when.called_with( + StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro" + ).should.throw(Exception, "nothere") + # ClientError + client.create_instance.when.called_with( + StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro" + ).should.throw(Exception, "Please only provide layer IDs from the same stack") + # ClientError + client.start_instance.when.called_with(InstanceId="nothere").should.throw( + Exception, "Unable to find instance with ID nothere" + ) + + +@mock_opsworks +def test_describe_instances(): + """ + create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2) + + populate S1L1 with 2 instances (S1L1_i1, S1L1_i2) + populate S2L1 with 1 instance (S2L1_i1) + populate S2L2 with 3 instances (S2L2_i1..2) + """ + + client = boto3.client("opsworks", region_name="us-east-1") + S1 = client.create_stack( + Name="S1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + S1L1 = client.create_layer( + StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1" + )["LayerId"] + S2 = client.create_stack( + Name="S2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + S2L1 = client.create_layer( + StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1" + )["LayerId"] + S2L2 = client.create_layer( + StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2" + )["LayerId"] + + S1L1_i1 = client.create_instance( + StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" + )["InstanceId"] + S1L1_i2 = client.create_instance( + StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" + )["InstanceId"] + S2L1_i1 = client.create_instance( + StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro" + )["InstanceId"] + S2L2_i1 = client.create_instance( + StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" + )["InstanceId"] + S2L2_i2 = client.create_instance( + StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" + )["InstanceId"] + + # instances in Stack 1 + response = client.describe_instances(StackId=S1)["Instances"] + response.should.have.length_of(2) + S1L1_i1.should.be.within([i["InstanceId"] for i in response]) + S1L1_i2.should.be.within([i["InstanceId"] for i in response]) + + response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"] + sorted(response2, key=lambda d: d["InstanceId"]).should.equal( + sorted(response, key=lambda d: d["InstanceId"]) + ) + + response3 = client.describe_instances(LayerId=S1L1)["Instances"] + sorted(response3, key=lambda d: d["InstanceId"]).should.equal( + sorted(response, key=lambda d: d["InstanceId"]) + ) + + response = client.describe_instances(StackId=S1)["Instances"] + response.should.have.length_of(2) + S1L1_i1.should.be.within([i["InstanceId"] for i in response]) + S1L1_i2.should.be.within([i["InstanceId"] for i in response]) + + # instances in Stack 2 + response = client.describe_instances(StackId=S2)["Instances"] + response.should.have.length_of(3) + S2L1_i1.should.be.within([i["InstanceId"] for i in response]) + S2L2_i1.should.be.within([i["InstanceId"] for i in response]) + S2L2_i2.should.be.within([i["InstanceId"] for i in response]) + + response = client.describe_instances(LayerId=S2L1)["Instances"] + response.should.have.length_of(1) + S2L1_i1.should.be.within([i["InstanceId"] for i in response]) + + response = client.describe_instances(LayerId=S2L2)["Instances"] + response.should.have.length_of(2) + S2L1_i1.should_not.be.within([i["InstanceId"] for i in response]) + + # ClientError + client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw( + Exception, "Please provide either one or more" + ) + # ClientError + client.describe_instances.when.called_with(StackId="nothere").should.throw( + Exception, "nothere" + ) + # ClientError + client.describe_instances.when.called_with(LayerId="nothere").should.throw( + Exception, "nothere" + ) + # ClientError + client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw( + Exception, "nothere" + ) + + +@mock_opsworks +@mock_ec2 +def test_ec2_integration(): + """ + instances created via OpsWorks should be discoverable via ec2 + """ + + opsworks = boto3.client("opsworks", region_name="us-east-1") + stack_id = opsworks.create_stack( + Name="S1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + layer_id = opsworks.create_layer( + StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1" + )["LayerId"] + + instance_id = opsworks.create_instance( + StackId=stack_id, + LayerIds=[layer_id], + InstanceType="t2.micro", + SshKeyName="testSSH", + )["InstanceId"] + + ec2 = boto3.client("ec2", region_name="us-east-1") + + # Before starting the instance, it shouldn't be discoverable via ec2 + reservations = ec2.describe_instances()["Reservations"] + assert reservations.should.be.empty + + # After starting the instance, it should be discoverable via ec2 + opsworks.start_instance(InstanceId=instance_id) + reservations = ec2.describe_instances()["Reservations"] + reservations[0]["Instances"].should.have.length_of(1) + instance = reservations[0]["Instances"][0] + opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0] + + instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"]) + instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"]) diff --git a/tests/test_opsworks/test_layers.py b/tests/test_opsworks/test_layers.py index 850666381..08d5a1ce4 100644 --- a/tests/test_opsworks/test_layers.py +++ b/tests/test_opsworks/test_layers.py @@ -1,96 +1,96 @@ -from __future__ import unicode_literals -import boto3 -from freezegun import freeze_time -import sure # noqa -import re - -from moto import mock_opsworks - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_create_layer_response(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - ) - - response.should.contain("LayerId") - - second_stack_id = client.create_stack( - Name="test_stack_2", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - - response = client.create_layer( - StackId=second_stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - ) - - response.should.contain("LayerId") - - # ClientError - client.create_layer.when.called_with( - StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_" - ).should.throw(Exception, re.compile(r'already a layer named "TestLayer"')) - # ClientError - client.create_layer.when.called_with( - StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName" - ).should.throw( - Exception, re.compile(r'already a layer with shortname "TestLayerShortName"') - ) - # ClientError - client.create_layer.when.called_with( - StackId="nothere", Type="custom", Name="TestLayer", Shortname="_" - ).should.throw(Exception, "nothere") - - -@freeze_time("2015-01-01") -@mock_opsworks -def test_describe_layers(): - client = boto3.client("opsworks", region_name="us-east-1") - stack_id = client.create_stack( - Name="test_stack_1", - Region="us-east-1", - ServiceRoleArn="service_arn", - DefaultInstanceProfileArn="profile_arn", - )["StackId"] - layer_id = client.create_layer( - StackId=stack_id, - Type="custom", - Name="TestLayer", - Shortname="TestLayerShortName", - )["LayerId"] - - rv1 = client.describe_layers(StackId=stack_id) - rv2 = client.describe_layers(LayerIds=[layer_id]) - rv1["Layers"].should.equal(rv2["Layers"]) - - rv1["Layers"][0]["Name"].should.equal("TestLayer") - - # ClientError - client.describe_layers.when.called_with( - StackId=stack_id, LayerIds=[layer_id] - ).should.throw(Exception, "Please provide one or more layer IDs or a stack ID") - # ClientError - client.describe_layers.when.called_with(StackId="nothere").should.throw( - Exception, "Unable to find stack with ID nothere" - ) - # ClientError - client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw( - Exception, "nothere" - ) +from __future__ import unicode_literals +import boto3 +from freezegun import freeze_time +import sure # noqa +import re + +from moto import mock_opsworks + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_create_layer_response(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + ) + + response.should.contain("LayerId") + + second_stack_id = client.create_stack( + Name="test_stack_2", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + + response = client.create_layer( + StackId=second_stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + ) + + response.should.contain("LayerId") + + # ClientError + client.create_layer.when.called_with( + StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_" + ).should.throw(Exception, re.compile(r'already a layer named "TestLayer"')) + # ClientError + client.create_layer.when.called_with( + StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName" + ).should.throw( + Exception, re.compile(r'already a layer with shortname "TestLayerShortName"') + ) + # ClientError + client.create_layer.when.called_with( + StackId="nothere", Type="custom", Name="TestLayer", Shortname="_" + ).should.throw(Exception, "nothere") + + +@freeze_time("2015-01-01") +@mock_opsworks +def test_describe_layers(): + client = boto3.client("opsworks", region_name="us-east-1") + stack_id = client.create_stack( + Name="test_stack_1", + Region="us-east-1", + ServiceRoleArn="service_arn", + DefaultInstanceProfileArn="profile_arn", + )["StackId"] + layer_id = client.create_layer( + StackId=stack_id, + Type="custom", + Name="TestLayer", + Shortname="TestLayerShortName", + )["LayerId"] + + rv1 = client.describe_layers(StackId=stack_id) + rv2 = client.describe_layers(LayerIds=[layer_id]) + rv1["Layers"].should.equal(rv2["Layers"]) + + rv1["Layers"][0]["Name"].should.equal("TestLayer") + + # ClientError + client.describe_layers.when.called_with( + StackId=stack_id, LayerIds=[layer_id] + ).should.throw(Exception, "Please provide one or more layer IDs or a stack ID") + # ClientError + client.describe_layers.when.called_with(StackId="nothere").should.throw( + Exception, "Unable to find stack with ID nothere" + ) + # ClientError + client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw( + Exception, "nothere" + ) diff --git a/tests/test_polly/test_polly.py b/tests/test_polly/test_polly.py index e172b98d0..5428cdeb7 100644 --- a/tests/test_polly/test_polly.py +++ b/tests/test_polly/test_polly.py @@ -1,263 +1,263 @@ -from __future__ import unicode_literals - -from botocore.exceptions import ClientError -import boto3 -import sure # noqa -from nose.tools import assert_raises -from moto import mock_polly - -# Polly only available in a few regions -DEFAULT_REGION = "eu-west-1" - -LEXICON_XML = """ - - - W3C - World Wide Web Consortium - -""" - - -@mock_polly -def test_describe_voices(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - resp = client.describe_voices() - len(resp["Voices"]).should.be.greater_than(1) - - resp = client.describe_voices(LanguageCode="en-GB") - len(resp["Voices"]).should.equal(3) - - try: - client.describe_voices(LanguageCode="SOME_LANGUAGE") - except ClientError as err: - err.response["Error"]["Code"].should.equal("400") - else: - raise RuntimeError("Should of raised an exception") - - -@mock_polly -def test_put_list_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - resp = client.list_lexicons() - len(resp["Lexicons"]).should.equal(1) - - -@mock_polly -def test_put_get_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - resp = client.get_lexicon(Name="test") - resp.should.contain("Lexicon") - resp.should.contain("LexiconAttributes") - - -@mock_polly -def test_put_lexicon_bad_name(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - try: - client.put_lexicon(Name="test-invalid", Content=LEXICON_XML) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised an exception") - - -@mock_polly -def test_synthesize_speech(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - - # Return nothing - client.put_lexicon(Name="test", Content=LEXICON_XML) - - tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg")) - for output_format, content_type in tests: - resp = client.synthesize_speech( - LexiconNames=["test"], - OutputFormat=output_format, - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - resp["ContentType"].should.equal(content_type) - - -@mock_polly -def test_synthesize_speech_bad_lexicon(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test2"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("LexiconNotFoundException") - else: - raise RuntimeError("Should of raised LexiconNotFoundException") - - -@mock_polly -def test_synthesize_speech_bad_output_format(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="invalid", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_sample_rate(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="18000", - Text="test1234", - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidSampleRateException") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_text_type(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="invalid", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_voice_id(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - VoiceId="Luke", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("InvalidParameterValue") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_text_too_long(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234" * 376, # = 3008 characters - TextType="text", - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal("TextLengthExceededException") - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_speech_marks1(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="text", - SpeechMarkTypes=["word"], - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal( - "MarksNotSupportedForFormatException" - ) - else: - raise RuntimeError("Should of raised ") - - -@mock_polly -def test_synthesize_speech_bad_speech_marks2(): - client = boto3.client("polly", region_name=DEFAULT_REGION) - client.put_lexicon(Name="test", Content=LEXICON_XML) - - try: - client.synthesize_speech( - LexiconNames=["test"], - OutputFormat="pcm", - SampleRate="16000", - Text="test1234", - TextType="ssml", - SpeechMarkTypes=["word"], - VoiceId="Astrid", - ) - except ClientError as err: - err.response["Error"]["Code"].should.equal( - "MarksNotSupportedForFormatException" - ) - else: - raise RuntimeError("Should of raised ") +from __future__ import unicode_literals + +from botocore.exceptions import ClientError +import boto3 +import sure # noqa +from nose.tools import assert_raises +from moto import mock_polly + +# Polly only available in a few regions +DEFAULT_REGION = "eu-west-1" + +LEXICON_XML = """ + + + W3C + World Wide Web Consortium + +""" + + +@mock_polly +def test_describe_voices(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + resp = client.describe_voices() + len(resp["Voices"]).should.be.greater_than(1) + + resp = client.describe_voices(LanguageCode="en-GB") + len(resp["Voices"]).should.equal(3) + + try: + client.describe_voices(LanguageCode="SOME_LANGUAGE") + except ClientError as err: + err.response["Error"]["Code"].should.equal("400") + else: + raise RuntimeError("Should of raised an exception") + + +@mock_polly +def test_put_list_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + resp = client.list_lexicons() + len(resp["Lexicons"]).should.equal(1) + + +@mock_polly +def test_put_get_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + resp = client.get_lexicon(Name="test") + resp.should.contain("Lexicon") + resp.should.contain("LexiconAttributes") + + +@mock_polly +def test_put_lexicon_bad_name(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + try: + client.put_lexicon(Name="test-invalid", Content=LEXICON_XML) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised an exception") + + +@mock_polly +def test_synthesize_speech(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + + # Return nothing + client.put_lexicon(Name="test", Content=LEXICON_XML) + + tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg")) + for output_format, content_type in tests: + resp = client.synthesize_speech( + LexiconNames=["test"], + OutputFormat=output_format, + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + resp["ContentType"].should.equal(content_type) + + +@mock_polly +def test_synthesize_speech_bad_lexicon(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test2"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("LexiconNotFoundException") + else: + raise RuntimeError("Should of raised LexiconNotFoundException") + + +@mock_polly +def test_synthesize_speech_bad_output_format(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="invalid", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_sample_rate(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="18000", + Text="test1234", + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidSampleRateException") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_text_type(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="invalid", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_voice_id(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + VoiceId="Luke", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("InvalidParameterValue") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_text_too_long(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234" * 376, # = 3008 characters + TextType="text", + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal("TextLengthExceededException") + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_speech_marks1(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="text", + SpeechMarkTypes=["word"], + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal( + "MarksNotSupportedForFormatException" + ) + else: + raise RuntimeError("Should of raised ") + + +@mock_polly +def test_synthesize_speech_bad_speech_marks2(): + client = boto3.client("polly", region_name=DEFAULT_REGION) + client.put_lexicon(Name="test", Content=LEXICON_XML) + + try: + client.synthesize_speech( + LexiconNames=["test"], + OutputFormat="pcm", + SampleRate="16000", + Text="test1234", + TextType="ssml", + SpeechMarkTypes=["word"], + VoiceId="Astrid", + ) + except ClientError as err: + err.response["Error"]["Code"].should.equal( + "MarksNotSupportedForFormatException" + ) + else: + raise RuntimeError("Should of raised ") diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 56cbe547b..2193f8b27 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -5,6 +5,7 @@ import datetime import os import sys +from boto3 import Session from six.moves.urllib.request import urlopen from six.moves.urllib.error import HTTPError from functools import wraps @@ -1135,6 +1136,380 @@ if not settings.TEST_SERVER_MODE: "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to." ) + # All tests for s3-control cannot be run under the server without a modification of the + # hosts file on your system. This is due to the fact that the URL to the host is in the form of: + # ACCOUNT_ID.s3-control.amazonaws.com <-- That Account ID part is the problem. If you want to + # make use of the moto server, update your hosts file for `THE_ACCOUNT_ID_FOR_MOTO.localhost` + # and this will work fine. + + @mock_s3 + def test_get_public_access_block_for_account(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + + # With an invalid account ID: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId="111111111111") + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Without one defined: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId=ACCOUNT_ID) + assert ( + ce.exception.response["Error"]["Code"] + == "NoSuchPublicAccessBlockConfiguration" + ) + + # Put a with an invalid account ID: + with assert_raises(ClientError) as ce: + client.put_public_access_block( + AccountId="111111111111", + PublicAccessBlockConfiguration={"BlockPublicAcls": True}, + ) + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Put with an invalid PAB: + with assert_raises(ClientError) as ce: + client.put_public_access_block( + AccountId=ACCOUNT_ID, PublicAccessBlockConfiguration={} + ) + assert ce.exception.response["Error"]["Code"] == "InvalidRequest" + assert ( + "Must specify at least one configuration." + in ce.exception.response["Error"]["Message"] + ) + + # Correct PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Get the correct PAB (for all regions): + for region in Session().get_available_regions("s3control"): + region_client = boto3.client("s3control", region_name=region) + assert region_client.get_public_access_block(AccountId=ACCOUNT_ID)[ + "PublicAccessBlockConfiguration" + ] == { + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + } + + # Delete with an invalid account ID: + with assert_raises(ClientError) as ce: + client.delete_public_access_block(AccountId="111111111111") + assert ce.exception.response["Error"]["Code"] == "AccessDenied" + + # Delete successfully: + client.delete_public_access_block(AccountId=ACCOUNT_ID) + + # Confirm that it's deleted: + with assert_raises(ClientError) as ce: + client.get_public_access_block(AccountId=ACCOUNT_ID) + assert ( + ce.exception.response["Error"]["Code"] + == "NoSuchPublicAccessBlockConfiguration" + ) + + @mock_s3 + @mock_config + def test_config_list_account_pab(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + config_client = boto3.client("config", region_name="us-west-2") + + # Create the aggregator: + account_aggregation_source = { + "AccountIds": [ACCOUNT_ID], + "AllAwsRegions": True, + } + config_client.put_configuration_aggregator( + ConfigurationAggregatorName="testing", + AccountAggregationSources=[account_aggregation_source], + ) + + # Without a PAB in place: + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock" + ) + assert not result["resourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + ) + assert not result["ResourceIdentifiers"] + + # Create a PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Test that successful queries work (non-aggregated): + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock" + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", + resourceIds=[ACCOUNT_ID, "nope"], + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceName="" + ) + assert result["resourceIdentifiers"] == [ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + + # Test that successful queries work (aggregated): + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + ) + regions = {region for region in Session().get_available_regions("config")} + for r in result["ResourceIdentifiers"]: + regions.remove(r.pop("SourceRegion")) + assert r == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + } + + # Just check that the len is the same -- this should be reasonable + regions = {region for region in Session().get_available_regions("config")} + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": ""}, + ) + assert len(regions) == len(result["ResourceIdentifiers"]) + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": "", "ResourceId": ACCOUNT_ID}, + ) + assert len(regions) == len(result["ResourceIdentifiers"]) + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={ + "ResourceName": "", + "ResourceId": ACCOUNT_ID, + "Region": "us-west-2", + }, + ) + assert ( + result["ResourceIdentifiers"][0]["SourceRegion"] == "us-west-2" + and len(result["ResourceIdentifiers"]) == 1 + ) + + # Test aggregator pagination: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Limit=1, + ) + regions = sorted( + [region for region in Session().get_available_regions("config")] + ) + assert result["ResourceIdentifiers"][0] == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + "SourceRegion": regions[0], + } + assert result["NextToken"] == regions[1] + + # Get the next region: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Limit=1, + NextToken=regions[1], + ) + assert result["ResourceIdentifiers"][0] == { + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "SourceAccountId": ACCOUNT_ID, + "ResourceId": ACCOUNT_ID, + "SourceRegion": regions[1], + } + + # Non-aggregated with incorrect info: + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceName="nope" + ) + assert not result["resourceIdentifiers"] + result = config_client.list_discovered_resources( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceIds=["nope"] + ) + assert not result["resourceIdentifiers"] + + # Aggregated with incorrect info: + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceName": "nope"}, + ) + assert not result["ResourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"ResourceId": "nope"}, + ) + assert not result["ResourceIdentifiers"] + result = config_client.list_aggregate_discovered_resources( + ResourceType="AWS::S3::AccountPublicAccessBlock", + ConfigurationAggregatorName="testing", + Filters={"Region": "Nope"}, + ) + assert not result["ResourceIdentifiers"] + + @mock_s3 + @mock_config + def test_config_get_account_pab(): + from moto.s3.models import ACCOUNT_ID + + client = boto3.client("s3control", region_name="us-west-2") + config_client = boto3.client("config", region_name="us-west-2") + + # Create the aggregator: + account_aggregation_source = { + "AccountIds": [ACCOUNT_ID], + "AllAwsRegions": True, + } + config_client.put_configuration_aggregator( + ConfigurationAggregatorName="testing", + AccountAggregationSources=[account_aggregation_source], + ) + + # Without a PAB in place: + with assert_raises(ClientError) as ce: + config_client.get_resource_config_history( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID + ) + assert ( + ce.exception.response["Error"]["Code"] == "ResourceNotDiscoveredException" + ) + # aggregate + result = config_client.batch_get_resource_config( + resourceKeys=[ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": "ACCOUNT_ID", + } + ] + ) + assert not result["baseConfigurationItems"] + result = config_client.batch_get_aggregate_resource_config( + ConfigurationAggregatorName="testing", + ResourceIdentifiers=[ + { + "SourceAccountId": ACCOUNT_ID, + "SourceRegion": "us-west-2", + "ResourceId": ACCOUNT_ID, + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "ResourceName": "", + } + ], + ) + assert not result["BaseConfigurationItems"] + + # Create a PAB: + client.put_public_access_block( + AccountId=ACCOUNT_ID, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) + + # Get the proper config: + proper_config = { + "blockPublicAcls": True, + "ignorePublicAcls": True, + "blockPublicPolicy": True, + "restrictPublicBuckets": True, + } + result = config_client.get_resource_config_history( + resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID + ) + assert ( + json.loads(result["configurationItems"][0]["configuration"]) + == proper_config + ) + assert ( + result["configurationItems"][0]["accountId"] + == result["configurationItems"][0]["resourceId"] + == ACCOUNT_ID + ) + result = config_client.batch_get_resource_config( + resourceKeys=[ + { + "resourceType": "AWS::S3::AccountPublicAccessBlock", + "resourceId": ACCOUNT_ID, + } + ] + ) + assert len(result["baseConfigurationItems"]) == 1 + assert ( + json.loads(result["baseConfigurationItems"][0]["configuration"]) + == proper_config + ) + assert ( + result["baseConfigurationItems"][0]["accountId"] + == result["baseConfigurationItems"][0]["resourceId"] + == ACCOUNT_ID + ) + + for region in Session().get_available_regions("s3control"): + result = config_client.batch_get_aggregate_resource_config( + ConfigurationAggregatorName="testing", + ResourceIdentifiers=[ + { + "SourceAccountId": ACCOUNT_ID, + "SourceRegion": region, + "ResourceId": ACCOUNT_ID, + "ResourceType": "AWS::S3::AccountPublicAccessBlock", + "ResourceName": "", + } + ], + ) + assert len(result["BaseConfigurationItems"]) == 1 + assert ( + json.loads(result["BaseConfigurationItems"][0]["configuration"]) + == proper_config + ) + @mock_s3_deprecated def test_ranged_get(): @@ -1768,6 +2143,34 @@ def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): obj2_version_new.should_not.equal(None) +@mock_s3 +def test_boto3_copy_object_with_replacement_tagging(): + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + client.create_bucket(Bucket="mybucket") + client.put_object( + Bucket="mybucket", Key="original", Body=b"test", Tagging="tag=old" + ) + + client.copy_object( + CopySource={"Bucket": "mybucket", "Key": "original"}, + Bucket="mybucket", + Key="copy1", + TaggingDirective="REPLACE", + Tagging="tag=new", + ) + client.copy_object( + CopySource={"Bucket": "mybucket", "Key": "original"}, + Bucket="mybucket", + Key="copy2", + TaggingDirective="COPY", + ) + + tags1 = client.get_object_tagging(Bucket="mybucket", Key="copy1")["TagSet"] + tags1.should.equal([{"Key": "tag", "Value": "new"}]) + tags2 = client.get_object_tagging(Bucket="mybucket", Key="copy2")["TagSet"] + tags2.should.equal([{"Key": "tag", "Value": "old"}]) + + @mock_s3 def test_boto3_deleted_versionings_list(): client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) diff --git a/tests/test_swf/models/test_domain.py b/tests/test_swf/models/test_domain.py index 389e516df..32940753f 100644 --- a/tests/test_swf/models/test_domain.py +++ b/tests/test_swf/models/test_domain.py @@ -1,107 +1,107 @@ -from collections import namedtuple -import sure # noqa - -from moto.swf.exceptions import SWFUnknownResourceFault -from moto.swf.models import Domain - -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises # noqa - -# Fake WorkflowExecution for tests purposes -WorkflowExecution = namedtuple( - "WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"] -) - - -def test_domain_short_dict_representation(): - domain = Domain("foo", "52") - domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"}) - - domain.description = "foo bar" - domain.to_short_dict()["description"].should.equal("foo bar") - - -def test_domain_full_dict_representation(): - domain = Domain("foo", "52") - - domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict()) - _config = domain.to_full_dict()["configuration"] - _config["workflowExecutionRetentionPeriodInDays"].should.equal("52") - - -def test_domain_string_representation(): - domain = Domain("my-domain", "60") - str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)") - - -def test_domain_add_to_activity_task_list(): - domain = Domain("my-domain", "60") - domain.add_to_activity_task_list("foo", "bar") - domain.activity_task_lists.should.equal({"foo": ["bar"]}) - - -def test_domain_activity_tasks(): - domain = Domain("my-domain", "60") - domain.add_to_activity_task_list("foo", "bar") - domain.add_to_activity_task_list("other", "baz") - sorted(domain.activity_tasks).should.equal(["bar", "baz"]) - - -def test_domain_add_to_decision_task_list(): - domain = Domain("my-domain", "60") - domain.add_to_decision_task_list("foo", "bar") - domain.decision_task_lists.should.equal({"foo": ["bar"]}) - - -def test_domain_decision_tasks(): - domain = Domain("my-domain", "60") - domain.add_to_decision_task_list("foo", "bar") - domain.add_to_decision_task_list("other", "baz") - sorted(domain.decision_tasks).should.equal(["bar", "baz"]) - - -def test_domain_get_workflow_execution(): - domain = Domain("my-domain", "60") - - wfe1 = WorkflowExecution( - workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True - ) - wfe2 = WorkflowExecution( - workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False - ) - wfe3 = WorkflowExecution( - workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True - ) - wfe4 = WorkflowExecution( - workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False - ) - domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4] - - # get workflow execution through workflow_id and run_id - domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1) - domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2) - domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4) - - domain.get_workflow_execution.when.called_with( - "wf-id-1", run_id="non-existent" - ).should.throw(SWFUnknownResourceFault) - - # get OPEN workflow execution by default if no run_id - domain.get_workflow_execution("wf-id-1").should.equal(wfe1) - domain.get_workflow_execution.when.called_with("wf-id-3").should.throw( - SWFUnknownResourceFault - ) - domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw( - SWFUnknownResourceFault - ) - - # raise_if_closed attribute - domain.get_workflow_execution( - "wf-id-1", run_id="run-id-1", raise_if_closed=True - ).should.equal(wfe1) - domain.get_workflow_execution.when.called_with( - "wf-id-3", run_id="run-id-4", raise_if_closed=True - ).should.throw(SWFUnknownResourceFault) - - # raise_if_none attribute - domain.get_workflow_execution("foo", raise_if_none=False).should.be.none +from collections import namedtuple +import sure # noqa + +from moto.swf.exceptions import SWFUnknownResourceFault +from moto.swf.models import Domain + +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises # noqa + +# Fake WorkflowExecution for tests purposes +WorkflowExecution = namedtuple( + "WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"] +) + + +def test_domain_short_dict_representation(): + domain = Domain("foo", "52") + domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"}) + + domain.description = "foo bar" + domain.to_short_dict()["description"].should.equal("foo bar") + + +def test_domain_full_dict_representation(): + domain = Domain("foo", "52") + + domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict()) + _config = domain.to_full_dict()["configuration"] + _config["workflowExecutionRetentionPeriodInDays"].should.equal("52") + + +def test_domain_string_representation(): + domain = Domain("my-domain", "60") + str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)") + + +def test_domain_add_to_activity_task_list(): + domain = Domain("my-domain", "60") + domain.add_to_activity_task_list("foo", "bar") + domain.activity_task_lists.should.equal({"foo": ["bar"]}) + + +def test_domain_activity_tasks(): + domain = Domain("my-domain", "60") + domain.add_to_activity_task_list("foo", "bar") + domain.add_to_activity_task_list("other", "baz") + sorted(domain.activity_tasks).should.equal(["bar", "baz"]) + + +def test_domain_add_to_decision_task_list(): + domain = Domain("my-domain", "60") + domain.add_to_decision_task_list("foo", "bar") + domain.decision_task_lists.should.equal({"foo": ["bar"]}) + + +def test_domain_decision_tasks(): + domain = Domain("my-domain", "60") + domain.add_to_decision_task_list("foo", "bar") + domain.add_to_decision_task_list("other", "baz") + sorted(domain.decision_tasks).should.equal(["bar", "baz"]) + + +def test_domain_get_workflow_execution(): + domain = Domain("my-domain", "60") + + wfe1 = WorkflowExecution( + workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True + ) + wfe2 = WorkflowExecution( + workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False + ) + wfe3 = WorkflowExecution( + workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True + ) + wfe4 = WorkflowExecution( + workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False + ) + domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4] + + # get workflow execution through workflow_id and run_id + domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1) + domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2) + domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4) + + domain.get_workflow_execution.when.called_with( + "wf-id-1", run_id="non-existent" + ).should.throw(SWFUnknownResourceFault) + + # get OPEN workflow execution by default if no run_id + domain.get_workflow_execution("wf-id-1").should.equal(wfe1) + domain.get_workflow_execution.when.called_with("wf-id-3").should.throw( + SWFUnknownResourceFault + ) + domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw( + SWFUnknownResourceFault + ) + + # raise_if_closed attribute + domain.get_workflow_execution( + "wf-id-1", run_id="run-id-1", raise_if_closed=True + ).should.equal(wfe1) + domain.get_workflow_execution.when.called_with( + "wf-id-3", run_id="run-id-4", raise_if_closed=True + ).should.throw(SWFUnknownResourceFault) + + # raise_if_none attribute + domain.get_workflow_execution("foo", raise_if_none=False).should.be.none diff --git a/tests/test_swf/models/test_timeout.py b/tests/test_swf/models/test_timeout.py index fb52652fd..0ee059065 100644 --- a/tests/test_swf/models/test_timeout.py +++ b/tests/test_swf/models/test_timeout.py @@ -1,19 +1,19 @@ -from freezegun import freeze_time -import sure # noqa - -from moto.swf.models import Timeout - -from ..utils import make_workflow_execution - - -def test_timeout_creation(): - wfe = make_workflow_execution() - - # epoch 1420113600 == "2015-01-01 13:00:00" - timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE") - - with freeze_time("2015-01-01 12:00:00"): - timeout.reached.should.be.falsy - - with freeze_time("2015-01-01 13:00:00"): - timeout.reached.should.be.truthy +from freezegun import freeze_time +import sure # noqa + +from moto.swf.models import Timeout + +from ..utils import make_workflow_execution + + +def test_timeout_creation(): + wfe = make_workflow_execution() + + # epoch 1420113600 == "2015-01-01 13:00:00" + timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE") + + with freeze_time("2015-01-01 12:00:00"): + timeout.reached.should.be.falsy + + with freeze_time("2015-01-01 13:00:00"): + timeout.reached.should.be.truthy diff --git a/tests/test_swf/models/test_workflow_execution.py b/tests/test_swf/models/test_workflow_execution.py index 6c73a9686..503198f46 100644 --- a/tests/test_swf/models/test_workflow_execution.py +++ b/tests/test_swf/models/test_workflow_execution.py @@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation(): ) +def test_closed_workflow_execution_full_dict_representation(): + domain = get_basic_domain() + wf_type = WorkflowType( + "test-workflow", + "v1.0", + task_list="queue", + default_child_policy="ABANDON", + default_execution_start_to_close_timeout="300", + default_task_start_to_close_timeout="300", + ) + wfe = WorkflowExecution(domain, wf_type, "ab1234") + wfe.execution_status = "CLOSED" + wfe.close_status = "CANCELED" + wfe.close_timestamp = 1420066801.123 + + fd = wfe.to_full_dict() + medium_dict = wfe.to_medium_dict() + medium_dict["closeStatus"] = "CANCELED" + medium_dict["closeTimestamp"] = 1420066801.123 + fd["executionInfo"].should.equal(medium_dict) + fd["openCounts"]["openTimers"].should.equal(0) + fd["openCounts"]["openDecisionTasks"].should.equal(0) + fd["openCounts"]["openActivityTasks"].should.equal(0) + fd["executionConfiguration"].should.equal( + { + "childPolicy": "ABANDON", + "executionStartToCloseTimeout": "300", + "taskList": {"name": "queue"}, + "taskStartToCloseTimeout": "300", + } + ) + + def test_workflow_execution_list_dict_representation(): domain = get_basic_domain() wf_type = WorkflowType( diff --git a/tests/test_swf/responses/test_domains.py b/tests/test_swf/responses/test_domains.py index 638bd410e..199219d27 100644 --- a/tests/test_swf/responses/test_domains.py +++ b/tests/test_swf/responses/test_domains.py @@ -1,114 +1,114 @@ -import boto -from boto.swf.exceptions import SWFResponseError -import sure # noqa - -from moto import mock_swf_deprecated - - -# RegisterDomain endpoint -@mock_swf_deprecated -def test_register_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - all_domains = conn.list_domains("REGISTERED") - domain = all_domains["domainInfos"][0] - - domain["name"].should.equal("test-domain") - domain["status"].should.equal("REGISTERED") - domain["description"].should.equal("A test domain") - - -@mock_swf_deprecated -def test_register_already_existing_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - conn.register_domain.when.called_with( - "test-domain", "60", description="A test domain" - ).should.throw(SWFResponseError) - - -@mock_swf_deprecated -def test_register_with_wrong_parameter_type(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.register_domain.when.called_with( - "test-domain", 60, description="A test domain" - ).should.throw(SWFResponseError) - - -# ListDomains endpoint -@mock_swf_deprecated -def test_list_domains_order(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("b-test-domain", "60") - conn.register_domain("a-test-domain", "60") - conn.register_domain("c-test-domain", "60") - - all_domains = conn.list_domains("REGISTERED") - names = [domain["name"] for domain in all_domains["domainInfos"]] - names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) - - -@mock_swf_deprecated -def test_list_domains_reverse_order(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("b-test-domain", "60") - conn.register_domain("a-test-domain", "60") - conn.register_domain("c-test-domain", "60") - - all_domains = conn.list_domains("REGISTERED", reverse_order=True) - names = [domain["name"] for domain in all_domains["domainInfos"]] - names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) - - -# DeprecateDomain endpoint -@mock_swf_deprecated -def test_deprecate_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - conn.deprecate_domain("test-domain") - - all_domains = conn.list_domains("DEPRECATED") - domain = all_domains["domainInfos"][0] - - domain["name"].should.equal("test-domain") - - -@mock_swf_deprecated -def test_deprecate_already_deprecated_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - conn.deprecate_domain("test-domain") - - conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError) - - -@mock_swf_deprecated -def test_deprecate_non_existent_domain(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.deprecate_domain.when.called_with("non-existent").should.throw( - SWFResponseError - ) - - -# DescribeDomain endpoint -@mock_swf_deprecated -def test_describe_domain(): - conn = boto.connect_swf("the_key", "the_secret") - conn.register_domain("test-domain", "60", description="A test domain") - - domain = conn.describe_domain("test-domain") - domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60") - domain["domainInfo"]["description"].should.equal("A test domain") - domain["domainInfo"]["name"].should.equal("test-domain") - domain["domainInfo"]["status"].should.equal("REGISTERED") - - -@mock_swf_deprecated -def test_describe_non_existent_domain(): - conn = boto.connect_swf("the_key", "the_secret") - - conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError) +import boto +from boto.swf.exceptions import SWFResponseError +import sure # noqa + +from moto import mock_swf_deprecated + + +# RegisterDomain endpoint +@mock_swf_deprecated +def test_register_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + all_domains = conn.list_domains("REGISTERED") + domain = all_domains["domainInfos"][0] + + domain["name"].should.equal("test-domain") + domain["status"].should.equal("REGISTERED") + domain["description"].should.equal("A test domain") + + +@mock_swf_deprecated +def test_register_already_existing_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + conn.register_domain.when.called_with( + "test-domain", "60", description="A test domain" + ).should.throw(SWFResponseError) + + +@mock_swf_deprecated +def test_register_with_wrong_parameter_type(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.register_domain.when.called_with( + "test-domain", 60, description="A test domain" + ).should.throw(SWFResponseError) + + +# ListDomains endpoint +@mock_swf_deprecated +def test_list_domains_order(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("b-test-domain", "60") + conn.register_domain("a-test-domain", "60") + conn.register_domain("c-test-domain", "60") + + all_domains = conn.list_domains("REGISTERED") + names = [domain["name"] for domain in all_domains["domainInfos"]] + names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) + + +@mock_swf_deprecated +def test_list_domains_reverse_order(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("b-test-domain", "60") + conn.register_domain("a-test-domain", "60") + conn.register_domain("c-test-domain", "60") + + all_domains = conn.list_domains("REGISTERED", reverse_order=True) + names = [domain["name"] for domain in all_domains["domainInfos"]] + names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) + + +# DeprecateDomain endpoint +@mock_swf_deprecated +def test_deprecate_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + conn.deprecate_domain("test-domain") + + all_domains = conn.list_domains("DEPRECATED") + domain = all_domains["domainInfos"][0] + + domain["name"].should.equal("test-domain") + + +@mock_swf_deprecated +def test_deprecate_already_deprecated_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + conn.deprecate_domain("test-domain") + + conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError) + + +@mock_swf_deprecated +def test_deprecate_non_existent_domain(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.deprecate_domain.when.called_with("non-existent").should.throw( + SWFResponseError + ) + + +# DescribeDomain endpoint +@mock_swf_deprecated +def test_describe_domain(): + conn = boto.connect_swf("the_key", "the_secret") + conn.register_domain("test-domain", "60", description="A test domain") + + domain = conn.describe_domain("test-domain") + domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60") + domain["domainInfo"]["description"].should.equal("A test domain") + domain["domainInfo"]["name"].should.equal("test-domain") + domain["domainInfo"]["status"].should.equal("REGISTERED") + + +@mock_swf_deprecated +def test_describe_non_existent_domain(): + conn = boto.connect_swf("the_key", "the_secret") + + conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError) diff --git a/tests/test_swf/responses/test_workflow_types.py b/tests/test_swf/responses/test_workflow_types.py index 4c92d7762..72aa814d2 100644 --- a/tests/test_swf/responses/test_workflow_types.py +++ b/tests/test_swf/responses/test_workflow_types.py @@ -1,7 +1,9 @@ import sure import boto +import boto3 from moto import mock_swf_deprecated +from moto import mock_swf from boto.swf.exceptions import SWFResponseError @@ -133,6 +135,41 @@ def test_describe_workflow_type(): infos["status"].should.equal("REGISTERED") +@mock_swf +def test_describe_workflow_type_full_boto3(): + # boto3 required as boto doesn't support all of the arguments + client = boto3.client("swf", region_name="us-east-1") + client.register_domain( + name="test-domain", workflowExecutionRetentionPeriodInDays="2" + ) + client.register_workflow_type( + domain="test-domain", + name="test-workflow", + version="v1.0", + description="Test workflow.", + defaultTaskStartToCloseTimeout="20", + defaultExecutionStartToCloseTimeout="60", + defaultTaskList={"name": "foo"}, + defaultTaskPriority="-2", + defaultChildPolicy="ABANDON", + defaultLambdaRole="arn:bar", + ) + + resp = client.describe_workflow_type( + domain="test-domain", workflowType={"name": "test-workflow", "version": "v1.0"} + ) + resp["typeInfo"]["workflowType"]["name"].should.equal("test-workflow") + resp["typeInfo"]["workflowType"]["version"].should.equal("v1.0") + resp["typeInfo"]["status"].should.equal("REGISTERED") + resp["typeInfo"]["description"].should.equal("Test workflow.") + resp["configuration"]["defaultTaskStartToCloseTimeout"].should.equal("20") + resp["configuration"]["defaultExecutionStartToCloseTimeout"].should.equal("60") + resp["configuration"]["defaultTaskList"]["name"].should.equal("foo") + resp["configuration"]["defaultTaskPriority"].should.equal("-2") + resp["configuration"]["defaultChildPolicy"].should.equal("ABANDON") + resp["configuration"]["defaultLambdaRole"].should.equal("arn:bar") + + @mock_swf_deprecated def test_describe_non_existent_workflow_type(): conn = boto.connect_swf("the_key", "the_secret") diff --git a/tests/test_swf/test_utils.py b/tests/test_swf/test_utils.py index 328342bbe..143804ca9 100644 --- a/tests/test_swf/test_utils.py +++ b/tests/test_swf/test_utils.py @@ -1,9 +1,9 @@ -import sure # noqa - -from moto.swf.utils import decapitalize - - -def test_decapitalize(): - cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"} - for before, after in cases.items(): - decapitalize(before).should.equal(after) +import sure # noqa + +from moto.swf.utils import decapitalize + + +def test_decapitalize(): + cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"} + for before, after in cases.items(): + decapitalize(before).should.equal(after) diff --git a/tests/test_utilities/test_tagging_service.py b/tests/test_utilities/test_tagging_service.py new file mode 100644 index 000000000..249e903fe --- /dev/null +++ b/tests/test_utilities/test_tagging_service.py @@ -0,0 +1,79 @@ +import sure + +from moto.utilities.tagging_service import TaggingService + + +def test_list_empty(): + svc = TaggingService() + result = svc.list_tags_for_resource("test") + + {"Tags": []}.should.be.equal(result) + + +def test_create_tag(): + svc = TaggingService("TheTags", "TagKey", "TagValue") + tags = [{"TagKey": "key_key", "TagValue": "value_value"}] + svc.tag_resource("arn", tags) + actual = svc.list_tags_for_resource("arn") + expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]} + + expected.should.be.equal(actual) + + +def test_create_tag_without_value(): + svc = TaggingService() + tags = [{"Key": "key_key"}] + svc.tag_resource("arn", tags) + actual = svc.list_tags_for_resource("arn") + expected = {"Tags": [{"Key": "key_key", "Value": None}]} + + expected.should.be.equal(actual) + + +def test_delete_tag_using_names(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + svc.tag_resource("arn", tags) + svc.untag_resource_using_names("arn", ["key_key"]) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_delete_all_tags_for_resource(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + tags2 = [{"Key": "key_key2", "Value": "value_value2"}] + svc.tag_resource("arn", tags) + svc.tag_resource("arn", tags2) + svc.delete_all_tags_for_resource("arn") + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_list_empty_delete(): + svc = TaggingService() + svc.untag_resource_using_names("arn", ["key_key"]) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_delete_tag_using_tags(): + svc = TaggingService() + tags = [{"Key": "key_key", "Value": "value_value"}] + svc.tag_resource("arn", tags) + svc.untag_resource_using_tags("arn", tags) + result = svc.list_tags_for_resource("arn") + + {"Tags": []}.should.be.equal(result) + + +def test_extract_tag_names(): + svc = TaggingService() + tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}] + actual = svc.extract_tag_names(tags) + expected = ["key1", "key2"] + + expected.should.be.equal(actual)