diff --git a/.gitignore b/.gitignore index 7f57e98e9..efb489651 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,6 @@ python_env .ropeproject/ .pytest_cache/ venv/ - +.python-version +.vscode/ +tests/file.tmp diff --git a/.travis.yml b/.travis.yml index 3a5de0fa2..5bc9779f3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: xenial language: python sudo: false services: @@ -5,26 +6,12 @@ services: python: - 2.7 - 3.6 + - 3.7 env: - TEST_SERVER_MODE=false - TEST_SERVER_MODE=true -# Due to incomplete Python 3.7 support on Travis CI ( -# https://github.com/travis-ci/travis-ci/issues/9815), -# using a matrix is necessary -matrix: - include: - - python: 3.7 - env: TEST_SERVER_MODE=false - dist: xenial - sudo: true - - python: 3.7 - env: TEST_SERVER_MODE=true - dist: xenial - sudo: true before_install: - export BOTO_CONFIG=/dev/null - - export AWS_SECRET_ACCESS_KEY=foobar_secret - - export AWS_ACCESS_KEY_ID=foobar_key install: # We build moto first so the docker container doesn't try to compile it as well, also note we don't use # -d for docker run so the logs show up in travis diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 8ac5f8e5e..ba91eddbd 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -1,4 +1,3 @@ - ## acm - 41% implemented - [X] add_tags_to_certificate - [X] delete_certificate diff --git a/Makefile b/Makefile index f224d7091..de08c6f74 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,7 @@ test: lint rm -f .coverage rm -rf cover @nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE) + test_server: @TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/ diff --git a/README.md b/README.md index 791226d6b..56f73e28e 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ [![Join the chat at https://gitter.im/awsmoto/Lobby](https://badges.gitter.im/awsmoto/Lobby.svg)](https://gitter.im/awsmoto/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Build Status](https://travis-ci.org/spulec/moto.png?branch=master)](https://travis-ci.org/spulec/moto) -[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.png?branch=master)](https://coveralls.io/r/spulec/moto) +[![Build Status](https://travis-ci.org/spulec/moto.svg?branch=master)](https://travis-ci.org/spulec/moto) +[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.svg?branch=master)](https://coveralls.io/r/spulec/moto) [![Docs](https://readthedocs.org/projects/pip/badge/?version=stable)](http://docs.getmoto.org) # In a nutshell @@ -70,10 +70,12 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L |------------------------------------------------------------------------------| | CloudwatchEvents | @mock_events | all endpoints done | |------------------------------------------------------------------------------| -| Cognito Identity | @mock_cognitoidentity| basic endpoints done | +| Cognito Identity | @mock_cognitoidentity| basic endpoints done | |------------------------------------------------------------------------------| | Cognito Identity Provider | @mock_cognitoidp| basic endpoints done | |------------------------------------------------------------------------------| +| Config | @mock_config | basic endpoints done | +|------------------------------------------------------------------------------| | Data Pipeline | @mock_datapipeline| basic endpoints done | |------------------------------------------------------------------------------| | DynamoDB | @mock_dynamodb | core endpoints done | @@ -259,7 +261,7 @@ It uses flask, which isn't a default dependency. You can install the server 'extra' package with: ```python -pip install moto[server] +pip install "moto[server]" ``` You can then start it running a service: diff --git a/moto/__init__.py b/moto/__init__.py index dd3593d5d..5eeac8471 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -3,7 +3,7 @@ import logging # logging.getLogger('boto').setLevel(logging.CRITICAL) __title__ = 'moto' -__version__ = '1.3.7' +__version__ = '1.3.8' from .acm import mock_acm # flake8: noqa from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa @@ -13,9 +13,11 @@ from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa +from .config import mock_config # flake8: noqa from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa +from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa diff --git a/moto/acm/models.py b/moto/acm/models.py index 39be8945d..15a1bd44d 100644 --- a/moto/acm/models.py +++ b/moto/acm/models.py @@ -243,7 +243,7 @@ class CertBundle(BaseModel): 'KeyAlgorithm': key_algo, 'NotAfter': datetime_to_epoch(self._cert.not_valid_after), 'NotBefore': datetime_to_epoch(self._cert.not_valid_before), - 'Serial': self._cert.serial, + 'Serial': self._cert.serial_number, 'SignatureAlgorithm': self._cert.signature_algorithm_oid._name.upper().replace('ENCRYPTION', ''), 'Status': self.status, # One of PENDING_VALIDATION, ISSUED, INACTIVE, EXPIRED, VALIDATION_TIMED_OUT, REVOKED, FAILED. 'Subject': 'CN={0}'.format(self.common_name), diff --git a/moto/autoscaling/models.py b/moto/autoscaling/models.py index 0ebc4c465..27e81a87c 100644 --- a/moto/autoscaling/models.py +++ b/moto/autoscaling/models.py @@ -17,10 +17,12 @@ ASG_NAME_TAG = "aws:autoscaling:groupName" class InstanceState(object): - def __init__(self, instance, lifecycle_state="InService", health_status="Healthy"): + def __init__(self, instance, lifecycle_state="InService", + health_status="Healthy", protected_from_scale_in=False): self.instance = instance self.lifecycle_state = lifecycle_state self.health_status = health_status + self.protected_from_scale_in = protected_from_scale_in class FakeScalingPolicy(BaseModel): @@ -152,7 +154,8 @@ class FakeAutoScalingGroup(BaseModel): min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, termination_policies, - autoscaling_backend, tags): + autoscaling_backend, tags, + new_instances_protected_from_scale_in=False): self.autoscaling_backend = autoscaling_backend self.name = name @@ -178,6 +181,7 @@ class FakeAutoScalingGroup(BaseModel): self.target_group_arns = target_group_arns self.placement_group = placement_group self.termination_policies = termination_policies + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in self.suspended_processes = [] self.instance_states = [] @@ -210,6 +214,8 @@ class FakeAutoScalingGroup(BaseModel): placement_group=None, termination_policies=properties.get("TerminationPolicies", []), tags=properties.get("Tags", []), + new_instances_protected_from_scale_in=properties.get( + "NewInstancesProtectedFromScaleIn", False) ) return group @@ -238,7 +244,8 @@ class FakeAutoScalingGroup(BaseModel): def update(self, availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies): + placement_group, termination_policies, + new_instances_protected_from_scale_in=None): if availability_zones: self.availability_zones = availability_zones if max_size is not None: @@ -256,6 +263,8 @@ class FakeAutoScalingGroup(BaseModel): self.health_check_period = health_check_period if health_check_type is not None: self.health_check_type = health_check_type + if new_instances_protected_from_scale_in is not None: + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in if desired_capacity is not None: self.set_desired_capacity(desired_capacity) @@ -280,12 +289,16 @@ class FakeAutoScalingGroup(BaseModel): else: # Need to remove some instances count_to_remove = curr_instance_count - self.desired_capacity - instances_to_remove = self.instance_states[:count_to_remove] - instance_ids_to_remove = [ - instance.instance.id for instance in instances_to_remove] - self.autoscaling_backend.ec2_backend.terminate_instances( - instance_ids_to_remove) - self.instance_states = self.instance_states[count_to_remove:] + instances_to_remove = [ # only remove unprotected + state for state in self.instance_states + if not state.protected_from_scale_in + ][:count_to_remove] + if instances_to_remove: # just in case not instances to remove + instance_ids_to_remove = [ + instance.instance.id for instance in instances_to_remove] + self.autoscaling_backend.ec2_backend.terminate_instances( + instance_ids_to_remove) + self.instance_states = list(set(self.instance_states) - set(instances_to_remove)) def get_propagated_tags(self): propagated_tags = {} @@ -310,7 +323,10 @@ class FakeAutoScalingGroup(BaseModel): ) for instance in reservation.instances: instance.autoscaling_group = self - self.instance_states.append(InstanceState(instance)) + self.instance_states.append(InstanceState( + instance, + protected_from_scale_in=self.new_instances_protected_from_scale_in, + )) def append_target_groups(self, target_group_arns): append = [x for x in target_group_arns if x not in self.target_group_arns] @@ -372,7 +388,8 @@ class AutoScalingBackend(BaseBackend): default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, - termination_policies, tags): + termination_policies, tags, + new_instances_protected_from_scale_in=False): def make_int(value): return int(value) if value is not None else value @@ -403,6 +420,7 @@ class AutoScalingBackend(BaseBackend): termination_policies=termination_policies, autoscaling_backend=self, tags=tags, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in, ) self.autoscaling_groups[name] = group @@ -415,12 +433,14 @@ class AutoScalingBackend(BaseBackend): launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, placement_group, - termination_policies): + termination_policies, + new_instances_protected_from_scale_in=None): group = self.autoscaling_groups[name] group.update(availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies) + placement_group, termination_policies, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in) return group def describe_auto_scaling_groups(self, names): @@ -448,7 +468,13 @@ class AutoScalingBackend(BaseBackend): raise ResourceContentionError else: group.desired_capacity = original_size + len(instance_ids) - new_instances = [InstanceState(self.ec2_backend.get_instance(x)) for x in instance_ids] + new_instances = [ + InstanceState( + self.ec2_backend.get_instance(x), + protected_from_scale_in=group.new_instances_protected_from_scale_in, + ) + for x in instance_ids + ] for instance in new_instances: self.ec2_backend.create_tags([instance.instance.id], {ASG_NAME_TAG: group.name}) group.instance_states.extend(new_instances) @@ -626,6 +652,13 @@ class AutoScalingBackend(BaseBackend): group = self.autoscaling_groups[group_name] group.suspended_processes = scaling_processes or [] + def set_instance_protection(self, group_name, instance_ids, protected_from_scale_in): + group = self.autoscaling_groups[group_name] + protected_instances = [ + x for x in group.instance_states if x.instance.id in instance_ids] + for instance in protected_instances: + instance.protected_from_scale_in = protected_from_scale_in + autoscaling_backends = {} for region, ec2_backend in ec2_backends.items(): diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py index 5586c51dd..6a7913021 100644 --- a/moto/autoscaling/responses.py +++ b/moto/autoscaling/responses.py @@ -85,6 +85,8 @@ class AutoScalingResponse(BaseResponse): termination_policies=self._get_multi_param( 'TerminationPolicies.member'), tags=self._get_list_prefix('Tags.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', False) ) template = self.response_template(CREATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -192,6 +194,8 @@ class AutoScalingResponse(BaseResponse): placement_group=self._get_param('PlacementGroup'), termination_policies=self._get_multi_param( 'TerminationPolicies.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', None) ) template = self.response_template(UPDATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -290,6 +294,15 @@ class AutoScalingResponse(BaseResponse): template = self.response_template(SUSPEND_PROCESSES_TEMPLATE) return template.render() + def set_instance_protection(self): + group_name = self._get_param('AutoScalingGroupName') + instance_ids = self._get_multi_param('InstanceIds.member') + protected_from_scale_in = self._get_bool_param('ProtectedFromScaleIn') + self.autoscaling_backend.set_instance_protection( + group_name, instance_ids, protected_from_scale_in) + template = self.response_template(SET_INSTANCE_PROTECTION_TEMPLATE) + return template.render() + CREATE_LAUNCH_CONFIGURATION_TEMPLATE = """ @@ -490,6 +503,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ instance_state.instance.id }} {{ group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -508,6 +522,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """ {% endif %} + {% if group.target_group_arns %} + + {% for target_group_arn in group.target_group_arns %} + {{ target_group_arn }} + {% endfor %} + + {% else %} + + {% endif %} {{ group.min_size }} {% if group.vpc_zone_identifier %} {{ group.vpc_zone_identifier }} @@ -530,6 +553,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ group.placement_group }} {% endif %} + {{ group.new_instances_protected_from_scale_in|string|lower }} {% endfor %} @@ -565,6 +589,7 @@ DESCRIBE_AUTOSCALING_INSTANCES_TEMPLATE = """{{ instance_state.instance.id }} {{ instance_state.instance.autoscaling_group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -668,3 +693,10 @@ SET_INSTANCE_HEALTH_TEMPLATE = """ + + +{{ requestid }} + +""" diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index b11bde042..9fc41c11e 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -386,7 +386,7 @@ class LambdaFunction(BaseModel): 'Role': properties['Role'], 'Runtime': properties['Runtime'], } - optional_properties = 'Description MemorySize Publish Timeout VpcConfig'.split() + optional_properties = 'Description MemorySize Publish Timeout VpcConfig Environment'.split() # NOTE: Not doing `properties.get(k, DEFAULT)` to avoid duplicating the # default logic for prop in optional_properties: @@ -500,6 +500,11 @@ class LambdaStorage(object): except ValueError: return self._functions[name]['latest'] + def list_versions_by_function(self, name): + if name not in self._functions: + return None + return [self._functions[name]['latest']] + def get_arn(self, arn): return self._arns.get(arn, None) @@ -607,6 +612,9 @@ class LambdaBackend(BaseBackend): def get_function(self, function_name, qualifier=None): return self._lambdas.get_function(function_name, qualifier) + def list_versions_by_function(self, function_name): + return self._lambdas.list_versions_by_function(function_name) + def get_function_by_arn(self, function_arn): return self._lambdas.get_arn(function_arn) diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index 1a9a4df83..1c43ef84b 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -52,7 +52,11 @@ class LambdaResponse(BaseResponse): self.setup_class(request, full_url, headers) if request.method == 'GET': # This is ListVersionByFunction - raise ValueError("Cannot handle request") + + path = request.path if hasattr(request, 'path') else path_url(request.url) + function_name = path.split('/')[-2] + return self._list_versions_by_function(function_name) + elif request.method == 'POST': return self._publish_function(request, full_url, headers) else: @@ -151,6 +155,19 @@ class LambdaResponse(BaseResponse): return 200, {}, json.dumps(result) + def _list_versions_by_function(self, function_name): + result = { + 'Versions': [] + } + + functions = self.lambda_backend.list_versions_by_function(function_name) + if functions: + for fn in functions: + json_data = fn.get_configuration() + result['Versions'].append(json_data) + + return 200, {}, json.dumps(result) + def _create_function(self, request, full_url, headers): try: fn = self.lambda_backend.create_function(self.json_body) @@ -166,7 +183,7 @@ class LambdaResponse(BaseResponse): fn = self.lambda_backend.publish_function(function_name) if fn: config = fn.get_configuration() - return 200, {}, json.dumps(config) + return 201, {}, json.dumps(config) else: return 404, {}, "{}" diff --git a/moto/backends.py b/moto/backends.py index d95424385..90cc803a7 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -12,6 +12,7 @@ from moto.core import moto_api_backends from moto.datapipeline import datapipeline_backends from moto.dynamodb import dynamodb_backends from moto.dynamodb2 import dynamodb_backends2 +from moto.dynamodbstreams import dynamodbstreams_backends from moto.ec2 import ec2_backends from moto.ecr import ecr_backends from moto.ecs import ecs_backends @@ -45,7 +46,7 @@ from moto.iot import iot_backends from moto.iotdata import iotdata_backends from moto.batch import batch_backends from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends - +from moto.config import config_backends BACKENDS = { 'acm': acm_backends, @@ -56,9 +57,11 @@ BACKENDS = { 'cloudwatch': cloudwatch_backends, 'cognito-identity': cognitoidentity_backends, 'cognito-idp': cognitoidp_backends, + 'config': config_backends, 'datapipeline': datapipeline_backends, 'dynamodb': dynamodb_backends, 'dynamodb2': dynamodb_backends2, + 'dynamodbstreams': dynamodbstreams_backends, 'ec2': ec2_backends, 'ecr': ecr_backends, 'ecs': ecs_backends, diff --git a/moto/batch/responses.py b/moto/batch/responses.py index e626b7d4c..7fb606184 100644 --- a/moto/batch/responses.py +++ b/moto/batch/responses.py @@ -27,7 +27,7 @@ class BatchResponse(BaseResponse): elif not hasattr(self, '_json'): try: self._json = json.loads(self.body) - except json.JSONDecodeError: + except ValueError: print() return self._json diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index e5ab7255d..01e3113dd 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -1,5 +1,5 @@ from __future__ import unicode_literals -from datetime import datetime +from datetime import datetime, timedelta import json import yaml import uuid @@ -12,11 +12,156 @@ from .parsing import ResourceMap, OutputMap from .utils import ( generate_changeset_id, generate_stack_id, + generate_stackset_arn, + generate_stackset_id, yaml_tag_constructor, + validate_template_cfn_lint, ) from .exceptions import ValidationError +class FakeStackSet(BaseModel): + + def __init__(self, stackset_id, name, template, region='us-east-1', + status='ACTIVE', description=None, parameters=None, tags=None, + admin_role='AWSCloudFormationStackSetAdministrationRole', + execution_role='AWSCloudFormationStackSetExecutionRole'): + self.id = stackset_id + self.arn = generate_stackset_arn(stackset_id, region) + self.name = name + self.template = template + self.description = description + self.parameters = parameters + self.tags = tags + self.admin_role = admin_role + self.execution_role = execution_role + self.status = status + self.instances = FakeStackInstances(parameters, self.id, self.name) + self.stack_instances = self.instances.stack_instances + self.operations = [] + + def _create_operation(self, operation_id, action, status, accounts=[], regions=[]): + operation = { + 'OperationId': str(operation_id), + 'Action': action, + 'Status': status, + 'CreationTimestamp': datetime.now(), + 'EndTimestamp': datetime.now() + timedelta(minutes=2), + 'Instances': [{account: region} for account in accounts for region in regions], + } + + self.operations += [operation] + return operation + + def get_operation(self, operation_id): + for operation in self.operations: + if operation_id == operation['OperationId']: + return operation + raise ValidationError(operation_id) + + def update_operation(self, operation_id, status): + operation = self.get_operation(operation_id) + operation['Status'] = status + return operation_id + + def delete(self): + self.status = 'DELETED' + + def update(self, template, description, parameters, tags, admin_role, + execution_role, accounts, regions, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.template = template if template else self.template + self.description = description if description is not None else self.description + self.parameters = parameters if parameters else self.parameters + self.tags = tags if tags else self.tags + self.admin_role = admin_role if admin_role else self.admin_role + self.execution_role = execution_role if execution_role else self.execution_role + + if accounts and regions: + self.update_instances(accounts, regions, self.parameters) + + operation = self._create_operation(operation_id=operation_id, + action='UPDATE', status='SUCCEEDED', accounts=accounts, + regions=regions) + return operation + + def create_stack_instances(self, accounts, regions, parameters, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + if not parameters: + parameters = self.parameters + + self.instances.create_instances(accounts, regions, parameters, operation_id) + self._create_operation(operation_id=operation_id, action='CREATE', + status='SUCCEEDED', accounts=accounts, regions=regions) + + def delete_stack_instances(self, accounts, regions, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.instances.delete(accounts, regions) + + operation = self._create_operation(operation_id=operation_id, action='DELETE', + status='SUCCEEDED', accounts=accounts, regions=regions) + return operation + + def update_instances(self, accounts, regions, parameters, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.instances.update(accounts, regions, parameters) + operation = self._create_operation(operation_id=operation_id, + action='UPDATE', status='SUCCEEDED', accounts=accounts, + regions=regions) + return operation + + +class FakeStackInstances(BaseModel): + def __init__(self, parameters, stackset_id, stackset_name): + self.parameters = parameters if parameters else {} + self.stackset_id = stackset_id + self.stack_name = "StackSet-{}".format(stackset_id) + self.stackset_name = stackset_name + self.stack_instances = [] + + def create_instances(self, accounts, regions, parameters, operation_id): + new_instances = [] + for region in regions: + for account in accounts: + instance = { + 'StackId': generate_stack_id(self.stack_name, region, account), + 'StackSetId': self.stackset_id, + 'Region': region, + 'Account': account, + 'Status': "CURRENT", + 'ParameterOverrides': parameters if parameters else [], + } + new_instances.append(instance) + self.stack_instances += new_instances + return new_instances + + def update(self, accounts, regions, parameters): + for account in accounts: + for region in regions: + instance = self.get_instance(account, region) + if parameters: + instance['ParameterOverrides'] = parameters + else: + instance['ParameterOverrides'] = [] + + def delete(self, accounts, regions): + for i, instance in enumerate(self.stack_instances): + if instance['Region'] in regions and instance['Account'] in accounts: + self.stack_instances.pop(i) + + def get_instance(self, account, region): + for i, instance in enumerate(self.stack_instances): + if instance['Region'] == region and instance['Account'] == account: + return self.stack_instances[i] + + class FakeStack(BaseModel): def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None, create_change_set=False): @@ -84,9 +229,9 @@ class FakeStack(BaseModel): def _parse_template(self): yaml.add_multi_constructor('', yaml_tag_constructor) try: - self.template_dict = yaml.load(self.template) + self.template_dict = yaml.load(self.template, Loader=yaml.Loader) except yaml.parser.ParserError: - self.template_dict = json.loads(self.template) + self.template_dict = json.loads(self.template, Loader=yaml.Loader) @property def stack_parameters(self): @@ -126,6 +271,49 @@ class FakeStack(BaseModel): self.status = "DELETE_COMPLETE" +class FakeChange(BaseModel): + + def __init__(self, action, logical_resource_id, resource_type): + self.action = action + self.logical_resource_id = logical_resource_id + self.resource_type = resource_type + + +class FakeChangeSet(FakeStack): + + def __init__(self, stack_id, stack_name, stack_template, change_set_id, change_set_name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None): + super(FakeChangeSet, self).__init__( + stack_id, + stack_name, + stack_template, + parameters, + region_name, + notification_arns=notification_arns, + tags=tags, + role_arn=role_arn, + cross_stack_resources=cross_stack_resources, + create_change_set=True, + ) + self.stack_name = stack_name + self.change_set_id = change_set_id + self.change_set_name = change_set_name + self.changes = self.diff(template=template, parameters=parameters) + + def diff(self, template, parameters=None): + self.template = template + self._parse_template() + changes = [] + resources_by_action = self.resource_map.diff(self.template_dict, parameters) + for action, resources in resources_by_action.items(): + for resource_name, resource in resources.items(): + changes.append(FakeChange( + action=action, + logical_resource_id=resource_name, + resource_type=resource['ResourceType'], + )) + return changes + + class FakeEvent(BaseModel): def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None): @@ -145,10 +333,72 @@ class CloudFormationBackend(BaseBackend): def __init__(self): self.stacks = OrderedDict() + self.stacksets = OrderedDict() self.deleted_stacks = {} self.exports = OrderedDict() self.change_sets = OrderedDict() + def create_stack_set(self, name, template, parameters, tags=None, description=None, region='us-east-1', admin_role=None, execution_role=None): + stackset_id = generate_stackset_id(name) + new_stackset = FakeStackSet( + stackset_id=stackset_id, + name=name, + template=template, + parameters=parameters, + description=description, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + ) + self.stacksets[stackset_id] = new_stackset + return new_stackset + + def get_stack_set(self, name): + stacksets = self.stacksets.keys() + for stackset in stacksets: + if self.stacksets[stackset].name == name: + return self.stacksets[stackset] + raise ValidationError(name) + + def delete_stack_set(self, name): + stacksets = self.stacksets.keys() + for stackset in stacksets: + if self.stacksets[stackset].name == name: + self.stacksets[stackset].delete() + + def create_stack_instances(self, stackset_name, accounts, regions, parameters, operation_id=None): + stackset = self.get_stack_set(stackset_name) + + stackset.create_stack_instances( + accounts=accounts, + regions=regions, + parameters=parameters, + operation_id=operation_id, + ) + return stackset + + def update_stack_set(self, stackset_name, template=None, description=None, + parameters=None, tags=None, admin_role=None, execution_role=None, + accounts=None, regions=None, operation_id=None): + stackset = self.get_stack_set(stackset_name) + update = stackset.update( + template=template, + description=description, + parameters=parameters, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + accounts=accounts, + regions=regions, + operation_id=operation_id + ) + return update + + def delete_stack_instances(self, stackset_name, accounts, regions, operation_id=None): + stackset = self.get_stack_set(stackset_name) + stackset.delete_stack_instances(accounts, regions, operation_id) + return stackset + def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, create_change_set=False): stack_id = generate_stack_id(name) new_stack = FakeStack( @@ -170,24 +420,62 @@ class CloudFormationBackend(BaseBackend): return new_stack def create_change_set(self, stack_name, change_set_name, template, parameters, region_name, change_set_type, notification_arns=None, tags=None, role_arn=None): + stack_id = None + stack_template = None if change_set_type == 'UPDATE': stacks = self.stacks.values() stack = None for s in stacks: if s.name == stack_name: stack = s + stack_id = stack.stack_id + stack_template = stack.template if stack is None: raise ValidationError(stack_name) - else: - stack = self.create_stack(stack_name, template, parameters, - region_name, notification_arns, tags, - role_arn, create_change_set=True) + stack_id = generate_stack_id(stack_name) + stack_template = template + change_set_id = generate_changeset_id(change_set_name, region_name) - self.stacks[change_set_name] = {'Id': change_set_id, - 'StackId': stack.stack_id} - self.change_sets[change_set_id] = stack - return change_set_id, stack.stack_id + new_change_set = FakeChangeSet( + stack_id=stack_id, + stack_name=stack_name, + stack_template=stack_template, + change_set_id=change_set_id, + change_set_name=change_set_name, + template=template, + parameters=parameters, + region_name=region_name, + notification_arns=notification_arns, + tags=tags, + role_arn=role_arn, + cross_stack_resources=self.exports + ) + self.change_sets[change_set_id] = new_change_set + self.stacks[stack_id] = new_change_set + return change_set_id, stack_id + + def delete_change_set(self, change_set_name, stack_name=None): + if change_set_name in self.change_sets: + # This means arn was passed in + del self.change_sets[change_set_name] + else: + for cs in self.change_sets: + if self.change_sets[cs].change_set_name == change_set_name: + del self.change_sets[cs] + + def describe_change_set(self, change_set_name, stack_name=None): + change_set = None + if change_set_name in self.change_sets: + # This means arn was passed in + change_set = self.change_sets[change_set_name] + else: + for cs in self.change_sets: + if self.change_sets[cs].change_set_name == change_set_name: + change_set = self.change_sets[cs] + if change_set is None: + raise ValidationError(change_set_name) + return change_set def execute_change_set(self, change_set_name, stack_name=None): stack = None @@ -196,7 +484,7 @@ class CloudFormationBackend(BaseBackend): stack = self.change_sets[change_set_name] else: for cs in self.change_sets: - if self.change_sets[cs].name == change_set_name: + if self.change_sets[cs].change_set_name == change_set_name: stack = self.change_sets[cs] if stack is None: raise ValidationError(stack_name) @@ -222,8 +510,15 @@ class CloudFormationBackend(BaseBackend): else: return list(stacks) + def list_change_sets(self): + return self.change_sets.values() + def list_stacks(self): - return self.stacks.values() + return [ + v for v in self.stacks.values() + ] + [ + v for v in self.deleted_stacks.values() + ] def get_stack(self, name_or_stack_id): all_stacks = dict(self.deleted_stacks, **self.stacks) @@ -270,6 +565,9 @@ class CloudFormationBackend(BaseBackend): next_token = str(token + 100) if len(all_exports) > token + 100 else None return exports, next_token + def validate_template(self, template): + return validate_template_cfn_lint(template) + def _validate_export_uniqueness(self, stack): new_stack_export_names = [x.name for x in stack.exports] export_names = self.exports.keys() diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py index 35b05d101..0be68944b 100644 --- a/moto/cloudformation/parsing.py +++ b/moto/cloudformation/parsing.py @@ -465,36 +465,70 @@ class ResourceMap(collections.Mapping): ec2_models.ec2_backends[self._region_name].create_tags( [self[resource].physical_resource_id], self.tags) - def update(self, template, parameters=None): + def diff(self, template, parameters=None): if parameters: self.input_parameters = parameters self.load_mapping() self.load_parameters() self.load_conditions() + old_template = self._resource_json_map + new_template = template['Resources'] + + resource_names_by_action = { + 'Add': set(new_template) - set(old_template), + 'Modify': set(name for name in new_template if name in old_template and new_template[ + name] != old_template[name]), + 'Remove': set(old_template) - set(new_template) + } + resources_by_action = { + 'Add': {}, + 'Modify': {}, + 'Remove': {}, + } + + for resource_name in resource_names_by_action['Add']: + resources_by_action['Add'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': new_template[resource_name]['Type'] + } + + for resource_name in resource_names_by_action['Modify']: + resources_by_action['Modify'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': new_template[resource_name]['Type'] + } + + for resource_name in resource_names_by_action['Remove']: + resources_by_action['Remove'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': old_template[resource_name]['Type'] + } + + return resources_by_action + + def update(self, template, parameters=None): + resources_by_action = self.diff(template, parameters) + old_template = self._resource_json_map new_template = template['Resources'] self._resource_json_map = new_template - new_resource_names = set(new_template) - set(old_template) - for resource_name in new_resource_names: + for resource_name, resource in resources_by_action['Add'].items(): resource_json = new_template[resource_name] new_resource = parse_and_create_resource( resource_name, resource_json, self, self._region_name) self._parsed_resources[resource_name] = new_resource - removed_resource_names = set(old_template) - set(new_template) - for resource_name in removed_resource_names: + for resource_name, resource in resources_by_action['Remove'].items(): resource_json = old_template[resource_name] parse_and_delete_resource( resource_name, resource_json, self, self._region_name) self._parsed_resources.pop(resource_name) - resources_to_update = set(name for name in new_template if name in old_template and new_template[ - name] != old_template[name]) tries = 1 - while resources_to_update and tries < 5: - for resource_name in resources_to_update.copy(): + while resources_by_action['Modify'] and tries < 5: + for resource_name, resource in resources_by_action['Modify'].copy().items(): resource_json = new_template[resource_name] try: changed_resource = parse_and_update_resource( @@ -505,7 +539,7 @@ class ResourceMap(collections.Mapping): last_exception = e else: self._parsed_resources[resource_name] = changed_resource - resources_to_update.remove(resource_name) + del resources_by_action['Modify'][resource_name] tries += 1 if tries == 5: raise last_exception diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index a1295a20d..d1ef5ba8a 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import json +import yaml from six.moves.urllib.parse import urlparse from moto.core.responses import BaseResponse @@ -87,7 +88,8 @@ class CloudFormationResponse(BaseResponse): role_arn = self._get_param('RoleARN') update_or_create = self._get_param('ChangeSetType', 'CREATE') parameters_list = self._get_list_prefix("Parameters.member") - tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")} + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) parameters = {param['parameter_key']: param['parameter_value'] for param in parameters_list} if template_url: @@ -118,6 +120,31 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(CREATE_CHANGE_SET_RESPONSE_TEMPLATE) return template.render(stack_id=stack_id, change_set_id=change_set_id) + def delete_change_set(self): + stack_name = self._get_param('StackName') + change_set_name = self._get_param('ChangeSetName') + + self.cloudformation_backend.delete_change_set(change_set_name=change_set_name, stack_name=stack_name) + if self.request_json: + return json.dumps({ + 'DeleteChangeSetResponse': { + 'DeleteChangeSetResult': {}, + } + }) + else: + template = self.response_template(DELETE_CHANGE_SET_RESPONSE_TEMPLATE) + return template.render() + + def describe_change_set(self): + stack_name = self._get_param('StackName') + change_set_name = self._get_param('ChangeSetName') + change_set = self.cloudformation_backend.describe_change_set( + change_set_name=change_set_name, + stack_name=stack_name, + ) + template = self.response_template(DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE) + return template.render(change_set=change_set) + @amzn_request_id def execute_change_set(self): stack_name = self._get_param('StackName') @@ -185,6 +212,11 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(DESCRIBE_STACK_EVENTS_RESPONSE) return template.render(stack=stack) + def list_change_sets(self): + change_sets = self.cloudformation_backend.list_change_sets() + template = self.response_template(LIST_CHANGE_SETS_RESPONSE) + return template.render(change_sets=change_sets) + def list_stacks(self): stacks = self.cloudformation_backend.list_stacks() template = self.response_template(LIST_STACKS_RESPONSE) @@ -294,6 +326,201 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(LIST_EXPORTS_RESPONSE) return template.render(exports=exports, next_token=next_token) + def validate_template(self): + cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody')) + if cfn_lint: + raise ValidationError(cfn_lint[0].message) + description = "" + try: + description = json.loads(self._get_param('TemplateBody'))['Description'] + except (ValueError, KeyError): + pass + try: + description = yaml.load(self._get_param('TemplateBody'))['Description'] + except (yaml.ParserError, KeyError): + pass + template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE) + return template.render(description=description) + + def create_stack_set(self): + stackset_name = self._get_param('StackSetName') + stack_body = self._get_param('TemplateBody') + template_url = self._get_param('TemplateURL') + # role_arn = self._get_param('RoleARN') + parameters_list = self._get_list_prefix("Parameters.member") + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) + + # Copy-Pasta - Hack dict-comprehension + parameters = dict([ + (parameter['parameter_key'], parameter['parameter_value']) + for parameter + in parameters_list + ]) + if template_url: + stack_body = self._get_stack_from_s3_url(template_url) + + stackset = self.cloudformation_backend.create_stack_set( + name=stackset_name, + template=stack_body, + parameters=parameters, + tags=tags, + # role_arn=role_arn, + ) + if self.request_json: + return json.dumps({ + 'CreateStackSetResponse': { + 'CreateStackSetResult': { + 'StackSetId': stackset.stackset_id, + } + } + }) + else: + template = self.response_template(CREATE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def create_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + parameters = self._get_multi_param('ParameterOverrides.member') + self.cloudformation_backend.create_stack_instances(stackset_name, accounts, regions, parameters) + template = self.response_template(CREATE_STACK_INSTANCES_TEMPLATE) + return template.render() + + def delete_stack_set(self): + stackset_name = self._get_param('StackSetName') + self.cloudformation_backend.delete_stack_set(stackset_name) + template = self.response_template(DELETE_STACK_SET_RESPONSE_TEMPLATE) + return template.render() + + def delete_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + operation = self.cloudformation_backend.delete_stack_instances(stackset_name, accounts, regions) + + template = self.response_template(DELETE_STACK_INSTANCES_TEMPLATE) + return template.render(operation=operation) + + def describe_stack_set(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + + if not stackset.admin_role: + stackset.admin_role = 'arn:aws:iam::123456789012:role/AWSCloudFormationStackSetAdministrationRole' + if not stackset.execution_role: + stackset.execution_role = 'AWSCloudFormationStackSetExecutionRole' + + template = self.response_template(DESCRIBE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def describe_stack_instance(self): + stackset_name = self._get_param('StackSetName') + account = self._get_param('StackInstanceAccount') + region = self._get_param('StackInstanceRegion') + + instance = self.cloudformation_backend.get_stack_set(stackset_name).instances.get_instance(account, region) + template = self.response_template(DESCRIBE_STACK_INSTANCE_TEMPLATE) + rendered = template.render(instance=instance) + return rendered + + def list_stack_sets(self): + stacksets = self.cloudformation_backend.stacksets + template = self.response_template(LIST_STACK_SETS_TEMPLATE) + return template.render(stacksets=stacksets) + + def list_stack_instances(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + template = self.response_template(LIST_STACK_INSTANCES_TEMPLATE) + return template.render(stackset=stackset) + + def list_stack_set_operations(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + template = self.response_template(LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def stop_stack_set_operation(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + stackset.update_operation(operation_id, 'STOPPED') + template = self.response_template(STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE) + return template.render() + + def describe_stack_set_operation(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + operation = stackset.get_operation(operation_id) + template = self.response_template(DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE) + return template.render(stackset=stackset, operation=operation) + + def list_stack_set_operation_results(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + operation = stackset.get_operation(operation_id) + template = self.response_template(LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE) + return template.render(operation=operation) + + def update_stack_set(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + description = self._get_param('Description') + execution_role = self._get_param('ExecutionRoleName') + admin_role = self._get_param('AdministrationRoleARN') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + template_body = self._get_param('TemplateBody') + template_url = self._get_param('TemplateURL') + if template_url: + template_body = self._get_stack_from_s3_url(template_url) + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) + parameters_list = self._get_list_prefix("Parameters.member") + parameters = dict([ + (parameter['parameter_key'], parameter['parameter_value']) + for parameter + in parameters_list + ]) + operation = self.cloudformation_backend.update_stack_set( + stackset_name=stackset_name, + template=template_body, + description=description, + parameters=parameters, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + accounts=accounts, + regions=regions, + operation_id=operation_id + ) + + template = self.response_template(UPDATE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(operation=operation) + + def update_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + parameters = self._get_multi_param('ParameterOverrides.member') + operation = self.cloudformation_backend.get_stack_set(stackset_name).update_instances(accounts, regions, parameters) + template = self.response_template(UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE) + return template.render(operation=operation) + + +VALIDATE_STACK_RESPONSE_TEMPLATE = """ + + + + +{{ description }} + + +""" CREATE_STACK_RESPONSE_TEMPLATE = """ @@ -326,6 +553,66 @@ CREATE_CHANGE_SET_RESPONSE_TEMPLATE = """ """ +DELETE_CHANGE_SET_RESPONSE_TEMPLATE = """ + + + + 3d3200a1-810e-3023-6cc3-example + + +""" + +DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE = """ + + {{ change_set.change_set_id }} + {{ change_set.change_set_name }} + {{ change_set.stack_id }} + {{ change_set.stack_name }} + {{ change_set.description }} + + {% for param_name, param_value in change_set.stack_parameters.items() %} + + {{ param_name }} + {{ param_value }} + + {% endfor %} + + 2011-05-23T15:47:44Z + {{ change_set.execution_status }} + {{ change_set.status }} + {{ change_set.status_reason }} + {% if change_set.notification_arns %} + + {% for notification_arn in change_set.notification_arns %} + {{ notification_arn }} + {% endfor %} + + {% else %} + + {% endif %} + {% if change_set.role_arn %} + {{ change_set.role_arn }} + {% endif %} + {% if change_set.changes %} + + {% for change in change_set.changes %} + + Resource + + {{ change.action }} + {{ change.logical_resource_id }} + {{ change.resource_type }} + + + {% endfor %} + + {% endif %} + {% if next_token %} + {{ next_token }} + {% endif %} + +""" + EXECUTE_CHANGE_SET_RESPONSE_TEMPLATE = """ @@ -451,6 +738,27 @@ DESCRIBE_STACK_EVENTS_RESPONSE = """ + + + {% for change_set in change_sets %} + + {{ change_set.stack_id }} + {{ change_set.stack_name }} + {{ change_set.change_set_id }} + {{ change_set.change_set_name }} + {{ change_set.execution_status }} + {{ change_set.status }} + {{ change_set.status_reason }} + 2011-05-23T15:47:44Z + {{ change_set.description }} + + {% endfor %} + + +""" + + LIST_STACKS_RESPONSE = """ @@ -525,3 +833,236 @@ LIST_EXPORTS_RESPONSE = """ + + {{ stackset.stackset_id }} + + + f457258c-391d-41d1-861f-example + + +""" + +DESCRIBE_STACK_SET_RESPONSE_TEMPLATE = """ + + + + {{ stackset.arn }} + {{ stackset.execution_role }} + {{ stackset.admin_role }} + {{ stackset.id }} + {{ stackset.template }} + {{ stackset.name }} + + {% for param_name, param_value in stackset.parameters.items() %} + + {{ param_name }} + {{ param_value }} + + {% endfor %} + + + {% for tag_key, tag_value in stackset.tags.items() %} + + {{ tag_key }} + {{ tag_value }} + + {% endfor %} + + {{ stackset.status }} + + + + d8b64e11-5332-46e1-9603-example + +""" + +DELETE_STACK_SET_RESPONSE_TEMPLATE = """ + + + c35ec2d0-d69f-4c4d-9bd7-example + +""" + +CREATE_STACK_INSTANCES_TEMPLATE = """ + + 1459ad6d-63cc-4c96-a73e-example + + + 6b29f7e3-69be-4d32-b374-example + + +""" + +LIST_STACK_INSTANCES_TEMPLATE = """ + + + {% for instance in stackset.stack_instances %} + + {{ instance.StackId }} + {{ instance.StackSetId }} + {{ instance.Region }} + {{ instance.Account }} + {{ instance.Status }} + + {% endfor %} + + + + 83c27e73-b498-410f-993c-example + + +""" + +DELETE_STACK_INSTANCES_TEMPLATE = """ + + {{ operation.OperationId }} + + + e5325090-66f6-4ecd-a531-example + + +""" + +DESCRIBE_STACK_INSTANCE_TEMPLATE = """ + + + {{ instance.StackId }} + {{ instance.StackSetId }} + {% if instance.ParameterOverrides %} + + {% for override in instance.ParameterOverrides %} + {% if override['ParameterKey'] or override['ParameterValue'] %} + + {{ override.ParameterKey }} + false + {{ override.ParameterValue }} + + {% endif %} + {% endfor %} + + {% else %} + + {% endif %} + {{ instance.Region }} + {{ instance.Account }} + {{ instance.Status }} + + + + c6c7be10-0343-4319-8a25-example + + +""" + +LIST_STACK_SETS_TEMPLATE = """ + + + {% for key, value in stacksets.items() %} + + {{ value.name }} + {{ value.id }} + {{ value.status }} + + {% endfor %} + + + + 4dcacb73-841e-4ed8-b335-example + + +""" + +UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE = """ + + {{ operation }} + + + bdbf8e94-19b6-4ce4-af85-example + + +""" + +UPDATE_STACK_SET_RESPONSE_TEMPLATE = """ + + {{ operation.OperationId }} + + + adac907b-17e3-43e6-a254-example + + +""" + +LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE = """ + + + {% for operation in stackset.operations %} + + {{ operation.CreationTimestamp }} + {{ operation.OperationId }} + {{ operation.Action }} + {{ operation.EndTimestamp }} + {{ operation.Status }} + + {% endfor %} + + + + 65b9d9be-08bb-4a43-9a21-example + + +""" + +STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE = """ + + + 2188554a-07c6-4396-b2c5-example + +""" + +DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE = """ + + + {{ stackset.execution_role }} + arn:aws:iam::123456789012:role/{{ stackset.admin_role }} + {{ stackset.id }} + {{ operation.CreationTimestamp }} + {{ operation.OperationId }} + {{ operation.Action }} + + + + {{ operation.EndTimestamp }} + {{ operation.Status }} + + + + 2edc27b6-9ce2-486a-a192-example + + +""" + +LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE = """ + + + {% for instance in operation.Instances %} + {% for account, region in instance.items() %} + + + Function not found: arn:aws:lambda:us-west-2:123456789012:function:AWSCloudFormationStackSetAccountGate + SKIPPED + + {{ region }} + {{ account }} + {{ operation.Status }} + + {% endfor %} + {% endfor %} + + + + ac05a9ce-5f98-4197-a29b-example + + +""" diff --git a/moto/cloudformation/utils.py b/moto/cloudformation/utils.py index f3b8874ed..de75d2c15 100644 --- a/moto/cloudformation/utils.py +++ b/moto/cloudformation/utils.py @@ -3,11 +3,14 @@ import uuid import six import random import yaml +import os + +from cfnlint import decode, core -def generate_stack_id(stack_name): +def generate_stack_id(stack_name, region="us-east-1", account="123456789"): random_id = uuid.uuid4() - return "arn:aws:cloudformation:us-east-1:123456789:stack/{0}/{1}".format(stack_name, random_id) + return "arn:aws:cloudformation:{}:{}:stack/{}/{}".format(region, account, stack_name, random_id) def generate_changeset_id(changeset_name, region_name): @@ -15,6 +18,15 @@ def generate_changeset_id(changeset_name, region_name): return 'arn:aws:cloudformation:{0}:123456789:changeSet/{1}/{2}'.format(region_name, changeset_name, random_id) +def generate_stackset_id(stackset_name): + random_id = uuid.uuid4() + return '{}:{}'.format(stackset_name, random_id) + + +def generate_stackset_arn(stackset_id, region_name): + return 'arn:aws:cloudformation:{}:123456789012:stackset/{}'.format(region_name, stackset_id) + + def random_suffix(): size = 12 chars = list(range(10)) + ['A-Z'] @@ -38,3 +50,33 @@ def yaml_tag_constructor(loader, tag, node): key = 'Fn::{}'.format(tag[1:]) return {key: _f(loader, tag, node)} + + +def validate_template_cfn_lint(template): + + # Save the template to a temporary file -- cfn-lint requires a file + filename = "file.tmp" + with open(filename, "w") as file: + file.write(template) + abs_filename = os.path.abspath(filename) + + # decode handles both yaml and json + template, matches = decode.decode(abs_filename, False) + + # Set cfn-lint to info + core.configure_logging(None) + + # Initialize the ruleset to be applied (no overrules, no excludes) + rules = core.get_rules([], [], []) + + # Use us-east-1 region (spec file) for validation + regions = ['us-east-1'] + + # Process all the rules and gather the errors + matches = core.run_checks( + abs_filename, + template, + rules, + regions) + + return matches diff --git a/moto/cognitoidp/exceptions.py b/moto/cognitoidp/exceptions.py index 1f1ec2309..452670213 100644 --- a/moto/cognitoidp/exceptions.py +++ b/moto/cognitoidp/exceptions.py @@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest): }) +class GroupExistsException(BadRequest): + + def __init__(self, message): + super(GroupExistsException, self).__init__() + self.description = json.dumps({ + "message": message, + '__type': 'GroupExistsException', + }) + + class NotAuthorizedError(BadRequest): def __init__(self, message): diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 476d470b9..bdd279ba6 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -1,6 +1,8 @@ from __future__ import unicode_literals import datetime +import functools +import itertools import json import os import time @@ -11,8 +13,7 @@ from jose import jws from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel -from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError - +from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError UserStatus = { "FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD", @@ -20,6 +21,39 @@ UserStatus = { } +def paginate(limit, start_arg="next_token", limit_arg="max_results"): + """Returns a limited result list, and an offset into list of remaining items + + Takes the next_token, and max_results kwargs given to a function and handles + the slicing of the results. The kwarg `next_token` is the offset into the + list to begin slicing from. `max_results` is the size of the result required + + If the max_results is not supplied then the `limit` parameter is used as a + default + + :param limit_arg: the name of argument in the decorated function that + controls amount of items returned + :param start_arg: the name of the argument in the decorated that provides + the starting offset + :param limit: A default maximum items to return + :return: a tuple containing a list of items, and the offset into the list + """ + default_start = 0 + + def outer_wrapper(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg]) + lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg]) + stop = start + lim + result = func(*args, **kwargs) + limited_results = list(itertools.islice(result, start, stop)) + next_token = stop if stop < len(result) else None + return limited_results, next_token + return wrapper + return outer_wrapper + + class CognitoIdpUserPool(BaseModel): def __init__(self, region, name, extended_config): @@ -33,6 +67,7 @@ class CognitoIdpUserPool(BaseModel): self.clients = OrderedDict() self.identity_providers = OrderedDict() + self.groups = OrderedDict() self.users = OrderedDict() self.refresh_tokens = {} self.access_tokens = {} @@ -185,6 +220,33 @@ class CognitoIdpIdentityProvider(BaseModel): return identity_provider_json +class CognitoIdpGroup(BaseModel): + + def __init__(self, user_pool_id, group_name, description, role_arn, precedence): + self.user_pool_id = user_pool_id + self.group_name = group_name + self.description = description or "" + self.role_arn = role_arn + self.precedence = precedence + self.last_modified_date = datetime.datetime.now() + self.creation_date = self.last_modified_date + + # Users who are members of this group. + # Note that these links are bidirectional. + self.users = set() + + def to_json(self): + return { + "GroupName": self.group_name, + "UserPoolId": self.user_pool_id, + "Description": self.description, + "RoleArn": self.role_arn, + "Precedence": self.precedence, + "LastModifiedDate": time.mktime(self.last_modified_date.timetuple()), + "CreationDate": time.mktime(self.creation_date.timetuple()), + } + + class CognitoIdpUser(BaseModel): def __init__(self, user_pool_id, username, password, status, attributes): @@ -198,6 +260,10 @@ class CognitoIdpUser(BaseModel): self.create_date = datetime.datetime.utcnow() self.last_modified_date = datetime.datetime.utcnow() + # Groups this user is a member of. + # Note that these links are bidirectional. + self.groups = set() + def _base_json(self): return { "UserPoolId": self.user_pool_id, @@ -242,7 +308,8 @@ class CognitoIdpBackend(BaseBackend): self.user_pools[user_pool.id] = user_pool return user_pool - def list_user_pools(self): + @paginate(60) + def list_user_pools(self, max_results=None, next_token=None): return self.user_pools.values() def describe_user_pool(self, user_pool_id): @@ -289,7 +356,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.clients[user_pool_client.id] = user_pool_client return user_pool_client - def list_user_pool_clients(self, user_pool_id): + @paginate(60) + def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -339,7 +407,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.identity_providers[name] = identity_provider return identity_provider - def list_identity_providers(self, user_pool_id): + @paginate(60) + def list_identity_providers(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -357,6 +426,19 @@ class CognitoIdpBackend(BaseBackend): return identity_provider + def update_identity_provider(self, user_pool_id, name, extended_config): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + identity_provider = user_pool.identity_providers.get(name) + if not identity_provider: + raise ResourceNotFoundError(name) + + identity_provider.extended_config.update(extended_config) + + return identity_provider + def delete_identity_provider(self, user_pool_id, name): user_pool = self.user_pools.get(user_pool_id) if not user_pool: @@ -367,6 +449,72 @@ class CognitoIdpBackend(BaseBackend): del user_pool.identity_providers[name] + # Group + def create_group(self, user_pool_id, group_name, description, role_arn, precedence): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence) + if group.group_name in user_pool.groups: + raise GroupExistsException("A group with the name already exists") + user_pool.groups[group.group_name] = group + + return group + + def get_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + return user_pool.groups[group_name] + + def list_groups(self, user_pool_id): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + return user_pool.groups.values() + + def delete_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + group = user_pool.groups[group_name] + for user in group.users: + user.groups.remove(group) + + del user_pool.groups[group_name] + + def admin_add_user_to_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.add(user) + user.groups.add(group) + + def list_users_in_group(self, user_pool_id, group_name): + group = self.get_group(user_pool_id, group_name) + return list(group.users) + + def admin_list_groups_for_user(self, user_pool_id, username): + user = self.admin_get_user(user_pool_id, username) + return list(user.groups) + + def admin_remove_user_from_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.discard(user) + user.groups.discard(group) + # User def admin_create_user(self, user_pool_id, username, temporary_password, attributes): user_pool = self.user_pools.get(user_pool_id) @@ -387,7 +535,8 @@ class CognitoIdpBackend(BaseBackend): return user_pool.users[username] - def list_users(self, user_pool_id): + @paginate(60, "pagination_token", "limit") + def list_users(self, user_pool_id, pagination_token=None, limit=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -410,6 +559,10 @@ class CognitoIdpBackend(BaseBackend): if username not in user_pool.users: raise UserNotFoundError(username) + user = user_pool.users[username] + for group in user.groups: + group.users.remove(user) + del user_pool.users[username] def _log_user_in(self, user_pool, client, username): diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 50939786b..264910739 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse): }) def list_user_pools(self): - user_pools = cognitoidp_backends[self.region].list_user_pools() - return json.dumps({ - "UserPools": [user_pool.to_json() for user_pool in user_pools] - }) + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pools, next_token = cognitoidp_backends[self.region].list_user_pools( + max_results=max_results, next_token=next_token + ) + response = { + "UserPools": [user_pool.to_json() for user_pool in user_pools], + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool(self): user_pool_id = self._get_param("UserPoolId") @@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse): def list_user_pool_clients(self): user_pool_id = self._get_param("UserPoolId") - user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id, + max_results=max_results, next_token=next_token) + response = { "UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool_client(self): user_pool_id = self._get_param("UserPoolId") @@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse): def list_identity_providers(self): user_pool_id = self._get_param("UserPoolId") - identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers( + user_pool_id, max_results=max_results, next_token=next_token + ) + response = { "Providers": [identity_provider.to_json() for identity_provider in identity_providers] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_identity_provider(self): user_pool_id = self._get_param("UserPoolId") @@ -123,12 +143,103 @@ class CognitoIdpResponse(BaseResponse): "IdentityProvider": identity_provider.to_json(extended=True) }) + def update_identity_provider(self): + user_pool_id = self._get_param("UserPoolId") + name = self._get_param("ProviderName") + identity_provider = cognitoidp_backends[self.region].update_identity_provider(user_pool_id, name, self.parameters) + return json.dumps({ + "IdentityProvider": identity_provider.to_json(extended=True) + }) + def delete_identity_provider(self): user_pool_id = self._get_param("UserPoolId") name = self._get_param("ProviderName") cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name) return "" + # Group + def create_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + description = self._get_param("Description") + role_arn = self._get_param("RoleArn") + precedence = self._get_param("Precedence") + + group = cognitoidp_backends[self.region].create_group( + user_pool_id, + group_name, + description, + role_arn, + precedence, + ) + + return json.dumps({ + "Group": group.to_json(), + }) + + def get_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name) + return json.dumps({ + "Group": group.to_json(), + }) + + def list_groups(self): + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].list_groups(user_pool_id) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def delete_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + cognitoidp_backends[self.region].delete_group(user_pool_id, group_name) + return "" + + def admin_add_user_to_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_add_user_to_group( + user_pool_id, + group_name, + username, + ) + + return "" + + def list_users_in_group(self): + user_pool_id = self._get_param("UserPoolId") + group_name = self._get_param("GroupName") + users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name) + return json.dumps({ + "Users": [user.to_json(extended=True) for user in users], + }) + + def admin_list_groups_for_user(self): + username = self._get_param("Username") + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def admin_remove_user_from_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_remove_user_from_group( + user_pool_id, + group_name, + username, + ) + + return "" + # User def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") @@ -155,10 +266,15 @@ class CognitoIdpResponse(BaseResponse): def list_users(self): user_pool_id = self._get_param("UserPoolId") - users = cognitoidp_backends[self.region].list_users(user_pool_id) - return json.dumps({ - "Users": [user.to_json(extended=True) for user in users] - }) + limit = self._get_param("Limit") + token = self._get_param("PaginationToken") + users, token = cognitoidp_backends[self.region].list_users(user_pool_id, + limit=limit, + pagination_token=token) + response = {"Users": [user.to_json(extended=True) for user in users]} + if token: + response["PaginationToken"] = str(token) + return json.dumps(response) def admin_disable_user(self): user_pool_id = self._get_param("UserPoolId") diff --git a/moto/config/__init__.py b/moto/config/__init__.py new file mode 100644 index 000000000..9ca6a5917 --- /dev/null +++ b/moto/config/__init__.py @@ -0,0 +1,4 @@ +from .models import config_backends +from ..core.models import base_decorator + +mock_config = base_decorator(config_backends) diff --git a/moto/config/exceptions.py b/moto/config/exceptions.py new file mode 100644 index 000000000..b2b01d6a0 --- /dev/null +++ b/moto/config/exceptions.py @@ -0,0 +1,149 @@ +from __future__ import unicode_literals +from moto.core.exceptions import JsonRESTError + + +class NameTooLongException(JsonRESTError): + code = 400 + + def __init__(self, name, location): + message = '1 validation error detected: Value \'{name}\' at \'{location}\' failed to satisfy' \ + ' constraint: Member must have length less than or equal to 256'.format(name=name, location=location) + super(NameTooLongException, self).__init__("ValidationException", message) + + +class InvalidConfigurationRecorderNameException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'The configuration recorder name \'{name}\' is not valid, blank string.'.format(name=name) + super(InvalidConfigurationRecorderNameException, self).__init__("InvalidConfigurationRecorderNameException", + message) + + +class MaxNumberOfConfigurationRecordersExceededException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to put configuration recorder \'{name}\' because the maximum number of ' \ + 'configuration recorders: 1 is reached.'.format(name=name) + super(MaxNumberOfConfigurationRecordersExceededException, self).__init__( + "MaxNumberOfConfigurationRecordersExceededException", message) + + +class InvalidRecordingGroupException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'The recording group provided is not valid' + super(InvalidRecordingGroupException, self).__init__("InvalidRecordingGroupException", message) + + +class InvalidResourceTypeException(JsonRESTError): + code = 400 + + def __init__(self, bad_list, good_list): + message = '{num} validation error detected: Value \'{bad_list}\' at ' \ + '\'configurationRecorder.recordingGroup.resourceTypes\' failed to satisfy constraint: ' \ + 'Member must satisfy constraint: [Member must satisfy enum value set: {good_list}]'.format( + num=len(bad_list), bad_list=bad_list, good_list=good_list) + # For PY2: + message = str(message) + + super(InvalidResourceTypeException, self).__init__("ValidationException", message) + + +class NoSuchConfigurationRecorderException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Cannot find configuration recorder with the specified name \'{name}\'.'.format(name=name) + super(NoSuchConfigurationRecorderException, self).__init__("NoSuchConfigurationRecorderException", message) + + +class InvalidDeliveryChannelNameException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'The delivery channel name \'{name}\' is not valid, blank string.'.format(name=name) + super(InvalidDeliveryChannelNameException, self).__init__("InvalidDeliveryChannelNameException", + message) + + +class NoSuchBucketException(JsonRESTError): + """We are *only* validating that there is value that is not '' here.""" + code = 400 + + def __init__(self): + message = 'Cannot find a S3 bucket with an empty bucket name.' + super(NoSuchBucketException, self).__init__("NoSuchBucketException", message) + + +class InvalidS3KeyPrefixException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'The s3 key prefix \'\' is not valid, empty s3 key prefix.' + super(InvalidS3KeyPrefixException, self).__init__("InvalidS3KeyPrefixException", message) + + +class InvalidSNSTopicARNException(JsonRESTError): + """We are *only* validating that there is value that is not '' here.""" + code = 400 + + def __init__(self): + message = 'The sns topic arn \'\' is not valid.' + super(InvalidSNSTopicARNException, self).__init__("InvalidSNSTopicARNException", message) + + +class InvalidDeliveryFrequency(JsonRESTError): + code = 400 + + def __init__(self, value, good_list): + message = '1 validation error detected: Value \'{value}\' at ' \ + '\'deliveryChannel.configSnapshotDeliveryProperties.deliveryFrequency\' failed to satisfy ' \ + 'constraint: Member must satisfy enum value set: {good_list}'.format(value=value, good_list=good_list) + super(InvalidDeliveryFrequency, self).__init__("InvalidDeliveryFrequency", message) + + +class MaxNumberOfDeliveryChannelsExceededException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to put delivery channel \'{name}\' because the maximum number of ' \ + 'delivery channels: 1 is reached.'.format(name=name) + super(MaxNumberOfDeliveryChannelsExceededException, self).__init__( + "MaxNumberOfDeliveryChannelsExceededException", message) + + +class NoSuchDeliveryChannelException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Cannot find delivery channel with specified name \'{name}\'.'.format(name=name) + super(NoSuchDeliveryChannelException, self).__init__("NoSuchDeliveryChannelException", message) + + +class NoAvailableConfigurationRecorderException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'Configuration recorder is not available to put delivery channel.' + super(NoAvailableConfigurationRecorderException, self).__init__("NoAvailableConfigurationRecorderException", + message) + + +class NoAvailableDeliveryChannelException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'Delivery channel is not available to start configuration recorder.' + super(NoAvailableDeliveryChannelException, self).__init__("NoAvailableDeliveryChannelException", message) + + +class LastDeliveryChannelDeleteFailedException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to delete last specified delivery channel with name \'{name}\', because there, ' \ + 'because there is a running configuration recorder.'.format(name=name) + super(LastDeliveryChannelDeleteFailedException, self).__init__("LastDeliveryChannelDeleteFailedException", message) diff --git a/moto/config/models.py b/moto/config/models.py new file mode 100644 index 000000000..cd6e07afa --- /dev/null +++ b/moto/config/models.py @@ -0,0 +1,335 @@ +import json +import time +import pkg_resources + +from datetime import datetime + +from boto3 import Session + +from moto.config.exceptions import InvalidResourceTypeException, InvalidDeliveryFrequency, \ + InvalidConfigurationRecorderNameException, NameTooLongException, \ + MaxNumberOfConfigurationRecordersExceededException, InvalidRecordingGroupException, \ + NoSuchConfigurationRecorderException, NoAvailableConfigurationRecorderException, \ + InvalidDeliveryChannelNameException, NoSuchBucketException, InvalidS3KeyPrefixException, \ + InvalidSNSTopicARNException, MaxNumberOfDeliveryChannelsExceededException, NoAvailableDeliveryChannelException, \ + NoSuchDeliveryChannelException, LastDeliveryChannelDeleteFailedException + +from moto.core import BaseBackend, BaseModel + +DEFAULT_ACCOUNT_ID = 123456789012 + + +def datetime2int(date): + return int(time.mktime(date.timetuple())) + + +def snake_to_camels(original): + parts = original.split('_') + + camel_cased = parts[0].lower() + ''.join(p.title() for p in parts[1:]) + camel_cased = camel_cased.replace('Arn', 'ARN') # Config uses 'ARN' instead of 'Arn' + + return camel_cased + + +class ConfigEmptyDictable(BaseModel): + """Base class to make serialization easy. This assumes that the sub-class will NOT return 'None's in the JSON.""" + + def to_dict(self): + data = {} + for item, value in self.__dict__.items(): + if value is not None: + if isinstance(value, ConfigEmptyDictable): + data[snake_to_camels(item)] = value.to_dict() + else: + data[snake_to_camels(item)] = value + + return data + + +class ConfigRecorderStatus(ConfigEmptyDictable): + + def __init__(self, name): + self.name = name + + self.recording = False + self.last_start_time = None + self.last_stop_time = None + self.last_status = None + self.last_error_code = None + self.last_error_message = None + self.last_status_change_time = None + + def start(self): + self.recording = True + self.last_status = 'PENDING' + self.last_start_time = datetime2int(datetime.utcnow()) + self.last_status_change_time = datetime2int(datetime.utcnow()) + + def stop(self): + self.recording = False + self.last_stop_time = datetime2int(datetime.utcnow()) + self.last_status_change_time = datetime2int(datetime.utcnow()) + + +class ConfigDeliverySnapshotProperties(ConfigEmptyDictable): + + def __init__(self, delivery_frequency): + self.delivery_frequency = delivery_frequency + + +class ConfigDeliveryChannel(ConfigEmptyDictable): + + def __init__(self, name, s3_bucket_name, prefix=None, sns_arn=None, snapshot_properties=None): + self.name = name + self.s3_bucket_name = s3_bucket_name + self.s3_key_prefix = prefix + self.sns_topic_arn = sns_arn + self.config_snapshot_delivery_properties = snapshot_properties + + +class RecordingGroup(ConfigEmptyDictable): + + def __init__(self, all_supported=True, include_global_resource_types=False, resource_types=None): + self.all_supported = all_supported + self.include_global_resource_types = include_global_resource_types + self.resource_types = resource_types + + +class ConfigRecorder(ConfigEmptyDictable): + + def __init__(self, role_arn, recording_group, name='default', status=None): + self.name = name + self.role_arn = role_arn + self.recording_group = recording_group + + if not status: + self.status = ConfigRecorderStatus(name) + else: + self.status = status + + +class ConfigBackend(BaseBackend): + + def __init__(self): + self.recorders = {} + self.delivery_channels = {} + + @staticmethod + def _validate_resource_types(resource_list): + # Load the service file: + resource_package = 'botocore' + resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json')) + conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) + + # Verify that each entry exists in the supported list: + bad_list = [] + for resource in resource_list: + # For PY2: + r_str = str(resource) + + if r_str not in conifg_schema['shapes']['ResourceType']['enum']: + bad_list.append(r_str) + + if bad_list: + raise InvalidResourceTypeException(bad_list, conifg_schema['shapes']['ResourceType']['enum']) + + @staticmethod + def _validate_delivery_snapshot_properties(properties): + # Load the service file: + resource_package = 'botocore' + resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json')) + conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) + + # Verify that the deliveryFrequency is set to an acceptable value: + if properties.get('deliveryFrequency', None) not in \ + conifg_schema['shapes']['MaximumExecutionFrequency']['enum']: + raise InvalidDeliveryFrequency(properties.get('deliveryFrequency', None), + conifg_schema['shapes']['MaximumExecutionFrequency']['enum']) + + def put_configuration_recorder(self, config_recorder): + # Validate the name: + if not config_recorder.get('name'): + raise InvalidConfigurationRecorderNameException(config_recorder.get('name')) + if len(config_recorder.get('name')) > 256: + raise NameTooLongException(config_recorder.get('name'), 'configurationRecorder.name') + + # We're going to assume that the passed in Role ARN is correct. + + # Config currently only allows 1 configuration recorder for an account: + if len(self.recorders) == 1 and not self.recorders.get(config_recorder['name']): + raise MaxNumberOfConfigurationRecordersExceededException(config_recorder['name']) + + # Is this updating an existing one? + recorder_status = None + if self.recorders.get(config_recorder['name']): + recorder_status = self.recorders[config_recorder['name']].status + + # Validate the Recording Group: + if config_recorder.get('recordingGroup') is None: + recording_group = RecordingGroup() + else: + rg = config_recorder['recordingGroup'] + + # If an empty dict is passed in, then bad: + if not rg: + raise InvalidRecordingGroupException() + + # Can't have both the resource types specified and the other flags as True. + if rg.get('resourceTypes') and ( + rg.get('allSupported', False) or + rg.get('includeGlobalResourceTypes', False)): + raise InvalidRecordingGroupException() + + # Must supply resourceTypes if 'allSupported' is not supplied: + if not rg.get('allSupported') and not rg.get('resourceTypes'): + raise InvalidRecordingGroupException() + + # Validate that the list provided is correct: + self._validate_resource_types(rg.get('resourceTypes', [])) + + recording_group = RecordingGroup( + all_supported=rg.get('allSupported', True), + include_global_resource_types=rg.get('includeGlobalResourceTypes', False), + resource_types=rg.get('resourceTypes', []) + ) + + self.recorders[config_recorder['name']] = \ + ConfigRecorder(config_recorder['roleARN'], recording_group, name=config_recorder['name'], + status=recorder_status) + + def describe_configuration_recorders(self, recorder_names): + recorders = [] + + if recorder_names: + for rn in recorder_names: + if not self.recorders.get(rn): + raise NoSuchConfigurationRecorderException(rn) + + # Format the recorder: + recorders.append(self.recorders[rn].to_dict()) + + else: + for recorder in self.recorders.values(): + recorders.append(recorder.to_dict()) + + return recorders + + def describe_configuration_recorder_status(self, recorder_names): + recorders = [] + + if recorder_names: + for rn in recorder_names: + if not self.recorders.get(rn): + raise NoSuchConfigurationRecorderException(rn) + + # Format the recorder: + recorders.append(self.recorders[rn].status.to_dict()) + + else: + for recorder in self.recorders.values(): + recorders.append(recorder.status.to_dict()) + + return recorders + + def put_delivery_channel(self, delivery_channel): + # Must have a configuration recorder: + if not self.recorders: + raise NoAvailableConfigurationRecorderException() + + # Validate the name: + if not delivery_channel.get('name'): + raise InvalidDeliveryChannelNameException(delivery_channel.get('name')) + if len(delivery_channel.get('name')) > 256: + raise NameTooLongException(delivery_channel.get('name'), 'deliveryChannel.name') + + # We are going to assume that the bucket exists -- but will verify if the bucket provided is blank: + if not delivery_channel.get('s3BucketName'): + raise NoSuchBucketException() + + # We are going to assume that the bucket has the correct policy attached to it. We are only going to verify + # if the prefix provided is not an empty string: + if delivery_channel.get('s3KeyPrefix', None) == '': + raise InvalidS3KeyPrefixException() + + # Ditto for SNS -- Only going to assume that the ARN provided is not an empty string: + if delivery_channel.get('snsTopicARN', None) == '': + raise InvalidSNSTopicARNException() + + # Config currently only allows 1 delivery channel for an account: + if len(self.delivery_channels) == 1 and not self.delivery_channels.get(delivery_channel['name']): + raise MaxNumberOfDeliveryChannelsExceededException(delivery_channel['name']) + + if not delivery_channel.get('configSnapshotDeliveryProperties'): + dp = None + + else: + # Validate the config snapshot delivery properties: + self._validate_delivery_snapshot_properties(delivery_channel['configSnapshotDeliveryProperties']) + + dp = ConfigDeliverySnapshotProperties( + delivery_channel['configSnapshotDeliveryProperties']['deliveryFrequency']) + + self.delivery_channels[delivery_channel['name']] = \ + ConfigDeliveryChannel(delivery_channel['name'], delivery_channel['s3BucketName'], + prefix=delivery_channel.get('s3KeyPrefix', None), + sns_arn=delivery_channel.get('snsTopicARN', None), + snapshot_properties=dp) + + def describe_delivery_channels(self, channel_names): + channels = [] + + if channel_names: + for cn in channel_names: + if not self.delivery_channels.get(cn): + raise NoSuchDeliveryChannelException(cn) + + # Format the delivery channel: + channels.append(self.delivery_channels[cn].to_dict()) + + else: + for channel in self.delivery_channels.values(): + channels.append(channel.to_dict()) + + return channels + + def start_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + # Must have a delivery channel available as well: + if not self.delivery_channels: + raise NoAvailableDeliveryChannelException() + + # Start recording: + self.recorders[recorder_name].status.start() + + def stop_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + # Stop recording: + self.recorders[recorder_name].status.stop() + + def delete_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + del self.recorders[recorder_name] + + def delete_delivery_channel(self, channel_name): + if not self.delivery_channels.get(channel_name): + raise NoSuchDeliveryChannelException(channel_name) + + # Check if a channel is recording -- if so, bad -- (there can only be 1 recorder): + for recorder in self.recorders.values(): + if recorder.status.recording: + raise LastDeliveryChannelDeleteFailedException(channel_name) + + del self.delivery_channels[channel_name] + + +config_backends = {} +boto3_session = Session() +for region in boto3_session.get_available_regions('config'): + config_backends[region] = ConfigBackend() diff --git a/moto/config/responses.py b/moto/config/responses.py new file mode 100644 index 000000000..286b2349f --- /dev/null +++ b/moto/config/responses.py @@ -0,0 +1,53 @@ +import json +from moto.core.responses import BaseResponse +from .models import config_backends + + +class ConfigResponse(BaseResponse): + + @property + def config_backend(self): + return config_backends[self.region] + + def put_configuration_recorder(self): + self.config_backend.put_configuration_recorder(self._get_param('ConfigurationRecorder')) + return "" + + def describe_configuration_recorders(self): + recorders = self.config_backend.describe_configuration_recorders(self._get_param('ConfigurationRecorderNames')) + schema = {'ConfigurationRecorders': recorders} + return json.dumps(schema) + + def describe_configuration_recorder_status(self): + recorder_statuses = self.config_backend.describe_configuration_recorder_status( + self._get_param('ConfigurationRecorderNames')) + schema = {'ConfigurationRecordersStatus': recorder_statuses} + return json.dumps(schema) + + def put_delivery_channel(self): + self.config_backend.put_delivery_channel(self._get_param('DeliveryChannel')) + return "" + + def describe_delivery_channels(self): + delivery_channels = self.config_backend.describe_delivery_channels(self._get_param('DeliveryChannelNames')) + schema = {'DeliveryChannels': delivery_channels} + return json.dumps(schema) + + def describe_delivery_channel_status(self): + raise NotImplementedError() + + def delete_delivery_channel(self): + self.config_backend.delete_delivery_channel(self._get_param('DeliveryChannelName')) + return "" + + def delete_configuration_recorder(self): + self.config_backend.delete_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" + + def start_configuration_recorder(self): + self.config_backend.start_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" + + def stop_configuration_recorder(self): + self.config_backend.stop_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" diff --git a/moto/config/urls.py b/moto/config/urls.py new file mode 100644 index 000000000..fd7b6969f --- /dev/null +++ b/moto/config/urls.py @@ -0,0 +1,10 @@ +from __future__ import unicode_literals +from .responses import ConfigResponse + +url_bases = [ + "https?://config.(.+).amazonaws.com", +] + +url_paths = { + '{0}/$': ConfigResponse.dispatch, +} diff --git a/moto/core/models.py b/moto/core/models.py index 19267ca08..9fe1e96bd 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -4,6 +4,7 @@ from __future__ import absolute_import import functools import inspect +import os import re import six from io import BytesIO @@ -21,6 +22,11 @@ from .utils import ( ) +# "Mock" the AWS credentials as they can't be mocked in Botocore currently +os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key") +os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret") + + class BaseMockAWS(object): nested_count = 0 diff --git a/moto/core/responses.py b/moto/core/responses.py index 0f133e72c..8fb247f75 100644 --- a/moto/core/responses.py +++ b/moto/core/responses.py @@ -718,6 +718,8 @@ def to_str(value, spec): return str(value) elif vtype == 'float': return str(value) + elif vtype == 'double': + return str(value) elif vtype == 'timestamp': return datetime.datetime.utcfromtimestamp( value).replace(tzinfo=pytz.utc).isoformat() @@ -737,6 +739,8 @@ def from_str(value, spec): return int(value) elif vtype == 'float': return float(value) + elif vtype == 'double': + return float(value) elif vtype == 'timestamp': return value elif vtype == 'string': diff --git a/moto/dynamodb2/comparisons.py b/moto/dynamodb2/comparisons.py index 53226c557..6d37345fe 100644 --- a/moto/dynamodb2/comparisons.py +++ b/moto/dynamodb2/comparisons.py @@ -383,7 +383,7 @@ class OpNotEqual(Op): def expr(self, item): lhs = self._lhs(item) rhs = self._rhs(item) - return lhs == rhs + return lhs != rhs class OpLessThanOrEqual(Op): diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index a54c4f7d0..0f4594aa4 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -5,6 +5,7 @@ import datetime import decimal import json import re +import uuid import boto3 from moto.compat import OrderedDict @@ -65,6 +66,8 @@ class DynamoType(object): return int(self.value) except ValueError: return float(self.value) + elif self.is_set(): + return set(self.value) else: return self.value @@ -292,9 +295,82 @@ class Item(BaseModel): 'ADD not supported for %s' % ', '.join(update_action['Value'].keys())) +class StreamRecord(BaseModel): + def __init__(self, table, stream_type, event_name, old, new, seq): + old_a = old.to_json()['Attributes'] if old is not None else {} + new_a = new.to_json()['Attributes'] if new is not None else {} + + rec = old if old is not None else new + keys = {table.hash_key_attr: rec.hash_key.to_json()} + if table.range_key_attr is not None: + keys[table.range_key_attr] = rec.range_key.to_json() + + self.record = { + 'eventID': uuid.uuid4().hex, + 'eventName': event_name, + 'eventSource': 'aws:dynamodb', + 'eventVersion': '1.0', + 'awsRegion': 'us-east-1', + 'dynamodb': { + 'StreamViewType': stream_type, + 'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(), + 'SequenceNumber': seq, + 'SizeBytes': 1, + 'Keys': keys + } + } + + if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['NewImage'] = new_a + if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['OldImage'] = old_a + + # This is a substantial overestimate but it's the easiest to do now + self.record['dynamodb']['SizeBytes'] = len( + json.dumps(self.record['dynamodb'])) + + def to_json(self): + return self.record + + +class StreamShard(BaseModel): + def __init__(self, table): + self.table = table + self.id = 'shardId-00000001541626099285-f35f62ef' + self.starting_sequence_number = 1100000000017454423009 + self.items = [] + self.created_on = datetime.datetime.utcnow() + + def to_json(self): + return { + 'ShardId': self.id, + 'SequenceNumberRange': { + 'StartingSequenceNumber': str(self.starting_sequence_number) + } + } + + def add(self, old, new): + t = self.table.stream_specification['StreamViewType'] + if old is None: + event_name = 'INSERT' + elif new is None: + event_name = 'DELETE' + else: + event_name = 'MODIFY' + seq = len(self.items) + self.starting_sequence_number + self.items.append( + StreamRecord(self.table, t, event_name, old, new, seq)) + + def get(self, start, quantity): + start -= self.starting_sequence_number + assert start >= 0 + end = start + quantity + return [i.to_json() for i in self.items[start:end]] + + class Table(BaseModel): - def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None): + def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None): self.name = table_name self.attr = attr self.schema = schema @@ -325,10 +401,22 @@ class Table(BaseModel): 'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED', # 'AttributeName': 'string' # Can contain this } + self.set_stream_specification(streams) def _generate_arn(self, name): return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name + def set_stream_specification(self, streams): + self.stream_specification = streams + if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')): + self.stream_specification['StreamEnabled'] = True + self.latest_stream_label = datetime.datetime.utcnow().isoformat() + self.stream_shard = StreamShard(self) + else: + self.stream_specification = {'StreamEnabled': False} + self.latest_stream_label = None + self.stream_shard = None + def describe(self, base_key='TableDescription'): results = { base_key: { @@ -345,6 +433,11 @@ class Table(BaseModel): 'LocalSecondaryIndexes': [index for index in self.indexes], } } + if self.stream_specification and self.stream_specification['StreamEnabled']: + results[base_key]['StreamSpecification'] = self.stream_specification + if self.latest_stream_label: + results[base_key]['LatestStreamLabel'] = self.latest_stream_label + results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label return results def __len__(self): @@ -385,23 +478,22 @@ class Table(BaseModel): else: range_value = None + if expected is None: + expected = {} + lookup_range_value = range_value + else: + expected_range_value = expected.get( + self.range_key_attr, {}).get("Value") + if(expected_range_value is None): + lookup_range_value = range_value + else: + lookup_range_value = DynamoType(expected_range_value) + current = self.get_item(hash_value, lookup_range_value) + item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) if not overwrite: - if expected is None: - expected = {} - lookup_range_value = range_value - else: - expected_range_value = expected.get( - self.range_key_attr, {}).get("Value") - if(expected_range_value is None): - lookup_range_value = range_value - else: - lookup_range_value = DynamoType(expected_range_value) - - current = self.get_item(hash_value, lookup_range_value) - if current is None: current_attr = {} elif hasattr(current, 'attrs'): @@ -419,19 +511,20 @@ class Table(BaseModel): elif 'Value' in val and DynamoType(val['Value']).value != current_attr[key].value: raise ValueError("The conditional request failed") elif 'ComparisonOperator' in val: - comparison_func = get_comparison_func( - val['ComparisonOperator']) dynamo_types = [ DynamoType(ele) for ele in val.get("AttributeValueList", []) ] - for t in dynamo_types: - if not comparison_func(current_attr[key].value, t.value): - raise ValueError('The conditional request failed') + if not current_attr[key].compare(val['ComparisonOperator'], dynamo_types): + raise ValueError('The conditional request failed') if range_value: self.items[hash_value][range_value] = item else: self.items[hash_value] = item + + if self.stream_shard is not None: + self.stream_shard.add(current, item) + return item def __nonzero__(self): @@ -462,9 +555,14 @@ class Table(BaseModel): def delete_item(self, hash_key, range_key): try: if range_key: - return self.items[hash_key].pop(range_key) + item = self.items[hash_key].pop(range_key) else: - return self.items.pop(hash_key) + item = self.items.pop(hash_key) + + if self.stream_shard is not None: + self.stream_shard.add(item, None) + + return item except KeyError: return None @@ -472,6 +570,7 @@ class Table(BaseModel): exclusive_start_key, scan_index_forward, projection_expression, index_name=None, filter_expression=None, **filter_kwargs): results = [] + if index_name: all_indexes = (self.global_indexes or []) + (self.indexes or []) indexes_by_name = dict((i['IndexName'], i) for i in all_indexes) @@ -488,24 +587,28 @@ class Table(BaseModel): raise ValueError('Missing Hash Key. KeySchema: %s' % index['KeySchema']) - possible_results = [] - for item in self.all_items(): - if not isinstance(item, Item): - continue - item_hash_key = item.attrs.get(index_hash_key['AttributeName']) - if item_hash_key and item_hash_key == hash_key: - possible_results.append(item) - else: - possible_results = [item for item in list(self.all_items()) if isinstance( - item, Item) and item.hash_key == hash_key] - - if index_name: try: index_range_key = [key for key in index[ 'KeySchema'] if key['KeyType'] == 'RANGE'][0] except IndexError: index_range_key = None + possible_results = [] + for item in self.all_items(): + if not isinstance(item, Item): + continue + item_hash_key = item.attrs.get(index_hash_key['AttributeName']) + if index_range_key is None: + if item_hash_key and item_hash_key == hash_key: + possible_results.append(item) + else: + item_range_key = item.attrs.get(index_range_key['AttributeName']) + if item_hash_key and item_hash_key == hash_key and item_range_key: + possible_results.append(item) + else: + possible_results = [item for item in list(self.all_items()) if isinstance( + item, Item) and item.hash_key == hash_key] + if range_comparison: if index_name and not index_range_key: raise ValueError( @@ -680,6 +783,13 @@ class DynamoDBBackend(BaseBackend): table.throughput = throughput return table + def update_table_streams(self, name, stream_specification): + table = self.tables[name] + if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label: + raise ValueError('Table already has stream enabled') + table.set_stream_specification(stream_specification) + return table + def update_table_global_indexes(self, name, global_index_updates): table = self.tables[name] gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes) @@ -840,15 +950,12 @@ class DynamoDBBackend(BaseBackend): elif 'Value' in val and DynamoType(val['Value']).value != item_attr[key].value: raise ValueError("The conditional request failed") elif 'ComparisonOperator' in val: - comparison_func = get_comparison_func( - val['ComparisonOperator']) dynamo_types = [ DynamoType(ele) for ele in val.get("AttributeValueList", []) ] - for t in dynamo_types: - if not comparison_func(item_attr[key].value, t.value): - raise ValueError('The conditional request failed') + if not item_attr[key].compare(val['ComparisonOperator'], dynamo_types): + raise ValueError('The conditional request failed') # Update does not fail on new items, so create one if item is None: diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index e2f1ef1cc..49095f09c 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -31,6 +31,67 @@ def get_empty_str_error(): )) +def condition_expression_to_expected(condition_expression, expression_attribute_names, expression_attribute_values): + """ + Limited condition expression syntax parsing. + Supports Global Negation ex: NOT(inner expressions). + Supports simple AND conditions ex: cond_a AND cond_b and cond_c. + Atomic expressions supported are attribute_exists(key), attribute_not_exists(key) and #key = :value. + """ + expected = {} + if condition_expression and 'OR' not in condition_expression: + reverse_re = re.compile('^NOT\s*\((.*)\)$') + reverse_m = reverse_re.match(condition_expression.strip()) + + reverse = False + if reverse_m: + reverse = True + condition_expression = reverse_m.group(1) + + cond_items = [c.strip() for c in condition_expression.split('AND')] + if cond_items: + exists_re = re.compile('^attribute_exists\s*\((.*)\)$') + not_exists_re = re.compile( + '^attribute_not_exists\s*\((.*)\)$') + equals_re = re.compile('^(#?\w+)\s*=\s*(\:?\w+)') + + for cond in cond_items: + exists_m = exists_re.match(cond) + not_exists_m = not_exists_re.match(cond) + equals_m = equals_re.match(cond) + + if exists_m: + attribute_name = expression_attribute_names_lookup(exists_m.group(1), expression_attribute_names) + expected[attribute_name] = {'Exists': True if not reverse else False} + elif not_exists_m: + attribute_name = expression_attribute_names_lookup(not_exists_m.group(1), expression_attribute_names) + expected[attribute_name] = {'Exists': False if not reverse else True} + elif equals_m: + attribute_name = expression_attribute_names_lookup(equals_m.group(1), expression_attribute_names) + attribute_value = expression_attribute_values_lookup(equals_m.group(2), expression_attribute_values) + expected[attribute_name] = { + 'AttributeValueList': [attribute_value], + 'ComparisonOperator': 'EQ' if not reverse else 'NEQ'} + + return expected + + +def expression_attribute_names_lookup(attribute_name, expression_attribute_names): + if attribute_name.startswith('#') and attribute_name in expression_attribute_names: + return expression_attribute_names[attribute_name] + else: + return attribute_name + + +def expression_attribute_values_lookup(attribute_value, expression_attribute_values): + if isinstance(attribute_value, six.string_types) and \ + attribute_value.startswith(':') and\ + attribute_value in expression_attribute_values: + return expression_attribute_values[attribute_value] + else: + return attribute_value + + class DynamoHandler(BaseResponse): def get_endpoint_name(self, headers): @@ -104,13 +165,16 @@ class DynamoHandler(BaseResponse): # getting the indexes global_indexes = body.get("GlobalSecondaryIndexes", []) local_secondary_indexes = body.get("LocalSecondaryIndexes", []) + # get the stream specification + streams = body.get("StreamSpecification") table = self.dynamodb_backend.create_table(table_name, schema=key_schema, throughput=throughput, attr=attr, global_indexes=global_indexes, - indexes=local_secondary_indexes) + indexes=local_secondary_indexes, + streams=streams) if table is not None: return dynamo_json_dump(table.describe()) else: @@ -163,12 +227,20 @@ class DynamoHandler(BaseResponse): def update_table(self): name = self.body['TableName'] + table = self.dynamodb_backend.get_table(name) if 'GlobalSecondaryIndexUpdates' in self.body: table = self.dynamodb_backend.update_table_global_indexes( name, self.body['GlobalSecondaryIndexUpdates']) if 'ProvisionedThroughput' in self.body: throughput = self.body["ProvisionedThroughput"] table = self.dynamodb_backend.update_table_throughput(name, throughput) + if 'StreamSpecification' in self.body: + try: + table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) + except ValueError: + er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException' + return self.error(er, 'Cannot enable stream') + return dynamo_json_dump(table.describe()) def describe_table(self): @@ -183,6 +255,11 @@ class DynamoHandler(BaseResponse): def put_item(self): name = self.body['TableName'] item = self.body['Item'] + return_values = self.body.get('ReturnValues', 'NONE') + + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') if has_empty_keys_or_values(item): return get_empty_str_error() @@ -193,28 +270,24 @@ class DynamoHandler(BaseResponse): else: expected = None + if return_values == 'ALL_OLD': + existing_item = self.dynamodb_backend.get_item(name, item) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} + # Attempt to parse simple ConditionExpressions into an Expected # expression if not expected: condition_expression = self.body.get('ConditionExpression') - if condition_expression and 'OR' not in condition_expression: - cond_items = [c.strip() - for c in condition_expression.split('AND')] - - if cond_items: - expected = {} - overwrite = False - exists_re = re.compile('^attribute_exists\s*\((.*)\)$') - not_exists_re = re.compile( - '^attribute_not_exists\s*\((.*)\)$') - - for cond in cond_items: - exists_m = exists_re.match(cond) - not_exists_m = not_exists_re.match(cond) - if exists_m: - expected[exists_m.group(1)] = {'Exists': True} - elif not_exists_m: - expected[not_exists_m.group(1)] = {'Exists': False} + expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) + expression_attribute_values = self.body.get('ExpressionAttributeValues', {}) + expected = condition_expression_to_expected(condition_expression, + expression_attribute_names, + expression_attribute_values) + if expected: + overwrite = False try: result = self.dynamodb_backend.put_item(name, item, expected, overwrite) @@ -228,6 +301,10 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 1 } + if return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + else: + item_dict.pop('Attributes', None) return dynamo_json_dump(item_dict) else: er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException' @@ -385,7 +462,7 @@ class DynamoHandler(BaseResponse): range_values = [value_alias_map[ range_key_expression_components[2]]] else: - hash_key_expression = key_condition_expression + hash_key_expression = key_condition_expression.strip('()') range_comparison = None range_values = [] @@ -512,7 +589,11 @@ class DynamoHandler(BaseResponse): def delete_item(self): name = self.body['TableName'] keys = self.body['Key'] - return_values = self.body.get('ReturnValues', '') + return_values = self.body.get('ReturnValues', 'NONE') + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') + table = self.dynamodb_backend.get_table(name) if not table: er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException' @@ -527,9 +608,9 @@ class DynamoHandler(BaseResponse): return dynamo_json_dump(item_dict) def update_item(self): - name = self.body['TableName'] key = self.body['Key'] + return_values = self.body.get('ReturnValues', 'NONE') update_expression = self.body.get('UpdateExpression') attribute_updates = self.body.get('AttributeUpdates') expression_attribute_names = self.body.get( @@ -537,6 +618,15 @@ class DynamoHandler(BaseResponse): expression_attribute_values = self.body.get( 'ExpressionAttributeValues', {}) existing_item = self.dynamodb_backend.get_item(name, key) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} + + if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD', + 'UPDATED_NEW'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') if has_empty_keys_or_values(expression_attribute_values): return get_empty_str_error() @@ -550,23 +640,11 @@ class DynamoHandler(BaseResponse): # expression if not expected: condition_expression = self.body.get('ConditionExpression') - if condition_expression and 'OR' not in condition_expression: - cond_items = [c.strip() - for c in condition_expression.split('AND')] - - if cond_items: - expected = {} - exists_re = re.compile('^attribute_exists\s*\((.*)\)$') - not_exists_re = re.compile( - '^attribute_not_exists\s*\((.*)\)$') - - for cond in cond_items: - exists_m = exists_re.match(cond) - not_exists_m = not_exists_re.match(cond) - if exists_m: - expected[exists_m.group(1)] = {'Exists': True} - elif not_exists_m: - expected[not_exists_m.group(1)] = {'Exists': False} + expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) + expression_attribute_values = self.body.get('ExpressionAttributeValues', {}) + expected = condition_expression_to_expected(condition_expression, + expression_attribute_names, + expression_attribute_values) # Support spaces between operators in an update expression # E.g. `a = b + c` -> `a=b+c` @@ -591,8 +669,26 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 0.5 } - if not existing_item: + unchanged_attributes = { + k for k in existing_attributes.keys() + if existing_attributes[k] == item_dict['Attributes'].get(k) + } + changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes) + + if return_values == 'NONE': item_dict['Attributes'] = {} + elif return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + elif return_values == 'UPDATED_OLD': + item_dict['Attributes'] = { + k: v for k, v in existing_attributes.items() + if k in changed_attributes + } + elif return_values == 'UPDATED_NEW': + item_dict['Attributes'] = { + k: v for k, v in item_dict['Attributes'].items() + if k in changed_attributes + } return dynamo_json_dump(item_dict) diff --git a/moto/dynamodbstreams/__init__.py b/moto/dynamodbstreams/__init__.py new file mode 100644 index 000000000..b35879eba --- /dev/null +++ b/moto/dynamodbstreams/__init__.py @@ -0,0 +1,6 @@ +from __future__ import unicode_literals +from .models import dynamodbstreams_backends +from ..core.models import base_decorator + +dynamodbstreams_backend = dynamodbstreams_backends['us-east-1'] +mock_dynamodbstreams = base_decorator(dynamodbstreams_backends) diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py new file mode 100644 index 000000000..41cc6e280 --- /dev/null +++ b/moto/dynamodbstreams/models.py @@ -0,0 +1,129 @@ +from __future__ import unicode_literals + +import os +import json +import boto3 +import base64 + +from moto.core import BaseBackend, BaseModel +from moto.dynamodb2.models import dynamodb_backends + + +class ShardIterator(BaseModel): + def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None): + self.id = base64.b64encode(os.urandom(472)).decode('utf-8') + self.streams_backend = streams_backend + self.stream_shard = stream_shard + self.shard_iterator_type = shard_iterator_type + if shard_iterator_type == 'TRIM_HORIZON': + self.sequence_number = stream_shard.starting_sequence_number + elif shard_iterator_type == 'LATEST': + self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items) + elif shard_iterator_type == 'AT_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + 1 + + @property + def arn(self): + return '{}/stream/{}|1|{}'.format( + self.stream_shard.table.table_arn, + self.stream_shard.table.latest_stream_label, + self.id) + + def to_json(self): + return { + 'ShardIterator': self.arn + } + + def get(self, limit=1000): + items = self.stream_shard.get(self.sequence_number, limit) + try: + last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items) + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AFTER_SEQUENCE_NUMBER', + last_sequence_number) + except ValueError: + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AT_SEQUENCE_NUMBER', + self.sequence_number) + + self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator + return { + 'NextShardIterator': new_shard_iterator.arn, + 'Records': items + } + + +class DynamoDBStreamsBackend(BaseBackend): + def __init__(self, region): + self.region = region + self.shard_iterators = {} + + def reset(self): + region = self.region + self.__dict__ = {} + self.__init__(region) + + @property + def dynamodb(self): + return dynamodb_backends[self.region] + + def _get_table_from_arn(self, arn): + table_name = arn.split(':', 6)[5].split('/')[1] + return self.dynamodb.get_table(table_name) + + def describe_stream(self, arn): + table = self._get_table_from_arn(arn) + resp = {'StreamDescription': { + 'StreamArn': arn, + 'StreamLabel': table.latest_stream_label, + 'StreamStatus': ('ENABLED' if table.latest_stream_label + else 'DISABLED'), + 'StreamViewType': table.stream_specification['StreamViewType'], + 'CreationRequestDateTime': table.stream_shard.created_on.isoformat(), + 'TableName': table.name, + 'KeySchema': table.schema, + 'Shards': ([table.stream_shard.to_json()] if table.stream_shard + else []) + }} + + return json.dumps(resp) + + def list_streams(self, table_name=None): + streams = [] + for table in self.dynamodb.tables.values(): + if table_name is not None and table.name != table_name: + continue + if table.latest_stream_label: + d = table.describe(base_key='Table') + streams.append({ + 'StreamArn': d['Table']['LatestStreamArn'], + 'TableName': d['Table']['TableName'], + 'StreamLabel': d['Table']['LatestStreamLabel'] + }) + + return json.dumps({'Streams': streams}) + + def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None): + table = self._get_table_from_arn(arn) + assert table.stream_shard.id == shard_id + + shard_iterator = ShardIterator(self, table.stream_shard, + shard_iterator_type, + sequence_number) + self.shard_iterators[shard_iterator.arn] = shard_iterator + + return json.dumps(shard_iterator.to_json()) + + def get_records(self, iterator_arn, limit): + shard_iterator = self.shard_iterators[iterator_arn] + return json.dumps(shard_iterator.get(limit)) + + +available_regions = boto3.session.Session().get_available_regions( + 'dynamodbstreams') +dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region) + for region in available_regions} diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py new file mode 100644 index 000000000..c9c113615 --- /dev/null +++ b/moto/dynamodbstreams/responses.py @@ -0,0 +1,34 @@ +from __future__ import unicode_literals + +from moto.core.responses import BaseResponse + +from .models import dynamodbstreams_backends + + +class DynamoDBStreamsHandler(BaseResponse): + + @property + def backend(self): + return dynamodbstreams_backends[self.region] + + def describe_stream(self): + arn = self._get_param('StreamArn') + return self.backend.describe_stream(arn) + + def list_streams(self): + table_name = self._get_param('TableName') + return self.backend.list_streams(table_name) + + def get_shard_iterator(self): + arn = self._get_param('StreamArn') + shard_id = self._get_param('ShardId') + shard_iterator_type = self._get_param('ShardIteratorType') + return self.backend.get_shard_iterator(arn, shard_id, + shard_iterator_type) + + def get_records(self): + arn = self._get_param('ShardIterator') + limit = self._get_param('Limit') + if limit is None: + limit = 1000 + return self.backend.get_records(arn, limit) diff --git a/moto/dynamodbstreams/urls.py b/moto/dynamodbstreams/urls.py new file mode 100644 index 000000000..1d0f94c35 --- /dev/null +++ b/moto/dynamodbstreams/urls.py @@ -0,0 +1,10 @@ +from __future__ import unicode_literals +from .responses import DynamoDBStreamsHandler + +url_bases = [ + "https?://streams.dynamodb.(.+).amazonaws.com" +] + +url_paths = { + "{0}/$": DynamoDBStreamsHandler.dispatch, +} diff --git a/moto/ec2/models.py b/moto/ec2/models.py old mode 100755 new mode 100644 index b94cac479..0936d2be9 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -134,6 +134,8 @@ def utc_date_and_time(): def validate_resource_ids(resource_ids): + if not resource_ids: + raise MissingParameterError(parameter='resourceIdSet') for resource_id in resource_ids: if not is_valid_resource_id(resource_id): raise InvalidID(resource_id=resource_id) @@ -189,7 +191,7 @@ class NetworkInterface(TaggedEC2Resource): self.ec2_backend = ec2_backend self.id = random_eni_id() self.device_index = device_index - self.private_ip_address = private_ip_address + self.private_ip_address = private_ip_address or random_private_ip() self.subnet = subnet self.instance = None self.attachment_id = None @@ -388,6 +390,7 @@ class Instance(TaggedEC2Resource, BotoInstance): self.ebs_optimized = kwargs.get("ebs_optimized", False) self.source_dest_check = "true" self.launch_time = utc_date_and_time() + self.ami_launch_index = kwargs.get("ami_launch_index", 0) self.disable_api_termination = kwargs.get("disable_api_termination", False) self._spot_fleet_id = kwargs.get("spot_fleet_id", None) associate_public_ip = kwargs.get("associate_public_ip", False) @@ -719,6 +722,7 @@ class InstanceBackend(object): instance_tags = tags.get('instance', {}) for index in range(count): + kwargs["ami_launch_index"] = index new_instance = Instance( self, image_id, @@ -1115,7 +1119,7 @@ class Ami(TaggedEC2Resource): elif filter_name == 'image-id': return self.id elif filter_name == 'is-public': - return str(self.is_public) + return self.is_public_string elif filter_name == 'state': return self.state elif filter_name == 'name': @@ -2230,6 +2234,10 @@ class VPCPeeringConnectionStatus(object): self.code = code self.message = message + def deleted(self): + self.code = 'deleted' + self.message = 'Deleted by {deleter ID}' + def initiating(self): self.code = 'initiating-request' self.message = 'Initiating Request to {accepter ID}' @@ -2292,9 +2300,8 @@ class VPCPeeringConnectionBackend(object): return self.vpc_pcxs.get(vpc_pcx_id) def delete_vpc_peering_connection(self, vpc_pcx_id): - deleted = self.vpc_pcxs.pop(vpc_pcx_id, None) - if not deleted: - raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id) + deleted = self.get_vpc_peering_connection(vpc_pcx_id) + deleted._status.deleted() return deleted def accept_vpc_peering_connection(self, vpc_pcx_id): @@ -2461,7 +2468,7 @@ class SubnetBackend(object): default_for_az, map_public_ip_on_launch) # AWS associates a new subnet with the default Network ACL - self.associate_default_network_acl_with_subnet(subnet_id) + self.associate_default_network_acl_with_subnet(subnet_id, vpc_id) self.subnets[availability_zone][subnet_id] = subnet return subnet @@ -2876,7 +2883,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): def __init__(self, ec2_backend, spot_request_id, price, image_id, type, valid_from, valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, - kernel_id, ramdisk_id, monitoring_enabled, subnet_id, spot_fleet_id, + kernel_id, ramdisk_id, monitoring_enabled, subnet_id, tags, spot_fleet_id, **kwargs): super(SpotInstanceRequest, self).__init__(**kwargs) ls = LaunchSpecification() @@ -2900,6 +2907,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): ls.monitored = monitoring_enabled ls.subnet_id = subnet_id self.spot_fleet_id = spot_fleet_id + self.tags = tags if security_groups: for group_name in security_groups: @@ -2933,6 +2941,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): security_group_names=[], security_group_ids=self.launch_specification.groups, spot_fleet_id=self.spot_fleet_id, + tags=self.tags, ) instance = reservation.instances[0] return instance @@ -2948,15 +2957,16 @@ class SpotRequestBackend(object): valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, kernel_id, ramdisk_id, - monitoring_enabled, subnet_id, spot_fleet_id=None): + monitoring_enabled, subnet_id, tags=None, spot_fleet_id=None): requests = [] + tags = tags or {} for _ in range(count): spot_request_id = random_spot_request_id() request = SpotInstanceRequest(self, spot_request_id, price, image_id, type, valid_from, valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, kernel_id, ramdisk_id, - monitoring_enabled, subnet_id, spot_fleet_id) + monitoring_enabled, subnet_id, tags, spot_fleet_id) self.spot_instance_requests[spot_request_id] = request requests.append(request) return requests @@ -2976,8 +2986,8 @@ class SpotRequestBackend(object): class SpotFleetLaunchSpec(object): def __init__(self, ebs_optimized, group_set, iam_instance_profile, image_id, - instance_type, key_name, monitoring, spot_price, subnet_id, user_data, - weighted_capacity): + instance_type, key_name, monitoring, spot_price, subnet_id, tag_specifications, + user_data, weighted_capacity): self.ebs_optimized = ebs_optimized self.group_set = group_set self.iam_instance_profile = iam_instance_profile @@ -2987,6 +2997,7 @@ class SpotFleetLaunchSpec(object): self.monitoring = monitoring self.spot_price = spot_price self.subnet_id = subnet_id + self.tag_specifications = tag_specifications self.user_data = user_data self.weighted_capacity = float(weighted_capacity) @@ -3017,6 +3028,7 @@ class SpotFleetRequest(TaggedEC2Resource): monitoring=spec.get('monitoring._enabled'), spot_price=spec.get('spot_price', self.spot_price), subnet_id=spec['subnet_id'], + tag_specifications=self._parse_tag_specifications(spec), user_data=spec.get('user_data'), weighted_capacity=spec['weighted_capacity'], ) @@ -3099,6 +3111,7 @@ class SpotFleetRequest(TaggedEC2Resource): monitoring_enabled=launch_spec.monitoring, subnet_id=launch_spec.subnet_id, spot_fleet_id=self.id, + tags=launch_spec.tag_specifications, ) self.spot_requests.extend(requests) self.fulfilled_capacity += added_weight @@ -3121,6 +3134,25 @@ class SpotFleetRequest(TaggedEC2Resource): self.spot_requests = [req for req in self.spot_requests if req.instance.id not in instance_ids] self.ec2_backend.terminate_instances(instance_ids) + def _parse_tag_specifications(self, spec): + try: + tag_spec_num = max([int(key.split('.')[1]) for key in spec if key.startswith("tag_specification_set")]) + except ValueError: # no tag specifications + return {} + + tag_specifications = {} + for si in range(1, tag_spec_num + 1): + resource_type = spec["tag_specification_set.{si}._resource_type".format(si=si)] + + tags = [key for key in spec if key.startswith("tag_specification_set.{si}._tag".format(si=si))] + tag_num = max([int(key.split('.')[3]) for key in tags]) + tag_specifications[resource_type] = dict(( + spec["tag_specification_set.{si}._tag.{ti}._key".format(si=si, ti=ti)], + spec["tag_specification_set.{si}._tag.{ti}._value".format(si=si, ti=ti)], + ) for ti in range(1, tag_num + 1)) + + return tag_specifications + class SpotFleetBackend(object): def __init__(self): @@ -3557,8 +3589,22 @@ class NetworkAclBackend(object): self.get_vpc(vpc_id) network_acl = NetworkAcl(self, network_acl_id, vpc_id, default) self.network_acls[network_acl_id] = network_acl + if default: + self.add_default_entries(network_acl_id) return network_acl + def add_default_entries(self, network_acl_id): + default_acl_entries = [ + {'rule_number': 100, 'rule_action': 'allow', 'egress': 'true'}, + {'rule_number': 32767, 'rule_action': 'deny', 'egress': 'true'}, + {'rule_number': 100, 'rule_action': 'allow', 'egress': 'false'}, + {'rule_number': 32767, 'rule_action': 'deny', 'egress': 'false'} + ] + for entry in default_acl_entries: + self.create_network_acl_entry(network_acl_id=network_acl_id, rule_number=entry['rule_number'], protocol='-1', + rule_action=entry['rule_action'], egress=entry['egress'], cidr_block='0.0.0.0/0', + icmp_code=None, icmp_type=None, port_range_from=None, port_range_to=None) + def get_all_network_acls(self, network_acl_ids=None, filters=None): network_acls = self.network_acls.values() @@ -3633,9 +3679,9 @@ class NetworkAclBackend(object): new_acl.associations[new_assoc_id] = association return association - def associate_default_network_acl_with_subnet(self, subnet_id): + def associate_default_network_acl_with_subnet(self, subnet_id, vpc_id): association_id = random_network_acl_subnet_association_id() - acl = next(acl for acl in self.network_acls.values() if acl.default) + acl = next(acl for acl in self.network_acls.values() if acl.default and acl.vpc_id == vpc_id) acl.associations[association_id] = NetworkAclAssociation(self, association_id, subnet_id, acl.id) diff --git a/moto/ec2/responses/elastic_block_store.py b/moto/ec2/responses/elastic_block_store.py index aa0d7f73b..acd37b283 100644 --- a/moto/ec2/responses/elastic_block_store.py +++ b/moto/ec2/responses/elastic_block_store.py @@ -150,16 +150,18 @@ CREATE_VOLUME_RESPONSE = """ 128: + raise TagKeyTooBig(tag_key, param=exception_param) + + # Validate that the tag key fits the proper Regex: + # [\w\s_.:/=+\-@]+ SHOULD be the same as the Java regex on the AWS documentation: [\p{L}\p{Z}\p{N}_.:/=+\-@]+ + match = re.findall(r'[\w\s_.:/=+\-@]+', tag_key) + # Kudos if you can come up with a better way of doing a global search :) + if not len(match) or len(match[0]) < len(tag_key): + raise InvalidTagCharacters(tag_key, param=exception_param) + + def _check_tag_duplicate(self, all_tags, tag_key): + """Validates that a tag key is not a duplicate + + :param all_tags: Dict to check if there is a duplicate tag. + :param tag_key: The tag key to check against. + :return: + """ + if tag_key in all_tags: + raise DuplicateTags() + + def list_role_tags(self, role_name, marker, max_items=100): + role = self.get_role(role_name) + + max_items = int(max_items) + tag_index = sorted(role.tags) + start_idx = int(marker) if marker else 0 + + tag_index = tag_index[start_idx:start_idx + max_items] + + if len(role.tags) <= (start_idx + max_items): + marker = None + else: + marker = str(start_idx + max_items) + + # Make the tag list of dict's: + tags = [role.tags[tag] for tag in tag_index] + + return tags, marker + + def tag_role(self, role_name, tags): + if len(tags) > 50: + raise TooManyTags(tags) + + role = self.get_role(role_name) + + tag_keys = {} + for tag in tags: + # Need to index by the lowercase tag key since the keys are case insensitive, but their case is retained. + ref_key = tag['Key'].lower() + self._check_tag_duplicate(tag_keys, ref_key) + self._validate_tag_key(tag['Key']) + if len(tag['Value']) > 256: + raise TagValueTooBig(tag['Value']) + + tag_keys[ref_key] = tag + + role.tags.update(tag_keys) + + def untag_role(self, role_name, tag_keys): + if len(tag_keys) > 50: + raise TooManyTags(tag_keys, param='tagKeys') + + role = self.get_role(role_name) + + for key in tag_keys: + ref_key = key.lower() + self._validate_tag_key(key, exception_param='tagKeys') + + role.tags.pop(ref_key, None) + def create_policy_version(self, policy_arn, policy_document, set_as_default): policy = self.get_policy(policy_arn) if not policy: raise IAMNotFoundException("Policy not found") version = PolicyVersion(policy_arn, policy_document, set_as_default) policy.versions.append(version) - version.version_id = 'v{0}'.format(len(policy.versions)) + version.version_id = 'v{0}'.format(policy.next_version_num) + policy.next_version_num += 1 if set_as_default: policy.default_version_id = version.version_id return version @@ -765,6 +899,70 @@ class IAMBackend(BaseBackend): return users + def update_user(self, user_name, new_path=None, new_user_name=None): + try: + user = self.users[user_name] + except KeyError: + raise IAMNotFoundException("User {0} not found".format(user_name)) + + if new_path: + user.path = new_path + if new_user_name: + user.name = new_user_name + self.users[new_user_name] = self.users.pop(user_name) + + def list_roles(self, path_prefix, marker, max_items): + roles = None + try: + roles = self.roles.values() + except KeyError: + raise IAMNotFoundException( + "Users {0}, {1}, {2} not found".format(path_prefix, marker, max_items)) + + return roles + + def upload_signing_certificate(self, user_name, body): + user = self.get_user(user_name) + cert_id = random_resource_id(size=32) + + # Validate the signing cert: + try: + if sys.version_info < (3, 0): + data = bytes(body) + else: + data = bytes(body, 'utf8') + + x509.load_pem_x509_certificate(data, default_backend()) + + except Exception: + raise MalformedCertificate(body) + + user.signing_certificates[cert_id] = SigningCertificate(cert_id, user_name, body) + + return user.signing_certificates[cert_id] + + def delete_signing_certificate(self, user_name, cert_id): + user = self.get_user(user_name) + + try: + del user.signing_certificates[cert_id] + except KeyError: + raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id)) + + def list_signing_certificates(self, user_name): + user = self.get_user(user_name) + + return list(user.signing_certificates.values()) + + def update_signing_certificate(self, user_name, cert_id, status): + user = self.get_user(user_name) + + try: + user.signing_certificates[cert_id].status = status + + except KeyError: + raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id)) + def create_login_profile(self, user_name, password): # This does not currently deal with PasswordPolicyViolation. user = self.get_user(user_name) @@ -838,6 +1036,24 @@ class IAMBackend(BaseBackend): user = self.get_user(user_name) user.update_access_key(access_key_id, status) + def get_access_key_last_used(self, access_key_id): + access_keys_list = self.get_all_access_keys_for_all_users() + for key in access_keys_list: + if key.access_key_id == access_key_id: + return { + 'user_name': key.user_name, + 'last_used': key.last_used + } + else: + raise IAMNotFoundException( + "The Access Key with id {0} cannot be found".format(access_key_id)) + + def get_all_access_keys_for_all_users(self): + access_keys_list = [] + for user_name in self.users: + access_keys_list += self.get_all_access_keys(user_name) + return access_keys_list + def get_all_access_keys(self, user_name, marker=None, max_items=None): user = self.get_user(user_name) keys = user.get_all_access_keys() @@ -937,5 +1153,33 @@ class IAMBackend(BaseBackend): 'managed_policies': returned_policies } + def create_saml_provider(self, name, saml_metadata_document): + saml_provider = SAMLProvider(name, saml_metadata_document) + self.saml_providers[name] = saml_provider + return saml_provider + + def update_saml_provider(self, saml_provider_arn, saml_metadata_document): + saml_provider = self.get_saml_provider(saml_provider_arn) + saml_provider.saml_metadata_document = saml_metadata_document + return saml_provider + + def delete_saml_provider(self, saml_provider_arn): + try: + for saml_provider in list(self.list_saml_providers()): + if saml_provider.arn == saml_provider_arn: + del self.saml_providers[saml_provider.name] + except KeyError: + raise IAMNotFoundException( + "SAMLProvider {0} not found".format(saml_provider_arn)) + + def list_saml_providers(self): + return self.saml_providers.values() + + def get_saml_provider(self, saml_provider_arn): + for saml_provider in self.list_saml_providers(): + if saml_provider.arn == saml_provider_arn: + return saml_provider + raise IAMNotFoundException("SamlProvider {0} not found".format(saml_provider_arn)) + iam_backend = IAMBackend() diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 22558f3f6..e5b4c9070 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -107,6 +107,69 @@ class IamResponse(BaseResponse): template = self.response_template(LIST_POLICIES_TEMPLATE) return template.render(policies=policies, marker=marker) + def list_entities_for_policy(self): + policy_arn = self._get_param('PolicyArn') + + # Options 'User'|'Role'|'Group'|'LocalManagedPolicy'|'AWSManagedPolicy + entity = self._get_param('EntityFilter') + path_prefix = self._get_param('PathPrefix') + # policy_usage_filter = self._get_param('PolicyUsageFilter') + marker = self._get_param('Marker') + max_items = self._get_param('MaxItems') + + entity_roles = [] + entity_groups = [] + entity_users = [] + + if entity == 'User': + users = iam_backend.list_users(path_prefix, marker, max_items) + if users: + for user in users: + for p in user.managed_policies: + if p == policy_arn: + entity_users.append(user.name) + + elif entity == 'Role': + roles = iam_backend.list_roles(path_prefix, marker, max_items) + if roles: + for role in roles: + for p in role.managed_policies: + if p == policy_arn: + entity_roles.append(role.name) + + elif entity == 'Group': + groups = iam_backend.list_groups() + if groups: + for group in groups: + for p in group.managed_policies: + if p == policy_arn: + entity_groups.append(group.name) + + elif entity == 'LocalManagedPolicy' or entity == 'AWSManagedPolicy': + users = iam_backend.list_users(path_prefix, marker, max_items) + if users: + for user in users: + for p in user.managed_policies: + if p == policy_arn: + entity_users.append(user.name) + + roles = iam_backend.list_roles(path_prefix, marker, max_items) + if roles: + for role in roles: + for p in role.managed_policies: + if p == policy_arn: + entity_roles.append(role.name) + + groups = iam_backend.list_groups() + if groups: + for group in groups: + for p in group.managed_policies: + if p == policy_arn: + entity_groups.append(group.name) + + template = self.response_template(LIST_ENTITIES_FOR_POLICY_TEMPLATE) + return template.render(roles=entity_roles, users=entity_users, groups=entity_groups) + def create_role(self): role_name = self._get_param('RoleName') path = self._get_param('Path') @@ -169,6 +232,20 @@ class IamResponse(BaseResponse): template = self.response_template(GENERIC_EMPTY_TEMPLATE) return template.render(name="UpdateAssumeRolePolicyResponse") + def update_role_description(self): + role_name = self._get_param('RoleName') + description = self._get_param('Description') + role = iam_backend.update_role_description(role_name, description) + template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) + return template.render(role=role) + + def update_role(self): + role_name = self._get_param('RoleName') + description = self._get_param('Description') + role = iam_backend.update_role(role_name, description) + template = self.response_template(UPDATE_ROLE_TEMPLATE) + return template.render(role=role) + def create_policy_version(self): policy_arn = self._get_param('PolicyArn') policy_document = self._get_param('PolicyDocument') @@ -201,7 +278,7 @@ class IamResponse(BaseResponse): def create_instance_profile(self): profile_name = self._get_param('InstanceProfileName') - path = self._get_param('Path') + path = self._get_param('Path', '/') profile = iam_backend.create_instance_profile( profile_name, path, role_ids=[]) @@ -363,6 +440,18 @@ class IamResponse(BaseResponse): template = self.response_template(LIST_USERS_TEMPLATE) return template.render(action='List', users=users) + def update_user(self): + user_name = self._get_param('UserName') + new_path = self._get_param('NewPath') + new_user_name = self._get_param('NewUserName') + iam_backend.update_user(user_name, new_path, new_user_name) + if new_user_name: + user = iam_backend.get_user(new_user_name) + else: + user = iam_backend.get_user(user_name) + template = self.response_template(USER_TEMPLATE) + return template.render(action='Update', user=user) + def create_login_profile(self): user_name = self._get_param('UserName') password = self._get_param('Password') @@ -454,9 +543,14 @@ class IamResponse(BaseResponse): template = self.response_template(GENERIC_EMPTY_TEMPLATE) return template.render(name='UpdateAccessKey') + def get_access_key_last_used(self): + access_key_id = self._get_param('AccessKeyId') + last_used_response = iam_backend.get_access_key_last_used(access_key_id) + template = self.response_template(GET_ACCESS_KEY_LAST_USED_TEMPLATE) + return template.render(user_name=last_used_response["user_name"], last_used=last_used_response["last_used"]) + def list_access_keys(self): user_name = self._get_param('UserName') - keys = iam_backend.get_all_access_keys(user_name) template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE) return template.render(user_name=user_name, keys=keys) @@ -549,9 +643,137 @@ class IamResponse(BaseResponse): policies=account_details['managed_policies'], users=account_details['users'], groups=account_details['groups'], - roles=account_details['roles'] + roles=account_details['roles'], + get_groups_for_user=iam_backend.get_groups_for_user ) + def create_saml_provider(self): + saml_provider_name = self._get_param('Name') + saml_metadata_document = self._get_param('SAMLMetadataDocument') + saml_provider = iam_backend.create_saml_provider(saml_provider_name, saml_metadata_document) + + template = self.response_template(CREATE_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + + def update_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + saml_metadata_document = self._get_param('SAMLMetadataDocument') + saml_provider = iam_backend.update_saml_provider(saml_provider_arn, saml_metadata_document) + + template = self.response_template(UPDATE_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + + def delete_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + iam_backend.delete_saml_provider(saml_provider_arn) + + template = self.response_template(DELETE_SAML_PROVIDER_TEMPLATE) + return template.render() + + def list_saml_providers(self): + saml_providers = iam_backend.list_saml_providers() + + template = self.response_template(LIST_SAML_PROVIDERS_TEMPLATE) + return template.render(saml_providers=saml_providers) + + def get_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + saml_provider = iam_backend.get_saml_provider(saml_provider_arn) + + template = self.response_template(GET_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + + def upload_signing_certificate(self): + user_name = self._get_param('UserName') + cert_body = self._get_param('CertificateBody') + + cert = iam_backend.upload_signing_certificate(user_name, cert_body) + template = self.response_template(UPLOAD_SIGNING_CERTIFICATE_TEMPLATE) + return template.render(cert=cert) + + def update_signing_certificate(self): + user_name = self._get_param('UserName') + cert_id = self._get_param('CertificateId') + status = self._get_param('Status') + + iam_backend.update_signing_certificate(user_name, cert_id, status) + template = self.response_template(UPDATE_SIGNING_CERTIFICATE_TEMPLATE) + return template.render() + + def delete_signing_certificate(self): + user_name = self._get_param('UserName') + cert_id = self._get_param('CertificateId') + + iam_backend.delete_signing_certificate(user_name, cert_id) + template = self.response_template(DELETE_SIGNING_CERTIFICATE_TEMPLATE) + return template.render() + + def list_signing_certificates(self): + user_name = self._get_param('UserName') + + certs = iam_backend.list_signing_certificates(user_name) + template = self.response_template(LIST_SIGNING_CERTIFICATES_TEMPLATE) + return template.render(user_name=user_name, certificates=certs) + + def list_role_tags(self): + role_name = self._get_param('RoleName') + marker = self._get_param('Marker') + max_items = self._get_param('MaxItems', 100) + + tags, marker = iam_backend.list_role_tags(role_name, marker, max_items) + + template = self.response_template(LIST_ROLE_TAG_TEMPLATE) + return template.render(tags=tags, marker=marker) + + def tag_role(self): + role_name = self._get_param('RoleName') + tags = self._get_multi_param('Tags.member') + + iam_backend.tag_role(role_name, tags) + + template = self.response_template(TAG_ROLE_TEMPLATE) + return template.render() + + def untag_role(self): + role_name = self._get_param('RoleName') + tag_keys = self._get_multi_param('TagKeys.member') + + iam_backend.untag_role(role_name, tag_keys) + + template = self.response_template(UNTAG_ROLE_TEMPLATE) + return template.render() + + +LIST_ENTITIES_FOR_POLICY_TEMPLATE = """ + + + {% for role in roles %} + + {{ role }} + + {% endfor %} + + + {% for group in groups %} + + {{ group }} + + {% endfor %} + + false + + {% for user in users %} + + {{ user }} + + {% endfor %} + + + + eb358e22-9d1f-11e4-93eb-190ecEXAMPLE + +""" + ATTACH_ROLE_POLICY_TEMPLATE = """ @@ -734,7 +956,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """ + + + + df37e965-9967-11e1-a4c3-270EXAMPLE04 + +""" + +UPDATE_ROLE_DESCRIPTION_TEMPLATE = """ + + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_role_policy_document }} + {{ role.create_date.isoformat() }} + {{ role.id }} + {% if role.tags %} + + {% for tag in role.get_tags() %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + + {% endif %} + + + + df37e965-9967-11e1-a4c3-270EXAMPLE04 + +""" + GET_ROLE_TEMPLATE = """ @@ -803,8 +1059,18 @@ GET_ROLE_TEMPLATE = """ {{ policy }} {% endfor %} + false - false 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE @@ -1240,11 +1506,23 @@ LIST_ACCESS_KEYS_TEMPLATE = """ """ + +GET_ACCESS_KEY_LAST_USED_TEMPLATE = """ + + + {{ user_name }} + + {{ last_used }} + + + +""" + CREDENTIAL_REPORT_GENERATING = """ - STARTED - No report exists. Starting a new report generation task + STARTED + No report exists. Starting a new report generation task fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1253,7 +1531,7 @@ CREDENTIAL_REPORT_GENERATING = """ CREDENTIAL_REPORT_GENERATED = """ - COMPLETE + COMPLETE fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1262,7 +1540,7 @@ CREDENTIAL_REPORT_GENERATED = """ CREDENTIAL_REPORT = """ - {{ report }} + {{ report }} 2015-02-02T20:02:02Z text/csv @@ -1277,23 +1555,23 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """ {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1376,13 +1654,24 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {% for user in users %} - - + + {% for group in get_groups_for_user(user.name) %} + {{ group.name }} + {% endfor %} + + + {% for policy in user.managed_policies %} + + {{ user.managed_policies[policy].name }} + {{ policy }} + + {% endfor %} + {{ user.id }} {{ user.path }} {{ user.name }} {{ user.arn }} - 2012-05-09T15:45:35Z + {{ user.created_iso_8601 }} {% endfor %} @@ -1391,53 +1680,75 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {{ group.id }} - {% for policy in group.managed_policies %} - - {{ policy.name }} - {{ policy.arn }} - + {% for policy_arn in group.managed_policies %} + + {{ group.managed_policies[policy_arn].name }} + {{ policy_arn }} + {% endfor %} {{ group.name }} {{ group.path }} {{ group.arn }} - 2012-05-09T16:27:11Z - + {{ group.create_date }} + + {% for policy in group.policies %} + + {{ policy }} + {{ group.get_policy(policy) }} + + {% endfor %} + {% endfor %} {% for role in roles %} - - - {% for policy in role.managed_policies %} + + {% for inline_policy in role.policies %} - {{ policy.name }} - {{ policy.arn }} + {{ inline_policy }} + {{ role.policies[inline_policy] }} + + {% endfor %} + + + {% for policy_arn in role.managed_policies %} + + {{ role.managed_policies[policy_arn].name }} + {{ policy_arn }} {% endfor %} + + {% for tag in role.get_tags() %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_role_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_role_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1445,7 +1756,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ role.arn }} {{ role.name }} {{ role.assume_role_policy_document }} - 2014-07-30T17:09:20Z + {{ role.create_date }} {{ role.id }} {% endfor %} @@ -1458,25 +1769,20 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ policy.id }} {{ policy.path }} + {% for policy_version in policy.versions %} - - {"Version":"2012-10-17","Statement":{"Effect":"Allow", - "Action":["iam:CreatePolicy","iam:CreatePolicyVersion", - "iam:DeletePolicy","iam:DeletePolicyVersion","iam:GetPolicy", - "iam:GetPolicyVersion","iam:ListPolicies", - "iam:ListPolicyVersions","iam:SetDefaultPolicyVersion"], - "Resource":"*"}} - - true - v1 - 2012-05-09T16:27:11Z + {{ policy_version.document }} + {{ policy_version.is_default }} + {{ policy_version.version_id }} + {{ policy_version.create_datetime }} + {% endfor %} {{ policy.arn }} 1 - 2012-05-09T16:27:11Z + {{ policy.create_datetime }} true - 2012-05-09T16:27:11Z + {{ policy.update_datetime }} {% endfor %} @@ -1485,3 +1791,139 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """92e79ae7-7399-11e4-8c85-4b53eEXAMPLE """ + +CREATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +LIST_SAML_PROVIDERS_TEMPLATE = """ + + + {% for saml_provider in saml_providers %} + + {{ saml_provider.arn }} + 2032-05-09T16:27:11Z + 2012-05-09T16:27:03Z + + {% endfor %} + + + + fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804 + +""" + +GET_SAML_PROVIDER_TEMPLATE = """ + + 2012-05-09T16:27:11Z + 2015-12-31T21:59:59Z + {{ saml_provider.saml_metadata_document }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +DELETE_SAML_PROVIDER_TEMPLATE = """ + + c749ee7f-99ef-11e1-a4c3-27EXAMPLE804 + +""" + +UPDATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """ + + + {{ cert.user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +DELETE_SIGNING_CERTIFICATE_TEMPLATE = """ + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +LIST_SIGNING_CERTIFICATES_TEMPLATE = """ + + {{ user_name }} + + {% for cert in certificates %} + + {{ user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + {% endfor %} + + false + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +TAG_ROLE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +LIST_ROLE_TAG_TEMPLATE = """ + + {{ 'true' if marker else 'false' }} + {% if marker %} + {{ marker }} + {% endif %} + + {% for tag in tags %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + + + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +UNTAG_ROLE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" diff --git a/moto/iam/utils.py b/moto/iam/utils.py index 1fae85a6c..f59bdfffe 100644 --- a/moto/iam/utils.py +++ b/moto/iam/utils.py @@ -12,8 +12,7 @@ def random_alphanumeric(length): ) -def random_resource_id(): - size = 20 +def random_resource_id(size=20): chars = list(range(10)) + list(string.ascii_lowercase) return ''.join(six.text_type(random.choice(chars)) for x in range(size)) diff --git a/moto/iot/exceptions.py b/moto/iot/exceptions.py index 72cf735b2..b8b3f1e84 100644 --- a/moto/iot/exceptions.py +++ b/moto/iot/exceptions.py @@ -1,42 +1,50 @@ -from __future__ import unicode_literals -from moto.core.exceptions import JsonRESTError - - -class IoTClientError(JsonRESTError): - code = 400 - - -class ResourceNotFoundException(IoTClientError): - def __init__(self): - self.code = 404 - super(ResourceNotFoundException, self).__init__( - "ResourceNotFoundException", - "The specified resource does not exist" - ) - - -class InvalidRequestException(IoTClientError): - def __init__(self, msg=None): - self.code = 400 - super(InvalidRequestException, self).__init__( - "InvalidRequestException", - msg or "The request is not valid." - ) - - -class InvalidStateTransitionException(IoTClientError): - def __init__(self, msg=None): - self.code = 409 - super(InvalidStateTransitionException, self).__init__( - "InvalidStateTransitionException", - msg or "An attempt was made to change to an invalid state." - ) - - -class VersionConflictException(IoTClientError): - def __init__(self, name): - self.code = 409 - super(VersionConflictException, self).__init__( - 'VersionConflictException', - 'The version for thing %s does not match the expected version.' % name - ) +from __future__ import unicode_literals +from moto.core.exceptions import JsonRESTError + + +class IoTClientError(JsonRESTError): + code = 400 + + +class ResourceNotFoundException(IoTClientError): + def __init__(self): + self.code = 404 + super(ResourceNotFoundException, self).__init__( + "ResourceNotFoundException", + "The specified resource does not exist" + ) + + +class InvalidRequestException(IoTClientError): + def __init__(self, msg=None): + self.code = 400 + super(InvalidRequestException, self).__init__( + "InvalidRequestException", + msg or "The request is not valid." + ) + + +class VersionConflictException(IoTClientError): + def __init__(self, name): + self.code = 409 + super(VersionConflictException, self).__init__( + 'VersionConflictException', + 'The version for thing %s does not match the expected version.' % name + ) + + +class CertificateStateException(IoTClientError): + def __init__(self, msg, cert_id): + self.code = 406 + super(CertificateStateException, self).__init__( + 'CertificateStateException', + '%s Id: %s' % (msg, cert_id) + ) + + +class DeleteConflictException(IoTClientError): + def __init__(self, msg): + self.code = 409 + super(DeleteConflictException, self).__init__( + 'DeleteConflictException', msg + ) \ No newline at end of file diff --git a/moto/kms/exceptions.py b/moto/kms/exceptions.py new file mode 100644 index 000000000..70edd3dcd --- /dev/null +++ b/moto/kms/exceptions.py @@ -0,0 +1,36 @@ +from __future__ import unicode_literals +from moto.core.exceptions import JsonRESTError + + +class NotFoundException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(NotFoundException, self).__init__( + "NotFoundException", message) + + +class ValidationException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(ValidationException, self).__init__( + "ValidationException", message) + + +class AlreadyExistsException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(AlreadyExistsException, self).__init__( + "AlreadyExistsException", message) + + +class NotAuthorizedException(JsonRESTError): + code = 400 + + def __init__(self): + super(NotAuthorizedException, self).__init__( + "NotAuthorizedException", None) + + self.description = '{"__type":"NotAuthorizedException"}' diff --git a/moto/kms/models.py b/moto/kms/models.py index bb39d1b24..b49e9dd09 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -1,5 +1,6 @@ from __future__ import unicode_literals +import os import boto.kms from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_without_milliseconds @@ -21,6 +22,7 @@ class Key(BaseModel): self.account_id = "0123456789012" self.key_rotation_status = False self.deletion_date = None + self.tags = {} @property def physical_resource_id(self): @@ -35,7 +37,7 @@ class Key(BaseModel): "KeyMetadata": { "AWSAccountId": self.account_id, "Arn": self.arn, - "CreationDate": "2015-01-01 00:00:00", + "CreationDate": datetime.strftime(datetime.utcnow(), "%s"), "Description": self.description, "Enabled": self.enabled, "KeyId": self.id, @@ -63,7 +65,6 @@ class Key(BaseModel): ) key.key_rotation_status = properties['EnableKeyRotation'] key.enabled = properties['Enabled'] - return key def get_cfn_attribute(self, attribute_name): @@ -84,6 +85,18 @@ class KmsBackend(BaseBackend): self.keys[key.id] = key return key + def update_key_description(self, key_id, description): + key = self.keys[self.get_key_id(key_id)] + key.description = description + + def tag_resource(self, key_id, tags): + key = self.keys[self.get_key_id(key_id)] + key.tags = tags + + def list_resource_tags(self, key_id): + key = self.keys[self.get_key_id(key_id)] + return key.tags + def delete_key(self, key_id): if key_id in self.keys: if key_id in self.key_to_aliases: @@ -147,27 +160,38 @@ class KmsBackend(BaseBackend): return self.keys[self.get_key_id(key_id)].policy def disable_key(self, key_id): - if key_id in self.keys: - self.keys[key_id].enabled = False - self.keys[key_id].key_state = 'Disabled' + self.keys[key_id].enabled = False + self.keys[key_id].key_state = 'Disabled' def enable_key(self, key_id): - if key_id in self.keys: - self.keys[key_id].enabled = True - self.keys[key_id].key_state = 'Enabled' + self.keys[key_id].enabled = True + self.keys[key_id].key_state = 'Enabled' def cancel_key_deletion(self, key_id): - if key_id in self.keys: - self.keys[key_id].key_state = 'Disabled' - self.keys[key_id].deletion_date = None + self.keys[key_id].key_state = 'Disabled' + self.keys[key_id].deletion_date = None def schedule_key_deletion(self, key_id, pending_window_in_days): - if key_id in self.keys: - if 7 <= pending_window_in_days <= 30: - self.keys[key_id].enabled = False - self.keys[key_id].key_state = 'PendingDeletion' - self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days) - return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date) + if 7 <= pending_window_in_days <= 30: + self.keys[key_id].enabled = False + self.keys[key_id].key_state = 'PendingDeletion' + self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days) + return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date) + + def generate_data_key(self, key_id, encryption_context, number_of_bytes, key_spec, grant_tokens): + key = self.keys[self.get_key_id(key_id)] + + if key_spec: + if key_spec == 'AES_128': + bytes = 16 + else: + bytes = 32 + else: + bytes = number_of_bytes + + plaintext = os.urandom(bytes) + + return plaintext, key.arn kms_backends = {} diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 5883f51ec..92195ed6b 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -5,11 +5,9 @@ import json import re import six -from boto.exception import JSONResponseError -from boto.kms.exceptions import AlreadyExistsException, NotFoundException - from moto.core.responses import BaseResponse from .models import kms_backends +from .exceptions import NotFoundException, ValidationException, AlreadyExistsException, NotAuthorizedException reserved_aliases = [ 'alias/aws/ebs', @@ -38,6 +36,28 @@ class KmsResponse(BaseResponse): policy, key_usage, description, self.region) return json.dumps(key.to_dict()) + def update_key_description(self): + key_id = self.parameters.get('KeyId') + description = self.parameters.get('Description') + + self.kms_backend.update_key_description(key_id, description) + return json.dumps(None) + + def tag_resource(self): + key_id = self.parameters.get('KeyId') + tags = self.parameters.get('Tags') + self.kms_backend.tag_resource(key_id, tags) + return json.dumps({}) + + def list_resource_tags(self): + key_id = self.parameters.get('KeyId') + tags = self.kms_backend.list_resource_tags(key_id) + return json.dumps({ + "Tags": tags, + "NextMarker": None, + "Truncated": False, + }) + def describe_key(self): key_id = self.parameters.get('KeyId') try: @@ -66,36 +86,28 @@ class KmsResponse(BaseResponse): def create_alias(self): alias_name = self.parameters['AliasName'] target_key_id = self.parameters['TargetKeyId'] - region = self.region if not alias_name.startswith('alias/'): - raise JSONResponseError(400, 'Bad Request', - body={'message': 'Invalid identifier', '__type': 'ValidationException'}) + raise ValidationException('Invalid identifier') if alias_name in reserved_aliases: - raise JSONResponseError(400, 'Bad Request', body={ - '__type': 'NotAuthorizedException'}) + raise NotAuthorizedException() if ':' in alias_name: - raise JSONResponseError(400, 'Bad Request', body={ - 'message': '{alias_name} contains invalid characters for an alias'.format(**locals()), - '__type': 'ValidationException'}) + raise ValidationException('{alias_name} contains invalid characters for an alias'.format(alias_name=alias_name)) if not re.match(r'^[a-zA-Z0-9:/_-]+$', alias_name): - raise JSONResponseError(400, 'Bad Request', body={ - 'message': "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$" - .format(**locals()), - '__type': 'ValidationException'}) + raise ValidationException("1 validation error detected: Value '{alias_name}' at 'aliasName' " + "failed to satisfy constraint: Member must satisfy regular " + "expression pattern: ^[a-zA-Z0-9:/_-]+$" + .format(alias_name=alias_name)) if self.kms_backend.alias_exists(target_key_id): - raise JSONResponseError(400, 'Bad Request', body={ - 'message': 'Aliases must refer to keys. Not aliases', - '__type': 'ValidationException'}) + raise ValidationException('Aliases must refer to keys. Not aliases') if self.kms_backend.alias_exists(alias_name): - raise AlreadyExistsException(400, 'Bad Request', body={ - 'message': 'An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} already exists' - .format(**locals()), '__type': 'AlreadyExistsException'}) + raise AlreadyExistsException('An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} ' + 'already exists'.format(region=self.region, alias_name=alias_name)) self.kms_backend.add_alias(target_key_id, alias_name) @@ -103,16 +115,13 @@ class KmsResponse(BaseResponse): def delete_alias(self): alias_name = self.parameters['AliasName'] - region = self.region if not alias_name.startswith('alias/'): - raise JSONResponseError(400, 'Bad Request', - body={'message': 'Invalid identifier', '__type': 'ValidationException'}) + raise ValidationException('Invalid identifier') if not self.kms_backend.alias_exists(alias_name): - raise NotFoundException(400, 'Bad Request', body={ - 'message': 'Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format(**locals()), - '__type': 'NotFoundException'}) + raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:' + '{alias_name} is not found.'.format(region=self.region, alias_name=alias_name)) self.kms_backend.delete_alias(alias_name) @@ -150,9 +159,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.enable_key_rotation(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) @@ -162,9 +170,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.disable_key_rotation(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def get_key_rotation_status(self): @@ -173,9 +180,8 @@ class KmsResponse(BaseResponse): try: rotation_enabled = self.kms_backend.get_key_rotation_status(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'KeyRotationEnabled': rotation_enabled}) def put_key_policy(self): @@ -188,9 +194,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.put_key_policy(key_id, policy) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) @@ -203,9 +208,8 @@ class KmsResponse(BaseResponse): try: return json.dumps({'Policy': self.kms_backend.get_key_policy(key_id)}) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) def list_key_policies(self): key_id = self.parameters.get('KeyId') @@ -213,9 +217,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.describe_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'Truncated': False, 'PolicyNames': ['default']}) @@ -227,11 +230,17 @@ class KmsResponse(BaseResponse): value = self.parameters.get("Plaintext") if isinstance(value, six.text_type): value = value.encode('utf-8') - return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8")}) + return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8"), 'KeyId': 'key_id'}) def decrypt(self): + # TODO refuse decode if EncryptionContext is not the same as when it was encrypted / generated + value = self.parameters.get("CiphertextBlob") - return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")}) + try: + return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")}) + except UnicodeDecodeError: + # Generate data key will produce random bytes which when decrypted is still returned as base64 + return json.dumps({"Plaintext": value}) def disable_key(self): key_id = self.parameters.get('KeyId') @@ -239,9 +248,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.disable_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def enable_key(self): @@ -250,9 +258,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.enable_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def cancel_key_deletion(self): @@ -261,9 +268,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.cancel_key_deletion(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'KeyId': key_id}) def schedule_key_deletion(self): @@ -279,19 +285,62 @@ class KmsResponse(BaseResponse): 'DeletionDate': self.kms_backend.schedule_key_deletion(key_id, pending_window_in_days) }) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) + + def generate_data_key(self): + key_id = self.parameters.get('KeyId') + encryption_context = self.parameters.get('EncryptionContext') + number_of_bytes = self.parameters.get('NumberOfBytes') + key_spec = self.parameters.get('KeySpec') + grant_tokens = self.parameters.get('GrantTokens') + + # Param validation + if key_id.startswith('alias'): + if self.kms_backend.get_key_id_from_alias(key_id) is None: + raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format( + region=self.region, alias_name=key_id)) + else: + if self.kms_backend.get_key_id(key_id) not in self.kms_backend.keys: + raise NotFoundException('Invalid keyId') + + if number_of_bytes and (number_of_bytes > 1024 or number_of_bytes < 0): + raise ValidationException("1 validation error detected: Value '2048' at 'numberOfBytes' failed " + "to satisfy constraint: Member must have value less than or " + "equal to 1024") + + if key_spec and key_spec not in ('AES_256', 'AES_128'): + raise ValidationException("1 validation error detected: Value 'AES_257' at 'keySpec' failed " + "to satisfy constraint: Member must satisfy enum value set: " + "[AES_256, AES_128]") + if not key_spec and not number_of_bytes: + raise ValidationException("Please specify either number of bytes or key spec.") + if key_spec and number_of_bytes: + raise ValidationException("Please specify either number of bytes or key spec.") + + plaintext, key_arn = self.kms_backend.generate_data_key(key_id, encryption_context, + number_of_bytes, key_spec, grant_tokens) + + plaintext = base64.b64encode(plaintext).decode() + + return json.dumps({ + 'CiphertextBlob': plaintext, + 'Plaintext': plaintext, + 'KeyId': key_arn # not alias + }) + + def generate_data_key_without_plaintext(self): + result = json.loads(self.generate_data_key()) + del result['Plaintext'] + + return json.dumps(result) def _assert_valid_key_id(key_id): if not re.match(r'^[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}$', key_id, re.IGNORECASE): - raise JSONResponseError(404, 'Not Found', body={ - 'message': ' Invalid keyId', '__type': 'NotFoundException'}) + raise NotFoundException('Invalid keyId') def _assert_default_policy(policy_name): if policy_name != 'default': - raise JSONResponseError(404, 'Not Found', body={ - 'message': "No such policy exists", - '__type': 'NotFoundException'}) + raise NotFoundException("No such policy exists") diff --git a/moto/logs/models.py b/moto/logs/models.py index ca1fdc4ad..e105d4d14 100644 --- a/moto/logs/models.py +++ b/moto/logs/models.py @@ -242,7 +242,8 @@ class LogsBackend(BaseBackend): if next_token is None: next_token = 0 - groups = sorted(group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix)) + groups = [group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix)] + groups = sorted(groups, key=lambda x: x['creationTime'], reverse=True) groups_page = groups[next_token:next_token + limit] next_token += limit diff --git a/moto/packages/httpretty/core.py b/moto/packages/httpretty/core.py index 8ad9168a5..4eb92108f 100644 --- a/moto/packages/httpretty/core.py +++ b/moto/packages/httpretty/core.py @@ -1113,4 +1113,4 @@ def httprettified(test): if isinstance(test, ClassTypes): return decorate_class(test) - return decorate_callable(test) + return decorate_callable(test) \ No newline at end of file diff --git a/moto/packages/httpretty/http.py b/moto/packages/httpretty/http.py index 7e9a56885..ee1625905 100644 --- a/moto/packages/httpretty/http.py +++ b/moto/packages/httpretty/http.py @@ -29,7 +29,6 @@ import re from .compat import BaseClass from .utils import decode_utf8 - STATUSES = { 100: "Continue", 101: "Switching Protocols", diff --git a/moto/route53/models.py b/moto/route53/models.py index d483d22e2..3760d3817 100644 --- a/moto/route53/models.py +++ b/moto/route53/models.py @@ -24,7 +24,7 @@ class HealthCheck(BaseModel): self.id = health_check_id self.ip_address = health_check_args.get("ip_address") self.port = health_check_args.get("port", 80) - self._type = health_check_args.get("type") + self.type_ = health_check_args.get("type") self.resource_path = health_check_args.get("resource_path") self.fqdn = health_check_args.get("fqdn") self.search_string = health_check_args.get("search_string") @@ -58,7 +58,7 @@ class HealthCheck(BaseModel): {{ health_check.ip_address }} {{ health_check.port }} - {{ health_check._type }} + {{ health_check.type_ }} {{ health_check.resource_path }} {{ health_check.fqdn }} {{ health_check.request_interval }} @@ -76,7 +76,7 @@ class RecordSet(BaseModel): def __init__(self, kwargs): self.name = kwargs.get('Name') - self._type = kwargs.get('Type') + self.type_ = kwargs.get('Type') self.ttl = kwargs.get('TTL') self.records = kwargs.get('ResourceRecords', []) self.set_identifier = kwargs.get('SetIdentifier') @@ -130,7 +130,7 @@ class RecordSet(BaseModel): def to_xml(self): template = Template(""" {{ record_set.name }} - {{ record_set._type }} + {{ record_set.type_ }} {% if record_set.set_identifier %} {{ record_set.set_identifier }} {% endif %} @@ -183,7 +183,7 @@ class FakeZone(BaseModel): def upsert_rrset(self, record_set): new_rrset = RecordSet(record_set) for i, rrset in enumerate(self.rrsets): - if rrset.name == new_rrset.name: + if rrset.name == new_rrset.name and rrset.type_ == new_rrset.type_: self.rrsets[i] = new_rrset break else: @@ -202,7 +202,7 @@ class FakeZone(BaseModel): record_sets = list(self.rrsets) # Copy the list if start_type: record_sets = [ - record_set for record_set in record_sets if record_set._type >= start_type] + record_set for record_set in record_sets if record_set.type_ >= start_type] if start_name: record_sets = [ record_set for record_set in record_sets if record_set.name >= start_name] diff --git a/moto/route53/responses.py b/moto/route53/responses.py index 6679e7945..98ffa4c47 100644 --- a/moto/route53/responses.py +++ b/moto/route53/responses.py @@ -123,6 +123,9 @@ class Route53(BaseResponse): """ % (record_set['Name'], the_zone.name) return 400, headers, error_msg + if not record_set['Name'].endswith('.'): + record_set['Name'] += '.' + if action in ('CREATE', 'UPSERT'): if 'ResourceRecords' in record_set: resource_records = list( diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index 26515dfd2..27c842111 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -178,3 +178,24 @@ class InvalidStorageClass(S3ClientError): "InvalidStorageClass", "The storage class you specified is not valid", *args, **kwargs) + + +class InvalidBucketName(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(InvalidBucketName, self).__init__( + "InvalidBucketName", + "The specified bucket is not valid.", + *args, **kwargs + ) + + +class DuplicateTagKeys(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(DuplicateTagKeys, self).__init__( + "InvalidTag", + "Cannot provide multiple Tags with the same key", + *args, **kwargs) diff --git a/moto/s3/models.py b/moto/s3/models.py index bb4d7848c..9e4a6a766 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -8,19 +8,26 @@ import itertools import codecs import random import string +import tempfile +import sys +import uuid import six from bisect import insort from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime -from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \ - InvalidNotificationDestination, MalformedXML, InvalidStorageClass +from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \ + EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys from .utils import clean_key_name, _VersionedKeyStore +MAX_BUCKET_NAME_LENGTH = 63 +MIN_BUCKET_NAME_LENGTH = 3 UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] +DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024 +DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() class FakeDeleteMarker(BaseModel): @@ -29,7 +36,7 @@ class FakeDeleteMarker(BaseModel): self.key = key self.name = key.name self.last_modified = datetime.datetime.utcnow() - self._version_id = key.version_id + 1 + self._version_id = str(uuid.uuid4()) @property def last_modified_ISO8601(self): @@ -42,9 +49,9 @@ class FakeDeleteMarker(BaseModel): class FakeKey(BaseModel): - def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0): + def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0, + max_buffer_size=DEFAULT_KEY_BUFFER_SIZE): self.name = name - self.value = value self.last_modified = datetime.datetime.utcnow() self.acl = get_canned_acl('private') self.website_redirect_location = None @@ -56,14 +63,37 @@ class FakeKey(BaseModel): self._is_versioned = is_versioned self._tagging = FakeTagging() + self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) + self._max_buffer_size = max_buffer_size + self.value = value + @property def version_id(self): return self._version_id - def copy(self, new_name=None): + @property + def value(self): + self._value_buffer.seek(0) + return self._value_buffer.read() + + @value.setter + def value(self, new_value): + self._value_buffer.seek(0) + self._value_buffer.truncate() + + # Hack for working around moto's own unit tests; this probably won't + # actually get hit in normal use. + if isinstance(new_value, six.text_type): + new_value = new_value.encode(DEFAULT_TEXT_ENCODING) + self._value_buffer.write(new_value) + + def copy(self, new_name=None, new_is_versioned=None): r = copy.deepcopy(self) if new_name is not None: r.name = new_name + if new_is_versioned is not None: + r._is_versioned = new_is_versioned + r.refresh_version() return r def set_metadata(self, metadata, replace=False): @@ -83,29 +113,34 @@ class FakeKey(BaseModel): self.acl = acl def append_to_value(self, value): - self.value += value + self._value_buffer.seek(0, os.SEEK_END) + self._value_buffer.write(value) + self.last_modified = datetime.datetime.utcnow() self._etag = None # must recalculate etag if self._is_versioned: - self._version_id += 1 + self._version_id = str(uuid.uuid4()) else: - self._is_versioned = 0 + self._version_id = None def restore(self, days): self._expiry = datetime.datetime.utcnow() + datetime.timedelta(days) - def increment_version(self): - self._version_id += 1 + def refresh_version(self): + self._version_id = str(uuid.uuid4()) + self.last_modified = datetime.datetime.utcnow() @property def etag(self): if self._etag is None: value_md5 = hashlib.md5() - if isinstance(self.value, six.text_type): - value = self.value.encode("utf-8") - else: - value = self.value - value_md5.update(value) + self._value_buffer.seek(0) + while True: + block = self._value_buffer.read(DEFAULT_KEY_BUFFER_SIZE) + if not block: + break + value_md5.update(block) + self._etag = value_md5.hexdigest() return '"{0}"'.format(self._etag) @@ -132,7 +167,7 @@ class FakeKey(BaseModel): res = { 'ETag': self.etag, 'last-modified': self.last_modified_RFC1123, - 'content-length': str(len(self.value)), + 'content-length': str(self.size), } if self._storage_class != 'STANDARD': res['x-amz-storage-class'] = self._storage_class @@ -150,7 +185,8 @@ class FakeKey(BaseModel): @property def size(self): - return len(self.value) + self._value_buffer.seek(0, os.SEEK_END) + return self._value_buffer.tell() @property def storage_class(self): @@ -161,6 +197,26 @@ class FakeKey(BaseModel): if self._expiry is not None: return self._expiry.strftime("%a, %d %b %Y %H:%M:%S GMT") + # Keys need to be pickleable due to some implementation details of boto3. + # Since file objects aren't pickleable, we need to override the default + # behavior. The following is adapted from the Python docs: + # https://docs.python.org/3/library/pickle.html#handling-stateful-objects + def __getstate__(self): + state = self.__dict__.copy() + state['value'] = self.value + del state['_value_buffer'] + return state + + def __setstate__(self, state): + self.__dict__.update({ + k: v for k, v in six.iteritems(state) + if k != 'value' + }) + + self._value_buffer = \ + tempfile.SpooledTemporaryFile(max_size=self._max_buffer_size) + self.value = state['value'] + class FakeMultipart(BaseModel): @@ -634,6 +690,8 @@ class S3Backend(BaseBackend): def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: raise BucketAlreadyExists(bucket=bucket_name) + if not MIN_BUCKET_NAME_LENGTH <= len(bucket_name) <= MAX_BUCKET_NAME_LENGTH: + raise InvalidBucketName() new_bucket = FakeBucket(name=bucket_name, region_name=region_name) self.buckets[bucket_name] = new_bucket return new_bucket @@ -663,17 +721,18 @@ class S3Backend(BaseBackend): def get_bucket_latest_versions(self, bucket_name): versions = self.get_bucket_versions(bucket_name) - maximum_version_per_key = {} + latest_modified_per_key = {} latest_versions = {} for version in versions: name = version.name + last_modified = version.last_modified version_id = version.version_id - maximum_version_per_key[name] = max( - version_id, - maximum_version_per_key.get(name, -1) + latest_modified_per_key[name] = max( + last_modified, + latest_modified_per_key.get(name, datetime.datetime.min) ) - if version_id == maximum_version_per_key[name]: + if last_modified == latest_modified_per_key[name]: latest_versions[name] = version_id return latest_versions @@ -721,20 +780,19 @@ class S3Backend(BaseBackend): bucket = self.get_bucket(bucket_name) - old_key = bucket.keys.get(key_name, None) - if old_key is not None and bucket.is_versioned: - new_version_id = old_key._version_id + 1 - else: - new_version_id = 0 - new_key = FakeKey( name=key_name, value=value, storage=storage, etag=etag, is_versioned=bucket.is_versioned, - version_id=new_version_id) - bucket.keys[key_name] = new_key + version_id=str(uuid.uuid4()) if bucket.is_versioned else None) + + keys = [ + key for key in bucket.keys.getlist(key_name, []) + if key.version_id != new_key.version_id + ] + [new_key] + bucket.keys.setlist(key_name, keys) return new_key @@ -773,6 +831,9 @@ class S3Backend(BaseBackend): return key def put_bucket_tagging(self, bucket_name, tagging): + tag_keys = [tag.key for tag in tagging.tag_set.tags] + if len(tag_keys) != len(set(tag_keys)): + raise DuplicateTagKeys() bucket = self.get_bucket(bucket_name) bucket.set_tags(tagging) @@ -915,17 +976,15 @@ class S3Backend(BaseBackend): dest_bucket = self.get_bucket(dest_bucket_name) key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id) - if dest_key_name != src_key_name: - key = key.copy(dest_key_name) - dest_bucket.keys[dest_key_name] = key - # By this point, the destination key must exist, or KeyError - if dest_bucket.is_versioned: - dest_bucket.keys[dest_key_name].increment_version() + new_key = key.copy(dest_key_name, dest_bucket.is_versioned) + if storage is not None: - key.set_storage_class(storage) + new_key.set_storage_class(storage) if acl is not None: - key.set_acl(acl) + new_key.set_acl(acl) + + dest_bucket.keys[dest_key_name] = new_key def set_bucket_acl(self, bucket_name, acl): bucket = self.get_bucket(bucket_name) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 13e5f87d9..856178941 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -19,7 +19,7 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \ FakeTag -from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url +from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url from xml.dom import minidom @@ -193,7 +193,13 @@ class ResponseObject(_TemplateEnvironmentMixin): elif 'location' in querystring: bucket = self.backend.get_bucket(bucket_name) template = self.response_template(S3_BUCKET_LOCATION) - return template.render(location=bucket.location) + + location = bucket.location + # us-east-1 is different - returns a None location + if location == DEFAULT_REGION_NAME: + location = None + + return template.render(location=location) elif 'lifecycle' in querystring: bucket = self.backend.get_bucket(bucket_name) if not bucket.rules: @@ -338,9 +344,15 @@ class ResponseObject(_TemplateEnvironmentMixin): if continuation_token or start_after: limit = continuation_token or start_after - result_keys = self._get_results_from_token(result_keys, limit) + if not delimiter: + result_keys = self._get_results_from_token(result_keys, limit) + else: + result_folders = self._get_results_from_token(result_folders, limit) - result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + if not delimiter: + result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + else: + result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys) return template.render( bucket=bucket, @@ -358,7 +370,7 @@ class ResponseObject(_TemplateEnvironmentMixin): def _get_results_from_token(self, result_keys, token): continuation_index = 0 for key in result_keys: - if key.name > token: + if (key.name if isinstance(key, FakeKey) else key) > token: break continuation_index += 1 return result_keys[continuation_index:] @@ -367,7 +379,8 @@ class ResponseObject(_TemplateEnvironmentMixin): if len(result_keys) > max_keys: is_truncated = 'true' result_keys = result_keys[:max_keys] - next_continuation_token = result_keys[-1].name + item = result_keys[-1] + next_continuation_token = (item.name if isinstance(item, FakeKey) else item) else: is_truncated = 'false' next_continuation_token = None @@ -432,8 +445,19 @@ class ResponseObject(_TemplateEnvironmentMixin): else: if body: + # us-east-1, the default AWS region behaves a bit differently + # - you should not use it as a location constraint --> it fails + # - querying the location constraint returns None try: - region_name = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + forced_region = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + + if forced_region == DEFAULT_REGION_NAME: + raise S3ClientError( + 'InvalidLocationConstraint', + 'The specified location-constraint is not valid' + ) + else: + region_name = forced_region except KeyError: pass @@ -709,7 +733,7 @@ class ResponseObject(_TemplateEnvironmentMixin): # Copy key # you can have a quoted ?version=abc with a version Id, so work on # we need to parse the unquoted string first - src_key = request.headers.get("x-amz-copy-source") + src_key = clean_key_name(request.headers.get("x-amz-copy-source")) if isinstance(src_key, six.binary_type): src_key = src_key.decode('utf-8') src_key_parsed = urlparse(src_key) @@ -1176,7 +1200,7 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """ """ S3_BUCKET_LOCATION = """ -{{ location }}""" +{% if location != None %}{{ location }}{% endif %}""" S3_BUCKET_LIFECYCLE_CONFIGURATION = """ @@ -1279,7 +1303,7 @@ S3_BUCKET_GET_VERSIONS = """ {% for key in key_list %} {{ key.name }} - {{ key.version_id }} + {% if key.version_id is none %}null{% else %}{{ key.version_id }}{% endif %} {% if latest_versions[key.name] == key.version_id %}true{% else %}false{% endif %} {{ key.last_modified_ISO8601 }} {{ key.etag }} diff --git a/moto/secretsmanager/exceptions.py b/moto/secretsmanager/exceptions.py index a72a32645..06010c411 100644 --- a/moto/secretsmanager/exceptions.py +++ b/moto/secretsmanager/exceptions.py @@ -27,3 +27,10 @@ class InvalidParameterException(SecretsManagerClientError): super(InvalidParameterException, self).__init__( 'InvalidParameterException', message) + + +class InvalidRequestException(SecretsManagerClientError): + def __init__(self, message): + super(InvalidRequestException, self).__init__( + 'InvalidRequestException', + message) diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 1404a0ec8..44ac1ef47 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -2,6 +2,8 @@ from __future__ import unicode_literals import time import json +import uuid +import datetime import boto3 @@ -9,6 +11,7 @@ from moto.core import BaseBackend, BaseModel from .exceptions import ( ResourceNotFoundException, InvalidParameterException, + InvalidRequestException, ClientError ) from .utils import random_password, secret_arn @@ -18,10 +21,6 @@ class SecretsManager(BaseModel): def __init__(self, region_name, **kwargs): self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.version_id = kwargs.get('version_id', '') - self.version_stage = kwargs.get('version_stage', '') - self.secret_string = '' class SecretsManagerBackend(BaseBackend): @@ -29,14 +28,7 @@ class SecretsManagerBackend(BaseBackend): def __init__(self, region_name=None, **kwargs): super(SecretsManagerBackend, self).__init__() self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.name = kwargs.get('name', '') - self.createdate = int(time.time()) - self.secret_string = '' - self.rotation_enabled = False - self.rotation_lambda_arn = '' - self.auto_rotate_after_days = 0 - self.version_id = '' + self.secrets = {} def reset(self): region_name = self.region @@ -44,36 +36,60 @@ class SecretsManagerBackend(BaseBackend): self.__init__(region_name) def _is_valid_identifier(self, identifier): - return identifier in (self.name, self.secret_id) + return identifier in self.secrets + + def _unix_time_secs(self, dt): + epoch = datetime.datetime.utcfromtimestamp(0) + return (dt - epoch).total_seconds() def get_secret_value(self, secret_id, version_id, version_stage): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException() + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the GetSecretValue operation: You tried to \ + perform the operation on a secret that's currently marked deleted." + ) + + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", - "SecretString": self.secret_string, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'], + "SecretString": secret['secret_string'], "VersionStages": [ "AWSCURRENT", ], - "CreatedDate": "2018-05-23 13:16:57.198000" + "CreatedDate": secret['createdate'] }) return response - def create_secret(self, name, secret_string, **kwargs): + def create_secret(self, name, secret_string, tags, **kwargs): - self.secret_string = secret_string - self.secret_id = name - self.name = name + generated_version_id = str(uuid.uuid4()) + + secret = { + 'secret_string': secret_string, + 'secret_id': name, + 'name': name, + 'createdate': int(time.time()), + 'rotation_enabled': False, + 'rotation_lambda_arn': '', + 'auto_rotate_after_days': 0, + 'version_id': generated_version_id, + 'tags': tags + } + + self.secrets[name] = secret response = json.dumps({ "ARN": secret_arn(self.region, name), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", + "Name": name, + "VersionId": generated_version_id, }) return response @@ -82,26 +98,23 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], "Description": "", "KmsKeyId": "", - "RotationEnabled": self.rotation_enabled, - "RotationLambdaARN": self.rotation_lambda_arn, + "RotationEnabled": secret['rotation_enabled'], + "RotationLambdaARN": secret['rotation_lambda_arn'], "RotationRules": { - "AutomaticallyAfterDays": self.auto_rotate_after_days + "AutomaticallyAfterDays": secret['auto_rotate_after_days'] }, "LastRotatedDate": None, "LastChangedDate": None, "LastAccessedDate": None, - "DeletedDate": None, - "Tags": [ - { - "Key": "", - "Value": "" - }, - ] + "DeletedDate": secret.get('deleted_date', None), + "Tags": secret['tags'] }) return response @@ -114,6 +127,12 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the RotateSecret operation: You tried to \ + perform the operation on a secret that's currently marked deleted." + ) + if client_request_token: token_length = len(client_request_token) if token_length < 32 or token_length > 64: @@ -141,17 +160,19 @@ class SecretsManagerBackend(BaseBackend): ) raise InvalidParameterException(msg) - self.version_id = client_request_token or '' - self.rotation_lambda_arn = rotation_lambda_arn or '' + secret = self.secrets[secret_id] + + secret['version_id'] = client_request_token or '' + secret['rotation_lambda_arn'] = rotation_lambda_arn or '' if rotation_rules: - self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0) - if self.auto_rotate_after_days > 0: - self.rotation_enabled = True + secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0) + if secret['auto_rotate_after_days'] > 0: + secret['rotation_enabled'] = True response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": self.version_id + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'] }) return response @@ -185,6 +206,85 @@ class SecretsManagerBackend(BaseBackend): return response + def list_secrets(self, max_results, next_token): + # TODO implement pagination and limits + + secret_list = [{ + "ARN": secret_arn(self.region, secret['secret_id']), + "DeletedDate": secret.get('deleted_date', None), + "Description": "", + "KmsKeyId": "", + "LastAccessedDate": None, + "LastChangedDate": None, + "LastRotatedDate": None, + "Name": secret['name'], + "RotationEnabled": secret['rotation_enabled'], + "RotationLambdaARN": secret['rotation_lambda_arn'], + "RotationRules": { + "AutomaticallyAfterDays": secret['auto_rotate_after_days'] + }, + "SecretVersionsToStages": { + secret['version_id']: ["AWSCURRENT"] + }, + "Tags": secret['tags'] + } for secret in self.secrets.values()] + + return secret_list, None + + def delete_secret(self, secret_id, recovery_window_in_days, force_delete_without_recovery): + + if not self._is_valid_identifier(secret_id): + raise ResourceNotFoundException + + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the DeleteSecret operation: You tried to \ + perform the operation on a secret that's currently marked deleted." + ) + + if recovery_window_in_days and force_delete_without_recovery: + raise InvalidParameterException( + "An error occurred (InvalidParameterException) when calling the DeleteSecret operation: You can't \ + use ForceDeleteWithoutRecovery in conjunction with RecoveryWindowInDays." + ) + + if recovery_window_in_days and (recovery_window_in_days < 7 or recovery_window_in_days > 30): + raise InvalidParameterException( + "An error occurred (InvalidParameterException) when calling the DeleteSecret operation: The \ + RecoveryWindowInDays value must be between 7 and 30 days (inclusive)." + ) + + deletion_date = datetime.datetime.utcnow() + + if force_delete_without_recovery: + secret = self.secrets.pop(secret_id, None) + else: + deletion_date += datetime.timedelta(days=recovery_window_in_days or 30) + self.secrets[secret_id]['deleted_date'] = self._unix_time_secs(deletion_date) + secret = self.secrets.get(secret_id, None) + + if not secret: + raise ResourceNotFoundException + + arn = secret_arn(self.region, secret['secret_id']) + name = secret['name'] + + return arn, name, self._unix_time_secs(deletion_date) + + def restore_secret(self, secret_id): + + if not self._is_valid_identifier(secret_id): + raise ResourceNotFoundException + + self.secrets[secret_id].pop('deleted_date', None) + + secret = self.secrets[secret_id] + + arn = secret_arn(self.region, secret['secret_id']) + name = secret['name'] + + return arn, name + available_regions = ( boto3.session.Session().get_available_regions("secretsmanager") diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index b8b6872a8..0eb02e39b 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -4,6 +4,8 @@ from moto.core.responses import BaseResponse from .models import secretsmanager_backends +import json + class SecretsManagerResponse(BaseResponse): @@ -19,9 +21,11 @@ class SecretsManagerResponse(BaseResponse): def create_secret(self): name = self._get_param('Name') secret_string = self._get_param('SecretString') + tags = self._get_param('Tags', if_none=[]) return secretsmanager_backends[self.region].create_secret( name=name, - secret_string=secret_string + secret_string=secret_string, + tags=tags ) def get_random_password(self): @@ -62,3 +66,30 @@ class SecretsManagerResponse(BaseResponse): rotation_lambda_arn=rotation_lambda_arn, rotation_rules=rotation_rules ) + + def list_secrets(self): + max_results = self._get_int_param("MaxResults") + next_token = self._get_param("NextToken") + secret_list, next_token = secretsmanager_backends[self.region].list_secrets( + max_results=max_results, + next_token=next_token, + ) + return json.dumps(dict(SecretList=secret_list, NextToken=next_token)) + + def delete_secret(self): + secret_id = self._get_param("SecretId") + recovery_window_in_days = self._get_param("RecoveryWindowInDays") + force_delete_without_recovery = self._get_param("ForceDeleteWithoutRecovery") + arn, name, deletion_date = secretsmanager_backends[self.region].delete_secret( + secret_id=secret_id, + recovery_window_in_days=recovery_window_in_days, + force_delete_without_recovery=force_delete_without_recovery, + ) + return json.dumps(dict(ARN=arn, Name=name, DeletionDate=deletion_date)) + + def restore_secret(self): + secret_id = self._get_param("SecretId") + arn, name = secretsmanager_backends[self.region].restore_secret( + secret_id=secret_id, + ) + return json.dumps(dict(ARN=arn, Name=name)) diff --git a/moto/secretsmanager/utils.py b/moto/secretsmanager/utils.py index 2cb92020a..231fea296 100644 --- a/moto/secretsmanager/utils.py +++ b/moto/secretsmanager/utils.py @@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers, def secret_arn(region, secret_id): - return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format( - region, secret_id) + id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5)) + return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format( + region, secret_id, id_string) def _exclude_characters(password, exclude_characters): diff --git a/moto/server.py b/moto/server.py index ba2470478..5ad02d383 100644 --- a/moto/server.py +++ b/moto/server.py @@ -80,10 +80,13 @@ class DomainDispatcherApplication(object): region = 'us-east-1' if service == 'dynamodb': - dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] - # If Newer API version, use dynamodb2 - if dynamo_api_version > "20111205": - host = "dynamodb2" + if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'): + host = 'dynamodbstreams' + else: + dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] + # If Newer API version, use dynamodb2 + if dynamo_api_version > "20111205": + host = "dynamodb2" else: host = "{service}.{region}.amazonaws.com".format( service=service, region=region) diff --git a/moto/sqs/models.py b/moto/sqs/models.py index f3262a988..1404ded75 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -534,7 +534,7 @@ class SQSBackend(BaseBackend): break import time - time.sleep(0.001) + time.sleep(0.01) continue previous_result_count = len(result) diff --git a/moto/ssm/models.py b/moto/ssm/models.py index f16a7d981..2f316a3ac 100644 --- a/moto/ssm/models.py +++ b/moto/ssm/models.py @@ -14,10 +14,12 @@ import itertools class Parameter(BaseModel): - def __init__(self, name, value, type, description, keyid, last_modified_date, version): + def __init__(self, name, value, type, description, allowed_pattern, keyid, + last_modified_date, version): self.name = name self.type = type self.description = description + self.allowed_pattern = allowed_pattern self.keyid = keyid self.last_modified_date = last_modified_date self.version = version @@ -58,6 +60,10 @@ class Parameter(BaseModel): if self.keyid: r['KeyId'] = self.keyid + + if self.allowed_pattern: + r['AllowedPattern'] = self.allowed_pattern + return r @@ -291,7 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend): return self._parameters[name] return None - def put_parameter(self, name, description, value, type, keyid, overwrite): + def put_parameter(self, name, description, value, type, allowed_pattern, + keyid, overwrite): previous_parameter = self._parameters.get(name) version = 1 @@ -302,8 +309,8 @@ class SimpleSystemManagerBackend(BaseBackend): return last_modified_date = time.time() - self._parameters[name] = Parameter( - name, value, type, description, keyid, last_modified_date, version) + self._parameters[name] = Parameter(name, value, type, description, + allowed_pattern, keyid, last_modified_date, version) return version def add_tags_to_resource(self, resource_type, resource_id, tags): diff --git a/moto/ssm/responses.py b/moto/ssm/responses.py index eb05e51b6..c47d4127a 100644 --- a/moto/ssm/responses.py +++ b/moto/ssm/responses.py @@ -160,11 +160,12 @@ class SimpleSystemManagerResponse(BaseResponse): description = self._get_param('Description') value = self._get_param('Value') type_ = self._get_param('Type') + allowed_pattern = self._get_param('AllowedPattern') keyid = self._get_param('KeyId') overwrite = self._get_param('Overwrite', False) result = self.ssm_backend.put_parameter( - name, description, value, type_, keyid, overwrite) + name, description, value, type_, allowed_pattern, keyid, overwrite) if result is None: error = { diff --git a/moto/ssm/urls.py b/moto/ssm/urls.py index d22866486..9ac327325 100644 --- a/moto/ssm/urls.py +++ b/moto/ssm/urls.py @@ -3,6 +3,7 @@ from .responses import SimpleSystemManagerResponse url_bases = [ "https?://ssm.(.+).amazonaws.com", + "https?://ssm.(.+).amazonaws.com.cn", ] url_paths = { diff --git a/setup.py b/setup.py index 1ba502beb..2ef3227d4 100755 --- a/setup.py +++ b/setup.py @@ -1,29 +1,44 @@ #!/usr/bin/env python from __future__ import unicode_literals +import codecs +import os +import re import setuptools from setuptools import setup, find_packages import sys +# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15 +here = os.path.abspath(os.path.dirname(__file__)) + +def read(*parts): + # intentionally *not* adding an encoding option to open, See: + # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 + with codecs.open(os.path.join(here, *parts), 'r') as fp: + return fp.read() + + install_requires = [ - "Jinja2>=2.7.3", + "Jinja2>=2.10.1", "boto>=2.36.0", - "boto3>=1.6.16", - "botocore>=1.12.13", + "boto3>=1.9.86", + "botocore>=1.12.86", "cryptography>=2.3.0", "requests>=2.5", "xmltodict", "six>1.9", "werkzeug", - "pyaml", + "PyYAML", "pytz", "python-dateutil<3.0.0,>=2.1", - "python-jose<3.0.0", + "python-jose<4.0.0", "mock", "docker>=2.5.1", - "jsondiff==1.1.1", + "jsondiff==1.1.2", "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", + "idna<2.9,>=2.5", + "cfn-lint", ] extras_require = { @@ -40,9 +55,11 @@ else: setup( name='moto', - version='1.3.7', + version='1.3.8', description='A library that allows your python tests to easily' ' mock out the boto library', + long_description=read('README.md'), + long_description_content_type='text/markdown', author='Steve Pulec', author_email='spulec@gmail.com', url='https://github.com/spulec/moto', diff --git a/tests/test_autoscaling/test_autoscaling.py b/tests/test_autoscaling/test_autoscaling.py index f86ca2b81..b1a65fb7e 100644 --- a/tests/test_autoscaling/test_autoscaling.py +++ b/tests/test_autoscaling/test_autoscaling.py @@ -710,6 +710,7 @@ def test_create_autoscaling_group_boto3(): 'PropagateAtLaunch': False }], VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) @@ -728,13 +729,48 @@ def test_describe_autoscaling_groups_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) + response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - response['AutoScalingGroups'][0][ - 'AutoScalingGroupName'].should.equal('test_asg') + group = response['AutoScalingGroups'][0] + group['AutoScalingGroupName'].should.equal('test_asg') + group['NewInstancesProtectedFromScaleIn'].should.equal(True) + group['Instances'][0]['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_describe_autoscaling_instances_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups( + AutoScalingGroupNames=["test_asg"] + ) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + + response = client.describe_auto_scaling_instances(InstanceIds=instance_ids) + for instance in response['AutoScalingInstances']: + instance['AutoScalingGroupName'].should.equal('test_asg') + instance['ProtectedFromScaleIn'].should.equal(True) @mock_autoscaling @@ -751,17 +787,21 @@ def test_update_autoscaling_group_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) - response = client.update_auto_scaling_group( + _ = client.update_auto_scaling_group( AutoScalingGroupName='test_asg', MinSize=1, + NewInstancesProtectedFromScaleIn=False, ) response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) - response['AutoScalingGroups'][0]['MinSize'].should.equal(1) + group = response['AutoScalingGroups'][0] + group['MinSize'].should.equal(1) + group['NewInstancesProtectedFromScaleIn'].should.equal(False) @mock_autoscaling @@ -992,9 +1032,7 @@ def test_attach_one_instance(): 'PropagateAtLaunch': True }], VPCZoneIdentifier=mocked_networking['subnet1'], - ) - response = client.describe_auto_scaling_groups( - AutoScalingGroupNames=['test_asg'] + NewInstancesProtectedFromScaleIn=True, ) ec2 = boto3.resource('ec2', 'us-east-1') @@ -1009,7 +1047,11 @@ def test_attach_one_instance(): response = client.describe_auto_scaling_groups( AutoScalingGroupNames=['test_asg'] ) - response['AutoScalingGroups'][0]['Instances'].should.have.length_of(3) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(3) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + @mock_autoscaling @mock_ec2 @@ -1100,3 +1142,111 @@ def test_suspend_processes(): launch_suspended = True assert launch_suspended is True + +@mock_autoscaling +def test_set_instance_protection(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + protected = instance_ids[:3] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=protected, + ProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + for instance in response['AutoScalingGroups'][0]['Instances']: + instance['ProtectedFromScaleIn'].should.equal( + instance['InstanceId'] in protected + ) + + +@mock_autoscaling +def test_set_desired_capacity_up_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=10, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(10) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_set_desired_capacity_down_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + unprotected, protected = instance_ids[:2], instance_ids[2:] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=unprotected, + ProtectedFromScaleIn=False, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=1, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + group = response['AutoScalingGroups'][0] + group['DesiredCapacity'].should.equal(1) + instance_ids = {instance['InstanceId'] for instance in group['Instances']} + set(protected).should.equal(instance_ids) + set(unprotected).should_not.be.within(instance_ids) # only unprotected killed diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 8ea9cc6fd..479aaaa8a 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -12,6 +12,8 @@ import sure # noqa from freezegun import freeze_time from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings +from nose.tools import assert_raises +from botocore.exceptions import ClientError _lambda_region = 'us-west-2' @@ -397,6 +399,11 @@ def test_get_function(): result = conn.get_function(FunctionName='testFunction', Qualifier='$LATEST') result['Configuration']['Version'].should.equal('$LATEST') + # Test get function when can't find function name + with assert_raises(ClientError): + conn.get_function(FunctionName='junk', Qualifier='$LATEST') + + @mock_lambda @mock_s3 @@ -464,7 +471,8 @@ def test_publish(): function_list['Functions'].should.have.length_of(1) latest_arn = function_list['Functions'][0]['FunctionArn'] - conn.publish_version(FunctionName='testFunction') + res = conn.publish_version(FunctionName='testFunction') + assert res['ResponseMetadata']['HTTPStatusCode'] == 201 function_list = conn.list_functions() function_list['Functions'].should.have.length_of(2) @@ -819,3 +827,87 @@ def get_function_policy(): assert isinstance(response['Policy'], str) res = json.loads(response['Policy']) assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction' + + +@mock_lambda +@mock_s3 +def test_list_versions_by_function(): + s3_conn = boto3.client('s3', 'us-west-2') + s3_conn.create_bucket(Bucket='test-bucket') + + zip_content = get_test_zip_file2() + s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) + conn = boto3.client('lambda', 'us-west-2') + + conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + res = conn.publish_version(FunctionName='testFunction') + assert res['ResponseMetadata']['HTTPStatusCode'] == 201 + versions = conn.list_versions_by_function(FunctionName='testFunction') + + assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' + + +@mock_lambda +@mock_s3 +def test_create_function_with_already_exists(): + s3_conn = boto3.client('s3', 'us-west-2') + s3_conn.create_bucket(Bucket='test-bucket') + + zip_content = get_test_zip_file2() + s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) + conn = boto3.client('lambda', 'us-west-2') + + conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + assert response['FunctionName'] == 'testFunction' + + +@mock_lambda +@mock_s3 +def test_list_versions_by_function_for_nonexistent_function(): + conn = boto3.client('lambda', 'us-west-2') + versions = conn.list_versions_by_function(FunctionName='testFunction') + + assert len(versions['Versions']) == 0 diff --git a/tests/test_batch/test_batch.py b/tests/test_batch/test_batch.py index ec24cd911..310ac0b48 100644 --- a/tests/test_batch/test_batch.py +++ b/tests/test_batch/test_batch.py @@ -323,6 +323,54 @@ def test_create_job_queue(): resp.should.contain('jobQueues') len(resp['jobQueues']).should.equal(0) + # Create job queue which already exists + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue', + state='ENABLED', + priority=123, + computeEnvironmentOrder=[ + { + 'order': 123, + 'computeEnvironment': arn + }, + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') + + + # Create job queue with incorrect state + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue2', + state='JUNK', + priority=123, + computeEnvironmentOrder=[ + { + 'order': 123, + 'computeEnvironment': arn + }, + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') + + # Create job queue with no compute env + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue3', + state='JUNK', + priority=123, + computeEnvironmentOrder=[ + + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') @mock_ec2 @mock_ecs @@ -397,6 +445,17 @@ def test_update_job_queue(): len(resp['jobQueues']).should.equal(1) resp['jobQueues'][0]['priority'].should.equal(5) + batch_client.update_job_queue( + jobQueue='test_job_queue', + priority=5 + ) + + resp = batch_client.describe_job_queues() + resp.should.contain('jobQueues') + len(resp['jobQueues']).should.equal(1) + resp['jobQueues'][0]['priority'].should.equal(5) + + @mock_ec2 @mock_ecs diff --git a/tests/test_cloudformation/fixtures/vpc_eni.py b/tests/test_cloudformation/fixtures/vpc_eni.py index bc13e691f..3f8eb2d03 100644 --- a/tests/test_cloudformation/fixtures/vpc_eni.py +++ b/tests/test_cloudformation/fixtures/vpc_eni.py @@ -1,34 +1,38 @@ -from __future__ import unicode_literals - -template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "VPC ENI Test CloudFormation", - "Resources": { - "ENI": { - "Type": "AWS::EC2::NetworkInterface", - "Properties": { - "SubnetId": {"Ref": "Subnet"} - } - }, - "Subnet": { - "Type": "AWS::EC2::Subnet", - "Properties": { - "AvailabilityZone": "us-east-1a", - "VpcId": {"Ref": "VPC"}, - "CidrBlock": "10.0.0.0/24" - } - }, - "VPC": { - "Type": "AWS::EC2::VPC", - "Properties": { - "CidrBlock": "10.0.0.0/16" - } - } - }, - "Outputs": { - "NinjaENI": { - "Description": "Elastic IP mapping to Auto-Scaling Group", - "Value": {"Ref": "ENI"} - } - } -} +from __future__ import unicode_literals + +template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "VPC ENI Test CloudFormation", + "Resources": { + "ENI": { + "Type": "AWS::EC2::NetworkInterface", + "Properties": { + "SubnetId": {"Ref": "Subnet"} + } + }, + "Subnet": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "AvailabilityZone": "us-east-1a", + "VpcId": {"Ref": "VPC"}, + "CidrBlock": "10.0.0.0/24" + } + }, + "VPC": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16" + } + } + }, + "Outputs": { + "NinjaENI": { + "Description": "Elastic IP mapping to Auto-Scaling Group", + "Value": {"Ref": "ENI"} + }, + "ENIIpAddress": { + "Description": "ENI's Private IP address", + "Value": {"Fn::GetAtt": ["ENI", "PrimaryPrivateIpAddress"]} + } + } +} diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud.py b/tests/test_cloudformation/test_cloudformation_stack_crud.py index a61aa157a..b7906632b 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud.py @@ -1,672 +1,672 @@ -from __future__ import unicode_literals - -import os -import json - -import boto -import boto.s3 -import boto.s3.key -import boto.cloudformation -from boto.exception import BotoServerError -import sure # noqa -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises # noqa -from nose.tools import assert_raises - -from moto import mock_cloudformation_deprecated, mock_s3_deprecated, mock_route53_deprecated -from moto.cloudformation import cloudformation_backends - -dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Resources": {}, -} - -dummy_template2 = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 2", - "Resources": {}, -} - -# template with resource which has no delete attribute defined -dummy_template3 = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 3", - "Resources": { - "VPC": { - "Properties": { - "CidrBlock": "192.168.0.0/16", - }, - "Type": "AWS::EC2::VPC" - } - }, -} - -dummy_template_json = json.dumps(dummy_template) -dummy_template_json2 = json.dumps(dummy_template2) -dummy_template_json3 = json.dumps(dummy_template3) - - -@mock_cloudformation_deprecated -def test_create_stack(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - stack = conn.describe_stacks()[0] - stack.stack_name.should.equal('test_stack') - stack.get_template().should.equal({ - 'GetTemplateResponse': { - 'GetTemplateResult': { - 'TemplateBody': dummy_template_json, - 'ResponseMetadata': { - 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' - } - } - } - - }) - - -@mock_cloudformation_deprecated -@mock_route53_deprecated -def test_create_stack_hosted_zone_by_id(): - conn = boto.connect_cloudformation() - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Parameters": { - }, - "Resources": { - "Bar": { - "Type" : "AWS::Route53::HostedZone", - "Properties" : { - "Name" : "foo.bar.baz", - } - }, - }, - } - dummy_template2 = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 2", - "Parameters": { - "ZoneId": { "Type": "String" } - }, - "Resources": { - "Foo": { - "Properties": { - "HostedZoneId": {"Ref": "ZoneId"}, - "RecordSets": [] - }, - "Type": "AWS::Route53::RecordSetGroup" - } - }, - } - conn.create_stack( - "test_stack", - template_body=json.dumps(dummy_template), - parameters={}.items() - ) - r53_conn = boto.connect_route53() - zone_id = r53_conn.get_zones()[0].id - conn.create_stack( - "test_stack", - template_body=json.dumps(dummy_template2), - parameters={"ZoneId": zone_id}.items() - ) - - stack = conn.describe_stacks()[0] - assert stack.list_resources() - - -@mock_cloudformation_deprecated -def test_creating_stacks_across_regions(): - west1_conn = boto.cloudformation.connect_to_region("us-west-1") - west1_conn.create_stack("test_stack", template_body=dummy_template_json) - - west2_conn = boto.cloudformation.connect_to_region("us-west-2") - west2_conn.create_stack("test_stack", template_body=dummy_template_json) - - list(west1_conn.describe_stacks()).should.have.length_of(1) - list(west2_conn.describe_stacks()).should.have.length_of(1) - - -@mock_cloudformation_deprecated -def test_create_stack_with_notification_arn(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack_with_notifications", - template_body=dummy_template_json, - notification_arns='arn:aws:sns:us-east-1:123456789012:fake-queue' - ) - - stack = conn.describe_stacks()[0] - [n.value for n in stack.notification_arns].should.contain( - 'arn:aws:sns:us-east-1:123456789012:fake-queue') - - -@mock_cloudformation_deprecated -@mock_s3_deprecated -def test_create_stack_from_s3_url(): - s3_conn = boto.s3.connect_to_region('us-west-1') - bucket = s3_conn.create_bucket("foobar") - key = boto.s3.key.Key(bucket) - key.key = "template-key" - key.set_contents_from_string(dummy_template_json) - key_url = key.generate_url(expires_in=0, query_auth=False) - - conn = boto.cloudformation.connect_to_region('us-west-1') - conn.create_stack('new-stack', template_url=key_url) - - stack = conn.describe_stacks()[0] - stack.stack_name.should.equal('new-stack') - stack.get_template().should.equal( - { - 'GetTemplateResponse': { - 'GetTemplateResult': { - 'TemplateBody': dummy_template_json, - 'ResponseMetadata': { - 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' - } - } - } - - }) - - -@mock_cloudformation_deprecated -def test_describe_stack_by_name(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - stack = conn.describe_stacks("test_stack")[0] - stack.stack_name.should.equal('test_stack') - - -@mock_cloudformation_deprecated -def test_describe_stack_by_stack_id(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - stack = conn.describe_stacks("test_stack")[0] - stack_by_id = conn.describe_stacks(stack.stack_id)[0] - stack_by_id.stack_id.should.equal(stack.stack_id) - stack_by_id.stack_name.should.equal("test_stack") - - -@mock_cloudformation_deprecated -def test_describe_deleted_stack(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - stack = conn.describe_stacks("test_stack")[0] - stack_id = stack.stack_id - conn.delete_stack(stack.stack_id) - stack_by_id = conn.describe_stacks(stack_id)[0] - stack_by_id.stack_id.should.equal(stack.stack_id) - stack_by_id.stack_name.should.equal("test_stack") - stack_by_id.stack_status.should.equal("DELETE_COMPLETE") - - -@mock_cloudformation_deprecated -def test_get_template_by_name(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - template = conn.get_template("test_stack") - template.should.equal({ - 'GetTemplateResponse': { - 'GetTemplateResult': { - 'TemplateBody': dummy_template_json, - 'ResponseMetadata': { - 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' - } - } - } - - }) - - -@mock_cloudformation_deprecated -def test_list_stacks(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - conn.create_stack( - "test_stack2", - template_body=dummy_template_json, - ) - - stacks = conn.list_stacks() - stacks.should.have.length_of(2) - stacks[0].template_description.should.equal("Stack 1") - - -@mock_cloudformation_deprecated -def test_delete_stack_by_name(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - conn.list_stacks().should.have.length_of(1) - conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) - - -@mock_cloudformation_deprecated -def test_delete_stack_by_id(): - conn = boto.connect_cloudformation() - stack_id = conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - conn.list_stacks().should.have.length_of(1) - conn.delete_stack(stack_id) - conn.list_stacks().should.have.length_of(0) - with assert_raises(BotoServerError): - conn.describe_stacks("test_stack") - - conn.describe_stacks(stack_id).should.have.length_of(1) - - -@mock_cloudformation_deprecated -def test_delete_stack_with_resource_missing_delete_attr(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json3, - ) - - conn.list_stacks().should.have.length_of(1) - conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) - - -@mock_cloudformation_deprecated -def test_bad_describe_stack(): - conn = boto.connect_cloudformation() - with assert_raises(BotoServerError): - conn.describe_stacks("bad_stack") - - -@mock_cloudformation_deprecated() -def test_cloudformation_params(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Resources": {}, - "Parameters": { - "APPNAME": { - "Default": "app-name", - "Description": "The name of the app", - "Type": "String" - } - } - } - dummy_template_json = json.dumps(dummy_template) - cfn = boto.connect_cloudformation() - cfn.create_stack('test_stack1', template_body=dummy_template_json, parameters=[ - ('APPNAME', 'testing123')]) - stack = cfn.describe_stacks('test_stack1')[0] - stack.parameters.should.have.length_of(1) - param = stack.parameters[0] - param.key.should.equal('APPNAME') - param.value.should.equal('testing123') - - -@mock_cloudformation_deprecated -def test_cloudformation_params_conditions_and_resources_are_distinct(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Conditions": { - "FooEnabled": { - "Fn::Equals": [ - { - "Ref": "FooEnabled" - }, - "true" - ] - }, - "FooDisabled": { - "Fn::Not": [ - { - "Fn::Equals": [ - { - "Ref": "FooEnabled" - }, - "true" - ] - } - ] - } - }, - "Parameters": { - "FooEnabled": { - "Type": "String", - "AllowedValues": [ - "true", - "false" - ] - } - }, - "Resources": { - "Bar": { - "Properties": { - "CidrBlock": "192.168.0.0/16", - }, - "Condition": "FooDisabled", - "Type": "AWS::EC2::VPC" - } - } - } - dummy_template_json = json.dumps(dummy_template) - cfn = boto.connect_cloudformation() - cfn.create_stack('test_stack1', template_body=dummy_template_json, parameters=[('FooEnabled', 'true')]) - stack = cfn.describe_stacks('test_stack1')[0] - resources = stack.list_resources() - assert not [resource for resource in resources if resource.logical_resource_id == 'Bar'] - - -@mock_cloudformation_deprecated -def test_stack_tags(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - tags={"foo": "bar", "baz": "bleh"}, - ) - - stack = conn.describe_stacks()[0] - dict(stack.tags).should.equal({"foo": "bar", "baz": "bleh"}) - - -@mock_cloudformation_deprecated -def test_update_stack(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - - conn.update_stack("test_stack", dummy_template_json2) - - stack = conn.describe_stacks()[0] - stack.stack_status.should.equal("UPDATE_COMPLETE") - stack.get_template().should.equal({ - 'GetTemplateResponse': { - 'GetTemplateResult': { - 'TemplateBody': dummy_template_json2, - 'ResponseMetadata': { - 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' - } - } - } - }) - - -@mock_cloudformation_deprecated -def test_update_stack_with_previous_template(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - ) - conn.update_stack("test_stack", use_previous_template=True) - - stack = conn.describe_stacks()[0] - stack.stack_status.should.equal("UPDATE_COMPLETE") - stack.get_template().should.equal({ - 'GetTemplateResponse': { - 'GetTemplateResult': { - 'TemplateBody': dummy_template_json, - 'ResponseMetadata': { - 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' - } - } - } - }) - - -@mock_cloudformation_deprecated -def test_update_stack_with_parameters(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack", - "Resources": { - "VPC": { - "Properties": { - "CidrBlock": {"Ref": "Bar"} - }, - "Type": "AWS::EC2::VPC" - } - }, - "Parameters": { - "Bar": { - "Type": "String" - } - } - } - dummy_template_json = json.dumps(dummy_template) - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - parameters=[("Bar", "192.168.0.0/16")] - ) - conn.update_stack( - "test_stack", - template_body=dummy_template_json, - parameters=[("Bar", "192.168.0.1/16")] - ) - - stack = conn.describe_stacks()[0] - assert stack.parameters[0].value == "192.168.0.1/16" - - -@mock_cloudformation_deprecated -def test_update_stack_replace_tags(): - conn = boto.connect_cloudformation() - conn.create_stack( - "test_stack", - template_body=dummy_template_json, - tags={"foo": "bar"}, - ) - conn.update_stack( - "test_stack", - template_body=dummy_template_json, - tags={"foo": "baz"}, - ) - - stack = conn.describe_stacks()[0] - stack.stack_status.should.equal("UPDATE_COMPLETE") - # since there is one tag it doesn't come out as a list - dict(stack.tags).should.equal({"foo": "baz"}) - - -@mock_cloudformation_deprecated -def test_update_stack_when_rolled_back(): - conn = boto.connect_cloudformation() - stack_id = conn.create_stack( - "test_stack", template_body=dummy_template_json) - - cloudformation_backends[conn.region.name].stacks[ - stack_id].status = 'ROLLBACK_COMPLETE' - - with assert_raises(BotoServerError) as err: - conn.update_stack("test_stack", dummy_template_json) - - ex = err.exception - ex.body.should.match( - r'is in ROLLBACK_COMPLETE state and can not be updated') - ex.error_code.should.equal('ValidationError') - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_cloudformation_deprecated -def test_describe_stack_events_shows_create_update_and_delete(): - conn = boto.connect_cloudformation() - stack_id = conn.create_stack( - "test_stack", template_body=dummy_template_json) - conn.update_stack(stack_id, template_body=dummy_template_json2) - conn.delete_stack(stack_id) - - # assert begins and ends with stack events - events = conn.describe_stack_events(stack_id) - events[0].resource_type.should.equal("AWS::CloudFormation::Stack") - events[-1].resource_type.should.equal("AWS::CloudFormation::Stack") - - # testing ordering of stack events without assuming resource events will not exist - # the AWS API returns events in reverse chronological order - stack_events_to_look_for = iter([ - ("DELETE_COMPLETE", None), - ("DELETE_IN_PROGRESS", "User Initiated"), - ("UPDATE_COMPLETE", None), - ("UPDATE_IN_PROGRESS", "User Initiated"), - ("CREATE_COMPLETE", None), - ("CREATE_IN_PROGRESS", "User Initiated"), - ]) - try: - for event in events: - event.stack_id.should.equal(stack_id) - event.stack_name.should.equal("test_stack") - event.event_id.should.match(r"[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}") - - if event.resource_type == "AWS::CloudFormation::Stack": - event.logical_resource_id.should.equal("test_stack") - event.physical_resource_id.should.equal(stack_id) - - status_to_look_for, reason_to_look_for = next( - stack_events_to_look_for) - event.resource_status.should.equal(status_to_look_for) - if reason_to_look_for is not None: - event.resource_status_reason.should.equal( - reason_to_look_for) - except StopIteration: - assert False, "Too many stack events" - - list(stack_events_to_look_for).should.be.empty - - -@mock_cloudformation_deprecated -def test_create_stack_lambda_and_dynamodb(): - conn = boto.connect_cloudformation() - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack Lambda Test 1", - "Parameters": { - }, - "Resources": { - "func1": { - "Type" : "AWS::Lambda::Function", - "Properties" : { - "Code": { - "S3Bucket": "bucket_123", - "S3Key": "key_123" - }, - "FunctionName": "func1", - "Handler": "handler.handler", - "Role": "role1", - "Runtime": "python2.7", - "Description": "descr", - "MemorySize": 12345, - } - }, - "func1version": { - "Type": "AWS::Lambda::LambdaVersion", - "Properties" : { - "Version": "v1.2.3" - } - }, - "tab1": { - "Type" : "AWS::DynamoDB::Table", - "Properties" : { - "TableName": "tab1", - "KeySchema": [{ - "AttributeName": "attr1", - "KeyType": "HASH" - }], - "AttributeDefinitions": [{ - "AttributeName": "attr1", - "AttributeType": "string" - }], - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10 - } - } - }, - "func1mapping": { - "Type": "AWS::Lambda::EventSourceMapping", - "Properties" : { - "FunctionName": "v1.2.3", - "EventSourceArn": "arn:aws:dynamodb:region:XXXXXX:table/tab1/stream/2000T00:00:00.000", - "StartingPosition": "0", - "BatchSize": 100, - "Enabled": True - } - } - }, - } - validate_s3_before = os.environ.get('VALIDATE_LAMBDA_S3', '') - try: - os.environ['VALIDATE_LAMBDA_S3'] = 'false' - conn.create_stack( - "test_stack_lambda_1", - template_body=json.dumps(dummy_template), - parameters={}.items() - ) - finally: - os.environ['VALIDATE_LAMBDA_S3'] = validate_s3_before - - stack = conn.describe_stacks()[0] - resources = stack.list_resources() - assert len(resources) == 4 - - -@mock_cloudformation_deprecated -def test_create_stack_kinesis(): - conn = boto.connect_cloudformation() - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack Kinesis Test 1", - "Parameters": {}, - "Resources": { - "stream1": { - "Type" : "AWS::Kinesis::Stream", - "Properties" : { - "Name": "stream1", - "ShardCount": 2 - } - } - } - } - conn.create_stack( - "test_stack_kinesis_1", - template_body=json.dumps(dummy_template), - parameters={}.items() - ) - - stack = conn.describe_stacks()[0] - resources = stack.list_resources() - assert len(resources) == 1 +from __future__ import unicode_literals + +import os +import json + +import boto +import boto.s3 +import boto.s3.key +import boto.cloudformation +from boto.exception import BotoServerError +import sure # noqa +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises # noqa +from nose.tools import assert_raises + +from moto import mock_cloudformation_deprecated, mock_s3_deprecated, mock_route53_deprecated +from moto.cloudformation import cloudformation_backends + +dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": {}, +} + +dummy_template2 = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 2", + "Resources": {}, +} + +# template with resource which has no delete attribute defined +dummy_template3 = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 3", + "Resources": { + "VPC": { + "Properties": { + "CidrBlock": "192.168.0.0/16", + }, + "Type": "AWS::EC2::VPC" + } + }, +} + +dummy_template_json = json.dumps(dummy_template) +dummy_template_json2 = json.dumps(dummy_template2) +dummy_template_json3 = json.dumps(dummy_template3) + + +@mock_cloudformation_deprecated +def test_create_stack(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + stack = conn.describe_stacks()[0] + stack.stack_name.should.equal('test_stack') + stack.get_template().should.equal({ + 'GetTemplateResponse': { + 'GetTemplateResult': { + 'TemplateBody': dummy_template_json, + 'ResponseMetadata': { + 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' + } + } + } + + }) + + +@mock_cloudformation_deprecated +@mock_route53_deprecated +def test_create_stack_hosted_zone_by_id(): + conn = boto.connect_cloudformation() + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Parameters": { + }, + "Resources": { + "Bar": { + "Type" : "AWS::Route53::HostedZone", + "Properties" : { + "Name" : "foo.bar.baz", + } + }, + }, + } + dummy_template2 = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 2", + "Parameters": { + "ZoneId": { "Type": "String" } + }, + "Resources": { + "Foo": { + "Properties": { + "HostedZoneId": {"Ref": "ZoneId"}, + "RecordSets": [] + }, + "Type": "AWS::Route53::RecordSetGroup" + } + }, + } + conn.create_stack( + "test_stack", + template_body=json.dumps(dummy_template), + parameters={}.items() + ) + r53_conn = boto.connect_route53() + zone_id = r53_conn.get_zones()[0].id + conn.create_stack( + "test_stack", + template_body=json.dumps(dummy_template2), + parameters={"ZoneId": zone_id}.items() + ) + + stack = conn.describe_stacks()[0] + assert stack.list_resources() + + +@mock_cloudformation_deprecated +def test_creating_stacks_across_regions(): + west1_conn = boto.cloudformation.connect_to_region("us-west-1") + west1_conn.create_stack("test_stack", template_body=dummy_template_json) + + west2_conn = boto.cloudformation.connect_to_region("us-west-2") + west2_conn.create_stack("test_stack", template_body=dummy_template_json) + + list(west1_conn.describe_stacks()).should.have.length_of(1) + list(west2_conn.describe_stacks()).should.have.length_of(1) + + +@mock_cloudformation_deprecated +def test_create_stack_with_notification_arn(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack_with_notifications", + template_body=dummy_template_json, + notification_arns='arn:aws:sns:us-east-1:123456789012:fake-queue' + ) + + stack = conn.describe_stacks()[0] + [n.value for n in stack.notification_arns].should.contain( + 'arn:aws:sns:us-east-1:123456789012:fake-queue') + + +@mock_cloudformation_deprecated +@mock_s3_deprecated +def test_create_stack_from_s3_url(): + s3_conn = boto.s3.connect_to_region('us-west-1') + bucket = s3_conn.create_bucket("foobar") + key = boto.s3.key.Key(bucket) + key.key = "template-key" + key.set_contents_from_string(dummy_template_json) + key_url = key.generate_url(expires_in=0, query_auth=False) + + conn = boto.cloudformation.connect_to_region('us-west-1') + conn.create_stack('new-stack', template_url=key_url) + + stack = conn.describe_stacks()[0] + stack.stack_name.should.equal('new-stack') + stack.get_template().should.equal( + { + 'GetTemplateResponse': { + 'GetTemplateResult': { + 'TemplateBody': dummy_template_json, + 'ResponseMetadata': { + 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' + } + } + } + + }) + + +@mock_cloudformation_deprecated +def test_describe_stack_by_name(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + stack = conn.describe_stacks("test_stack")[0] + stack.stack_name.should.equal('test_stack') + + +@mock_cloudformation_deprecated +def test_describe_stack_by_stack_id(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + stack = conn.describe_stacks("test_stack")[0] + stack_by_id = conn.describe_stacks(stack.stack_id)[0] + stack_by_id.stack_id.should.equal(stack.stack_id) + stack_by_id.stack_name.should.equal("test_stack") + + +@mock_cloudformation_deprecated +def test_describe_deleted_stack(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + stack = conn.describe_stacks("test_stack")[0] + stack_id = stack.stack_id + conn.delete_stack(stack.stack_id) + stack_by_id = conn.describe_stacks(stack_id)[0] + stack_by_id.stack_id.should.equal(stack.stack_id) + stack_by_id.stack_name.should.equal("test_stack") + stack_by_id.stack_status.should.equal("DELETE_COMPLETE") + + +@mock_cloudformation_deprecated +def test_get_template_by_name(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + template = conn.get_template("test_stack") + template.should.equal({ + 'GetTemplateResponse': { + 'GetTemplateResult': { + 'TemplateBody': dummy_template_json, + 'ResponseMetadata': { + 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' + } + } + } + + }) + + +@mock_cloudformation_deprecated +def test_list_stacks(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + conn.create_stack( + "test_stack2", + template_body=dummy_template_json, + ) + + stacks = conn.list_stacks() + stacks.should.have.length_of(2) + stacks[0].template_description.should.equal("Stack 1") + + +@mock_cloudformation_deprecated +def test_delete_stack_by_name(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + conn.describe_stacks().should.have.length_of(1) + conn.delete_stack("test_stack") + conn.describe_stacks().should.have.length_of(0) + + +@mock_cloudformation_deprecated +def test_delete_stack_by_id(): + conn = boto.connect_cloudformation() + stack_id = conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + conn.describe_stacks().should.have.length_of(1) + conn.delete_stack(stack_id) + conn.describe_stacks().should.have.length_of(0) + with assert_raises(BotoServerError): + conn.describe_stacks("test_stack") + + conn.describe_stacks(stack_id).should.have.length_of(1) + + +@mock_cloudformation_deprecated +def test_delete_stack_with_resource_missing_delete_attr(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json3, + ) + + conn.describe_stacks().should.have.length_of(1) + conn.delete_stack("test_stack") + conn.describe_stacks().should.have.length_of(0) + + +@mock_cloudformation_deprecated +def test_bad_describe_stack(): + conn = boto.connect_cloudformation() + with assert_raises(BotoServerError): + conn.describe_stacks("bad_stack") + + +@mock_cloudformation_deprecated() +def test_cloudformation_params(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": {}, + "Parameters": { + "APPNAME": { + "Default": "app-name", + "Description": "The name of the app", + "Type": "String" + } + } + } + dummy_template_json = json.dumps(dummy_template) + cfn = boto.connect_cloudformation() + cfn.create_stack('test_stack1', template_body=dummy_template_json, parameters=[ + ('APPNAME', 'testing123')]) + stack = cfn.describe_stacks('test_stack1')[0] + stack.parameters.should.have.length_of(1) + param = stack.parameters[0] + param.key.should.equal('APPNAME') + param.value.should.equal('testing123') + + +@mock_cloudformation_deprecated +def test_cloudformation_params_conditions_and_resources_are_distinct(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Conditions": { + "FooEnabled": { + "Fn::Equals": [ + { + "Ref": "FooEnabled" + }, + "true" + ] + }, + "FooDisabled": { + "Fn::Not": [ + { + "Fn::Equals": [ + { + "Ref": "FooEnabled" + }, + "true" + ] + } + ] + } + }, + "Parameters": { + "FooEnabled": { + "Type": "String", + "AllowedValues": [ + "true", + "false" + ] + } + }, + "Resources": { + "Bar": { + "Properties": { + "CidrBlock": "192.168.0.0/16", + }, + "Condition": "FooDisabled", + "Type": "AWS::EC2::VPC" + } + } + } + dummy_template_json = json.dumps(dummy_template) + cfn = boto.connect_cloudformation() + cfn.create_stack('test_stack1', template_body=dummy_template_json, parameters=[('FooEnabled', 'true')]) + stack = cfn.describe_stacks('test_stack1')[0] + resources = stack.list_resources() + assert not [resource for resource in resources if resource.logical_resource_id == 'Bar'] + + +@mock_cloudformation_deprecated +def test_stack_tags(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + tags={"foo": "bar", "baz": "bleh"}, + ) + + stack = conn.describe_stacks()[0] + dict(stack.tags).should.equal({"foo": "bar", "baz": "bleh"}) + + +@mock_cloudformation_deprecated +def test_update_stack(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + + conn.update_stack("test_stack", dummy_template_json2) + + stack = conn.describe_stacks()[0] + stack.stack_status.should.equal("UPDATE_COMPLETE") + stack.get_template().should.equal({ + 'GetTemplateResponse': { + 'GetTemplateResult': { + 'TemplateBody': dummy_template_json2, + 'ResponseMetadata': { + 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' + } + } + } + }) + + +@mock_cloudformation_deprecated +def test_update_stack_with_previous_template(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + ) + conn.update_stack("test_stack", use_previous_template=True) + + stack = conn.describe_stacks()[0] + stack.stack_status.should.equal("UPDATE_COMPLETE") + stack.get_template().should.equal({ + 'GetTemplateResponse': { + 'GetTemplateResult': { + 'TemplateBody': dummy_template_json, + 'ResponseMetadata': { + 'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE' + } + } + } + }) + + +@mock_cloudformation_deprecated +def test_update_stack_with_parameters(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack", + "Resources": { + "VPC": { + "Properties": { + "CidrBlock": {"Ref": "Bar"} + }, + "Type": "AWS::EC2::VPC" + } + }, + "Parameters": { + "Bar": { + "Type": "String" + } + } + } + dummy_template_json = json.dumps(dummy_template) + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + parameters=[("Bar", "192.168.0.0/16")] + ) + conn.update_stack( + "test_stack", + template_body=dummy_template_json, + parameters=[("Bar", "192.168.0.1/16")] + ) + + stack = conn.describe_stacks()[0] + assert stack.parameters[0].value == "192.168.0.1/16" + + +@mock_cloudformation_deprecated +def test_update_stack_replace_tags(): + conn = boto.connect_cloudformation() + conn.create_stack( + "test_stack", + template_body=dummy_template_json, + tags={"foo": "bar"}, + ) + conn.update_stack( + "test_stack", + template_body=dummy_template_json, + tags={"foo": "baz"}, + ) + + stack = conn.describe_stacks()[0] + stack.stack_status.should.equal("UPDATE_COMPLETE") + # since there is one tag it doesn't come out as a list + dict(stack.tags).should.equal({"foo": "baz"}) + + +@mock_cloudformation_deprecated +def test_update_stack_when_rolled_back(): + conn = boto.connect_cloudformation() + stack_id = conn.create_stack( + "test_stack", template_body=dummy_template_json) + + cloudformation_backends[conn.region.name].stacks[ + stack_id].status = 'ROLLBACK_COMPLETE' + + with assert_raises(BotoServerError) as err: + conn.update_stack("test_stack", dummy_template_json) + + ex = err.exception + ex.body.should.match( + r'is in ROLLBACK_COMPLETE state and can not be updated') + ex.error_code.should.equal('ValidationError') + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_cloudformation_deprecated +def test_describe_stack_events_shows_create_update_and_delete(): + conn = boto.connect_cloudformation() + stack_id = conn.create_stack( + "test_stack", template_body=dummy_template_json) + conn.update_stack(stack_id, template_body=dummy_template_json2) + conn.delete_stack(stack_id) + + # assert begins and ends with stack events + events = conn.describe_stack_events(stack_id) + events[0].resource_type.should.equal("AWS::CloudFormation::Stack") + events[-1].resource_type.should.equal("AWS::CloudFormation::Stack") + + # testing ordering of stack events without assuming resource events will not exist + # the AWS API returns events in reverse chronological order + stack_events_to_look_for = iter([ + ("DELETE_COMPLETE", None), + ("DELETE_IN_PROGRESS", "User Initiated"), + ("UPDATE_COMPLETE", None), + ("UPDATE_IN_PROGRESS", "User Initiated"), + ("CREATE_COMPLETE", None), + ("CREATE_IN_PROGRESS", "User Initiated"), + ]) + try: + for event in events: + event.stack_id.should.equal(stack_id) + event.stack_name.should.equal("test_stack") + event.event_id.should.match(r"[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}") + + if event.resource_type == "AWS::CloudFormation::Stack": + event.logical_resource_id.should.equal("test_stack") + event.physical_resource_id.should.equal(stack_id) + + status_to_look_for, reason_to_look_for = next( + stack_events_to_look_for) + event.resource_status.should.equal(status_to_look_for) + if reason_to_look_for is not None: + event.resource_status_reason.should.equal( + reason_to_look_for) + except StopIteration: + assert False, "Too many stack events" + + list(stack_events_to_look_for).should.be.empty + + +@mock_cloudformation_deprecated +def test_create_stack_lambda_and_dynamodb(): + conn = boto.connect_cloudformation() + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack Lambda Test 1", + "Parameters": { + }, + "Resources": { + "func1": { + "Type" : "AWS::Lambda::Function", + "Properties" : { + "Code": { + "S3Bucket": "bucket_123", + "S3Key": "key_123" + }, + "FunctionName": "func1", + "Handler": "handler.handler", + "Role": "role1", + "Runtime": "python2.7", + "Description": "descr", + "MemorySize": 12345, + } + }, + "func1version": { + "Type": "AWS::Lambda::LambdaVersion", + "Properties" : { + "Version": "v1.2.3" + } + }, + "tab1": { + "Type" : "AWS::DynamoDB::Table", + "Properties" : { + "TableName": "tab1", + "KeySchema": [{ + "AttributeName": "attr1", + "KeyType": "HASH" + }], + "AttributeDefinitions": [{ + "AttributeName": "attr1", + "AttributeType": "string" + }], + "ProvisionedThroughput": { + "ReadCapacityUnits": 10, + "WriteCapacityUnits": 10 + } + } + }, + "func1mapping": { + "Type": "AWS::Lambda::EventSourceMapping", + "Properties" : { + "FunctionName": "v1.2.3", + "EventSourceArn": "arn:aws:dynamodb:region:XXXXXX:table/tab1/stream/2000T00:00:00.000", + "StartingPosition": "0", + "BatchSize": 100, + "Enabled": True + } + } + }, + } + validate_s3_before = os.environ.get('VALIDATE_LAMBDA_S3', '') + try: + os.environ['VALIDATE_LAMBDA_S3'] = 'false' + conn.create_stack( + "test_stack_lambda_1", + template_body=json.dumps(dummy_template), + parameters={}.items() + ) + finally: + os.environ['VALIDATE_LAMBDA_S3'] = validate_s3_before + + stack = conn.describe_stacks()[0] + resources = stack.list_resources() + assert len(resources) == 4 + + +@mock_cloudformation_deprecated +def test_create_stack_kinesis(): + conn = boto.connect_cloudformation() + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack Kinesis Test 1", + "Parameters": {}, + "Resources": { + "stream1": { + "Type" : "AWS::Kinesis::Stream", + "Properties" : { + "Name": "stream1", + "ShardCount": 2 + } + } + } + } + conn.create_stack( + "test_stack_kinesis_1", + template_body=json.dumps(dummy_template), + parameters={}.items() + ) + + stack = conn.describe_stacks()[0] + resources = stack.list_resources() + assert len(resources) == 1 diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 152b359e3..d05bc1b53 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -1,795 +1,1286 @@ -from __future__ import unicode_literals - -import json -from collections import OrderedDict - -import boto3 -from botocore.exceptions import ClientError -import sure # noqa -# Ensure 'assert_raises' context manager support for Python 2.6 -from nose.tools import assert_raises - -from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2 - -dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Resources": { - "EC2Instance1": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-d3adb33f", - "KeyName": "dummy", - "InstanceType": "t2.micro", - "Tags": [ - { - "Key": "Description", - "Value": "Test tag" - }, - { - "Key": "Name", - "Value": "Name tag for tests" - } - ] - } - } - } -} - -dummy_template_yaml = """--- -AWSTemplateFormatVersion: 2010-09-09 -Description: Stack1 with yaml template -Resources: - EC2Instance1: - Type: AWS::EC2::Instance - Properties: - ImageId: ami-d3adb33f - KeyName: dummy - InstanceType: t2.micro - Tags: - - Key: Description - Value: Test tag - - Key: Name - Value: Name tag for tests -""" - -dummy_template_yaml_with_short_form_func = """--- -AWSTemplateFormatVersion: 2010-09-09 -Description: Stack1 with yaml template -Resources: - EC2Instance1: - Type: AWS::EC2::Instance - Properties: - ImageId: ami-d3adb33f - KeyName: !Join [ ":", [ du, m, my ] ] - InstanceType: t2.micro - Tags: - - Key: Description - Value: Test tag - - Key: Name - Value: Name tag for tests -""" - -dummy_template_yaml_with_ref = """--- -AWSTemplateFormatVersion: 2010-09-09 -Description: Stack1 with yaml template -Parameters: - TagDescription: - Type: String - TagName: - Type: String - -Resources: - EC2Instance1: - Type: AWS::EC2::Instance - Properties: - ImageId: ami-d3adb33f - KeyName: dummy - InstanceType: t2.micro - Tags: - - Key: Description - Value: - Ref: TagDescription - - Key: Name - Value: !Ref TagName -""" - -dummy_update_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Parameters": { - "KeyName": { - "Description": "Name of an existing EC2 KeyPair", - "Type": "AWS::EC2::KeyPair::KeyName", - "ConstraintDescription": "must be the name of an existing EC2 KeyPair." - } - }, - "Resources": { - "Instance": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-08111162" - } - } - } -} - -dummy_output_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Resources": { - "Instance": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-08111162" - } - } - }, - "Outputs": { - "StackVPC": { - "Description": "The ID of the VPC", - "Value": "VPCID", - "Export": { - "Name": "My VPC ID" - } - } - } -} - -dummy_import_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::ImportValue": 'My VPC ID'}, - "VisibilityTimeout": 60, - } - } - } -} - -dummy_redrive_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "MainQueue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "mainqueue.fifo", - "FifoQueue": True, - "ContentBasedDeduplication": False, - "RedrivePolicy": { - "deadLetterTargetArn": { - "Fn::GetAtt": [ - "DeadLetterQueue", - "Arn" - ] - }, - "maxReceiveCount": 5 - } - } - }, - "DeadLetterQueue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "FifoQueue": True - } - }, - } -} - -dummy_template_json = json.dumps(dummy_template) -dummy_update_template_json = json.dumps(dummy_update_template) -dummy_output_template_json = json.dumps(dummy_output_template) -dummy_import_template_json = json.dumps(dummy_import_template) -dummy_redrive_template_json = json.dumps(dummy_redrive_template) - - - -@mock_cloudformation -def test_boto3_create_stack(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( - json.loads(dummy_template_json, object_pairs_hook=OrderedDict)) - - -@mock_cloudformation -def test_boto3_create_stack_with_yaml(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_yaml, - ) - - cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( - dummy_template_yaml) - - -@mock_cloudformation -def test_boto3_create_stack_with_short_form_func_yaml(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_yaml_with_short_form_func, - ) - - cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( - dummy_template_yaml_with_short_form_func) - - -@mock_cloudformation -def test_boto3_create_stack_with_ref_yaml(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - params = [ - {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, - {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, - ] - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_yaml_with_ref, - Parameters=params - ) - - cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( - dummy_template_yaml_with_ref) - - -@mock_cloudformation -def test_creating_stacks_across_regions(): - west1_cf = boto3.resource('cloudformation', region_name='us-west-1') - west2_cf = boto3.resource('cloudformation', region_name='us-west-2') - west1_cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - west2_cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - list(west1_cf.stacks.all()).should.have.length_of(1) - list(west2_cf.stacks.all()).should.have.length_of(1) - - -@mock_cloudformation -def test_create_stack_with_notification_arn(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - cf.create_stack( - StackName="test_stack_with_notifications", - TemplateBody=dummy_template_json, - NotificationARNs=['arn:aws:sns:us-east-1:123456789012:fake-queue'], - ) - - stack = list(cf.stacks.all())[0] - stack.notification_arns.should.contain( - 'arn:aws:sns:us-east-1:123456789012:fake-queue') - - -@mock_cloudformation -def test_create_stack_with_role_arn(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - cf.create_stack( - StackName="test_stack_with_notifications", - TemplateBody=dummy_template_json, - RoleARN='arn:aws:iam::123456789012:role/moto', - ) - stack = list(cf.stacks.all())[0] - stack.role_arn.should.equal('arn:aws:iam::123456789012:role/moto') - - -@mock_cloudformation -@mock_s3 -def test_create_stack_from_s3_url(): - s3 = boto3.client('s3') - s3_conn = boto3.resource('s3') - bucket = s3_conn.create_bucket(Bucket="foobar") - - key = s3_conn.Object( - 'foobar', 'template-key').put(Body=dummy_template_json) - key_url = s3.generate_presigned_url( - ClientMethod='get_object', - Params={ - 'Bucket': 'foobar', - 'Key': 'template-key' - } - ) - - cf_conn = boto3.client('cloudformation', region_name='us-west-1') - cf_conn.create_stack( - StackName='stack_from_url', - TemplateURL=key_url, - ) - cf_conn.get_template(StackName="stack_from_url")['TemplateBody'].should.equal( - json.loads(dummy_template_json, object_pairs_hook=OrderedDict)) - - -@mock_cloudformation -def test_update_stack_with_previous_value(): - name = 'update_stack_with_previous_value' - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName=name, TemplateBody=dummy_template_yaml_with_ref, - Parameters=[ - {'ParameterKey': 'TagName', 'ParameterValue': 'foo'}, - {'ParameterKey': 'TagDescription', 'ParameterValue': 'bar'}, - ] - ) - cf_conn.update_stack( - StackName=name, UsePreviousTemplate=True, - Parameters=[ - {'ParameterKey': 'TagName', 'UsePreviousValue': True}, - {'ParameterKey': 'TagDescription', 'ParameterValue': 'not bar'}, - ] - ) - stack = cf_conn.describe_stacks(StackName=name)['Stacks'][0] - tag_name = [x['ParameterValue'] for x in stack['Parameters'] - if x['ParameterKey'] == 'TagName'][0] - tag_desc = [x['ParameterValue'] for x in stack['Parameters'] - if x['ParameterKey'] == 'TagDescription'][0] - assert tag_name == 'foo' - assert tag_desc == 'not bar' - - -@mock_cloudformation -@mock_s3 -@mock_ec2 -def test_update_stack_from_s3_url(): - s3 = boto3.client('s3') - s3_conn = boto3.resource('s3') - - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="update_stack_from_url", - TemplateBody=dummy_template_json, - Tags=[{'Key': 'foo', 'Value': 'bar'}], - ) - - s3_conn.create_bucket(Bucket="foobar") - - s3_conn.Object( - 'foobar', 'template-key').put(Body=dummy_update_template_json) - key_url = s3.generate_presigned_url( - ClientMethod='get_object', - Params={ - 'Bucket': 'foobar', - 'Key': 'template-key' - } - ) - - cf_conn.update_stack( - StackName="update_stack_from_url", - TemplateURL=key_url, - ) - - cf_conn.get_template(StackName="update_stack_from_url")[ 'TemplateBody'].should.equal( - json.loads(dummy_update_template_json, object_pairs_hook=OrderedDict)) - - -@mock_cloudformation -@mock_s3 -def test_create_change_set_from_s3_url(): - s3 = boto3.client('s3') - s3_conn = boto3.resource('s3') - bucket = s3_conn.create_bucket(Bucket="foobar") - - key = s3_conn.Object( - 'foobar', 'template-key').put(Body=dummy_template_json) - key_url = s3.generate_presigned_url( - ClientMethod='get_object', - Params={ - 'Bucket': 'foobar', - 'Key': 'template-key' - } - ) - cf_conn = boto3.client('cloudformation', region_name='us-west-1') - response = cf_conn.create_change_set( - StackName='NewStack', - TemplateURL=key_url, - ChangeSetName='NewChangeSet', - ChangeSetType='CREATE', - ) - assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id'] - assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId'] - - -@mock_cloudformation -def test_execute_change_set_w_arn(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - change_set = cf_conn.create_change_set( - StackName='NewStack', - TemplateBody=dummy_template_json, - ChangeSetName='NewChangeSet', - ChangeSetType='CREATE', - ) - cf_conn.execute_change_set(ChangeSetName=change_set['Id']) - - -@mock_cloudformation -def test_execute_change_set_w_name(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - change_set = cf_conn.create_change_set( - StackName='NewStack', - TemplateBody=dummy_template_json, - ChangeSetName='NewChangeSet', - ChangeSetType='CREATE', - ) - cf_conn.execute_change_set(ChangeSetName='NewStack', StackName='NewStack') - - -@mock_cloudformation -def test_describe_stack_pagination(): - conn = boto3.client('cloudformation', region_name='us-east-1') - for i in range(100): - conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - resp = conn.describe_stacks() - stacks = resp['Stacks'] - stacks.should.have.length_of(50) - next_token = resp['NextToken'] - next_token.should_not.be.none - resp2 = conn.describe_stacks(NextToken=next_token) - stacks.extend(resp2['Stacks']) - stacks.should.have.length_of(100) - assert 'NextToken' not in resp2.keys() - - -@mock_cloudformation -def test_describe_stack_resources(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] - - response = cf_conn.describe_stack_resources(StackName=stack['StackName']) - resource = response['StackResources'][0] - resource['LogicalResourceId'].should.equal('EC2Instance1') - resource['ResourceStatus'].should.equal('CREATE_COMPLETE') - resource['ResourceType'].should.equal('AWS::EC2::Instance') - resource['StackId'].should.equal(stack['StackId']) - - -@mock_cloudformation -def test_describe_stack_by_name(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] - stack['StackName'].should.equal('test_stack') - - -@mock_cloudformation -def test_describe_stack_by_stack_id(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] - stack_by_id = cf_conn.describe_stacks(StackName=stack['StackId'])['Stacks'][ - 0] - - stack_by_id['StackId'].should.equal(stack['StackId']) - stack_by_id['StackName'].should.equal("test_stack") - - -@mock_cloudformation -def test_list_stacks(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - cf.create_stack( - StackName="test_stack2", - TemplateBody=dummy_template_json, - ) - - stacks = list(cf.stacks.all()) - stacks.should.have.length_of(2) - stack_names = [stack.stack_name for stack in stacks] - stack_names.should.contain("test_stack") - stack_names.should.contain("test_stack2") - - -@mock_cloudformation -def test_delete_stack_from_resource(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - stack = cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - list(cf.stacks.all()).should.have.length_of(1) - stack.delete() - list(cf.stacks.all()).should.have.length_of(0) - - -@mock_cloudformation -@mock_ec2 -def test_delete_stack_by_name(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - cf_conn.describe_stacks()['Stacks'].should.have.length_of(1) - cf_conn.delete_stack(StackName="test_stack") - cf_conn.describe_stacks()['Stacks'].should.have.length_of(0) - - -@mock_cloudformation -def test_describe_deleted_stack(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - - stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] - stack_id = stack['StackId'] - cf_conn.delete_stack(StackName=stack['StackId']) - stack_by_id = cf_conn.describe_stacks(StackName=stack_id)['Stacks'][0] - stack_by_id['StackId'].should.equal(stack['StackId']) - stack_by_id['StackName'].should.equal("test_stack") - stack_by_id['StackStatus'].should.equal("DELETE_COMPLETE") - - -@mock_cloudformation -@mock_ec2 -def test_describe_updated_stack(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - Tags=[{'Key': 'foo', 'Value': 'bar'}], - ) - - cf_conn.update_stack( - StackName="test_stack", - RoleARN='arn:aws:iam::123456789012:role/moto', - TemplateBody=dummy_update_template_json, - Tags=[{'Key': 'foo', 'Value': 'baz'}], - ) - - stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] - stack_id = stack['StackId'] - stack_by_id = cf_conn.describe_stacks(StackName=stack_id)['Stacks'][0] - stack_by_id['StackId'].should.equal(stack['StackId']) - stack_by_id['StackName'].should.equal("test_stack") - stack_by_id['StackStatus'].should.equal("UPDATE_COMPLETE") - stack_by_id['RoleARN'].should.equal('arn:aws:iam::123456789012:role/moto') - stack_by_id['Tags'].should.equal([{'Key': 'foo', 'Value': 'baz'}]) - - -@mock_cloudformation -def test_bad_describe_stack(): - cf_conn = boto3.client('cloudformation', region_name='us-east-1') - with assert_raises(ClientError): - cf_conn.describe_stacks(StackName="non_existent_stack") - - -@mock_cloudformation() -def test_cloudformation_params(): - dummy_template_with_params = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Stack 1", - "Resources": {}, - "Parameters": { - "APPNAME": { - "Default": "app-name", - "Description": "The name of the app", - "Type": "String" - } - } - } - dummy_template_with_params_json = json.dumps(dummy_template_with_params) - - cf = boto3.resource('cloudformation', region_name='us-east-1') - stack = cf.create_stack( - StackName='test_stack', - TemplateBody=dummy_template_with_params_json, - Parameters=[{ - "ParameterKey": "APPNAME", - "ParameterValue": "testing123", - }], - ) - - stack.parameters.should.have.length_of(1) - param = stack.parameters[0] - param['ParameterKey'].should.equal('APPNAME') - param['ParameterValue'].should.equal('testing123') - - -@mock_cloudformation -def test_stack_tags(): - tags = [ - { - "Key": "foo", - "Value": "bar" - }, - { - "Key": "baz", - "Value": "bleh" - } - ] - cf = boto3.resource('cloudformation', region_name='us-east-1') - stack = cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - Tags=tags, - ) - observed_tag_items = set( - item for items in [tag.items() for tag in stack.tags] for item in items) - expected_tag_items = set( - item for items in [tag.items() for tag in tags] for item in items) - observed_tag_items.should.equal(expected_tag_items) - - -@mock_cloudformation -@mock_ec2 -def test_stack_events(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - stack = cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_template_json, - ) - stack.update(TemplateBody=dummy_update_template_json) - stack = cf.Stack(stack.stack_id) - stack.delete() - - # assert begins and ends with stack events - events = list(stack.events.all()) - events[0].resource_type.should.equal("AWS::CloudFormation::Stack") - events[-1].resource_type.should.equal("AWS::CloudFormation::Stack") - - # testing ordering of stack events without assuming resource events will not exist - # the AWS API returns events in reverse chronological order - stack_events_to_look_for = iter([ - ("DELETE_COMPLETE", None), - ("DELETE_IN_PROGRESS", "User Initiated"), - ("UPDATE_COMPLETE", None), - ("UPDATE_IN_PROGRESS", "User Initiated"), - ("CREATE_COMPLETE", None), - ("CREATE_IN_PROGRESS", "User Initiated"), - ]) - try: - for event in events: - event.stack_id.should.equal(stack.stack_id) - event.stack_name.should.equal("test_stack") - event.event_id.should.match(r"[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}") - - if event.resource_type == "AWS::CloudFormation::Stack": - event.logical_resource_id.should.equal("test_stack") - event.physical_resource_id.should.equal(stack.stack_id) - - status_to_look_for, reason_to_look_for = next( - stack_events_to_look_for) - event.resource_status.should.equal(status_to_look_for) - if reason_to_look_for is not None: - event.resource_status_reason.should.equal( - reason_to_look_for) - except StopIteration: - assert False, "Too many stack events" - - list(stack_events_to_look_for).should.be.empty - - -@mock_cloudformation -def test_list_exports(): - cf_client = boto3.client('cloudformation', region_name='us-east-1') - cf_resource = boto3.resource('cloudformation', region_name='us-east-1') - stack = cf_resource.create_stack( - StackName="test_stack", - TemplateBody=dummy_output_template_json, - ) - output_value = 'VPCID' - exports = cf_client.list_exports()['Exports'] - - stack.outputs.should.have.length_of(1) - stack.outputs[0]['OutputValue'].should.equal(output_value) - - exports.should.have.length_of(1) - exports[0]['ExportingStackId'].should.equal(stack.stack_id) - exports[0]['Name'].should.equal('My VPC ID') - exports[0]['Value'].should.equal(output_value) - - -@mock_cloudformation -def test_list_exports_with_token(): - cf = boto3.client('cloudformation', region_name='us-east-1') - for i in range(101): - # Add index to ensure name is unique - dummy_output_template['Outputs']['StackVPC']['Export']['Name'] += str(i) - cf.create_stack( - StackName="test_stack", - TemplateBody=json.dumps(dummy_output_template), - ) - exports = cf.list_exports() - exports['Exports'].should.have.length_of(100) - exports.get('NextToken').should_not.be.none - - more_exports = cf.list_exports(NextToken=exports['NextToken']) - more_exports['Exports'].should.have.length_of(1) - more_exports.get('NextToken').should.be.none - - -@mock_cloudformation -def test_delete_stack_with_export(): - cf = boto3.client('cloudformation', region_name='us-east-1') - stack = cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_output_template_json, - ) - - stack_id = stack['StackId'] - exports = cf.list_exports()['Exports'] - exports.should.have.length_of(1) - - cf.delete_stack(StackName=stack_id) - cf.list_exports()['Exports'].should.have.length_of(0) - - -@mock_cloudformation -def test_export_names_must_be_unique(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - first_stack = cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_output_template_json, - ) - with assert_raises(ClientError): - cf.create_stack( - StackName="test_stack", - TemplateBody=dummy_output_template_json, - ) - - -@mock_sqs -@mock_cloudformation -def test_stack_with_imports(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - ec2_resource = boto3.resource('sqs', region_name='us-east-1') - - output_stack = cf.create_stack( - StackName="test_stack1", - TemplateBody=dummy_output_template_json, - ) - import_stack = cf.create_stack( - StackName="test_stack2", - TemplateBody=dummy_import_template_json - ) - - output_stack.outputs.should.have.length_of(1) - output = output_stack.outputs[0]['OutputValue'] - queue = ec2_resource.get_queue_by_name(QueueName=output) - queue.should_not.be.none - - -@mock_sqs -@mock_cloudformation -def test_non_json_redrive_policy(): - cf = boto3.resource('cloudformation', region_name='us-east-1') - - stack = cf.create_stack( - StackName="test_stack1", - TemplateBody=dummy_redrive_template_json - ) - - stack.Resource('MainQueue').resource_status\ - .should.equal("CREATE_COMPLETE") - stack.Resource('DeadLetterQueue').resource_status\ - .should.equal("CREATE_COMPLETE") +from __future__ import unicode_literals + +import json +from collections import OrderedDict + +import boto3 +from botocore.exceptions import ClientError +import sure # noqa +# Ensure 'assert_raises' context manager support for Python 2.6 +from nose.tools import assert_raises + +from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2 + +dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": { + "EC2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-d3adb33f", + "KeyName": "dummy", + "InstanceType": "t2.micro", + "Tags": [ + { + "Key": "Description", + "Value": "Test tag" + }, + { + "Key": "Name", + "Value": "Name tag for tests" + } + ] + } + } + } +} + +dummy_template_yaml = """--- +AWSTemplateFormatVersion: 2010-09-09 +Description: Stack1 with yaml template +Resources: + EC2Instance1: + Type: AWS::EC2::Instance + Properties: + ImageId: ami-d3adb33f + KeyName: dummy + InstanceType: t2.micro + Tags: + - Key: Description + Value: Test tag + - Key: Name + Value: Name tag for tests +""" + +dummy_template_yaml_with_short_form_func = """--- +AWSTemplateFormatVersion: 2010-09-09 +Description: Stack1 with yaml template +Resources: + EC2Instance1: + Type: AWS::EC2::Instance + Properties: + ImageId: ami-d3adb33f + KeyName: !Join [ ":", [ du, m, my ] ] + InstanceType: t2.micro + Tags: + - Key: Description + Value: Test tag + - Key: Name + Value: Name tag for tests +""" + +dummy_template_yaml_with_ref = """--- +AWSTemplateFormatVersion: 2010-09-09 +Description: Stack1 with yaml template +Parameters: + TagDescription: + Type: String + TagName: + Type: String + +Resources: + EC2Instance1: + Type: AWS::EC2::Instance + Properties: + ImageId: ami-d3adb33f + KeyName: dummy + InstanceType: t2.micro + Tags: + - Key: Description + Value: + Ref: TagDescription + - Key: Name + Value: !Ref TagName +""" + +dummy_update_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Parameters": { + "KeyName": { + "Description": "Name of an existing EC2 KeyPair", + "Type": "AWS::EC2::KeyPair::KeyName", + "ConstraintDescription": "must be the name of an existing EC2 KeyPair." + } + }, + "Resources": { + "Instance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-08111162" + } + } + } +} + +dummy_output_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": { + "Instance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-08111162" + } + } + }, + "Outputs": { + "StackVPC": { + "Description": "The ID of the VPC", + "Value": "VPCID", + "Export": { + "Name": "My VPC ID" + } + } + } +} + +dummy_import_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::ImportValue": 'My VPC ID'}, + "VisibilityTimeout": 60, + } + } + } +} + +dummy_redrive_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "MainQueue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "mainqueue.fifo", + "FifoQueue": True, + "ContentBasedDeduplication": False, + "RedrivePolicy": { + "deadLetterTargetArn": { + "Fn::GetAtt": [ + "DeadLetterQueue", + "Arn" + ] + }, + "maxReceiveCount": 5 + } + } + }, + "DeadLetterQueue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "FifoQueue": True + } + }, + } +} + +dummy_template_json = json.dumps(dummy_template) +dummy_update_template_json = json.dumps(dummy_update_template) +dummy_output_template_json = json.dumps(dummy_output_template) +dummy_import_template_json = json.dumps(dummy_import_template) +dummy_redrive_template_json = json.dumps(dummy_redrive_template) + + +@mock_cloudformation +def test_boto3_describe_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + use1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-east-1', + ) + + usw2_instance['StackInstance'].should.have.key('Region').which.should.equal('us-west-2') + usw2_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012') + use1_instance['StackInstance'].should.have.key('Region').which.should.equal('us-east-1') + use1_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012') + + +@mock_cloudformation +def test_boto3_list_stacksets_length(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_set( + StackSetName="test_stack_set2", + TemplateBody=dummy_template_yaml, + ) + stacksets = cf_conn.list_stack_sets() + stacksets.should.have.length_of(2) + + +@mock_cloudformation +def test_boto3_list_stacksets_contents(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + stacksets = cf_conn.list_stack_sets() + stacksets['Summaries'][0].should.have.key('StackSetName').which.should.equal('test_stack_set') + stacksets['Summaries'][0].should.have.key('Status').which.should.equal('ACTIVE') + + +@mock_cloudformation +def test_boto3_stop_stack_set_operation(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + list_operation = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set" + ) + list_operation['Summaries'][-1]['Status'].should.equal('STOPPED') + + +@mock_cloudformation +def test_boto3_describe_stack_set_operation(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + response = cf_conn.describe_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id, + ) + + response['StackSetOperation']['Status'].should.equal('STOPPED') + response['StackSetOperation']['Action'].should.equal('CREATE') + + +@mock_cloudformation +def test_boto3_list_stack_set_operation_results(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + response = cf_conn.list_stack_set_operation_results( + StackSetName="test_stack_set", + OperationId=operation_id, + ) + + response['Summaries'].should.have.length_of(3) + response['Summaries'][0].should.have.key('Account').which.should.equal('123456789012') + response['Summaries'][1].should.have.key('Status').which.should.equal('STOPPED') + + +@mock_cloudformation +def test_boto3_update_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'SomeParam', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'AnotherParam', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'SomeParam', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'AnotherParam', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + cf_conn.update_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-west-1', 'us-west-2'], + ParameterOverrides=param_overrides, + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + usw1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-1', + ) + use1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-east-1', + ) + + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + use1_instance['StackInstance']['ParameterOverrides'].should.be.empty + + +@mock_cloudformation +def test_boto3_delete_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + cf_conn.delete_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1'], + RetainStacks=False, + ) + + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(1) + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Region'].should.equal( + 'us-west-2') + + +@mock_cloudformation +def test_boto3_create_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(2) + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Account'].should.equal( + '123456789012') + + +@mock_cloudformation +def test_boto3_create_stack_instances_with_param_overrides(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ParameterOverrides=param_overrides, + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + +@mock_cloudformation +def test_update_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.update_stack_set( + StackSetName='test_stack_set', + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param_overrides, + ) + stackset = cf_conn.describe_stack_set(StackSetName='test_stack_set') + + stackset['StackSet']['Parameters'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + stackset['StackSet']['Parameters'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + stackset['StackSet']['Parameters'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + stackset['StackSet']['Parameters'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + + +@mock_cloudformation +def test_boto3_list_stack_set_operations(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + cf_conn.update_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + list_operation = cf_conn.list_stack_set_operations(StackSetName="test_stack_set") + list_operation['Summaries'].should.have.length_of(2) + list_operation['Summaries'][-1]['Action'].should.equal('UPDATE') + + +@mock_cloudformation +def test_boto3_delete_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.delete_stack_set(StackSetName='test_stack_set') + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['Status'].should.equal( + 'DELETED') + + +@mock_cloudformation +def test_boto3_create_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal( + dummy_template_json) + + +@mock_cloudformation +def test_boto3_create_stack_set_with_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml, + ) + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal( + dummy_template_yaml) + + +@mock_cloudformation +@mock_s3 +def test_create_stack_set_from_s3_url(): + s3 = boto3.client('s3') + s3_conn = boto3.resource('s3') + bucket = s3_conn.create_bucket(Bucket="foobar") + + key = s3_conn.Object( + 'foobar', 'template-key').put(Body=dummy_template_json) + key_url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': 'foobar', + 'Key': 'template-key' + } + ) + + cf_conn = boto3.client('cloudformation', region_name='us-west-1') + cf_conn.create_stack_set( + StackSetName='stack_from_url', + TemplateURL=key_url, + ) + cf_conn.describe_stack_set(StackSetName="stack_from_url")['StackSet']['TemplateBody'].should.equal( + dummy_template_json) + + +@mock_cloudformation +def test_boto3_create_stack_set_with_ref_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + params = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, + ] + cf_conn.create_stack_set( + StackSetName="test_stack", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=params + ) + + cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['TemplateBody'].should.equal( + dummy_template_yaml_with_ref) + + +@mock_cloudformation +def test_boto3_describe_stack_set_params(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + params = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, + ] + cf_conn.create_stack_set( + StackSetName="test_stack", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=params + ) + + cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['Parameters'].should.equal( + params) + + +@mock_cloudformation +def test_boto3_create_stack(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( + json.loads(dummy_template_json, object_pairs_hook=OrderedDict)) + + +@mock_cloudformation +def test_boto3_create_stack_with_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_yaml, + ) + + cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( + dummy_template_yaml) + + +@mock_cloudformation +def test_boto3_create_stack_with_short_form_func_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_yaml_with_short_form_func, + ) + + cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( + dummy_template_yaml_with_short_form_func) + + +@mock_cloudformation +def test_boto3_create_stack_with_ref_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + params = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, + ] + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=params + ) + + cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal( + dummy_template_yaml_with_ref) + + +@mock_cloudformation +def test_creating_stacks_across_regions(): + west1_cf = boto3.resource('cloudformation', region_name='us-west-1') + west2_cf = boto3.resource('cloudformation', region_name='us-west-2') + west1_cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + west2_cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + list(west1_cf.stacks.all()).should.have.length_of(1) + list(west2_cf.stacks.all()).should.have.length_of(1) + + +@mock_cloudformation +def test_create_stack_with_notification_arn(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack_with_notifications", + TemplateBody=dummy_template_json, + NotificationARNs=['arn:aws:sns:us-east-1:123456789012:fake-queue'], + ) + + stack = list(cf.stacks.all())[0] + stack.notification_arns.should.contain( + 'arn:aws:sns:us-east-1:123456789012:fake-queue') + + +@mock_cloudformation +def test_create_stack_with_role_arn(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack_with_notifications", + TemplateBody=dummy_template_json, + RoleARN='arn:aws:iam::123456789012:role/moto', + ) + stack = list(cf.stacks.all())[0] + stack.role_arn.should.equal('arn:aws:iam::123456789012:role/moto') + + +@mock_cloudformation +@mock_s3 +def test_create_stack_from_s3_url(): + s3 = boto3.client('s3') + s3_conn = boto3.resource('s3') + bucket = s3_conn.create_bucket(Bucket="foobar") + + key = s3_conn.Object( + 'foobar', 'template-key').put(Body=dummy_template_json) + key_url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': 'foobar', + 'Key': 'template-key' + } + ) + + cf_conn = boto3.client('cloudformation', region_name='us-west-1') + cf_conn.create_stack( + StackName='stack_from_url', + TemplateURL=key_url, + ) + cf_conn.get_template(StackName="stack_from_url")['TemplateBody'].should.equal( + json.loads(dummy_template_json, object_pairs_hook=OrderedDict)) + + +@mock_cloudformation +def test_update_stack_with_previous_value(): + name = 'update_stack_with_previous_value' + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName=name, TemplateBody=dummy_template_yaml_with_ref, + Parameters=[ + {'ParameterKey': 'TagName', 'ParameterValue': 'foo'}, + {'ParameterKey': 'TagDescription', 'ParameterValue': 'bar'}, + ] + ) + cf_conn.update_stack( + StackName=name, UsePreviousTemplate=True, + Parameters=[ + {'ParameterKey': 'TagName', 'UsePreviousValue': True}, + {'ParameterKey': 'TagDescription', 'ParameterValue': 'not bar'}, + ] + ) + stack = cf_conn.describe_stacks(StackName=name)['Stacks'][0] + tag_name = [x['ParameterValue'] for x in stack['Parameters'] + if x['ParameterKey'] == 'TagName'][0] + tag_desc = [x['ParameterValue'] for x in stack['Parameters'] + if x['ParameterKey'] == 'TagDescription'][0] + assert tag_name == 'foo' + assert tag_desc == 'not bar' + + +@mock_cloudformation +@mock_s3 +@mock_ec2 +def test_update_stack_from_s3_url(): + s3 = boto3.client('s3') + s3_conn = boto3.resource('s3') + + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="update_stack_from_url", + TemplateBody=dummy_template_json, + Tags=[{'Key': 'foo', 'Value': 'bar'}], + ) + + s3_conn.create_bucket(Bucket="foobar") + + s3_conn.Object( + 'foobar', 'template-key').put(Body=dummy_update_template_json) + key_url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': 'foobar', + 'Key': 'template-key' + } + ) + + cf_conn.update_stack( + StackName="update_stack_from_url", + TemplateURL=key_url, + ) + + cf_conn.get_template(StackName="update_stack_from_url")[ 'TemplateBody'].should.equal( + json.loads(dummy_update_template_json, object_pairs_hook=OrderedDict)) + + +@mock_cloudformation +@mock_s3 +def test_create_change_set_from_s3_url(): + s3 = boto3.client('s3') + s3_conn = boto3.resource('s3') + bucket = s3_conn.create_bucket(Bucket="foobar") + + key = s3_conn.Object( + 'foobar', 'template-key').put(Body=dummy_template_json) + key_url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': 'foobar', + 'Key': 'template-key' + } + ) + cf_conn = boto3.client('cloudformation', region_name='us-west-1') + response = cf_conn.create_change_set( + StackName='NewStack', + TemplateURL=key_url, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + Tags=[ + {'Key': 'tag-key', 'Value': 'tag-value'} + ], + ) + assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id'] + assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId'] + + +@mock_cloudformation +def test_describe_change_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + + stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet") + stack['ChangeSetName'].should.equal('NewChangeSet') + stack['StackName'].should.equal('NewStack') + + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_update_template_json, + ChangeSetName='NewChangeSet2', + ChangeSetType='UPDATE', + ) + stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet2") + stack['ChangeSetName'].should.equal('NewChangeSet2') + stack['StackName'].should.equal('NewStack') + stack['Changes'].should.have.length_of(2) + + +@mock_cloudformation +def test_execute_change_set_w_arn(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + change_set = cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + cf_conn.execute_change_set(ChangeSetName=change_set['Id']) + + +@mock_cloudformation +def test_execute_change_set_w_name(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + change_set = cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + cf_conn.execute_change_set(ChangeSetName='NewChangeSet', StackName='NewStack') + + +@mock_cloudformation +def test_describe_stack_pagination(): + conn = boto3.client('cloudformation', region_name='us-east-1') + for i in range(100): + conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + resp = conn.describe_stacks() + stacks = resp['Stacks'] + stacks.should.have.length_of(50) + next_token = resp['NextToken'] + next_token.should_not.be.none + resp2 = conn.describe_stacks(NextToken=next_token) + stacks.extend(resp2['Stacks']) + stacks.should.have.length_of(100) + assert 'NextToken' not in resp2.keys() + + +@mock_cloudformation +def test_describe_stack_resources(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] + + response = cf_conn.describe_stack_resources(StackName=stack['StackName']) + resource = response['StackResources'][0] + resource['LogicalResourceId'].should.equal('EC2Instance1') + resource['ResourceStatus'].should.equal('CREATE_COMPLETE') + resource['ResourceType'].should.equal('AWS::EC2::Instance') + resource['StackId'].should.equal(stack['StackId']) + + +@mock_cloudformation +def test_describe_stack_by_name(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] + stack['StackName'].should.equal('test_stack') + + +@mock_cloudformation +def test_describe_stack_by_stack_id(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] + stack_by_id = cf_conn.describe_stacks(StackName=stack['StackId'])['Stacks'][ + 0] + + stack_by_id['StackId'].should.equal(stack['StackId']) + stack_by_id['StackName'].should.equal("test_stack") + + +@mock_cloudformation +def test_list_change_sets(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack2', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet2', + ChangeSetType='CREATE', + ) + change_set = cf_conn.list_change_sets(StackName='NewStack2')['Summaries'][0] + change_set['StackName'].should.equal('NewStack2') + change_set['ChangeSetName'].should.equal('NewChangeSet2') + + +@mock_cloudformation +def test_list_stacks(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + cf.create_stack( + StackName="test_stack2", + TemplateBody=dummy_template_json, + ) + + stacks = list(cf.stacks.all()) + stacks.should.have.length_of(2) + stack_names = [stack.stack_name for stack in stacks] + stack_names.should.contain("test_stack") + stack_names.should.contain("test_stack2") + + +@mock_cloudformation +def test_delete_stack_from_resource(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + stack = cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + list(cf.stacks.all()).should.have.length_of(1) + stack.delete() + list(cf.stacks.all()).should.have.length_of(0) + + +@mock_cloudformation +@mock_ec2 +def test_delete_change_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + + cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(1) + cf_conn.delete_change_set(ChangeSetName='NewChangeSet', StackName='NewStack') + cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(0) + + +@mock_cloudformation +@mock_ec2 +def test_delete_stack_by_name(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + cf_conn.describe_stacks()['Stacks'].should.have.length_of(1) + cf_conn.delete_stack(StackName="test_stack") + cf_conn.describe_stacks()['Stacks'].should.have.length_of(0) + + +@mock_cloudformation +def test_delete_stack(): + cf = boto3.client('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + cf.delete_stack( + StackName="test_stack", + ) + stacks = cf.list_stacks() + assert stacks['StackSummaries'][0]['StackStatus'] == 'DELETE_COMPLETE' + + +@mock_cloudformation +def test_describe_deleted_stack(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] + stack_id = stack['StackId'] + cf_conn.delete_stack(StackName=stack['StackId']) + stack_by_id = cf_conn.describe_stacks(StackName=stack_id)['Stacks'][0] + stack_by_id['StackId'].should.equal(stack['StackId']) + stack_by_id['StackName'].should.equal("test_stack") + stack_by_id['StackStatus'].should.equal("DELETE_COMPLETE") + + +@mock_cloudformation +@mock_ec2 +def test_describe_updated_stack(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + Tags=[{'Key': 'foo', 'Value': 'bar'}], + ) + + cf_conn.update_stack( + StackName="test_stack", + RoleARN='arn:aws:iam::123456789012:role/moto', + TemplateBody=dummy_update_template_json, + Tags=[{'Key': 'foo', 'Value': 'baz'}], + ) + + stack = cf_conn.describe_stacks(StackName="test_stack")['Stacks'][0] + stack_id = stack['StackId'] + stack_by_id = cf_conn.describe_stacks(StackName=stack_id)['Stacks'][0] + stack_by_id['StackId'].should.equal(stack['StackId']) + stack_by_id['StackName'].should.equal("test_stack") + stack_by_id['StackStatus'].should.equal("UPDATE_COMPLETE") + stack_by_id['RoleARN'].should.equal('arn:aws:iam::123456789012:role/moto') + stack_by_id['Tags'].should.equal([{'Key': 'foo', 'Value': 'baz'}]) + + +@mock_cloudformation +def test_bad_describe_stack(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + with assert_raises(ClientError): + cf_conn.describe_stacks(StackName="non_existent_stack") + + +@mock_cloudformation() +def test_cloudformation_params(): + dummy_template_with_params = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": {}, + "Parameters": { + "APPNAME": { + "Default": "app-name", + "Description": "The name of the app", + "Type": "String" + } + } + } + dummy_template_with_params_json = json.dumps(dummy_template_with_params) + + cf = boto3.resource('cloudformation', region_name='us-east-1') + stack = cf.create_stack( + StackName='test_stack', + TemplateBody=dummy_template_with_params_json, + Parameters=[{ + "ParameterKey": "APPNAME", + "ParameterValue": "testing123", + }], + ) + + stack.parameters.should.have.length_of(1) + param = stack.parameters[0] + param['ParameterKey'].should.equal('APPNAME') + param['ParameterValue'].should.equal('testing123') + + +@mock_cloudformation +def test_stack_tags(): + tags = [ + { + "Key": "foo", + "Value": "bar" + }, + { + "Key": "baz", + "Value": "bleh" + } + ] + cf = boto3.resource('cloudformation', region_name='us-east-1') + stack = cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + Tags=tags, + ) + observed_tag_items = set( + item for items in [tag.items() for tag in stack.tags] for item in items) + expected_tag_items = set( + item for items in [tag.items() for tag in tags] for item in items) + observed_tag_items.should.equal(expected_tag_items) + + +@mock_cloudformation +@mock_ec2 +def test_stack_events(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + stack = cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + stack.update(TemplateBody=dummy_update_template_json) + stack = cf.Stack(stack.stack_id) + stack.delete() + + # assert begins and ends with stack events + events = list(stack.events.all()) + events[0].resource_type.should.equal("AWS::CloudFormation::Stack") + events[-1].resource_type.should.equal("AWS::CloudFormation::Stack") + + # testing ordering of stack events without assuming resource events will not exist + # the AWS API returns events in reverse chronological order + stack_events_to_look_for = iter([ + ("DELETE_COMPLETE", None), + ("DELETE_IN_PROGRESS", "User Initiated"), + ("UPDATE_COMPLETE", None), + ("UPDATE_IN_PROGRESS", "User Initiated"), + ("CREATE_COMPLETE", None), + ("CREATE_IN_PROGRESS", "User Initiated"), + ]) + try: + for event in events: + event.stack_id.should.equal(stack.stack_id) + event.stack_name.should.equal("test_stack") + event.event_id.should.match(r"[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}") + + if event.resource_type == "AWS::CloudFormation::Stack": + event.logical_resource_id.should.equal("test_stack") + event.physical_resource_id.should.equal(stack.stack_id) + + status_to_look_for, reason_to_look_for = next( + stack_events_to_look_for) + event.resource_status.should.equal(status_to_look_for) + if reason_to_look_for is not None: + event.resource_status_reason.should.equal( + reason_to_look_for) + except StopIteration: + assert False, "Too many stack events" + + list(stack_events_to_look_for).should.be.empty + + +@mock_cloudformation +def test_list_exports(): + cf_client = boto3.client('cloudformation', region_name='us-east-1') + cf_resource = boto3.resource('cloudformation', region_name='us-east-1') + stack = cf_resource.create_stack( + StackName="test_stack", + TemplateBody=dummy_output_template_json, + ) + output_value = 'VPCID' + exports = cf_client.list_exports()['Exports'] + + stack.outputs.should.have.length_of(1) + stack.outputs[0]['OutputValue'].should.equal(output_value) + + exports.should.have.length_of(1) + exports[0]['ExportingStackId'].should.equal(stack.stack_id) + exports[0]['Name'].should.equal('My VPC ID') + exports[0]['Value'].should.equal(output_value) + + +@mock_cloudformation +def test_list_exports_with_token(): + cf = boto3.client('cloudformation', region_name='us-east-1') + for i in range(101): + # Add index to ensure name is unique + dummy_output_template['Outputs']['StackVPC']['Export']['Name'] += str(i) + cf.create_stack( + StackName="test_stack", + TemplateBody=json.dumps(dummy_output_template), + ) + exports = cf.list_exports() + exports['Exports'].should.have.length_of(100) + exports.get('NextToken').should_not.be.none + + more_exports = cf.list_exports(NextToken=exports['NextToken']) + more_exports['Exports'].should.have.length_of(1) + more_exports.get('NextToken').should.be.none + + +@mock_cloudformation +def test_delete_stack_with_export(): + cf = boto3.client('cloudformation', region_name='us-east-1') + stack = cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_output_template_json, + ) + + stack_id = stack['StackId'] + exports = cf.list_exports()['Exports'] + exports.should.have.length_of(1) + + cf.delete_stack(StackName=stack_id) + cf.list_exports()['Exports'].should.have.length_of(0) + + +@mock_cloudformation +def test_export_names_must_be_unique(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + first_stack = cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_output_template_json, + ) + with assert_raises(ClientError): + cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_output_template_json, + ) + + +@mock_sqs +@mock_cloudformation +def test_stack_with_imports(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + ec2_resource = boto3.resource('sqs', region_name='us-east-1') + + output_stack = cf.create_stack( + StackName="test_stack1", + TemplateBody=dummy_output_template_json, + ) + import_stack = cf.create_stack( + StackName="test_stack2", + TemplateBody=dummy_import_template_json + ) + + output_stack.outputs.should.have.length_of(1) + output = output_stack.outputs[0]['OutputValue'] + queue = ec2_resource.get_queue_by_name(QueueName=output) + queue.should_not.be.none + + +@mock_sqs +@mock_cloudformation +def test_non_json_redrive_policy(): + cf = boto3.resource('cloudformation', region_name='us-east-1') + + stack = cf.create_stack( + StackName="test_stack1", + TemplateBody=dummy_redrive_template_json + ) + + stack.Resource('MainQueue').resource_status\ + .should.equal("CREATE_COMPLETE") + stack.Resource('DeadLetterQueue').resource_status\ + .should.equal("CREATE_COMPLETE") diff --git a/tests/test_cloudformation/test_cloudformation_stack_integration.py b/tests/test_cloudformation/test_cloudformation_stack_integration.py index 936f7c2a1..449fde4ce 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_integration.py +++ b/tests/test_cloudformation/test_cloudformation_stack_integration.py @@ -1,2427 +1,2435 @@ -from __future__ import unicode_literals -import json - -import base64 -import boto -import boto.cloudformation -import boto.datapipeline -import boto.ec2 -import boto.ec2.autoscale -import boto.ec2.elb -from boto.exception import BotoServerError -import boto.iam -import boto.redshift -import boto.sns -import boto.sqs -import boto.vpc -import boto3 -import sure # noqa - -from moto import ( - mock_autoscaling_deprecated, - mock_cloudformation, - mock_cloudformation_deprecated, - mock_datapipeline_deprecated, - mock_ec2, - mock_ec2_deprecated, - mock_elb, - mock_elb_deprecated, - mock_iam_deprecated, - mock_kms, - mock_lambda, - mock_rds_deprecated, - mock_rds2, - mock_rds2_deprecated, - mock_redshift, - mock_redshift_deprecated, - mock_route53_deprecated, - mock_sns_deprecated, - mock_sqs, - mock_sqs_deprecated, - mock_elbv2) - -from .fixtures import ( - ec2_classic_eip, - fn_join, - rds_mysql_with_db_parameter_group, - rds_mysql_with_read_replica, - redshift, - route53_ec2_instance_with_public_ip, - route53_health_check, - route53_roundrobin, - single_instance_with_ebs_volume, - vpc_eip, - vpc_single_instance_in_subnet, -) - - -@mock_cloudformation_deprecated() -def test_stack_sqs_integration(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "QueueGroup": { - - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=sqs_template_json, - ) - - stack = conn.describe_stacks()[0] - queue = stack.describe_resources()[0] - queue.resource_type.should.equal('AWS::SQS::Queue') - queue.logical_resource_id.should.equal("QueueGroup") - queue.physical_resource_id.should.equal("my-queue") - - -@mock_cloudformation_deprecated() -def test_stack_list_resources(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "QueueGroup": { - - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=sqs_template_json, - ) - - resources = conn.list_stack_resources("test_stack") - assert len(resources) == 1 - queue = resources[0] - queue.resource_type.should.equal('AWS::SQS::Queue') - queue.logical_resource_id.should.equal("QueueGroup") - queue.physical_resource_id.should.equal("my-queue") - - -@mock_cloudformation_deprecated() -@mock_sqs_deprecated() -def test_update_stack(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "QueueGroup": { - - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=sqs_template_json, - ) - - sqs_conn = boto.sqs.connect_to_region("us-west-1") - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(1) - queues[0].get_attributes('VisibilityTimeout')[ - 'VisibilityTimeout'].should.equal('60') - - sqs_template['Resources']['QueueGroup'][ - 'Properties']['VisibilityTimeout'] = 100 - sqs_template_json = json.dumps(sqs_template) - conn.update_stack("test_stack", sqs_template_json) - - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(1) - queues[0].get_attributes('VisibilityTimeout')[ - 'VisibilityTimeout'].should.equal('100') - - -@mock_cloudformation_deprecated() -@mock_sqs_deprecated() -def test_update_stack_and_remove_resource(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "QueueGroup": { - - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=sqs_template_json, - ) - - sqs_conn = boto.sqs.connect_to_region("us-west-1") - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(1) - - sqs_template['Resources'].pop('QueueGroup') - sqs_template_json = json.dumps(sqs_template) - conn.update_stack("test_stack", sqs_template_json) - - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(0) - - -@mock_cloudformation_deprecated() -@mock_sqs_deprecated() -def test_update_stack_and_add_resource(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": {}, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=sqs_template_json, - ) - - sqs_conn = boto.sqs.connect_to_region("us-west-1") - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(0) - - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "QueueGroup": { - - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - conn.update_stack("test_stack", sqs_template_json) - - queues = sqs_conn.get_all_queues() - queues.should.have.length_of(1) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_stack_ec2_integration(): - ec2_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "WebServerGroup": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-1234abcd", - "UserData": "some user data", - } - }, - }, - } - ec2_template_json = json.dumps(ec2_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "ec2_stack", - template_body=ec2_template_json, - ) - - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - - stack = conn.describe_stacks()[0] - instance = stack.describe_resources()[0] - instance.resource_type.should.equal('AWS::EC2::Instance') - instance.logical_resource_id.should.contain("WebServerGroup") - instance.physical_resource_id.should.equal(ec2_instance.id) - - -@mock_ec2_deprecated() -@mock_elb_deprecated() -@mock_cloudformation_deprecated() -def test_stack_elb_integration_with_attached_ec2_instances(): - elb_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "MyELB": { - "Type": "AWS::ElasticLoadBalancing::LoadBalancer", - "Properties": { - "Instances": [{"Ref": "Ec2Instance1"}], - "LoadBalancerName": "test-elb", - "AvailabilityZones": ['us-east-1'], - "Listeners": [ - { - "InstancePort": "80", - "LoadBalancerPort": "80", - "Protocol": "HTTP", - } - ], - } - }, - "Ec2Instance1": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-1234abcd", - "UserData": "some user data", - } - }, - }, - } - elb_template_json = json.dumps(elb_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "elb_stack", - template_body=elb_template_json, - ) - - elb_conn = boto.ec2.elb.connect_to_region("us-west-1") - load_balancer = elb_conn.get_all_load_balancers()[0] - - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - - load_balancer.instances[0].id.should.equal(ec2_instance.id) - list(load_balancer.availability_zones).should.equal(['us-east-1']) - - -@mock_elb_deprecated() -@mock_cloudformation_deprecated() -def test_stack_elb_integration_with_health_check(): - elb_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "MyELB": { - "Type": "AWS::ElasticLoadBalancing::LoadBalancer", - "Properties": { - "LoadBalancerName": "test-elb", - "AvailabilityZones": ['us-west-1'], - "HealthCheck": { - "HealthyThreshold": "3", - "Interval": "5", - "Target": "HTTP:80/healthcheck", - "Timeout": "4", - "UnhealthyThreshold": "2", - }, - "Listeners": [ - { - "InstancePort": "80", - "LoadBalancerPort": "80", - "Protocol": "HTTP", - } - ], - } - }, - }, - } - elb_template_json = json.dumps(elb_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "elb_stack", - template_body=elb_template_json, - ) - - elb_conn = boto.ec2.elb.connect_to_region("us-west-1") - load_balancer = elb_conn.get_all_load_balancers()[0] - health_check = load_balancer.health_check - - health_check.healthy_threshold.should.equal(3) - health_check.interval.should.equal(5) - health_check.target.should.equal("HTTP:80/healthcheck") - health_check.timeout.should.equal(4) - health_check.unhealthy_threshold.should.equal(2) - - -@mock_elb_deprecated() -@mock_cloudformation_deprecated() -def test_stack_elb_integration_with_update(): - elb_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "MyELB": { - "Type": "AWS::ElasticLoadBalancing::LoadBalancer", - "Properties": { - "LoadBalancerName": "test-elb", - "AvailabilityZones": ['us-west-1a'], - "Listeners": [ - { - "InstancePort": "80", - "LoadBalancerPort": "80", - "Protocol": "HTTP", - } - ], - "Policies": {"Ref": "AWS::NoValue"}, - } - }, - }, - } - elb_template_json = json.dumps(elb_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "elb_stack", - template_body=elb_template_json, - ) - - elb_conn = boto.ec2.elb.connect_to_region("us-west-1") - load_balancer = elb_conn.get_all_load_balancers()[0] - load_balancer.availability_zones[0].should.equal('us-west-1a') - - elb_template['Resources']['MyELB']['Properties'][ - 'AvailabilityZones'] = ['us-west-1b'] - elb_template_json = json.dumps(elb_template) - conn.update_stack( - "elb_stack", - template_body=elb_template_json, - ) - load_balancer = elb_conn.get_all_load_balancers()[0] - load_balancer.availability_zones[0].should.equal('us-west-1b') - - -@mock_ec2_deprecated() -@mock_redshift_deprecated() -@mock_cloudformation_deprecated() -def test_redshift_stack(): - redshift_template_json = json.dumps(redshift.template) - - vpc_conn = boto.vpc.connect_to_region("us-west-2") - conn = boto.cloudformation.connect_to_region("us-west-2") - conn.create_stack( - "redshift_stack", - template_body=redshift_template_json, - parameters=[ - ("DatabaseName", "mydb"), - ("ClusterType", "multi-node"), - ("NumberOfNodes", 2), - ("NodeType", "dw1.xlarge"), - ("MasterUsername", "myuser"), - ("MasterUserPassword", "mypass"), - ("InboundTraffic", "10.0.0.1/16"), - ("PortNumber", 5439), - ] - ) - - redshift_conn = boto.redshift.connect_to_region("us-west-2") - - cluster_res = redshift_conn.describe_clusters() - clusters = cluster_res['DescribeClustersResponse'][ - 'DescribeClustersResult']['Clusters'] - clusters.should.have.length_of(1) - cluster = clusters[0] - cluster['DBName'].should.equal("mydb") - cluster['NumberOfNodes'].should.equal(2) - cluster['NodeType'].should.equal("dw1.xlarge") - cluster['MasterUsername'].should.equal("myuser") - cluster['Port'].should.equal(5439) - cluster['VpcSecurityGroups'].should.have.length_of(1) - security_group_id = cluster['VpcSecurityGroups'][0]['VpcSecurityGroupId'] - - groups = vpc_conn.get_all_security_groups(group_ids=[security_group_id]) - groups.should.have.length_of(1) - group = groups[0] - group.rules.should.have.length_of(1) - group.rules[0].grants[0].cidr_ip.should.equal("10.0.0.1/16") - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_stack_security_groups(): - security_group_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "my-security-group": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "My other group", - }, - }, - "Ec2Instance2": { - "Type": "AWS::EC2::Instance", - "Properties": { - "SecurityGroups": [{"Ref": "InstanceSecurityGroup"}], - "ImageId": "ami-1234abcd", - } - }, - "InstanceSecurityGroup": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "My security group", - "Tags": [ - { - "Key": "bar", - "Value": "baz" - } - ], - "SecurityGroupIngress": [{ - "IpProtocol": "tcp", - "FromPort": "22", - "ToPort": "22", - "CidrIp": "123.123.123.123/32", - }, { - "IpProtocol": "tcp", - "FromPort": "80", - "ToPort": "8000", - "SourceSecurityGroupId": {"Ref": "my-security-group"}, - }] - } - } - }, - } - security_group_template_json = json.dumps(security_group_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "security_group_stack", - template_body=security_group_template_json, - tags={"foo": "bar"} - ) - - ec2_conn = boto.ec2.connect_to_region("us-west-1") - instance_group = ec2_conn.get_all_security_groups( - filters={'description': ['My security group']})[0] - other_group = ec2_conn.get_all_security_groups( - filters={'description': ['My other group']})[0] - - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - - ec2_instance.groups[0].id.should.equal(instance_group.id) - instance_group.description.should.equal("My security group") - instance_group.tags.should.have.key('foo').which.should.equal('bar') - instance_group.tags.should.have.key('bar').which.should.equal('baz') - rule1, rule2 = instance_group.rules - int(rule1.to_port).should.equal(22) - int(rule1.from_port).should.equal(22) - rule1.grants[0].cidr_ip.should.equal("123.123.123.123/32") - rule1.ip_protocol.should.equal('tcp') - - int(rule2.to_port).should.equal(8000) - int(rule2.from_port).should.equal(80) - rule2.ip_protocol.should.equal('tcp') - rule2.grants[0].group_id.should.equal(other_group.id) - - -@mock_autoscaling_deprecated() -@mock_elb_deprecated() -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -def test_autoscaling_group_with_elb(): - web_setup_template = { - "AWSTemplateFormatVersion": "2010-09-09", - - "Resources": { - "my-as-group": { - "Type": "AWS::AutoScaling::AutoScalingGroup", - "Properties": { - "AvailabilityZones": ['us-east1'], - "LaunchConfigurationName": {"Ref": "my-launch-config"}, - "MinSize": "2", - "MaxSize": "2", - "DesiredCapacity": "2", - "LoadBalancerNames": [{"Ref": "my-elb"}], - "Tags": [ - { - "Key": "propagated-test-tag", "Value": "propagated-test-tag-value", - "PropagateAtLaunch": True}, - { - "Key": "not-propagated-test-tag", - "Value": "not-propagated-test-tag-value", - "PropagateAtLaunch": False - } - ] - }, - }, - - "my-launch-config": { - "Type": "AWS::AutoScaling::LaunchConfiguration", - "Properties": { - "ImageId": "ami-1234abcd", - "UserData": "some user data", - } - }, - - "my-elb": { - "Type": "AWS::ElasticLoadBalancing::LoadBalancer", - "Properties": { - "AvailabilityZones": ['us-east1'], - "Listeners": [{ - "LoadBalancerPort": "80", - "InstancePort": "80", - "Protocol": "HTTP", - }], - "LoadBalancerName": "my-elb", - "HealthCheck": { - "Target": "HTTP:80", - "HealthyThreshold": "3", - "UnhealthyThreshold": "5", - "Interval": "30", - "Timeout": "5", - }, - }, - }, - } - } - - web_setup_template_json = json.dumps(web_setup_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "web_stack", - template_body=web_setup_template_json, - ) - - autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") - autoscale_group = autoscale_conn.get_all_groups()[0] - autoscale_group.launch_config_name.should.contain("my-launch-config") - autoscale_group.load_balancers[0].should.equal('my-elb') - - # Confirm the Launch config was actually created - autoscale_conn.get_all_launch_configurations().should.have.length_of(1) - - # Confirm the ELB was actually created - elb_conn = boto.ec2.elb.connect_to_region("us-west-1") - elb_conn.get_all_load_balancers().should.have.length_of(1) - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - as_group_resource = [resource for resource in resources if resource.resource_type == - 'AWS::AutoScaling::AutoScalingGroup'][0] - as_group_resource.physical_resource_id.should.contain("my-as-group") - - launch_config_resource = [ - resource for resource in resources if - resource.resource_type == 'AWS::AutoScaling::LaunchConfiguration'][0] - launch_config_resource.physical_resource_id.should.contain( - "my-launch-config") - - elb_resource = [resource for resource in resources if resource.resource_type == - 'AWS::ElasticLoadBalancing::LoadBalancer'][0] - elb_resource.physical_resource_id.should.contain("my-elb") - - # confirm the instances were created with the right tags - ec2_conn = boto.ec2.connect_to_region('us-west-1') - reservations = ec2_conn.get_all_reservations() - len(reservations).should.equal(1) - reservation = reservations[0] - len(reservation.instances).should.equal(2) - for instance in reservation.instances: - instance.tags['propagated-test-tag'].should.equal('propagated-test-tag-value') - instance.tags.keys().should_not.contain('not-propagated-test-tag') - - -@mock_autoscaling_deprecated() -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -def test_autoscaling_group_update(): - asg_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "my-as-group": { - "Type": "AWS::AutoScaling::AutoScalingGroup", - "Properties": { - "AvailabilityZones": ['us-west-1'], - "LaunchConfigurationName": {"Ref": "my-launch-config"}, - "MinSize": "2", - "MaxSize": "2", - "DesiredCapacity": "2" - }, - }, - - "my-launch-config": { - "Type": "AWS::AutoScaling::LaunchConfiguration", - "Properties": { - "ImageId": "ami-1234abcd", - "UserData": "some user data", - } - }, - }, - } - asg_template_json = json.dumps(asg_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "asg_stack", - template_body=asg_template_json, - ) - - autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") - asg = autoscale_conn.get_all_groups()[0] - asg.min_size.should.equal(2) - asg.max_size.should.equal(2) - asg.desired_capacity.should.equal(2) - - asg_template['Resources']['my-as-group']['Properties']['MaxSize'] = 3 - asg_template['Resources']['my-as-group']['Properties']['Tags'] = [ - { - "Key": "propagated-test-tag", "Value": "propagated-test-tag-value", - "PropagateAtLaunch": True}, - { - "Key": "not-propagated-test-tag", - "Value": "not-propagated-test-tag-value", - "PropagateAtLaunch": False - } - ] - asg_template_json = json.dumps(asg_template) - conn.update_stack( - "asg_stack", - template_body=asg_template_json, - ) - asg = autoscale_conn.get_all_groups()[0] - asg.min_size.should.equal(2) - asg.max_size.should.equal(3) - asg.desired_capacity.should.equal(2) - - # confirm the instances were created with the right tags - ec2_conn = boto.ec2.connect_to_region('us-west-1') - reservations = ec2_conn.get_all_reservations() - running_instance_count = 0 - for res in reservations: - for instance in res.instances: - if instance.state == 'running': - running_instance_count += 1 - instance.tags['propagated-test-tag'].should.equal('propagated-test-tag-value') - instance.tags.keys().should_not.contain('not-propagated-test-tag') - running_instance_count.should.equal(2) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_vpc_single_instance_in_subnet(): - template_json = json.dumps(vpc_single_instance_in_subnet.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - parameters=[("KeyName", "my_key")], - ) - - vpc_conn = boto.vpc.connect_to_region("us-west-1") - - vpc = vpc_conn.get_all_vpcs(filters={'cidrBlock': '10.0.0.0/16'})[0] - vpc.cidr_block.should.equal("10.0.0.0/16") - - # Add this once we implement the endpoint - # vpc_conn.get_all_internet_gateways().should.have.length_of(1) - - subnet = vpc_conn.get_all_subnets(filters={'vpcId': vpc.id})[0] - subnet.vpc_id.should.equal(vpc.id) - - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - instance = reservation.instances[0] - instance.tags["Foo"].should.equal("Bar") - # Check that the EIP is attached the the EC2 instance - eip = ec2_conn.get_all_addresses()[0] - eip.domain.should.equal('vpc') - eip.instance_id.should.equal(instance.id) - - security_group = ec2_conn.get_all_security_groups( - filters={'vpc_id': [vpc.id]})[0] - security_group.vpc_id.should.equal(vpc.id) - - stack = conn.describe_stacks()[0] - - vpc.tags.should.have.key('Application').which.should.equal(stack.stack_id) - - resources = stack.describe_resources() - vpc_resource = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::VPC'][0] - vpc_resource.physical_resource_id.should.equal(vpc.id) - - subnet_resource = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::Subnet'][0] - subnet_resource.physical_resource_id.should.equal(subnet.id) - - eip_resource = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] - eip_resource.physical_resource_id.should.equal(eip.public_ip) - - -@mock_cloudformation() -@mock_ec2() -@mock_rds2() -def test_rds_db_parameter_groups(): - ec2_conn = boto3.client("ec2", region_name="us-west-1") - ec2_conn.create_security_group( - GroupName='application', Description='Our Application Group') - - template_json = json.dumps(rds_mysql_with_db_parameter_group.template) - cf_conn = boto3.client('cloudformation', 'us-west-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=template_json, - Parameters=[{'ParameterKey': key, 'ParameterValue': value} for - key, value in [ - ("DBInstanceIdentifier", "master_db"), - ("DBName", "my_db"), - ("DBUser", "my_user"), - ("DBPassword", "my_password"), - ("DBAllocatedStorage", "20"), - ("DBInstanceClass", "db.m1.medium"), - ("EC2SecurityGroup", "application"), - ("MultiAZ", "true"), - ] - ], - ) - - rds_conn = boto3.client('rds', region_name="us-west-1") - - db_parameter_groups = rds_conn.describe_db_parameter_groups() - len(db_parameter_groups['DBParameterGroups']).should.equal(1) - db_parameter_group_name = db_parameter_groups[ - 'DBParameterGroups'][0]['DBParameterGroupName'] - - found_cloudformation_set_parameter = False - for db_parameter in rds_conn.describe_db_parameters(DBParameterGroupName=db_parameter_group_name)[ - 'Parameters']: - if db_parameter['ParameterName'] == 'BACKLOG_QUEUE_LIMIT' and db_parameter[ - 'ParameterValue'] == '2048': - found_cloudformation_set_parameter = True - - found_cloudformation_set_parameter.should.equal(True) - - -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -@mock_rds_deprecated() -def test_rds_mysql_with_read_replica(): - ec2_conn = boto.ec2.connect_to_region("us-west-1") - ec2_conn.create_security_group('application', 'Our Application Group') - - template_json = json.dumps(rds_mysql_with_read_replica.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - parameters=[ - ("DBInstanceIdentifier", "master_db"), - ("DBName", "my_db"), - ("DBUser", "my_user"), - ("DBPassword", "my_password"), - ("DBAllocatedStorage", "20"), - ("DBInstanceClass", "db.m1.medium"), - ("EC2SecurityGroup", "application"), - ("MultiAZ", "true"), - ], - ) - - rds_conn = boto.rds.connect_to_region("us-west-1") - - primary = rds_conn.get_all_dbinstances("master_db")[0] - primary.master_username.should.equal("my_user") - primary.allocated_storage.should.equal(20) - primary.instance_class.should.equal("db.m1.medium") - primary.multi_az.should.equal(True) - list(primary.read_replica_dbinstance_identifiers).should.have.length_of(1) - replica_id = primary.read_replica_dbinstance_identifiers[0] - - replica = rds_conn.get_all_dbinstances(replica_id)[0] - replica.instance_class.should.equal("db.m1.medium") - - security_group_name = primary.security_groups[0].name - security_group = rds_conn.get_all_dbsecurity_groups(security_group_name)[0] - security_group.ec2_groups[0].name.should.equal("application") - - -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -@mock_rds_deprecated() -def test_rds_mysql_with_read_replica_in_vpc(): - template_json = json.dumps(rds_mysql_with_read_replica.template) - conn = boto.cloudformation.connect_to_region("eu-central-1") - conn.create_stack( - "test_stack", - template_body=template_json, - parameters=[ - ("DBInstanceIdentifier", "master_db"), - ("DBName", "my_db"), - ("DBUser", "my_user"), - ("DBPassword", "my_password"), - ("DBAllocatedStorage", "20"), - ("DBInstanceClass", "db.m1.medium"), - ("MultiAZ", "true"), - ], - ) - - rds_conn = boto.rds.connect_to_region("eu-central-1") - primary = rds_conn.get_all_dbinstances("master_db")[0] - - subnet_group_name = primary.subnet_group.name - subnet_group = rds_conn.get_all_db_subnet_groups(subnet_group_name)[0] - subnet_group.description.should.equal("my db subnet group") - - -@mock_autoscaling_deprecated() -@mock_iam_deprecated() -@mock_cloudformation_deprecated() -def test_iam_roles(): - iam_template = { - "AWSTemplateFormatVersion": "2010-09-09", - - "Resources": { - - "my-launch-config": { - "Properties": { - "IamInstanceProfile": {"Ref": "my-instance-profile-with-path"}, - "ImageId": "ami-1234abcd", - }, - "Type": "AWS::AutoScaling::LaunchConfiguration" - }, - "my-instance-profile-with-path": { - "Properties": { - "Path": "my-path", - "Roles": [{"Ref": "my-role-with-path"}], - }, - "Type": "AWS::IAM::InstanceProfile" - }, - "my-instance-profile-no-path": { - "Properties": { - "Roles": [{"Ref": "my-role-no-path"}], - }, - "Type": "AWS::IAM::InstanceProfile" - }, - "my-role-with-path": { - "Properties": { - "AssumeRolePolicyDocument": { - "Statement": [ - { - "Action": [ - "sts:AssumeRole" - ], - "Effect": "Allow", - "Principal": { - "Service": [ - "ec2.amazonaws.com" - ] - } - } - ] - }, - "Path": "my-path", - "Policies": [ - { - "PolicyDocument": { - "Statement": [ - { - "Action": [ - "ec2:CreateTags", - "ec2:DescribeInstances", - "ec2:DescribeTags" - ], - "Effect": "Allow", - "Resource": [ - "*" - ] - } - ], - "Version": "2012-10-17" - }, - "PolicyName": "EC2_Tags" - }, - { - "PolicyDocument": { - "Statement": [ - { - "Action": [ - "sqs:*" - ], - "Effect": "Allow", - "Resource": [ - "*" - ] - } - ], - "Version": "2012-10-17" - }, - "PolicyName": "SQS" - }, - ] - }, - "Type": "AWS::IAM::Role" - }, - "my-role-no-path": { - "Properties": { - "AssumeRolePolicyDocument": { - "Statement": [ - { - "Action": [ - "sts:AssumeRole" - ], - "Effect": "Allow", - "Principal": { - "Service": [ - "ec2.amazonaws.com" - ] - } - } - ] - }, - }, - "Type": "AWS::IAM::Role" - } - } - } - - iam_template_json = json.dumps(iam_template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=iam_template_json, - ) - - iam_conn = boto.iam.connect_to_region("us-west-1") - - role_results = iam_conn.list_roles()['list_roles_response'][ - 'list_roles_result']['roles'] - role_name_to_id = {} - for role_result in role_results: - role = iam_conn.get_role(role_result.role_name) - role.role_name.should.contain("my-role") - if 'with-path' in role.role_name: - role_name_to_id['with-path'] = role.role_id - role.path.should.equal("my-path") - else: - role_name_to_id['no-path'] = role.role_id - role.role_name.should.contain('no-path') - role.path.should.equal('/') - - instance_profile_responses = iam_conn.list_instance_profiles()[ - 'list_instance_profiles_response']['list_instance_profiles_result']['instance_profiles'] - instance_profile_responses.should.have.length_of(2) - instance_profile_names = [] - - for instance_profile_response in instance_profile_responses: - instance_profile = iam_conn.get_instance_profile(instance_profile_response.instance_profile_name) - instance_profile_names.append(instance_profile.instance_profile_name) - instance_profile.instance_profile_name.should.contain( - "my-instance-profile") - if "with-path" in instance_profile.instance_profile_name: - instance_profile.path.should.equal("my-path") - instance_profile.role_id.should.equal(role_name_to_id['with-path']) - else: - instance_profile.instance_profile_name.should.contain('no-path') - instance_profile.role_id.should.equal(role_name_to_id['no-path']) - instance_profile.path.should.equal('/') - - autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") - launch_config = autoscale_conn.get_all_launch_configurations()[0] - launch_config.instance_profile_name.should.contain("my-instance-profile-with-path") - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - instance_profile_resources = [ - resource for resource in resources if resource.resource_type == 'AWS::IAM::InstanceProfile'] - {ip.physical_resource_id for ip in instance_profile_resources}.should.equal(set(instance_profile_names)) - - role_resources = [ - resource for resource in resources if resource.resource_type == 'AWS::IAM::Role'] - {r.physical_resource_id for r in role_resources}.should.equal(set(role_name_to_id.values())) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_single_instance_with_ebs_volume(): - template_json = json.dumps(single_instance_with_ebs_volume.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - parameters=[("KeyName", "key_name")] - ) - - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - - volumes = ec2_conn.get_all_volumes() - # Grab the mounted drive - volume = [ - volume for volume in volumes if volume.attach_data.device == '/dev/sdh'][0] - volume.volume_state().should.equal('in-use') - volume.attach_data.instance_id.should.equal(ec2_instance.id) - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - ebs_volumes = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::Volume'] - ebs_volumes[0].physical_resource_id.should.equal(volume.id) - - -@mock_cloudformation_deprecated() -def test_create_template_without_required_param(): - template_json = json.dumps(single_instance_with_ebs_volume.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack.when.called_with( - "test_stack", - template_body=template_json, - ).should.throw(BotoServerError) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_classic_eip(): - template_json = json.dumps(ec2_classic_eip.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack("test_stack", template_body=template_json) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - eip = ec2_conn.get_all_addresses()[0] - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - cfn_eip = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] - cfn_eip.physical_resource_id.should.equal(eip.public_ip) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_vpc_eip(): - template_json = json.dumps(vpc_eip.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack("test_stack", template_body=template_json) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - eip = ec2_conn.get_all_addresses()[0] - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - cfn_eip = [ - resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] - cfn_eip.physical_resource_id.should.equal(eip.public_ip) - - -@mock_ec2_deprecated() -@mock_cloudformation_deprecated() -def test_fn_join(): - template_json = json.dumps(fn_join.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack("test_stack", template_body=template_json) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - eip = ec2_conn.get_all_addresses()[0] - - stack = conn.describe_stacks()[0] - fn_join_output = stack.outputs[0] - fn_join_output.value.should.equal('test eip:{0}'.format(eip.public_ip)) - - -@mock_cloudformation_deprecated() -@mock_sqs_deprecated() -def test_conditional_resources(): - sqs_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Parameters": { - "EnvType": { - "Description": "Environment type.", - "Type": "String", - } - }, - "Conditions": { - "CreateQueue": {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]} - }, - "Resources": { - "QueueGroup": { - "Condition": "CreateQueue", - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - }, - } - sqs_template_json = json.dumps(sqs_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack_without_queue", - template_body=sqs_template_json, - parameters=[("EnvType", "staging")], - ) - sqs_conn = boto.sqs.connect_to_region("us-west-1") - list(sqs_conn.get_all_queues()).should.have.length_of(0) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack_with_queue", - template_body=sqs_template_json, - parameters=[("EnvType", "prod")], - ) - sqs_conn = boto.sqs.connect_to_region("us-west-1") - list(sqs_conn.get_all_queues()).should.have.length_of(1) - - -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -def test_conditional_if_handling(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Conditions": { - "EnvEqualsPrd": { - "Fn::Equals": [ - { - "Ref": "ENV" - }, - "prd" - ] - } - }, - "Parameters": { - "ENV": { - "Default": "dev", - "Description": "Deployment environment for the stack (dev/prd)", - "Type": "String" - }, - }, - "Description": "Stack 1", - "Resources": { - "App1": { - "Properties": { - "ImageId": { - "Fn::If": [ - "EnvEqualsPrd", - "ami-00000000", - "ami-ffffffff" - ] - }, - }, - "Type": "AWS::EC2::Instance" - }, - } - } - dummy_template_json = json.dumps(dummy_template) - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack('test_stack1', template_body=dummy_template_json) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - ec2_instance.image_id.should.equal("ami-ffffffff") - ec2_instance.terminate() - - conn = boto.cloudformation.connect_to_region("us-west-2") - conn.create_stack( - 'test_stack1', template_body=dummy_template_json, parameters=[("ENV", "prd")]) - ec2_conn = boto.ec2.connect_to_region("us-west-2") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - ec2_instance.image_id.should.equal("ami-00000000") - - -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -def test_cloudformation_mapping(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Mappings": { - "RegionMap": { - "us-east-1": {"32": "ami-6411e20d", "64": "ami-7a11e213"}, - "us-west-1": {"32": "ami-c9c7978c", "64": "ami-cfc7978a"}, - "eu-west-1": {"32": "ami-37c2f643", "64": "ami-31c2f645"}, - "ap-southeast-1": {"32": "ami-66f28c34", "64": "ami-60f28c32"}, - "ap-northeast-1": {"32": "ami-9c03a89d", "64": "ami-a003a8a1"} - } - }, - "Resources": { - "WebServer": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": { - "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "32"] - }, - "InstanceType": "m1.small" - }, - "Type": "AWS::EC2::Instance", - }, - }, - } - - dummy_template_json = json.dumps(dummy_template) - - conn = boto.cloudformation.connect_to_region("us-east-1") - conn.create_stack('test_stack1', template_body=dummy_template_json) - ec2_conn = boto.ec2.connect_to_region("us-east-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - ec2_instance.image_id.should.equal("ami-6411e20d") - - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack('test_stack1', template_body=dummy_template_json) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - reservation = ec2_conn.get_all_instances()[0] - ec2_instance = reservation.instances[0] - ec2_instance.image_id.should.equal("ami-c9c7978c") - - -@mock_cloudformation_deprecated() -@mock_route53_deprecated() -def test_route53_roundrobin(): - route53_conn = boto.connect_route53() - - template_json = json.dumps(route53_roundrobin.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - stack = conn.create_stack( - "test_stack", - template_body=template_json, - ) - - zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ - 'HostedZones'] - list(zones).should.have.length_of(1) - zone_id = zones[0]['Id'] - zone_id = zone_id.split('/') - zone_id = zone_id[2] - - rrsets = route53_conn.get_all_rrsets(zone_id) - rrsets.hosted_zone_id.should.equal(zone_id) - rrsets.should.have.length_of(2) - record_set1 = rrsets[0] - record_set1.name.should.equal('test_stack.us-west-1.my_zone.') - record_set1.identifier.should.equal("test_stack AWS") - record_set1.type.should.equal('CNAME') - record_set1.ttl.should.equal('900') - record_set1.weight.should.equal('3') - record_set1.resource_records[0].should.equal("aws.amazon.com") - - record_set2 = rrsets[1] - record_set2.name.should.equal('test_stack.us-west-1.my_zone.') - record_set2.identifier.should.equal("test_stack Amazon") - record_set2.type.should.equal('CNAME') - record_set2.ttl.should.equal('900') - record_set2.weight.should.equal('1') - record_set2.resource_records[0].should.equal("www.amazon.com") - - stack = conn.describe_stacks()[0] - output = stack.outputs[0] - output.key.should.equal('DomainName') - output.value.should.equal( - 'arn:aws:route53:::hostedzone/{0}'.format(zone_id)) - - -@mock_cloudformation_deprecated() -@mock_ec2_deprecated() -@mock_route53_deprecated() -def test_route53_ec2_instance_with_public_ip(): - route53_conn = boto.connect_route53() - ec2_conn = boto.ec2.connect_to_region("us-west-1") - - template_json = json.dumps(route53_ec2_instance_with_public_ip.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - ) - - instance_id = ec2_conn.get_all_reservations()[0].instances[0].id - - zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ - 'HostedZones'] - list(zones).should.have.length_of(1) - zone_id = zones[0]['Id'] - zone_id = zone_id.split('/') - zone_id = zone_id[2] - - rrsets = route53_conn.get_all_rrsets(zone_id) - rrsets.should.have.length_of(1) - - record_set1 = rrsets[0] - record_set1.name.should.equal('{0}.us-west-1.my_zone.'.format(instance_id)) - record_set1.identifier.should.equal(None) - record_set1.type.should.equal('A') - record_set1.ttl.should.equal('900') - record_set1.weight.should.equal(None) - record_set1.resource_records[0].should.equal("10.0.0.25") - - -@mock_cloudformation_deprecated() -@mock_route53_deprecated() -def test_route53_associate_health_check(): - route53_conn = boto.connect_route53() - - template_json = json.dumps(route53_health_check.template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - ) - - checks = route53_conn.get_list_health_checks()['ListHealthChecksResponse'][ - 'HealthChecks'] - list(checks).should.have.length_of(1) - check = checks[0] - health_check_id = check['Id'] - config = check['HealthCheckConfig'] - config["FailureThreshold"].should.equal("3") - config["IPAddress"].should.equal("10.0.0.4") - config["Port"].should.equal("80") - config["RequestInterval"].should.equal("10") - config["ResourcePath"].should.equal("/") - config["Type"].should.equal("HTTP") - - zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ - 'HostedZones'] - list(zones).should.have.length_of(1) - zone_id = zones[0]['Id'] - zone_id = zone_id.split('/') - zone_id = zone_id[2] - - rrsets = route53_conn.get_all_rrsets(zone_id) - rrsets.should.have.length_of(1) - - record_set = rrsets[0] - record_set.health_check.should.equal(health_check_id) - - -@mock_cloudformation_deprecated() -@mock_route53_deprecated() -def test_route53_with_update(): - route53_conn = boto.connect_route53() - - template_json = json.dumps(route53_health_check.template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - - zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ - 'HostedZones'] - list(zones).should.have.length_of(1) - zone_id = zones[0]['Id'] - zone_id = zone_id.split('/') - zone_id = zone_id[2] - - rrsets = route53_conn.get_all_rrsets(zone_id) - rrsets.should.have.length_of(1) - - record_set = rrsets[0] - record_set.resource_records.should.equal(["my.example.com"]) - - route53_health_check.template['Resources']['myDNSRecord'][ - 'Properties']['ResourceRecords'] = ["my_other.example.com"] - template_json = json.dumps(route53_health_check.template) - cf_conn.update_stack( - "test_stack", - template_body=template_json, - ) - - zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ - 'HostedZones'] - list(zones).should.have.length_of(1) - zone_id = zones[0]['Id'] - zone_id = zone_id.split('/') - zone_id = zone_id[2] - - rrsets = route53_conn.get_all_rrsets(zone_id) - rrsets.should.have.length_of(1) - - record_set = rrsets[0] - record_set.resource_records.should.equal(["my_other.example.com"]) - - -@mock_cloudformation_deprecated() -@mock_sns_deprecated() -def test_sns_topic(): - dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "MySNSTopic": { - "Type": "AWS::SNS::Topic", - "Properties": { - "Subscription": [ - {"Endpoint": "https://example.com", "Protocol": "https"}, - ], - "TopicName": "my_topics", - } - } - }, - "Outputs": { - "topic_name": { - "Value": {"Fn::GetAtt": ["MySNSTopic", "TopicName"]} - }, - "topic_arn": { - "Value": {"Ref": "MySNSTopic"} - }, - } - } - template_json = json.dumps(dummy_template) - conn = boto.cloudformation.connect_to_region("us-west-1") - stack = conn.create_stack( - "test_stack", - template_body=template_json, - ) - - sns_conn = boto.sns.connect_to_region("us-west-1") - topics = sns_conn.get_all_topics()["ListTopicsResponse"][ - "ListTopicsResult"]["Topics"] - topics.should.have.length_of(1) - topic_arn = topics[0]['TopicArn'] - topic_arn.should.contain("my_topics") - - subscriptions = sns_conn.get_all_subscriptions()["ListSubscriptionsResponse"][ - "ListSubscriptionsResult"]["Subscriptions"] - subscriptions.should.have.length_of(1) - subscription = subscriptions[0] - subscription["TopicArn"].should.equal(topic_arn) - subscription["Protocol"].should.equal("https") - subscription["SubscriptionArn"].should.contain(topic_arn) - subscription["Endpoint"].should.equal("https://example.com") - - stack = conn.describe_stacks()[0] - topic_name_output = [x for x in stack.outputs if x.key == 'topic_name'][0] - topic_name_output.value.should.equal("my_topics") - topic_arn_output = [x for x in stack.outputs if x.key == 'topic_arn'][0] - topic_arn_output.value.should.equal(topic_arn) - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_vpc_gateway_attachment_creation_should_attach_itself_to_vpc(): - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "internetgateway": { - "Type": "AWS::EC2::InternetGateway" - }, - "testvpc": { - "Type": "AWS::EC2::VPC", - "Properties": { - "CidrBlock": "10.0.0.0/16", - "EnableDnsHostnames": "true", - "EnableDnsSupport": "true", - "InstanceTenancy": "default" - }, - }, - "vpcgatewayattachment": { - "Type": "AWS::EC2::VPCGatewayAttachment", - "Properties": { - "InternetGatewayId": { - "Ref": "internetgateway" - }, - "VpcId": { - "Ref": "testvpc" - } - }, - }, - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - - vpc_conn = boto.vpc.connect_to_region("us-west-1") - vpc = vpc_conn.get_all_vpcs(filters={'cidrBlock': '10.0.0.0/16'})[0] - igws = vpc_conn.get_all_internet_gateways( - filters={'attachment.vpc-id': vpc.id} - ) - - igws.should.have.length_of(1) - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_vpc_peering_creation(): - vpc_conn = boto.vpc.connect_to_region("us-west-1") - vpc_source = vpc_conn.create_vpc("10.0.0.0/16") - peer_vpc = vpc_conn.create_vpc("10.1.0.0/16") - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "vpcpeeringconnection": { - "Type": "AWS::EC2::VPCPeeringConnection", - "Properties": { - "PeerVpcId": peer_vpc.id, - "VpcId": vpc_source.id, - } - }, - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - - peering_connections = vpc_conn.get_all_vpc_peering_connections() - peering_connections.should.have.length_of(1) - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_multiple_security_group_ingress_separate_from_security_group_by_id(): - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "test-security-group1": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "Tags": [ - { - "Key": "sg-name", - "Value": "sg1" - } - ] - }, - }, - "test-security-group2": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "Tags": [ - { - "Key": "sg-name", - "Value": "sg2" - } - ] - }, - }, - "test-sg-ingress": { - "Type": "AWS::EC2::SecurityGroupIngress", - "Properties": { - "GroupId": {"Ref": "test-security-group1"}, - "IpProtocol": "tcp", - "FromPort": "80", - "ToPort": "8080", - "SourceSecurityGroupId": {"Ref": "test-security-group2"}, - } - } - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - - security_group1 = ec2_conn.get_all_security_groups( - filters={"tag:sg-name": "sg1"})[0] - security_group2 = ec2_conn.get_all_security_groups( - filters={"tag:sg-name": "sg2"})[0] - - security_group1.rules.should.have.length_of(1) - security_group1.rules[0].grants.should.have.length_of(1) - security_group1.rules[0].grants[ - 0].group_id.should.equal(security_group2.id) - security_group1.rules[0].ip_protocol.should.equal('tcp') - security_group1.rules[0].from_port.should.equal('80') - security_group1.rules[0].to_port.should.equal('8080') - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_security_group_ingress_separate_from_security_group_by_id(): - ec2_conn = boto.ec2.connect_to_region("us-west-1") - ec2_conn.create_security_group( - "test-security-group1", "test security group") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "test-security-group2": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "Tags": [ - { - "Key": "sg-name", - "Value": "sg2" - } - ] - }, - }, - "test-sg-ingress": { - "Type": "AWS::EC2::SecurityGroupIngress", - "Properties": { - "GroupName": "test-security-group1", - "IpProtocol": "tcp", - "FromPort": "80", - "ToPort": "8080", - "SourceSecurityGroupId": {"Ref": "test-security-group2"}, - } - } - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - security_group1 = ec2_conn.get_all_security_groups( - groupnames=["test-security-group1"])[0] - security_group2 = ec2_conn.get_all_security_groups( - filters={"tag:sg-name": "sg2"})[0] - - security_group1.rules.should.have.length_of(1) - security_group1.rules[0].grants.should.have.length_of(1) - security_group1.rules[0].grants[ - 0].group_id.should.equal(security_group2.id) - security_group1.rules[0].ip_protocol.should.equal('tcp') - security_group1.rules[0].from_port.should.equal('80') - security_group1.rules[0].to_port.should.equal('8080') - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_security_group_ingress_separate_from_security_group_by_id_using_vpc(): - vpc_conn = boto.vpc.connect_to_region("us-west-1") - vpc = vpc_conn.create_vpc("10.0.0.0/16") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "test-security-group1": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "VpcId": vpc.id, - "Tags": [ - { - "Key": "sg-name", - "Value": "sg1" - } - ] - }, - }, - "test-security-group2": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "VpcId": vpc.id, - "Tags": [ - { - "Key": "sg-name", - "Value": "sg2" - } - ] - }, - }, - "test-sg-ingress": { - "Type": "AWS::EC2::SecurityGroupIngress", - "Properties": { - "GroupId": {"Ref": "test-security-group1"}, - "VpcId": vpc.id, - "IpProtocol": "tcp", - "FromPort": "80", - "ToPort": "8080", - "SourceSecurityGroupId": {"Ref": "test-security-group2"}, - } - } - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - security_group1 = vpc_conn.get_all_security_groups( - filters={"tag:sg-name": "sg1"})[0] - security_group2 = vpc_conn.get_all_security_groups( - filters={"tag:sg-name": "sg2"})[0] - - security_group1.rules.should.have.length_of(1) - security_group1.rules[0].grants.should.have.length_of(1) - security_group1.rules[0].grants[ - 0].group_id.should.equal(security_group2.id) - security_group1.rules[0].ip_protocol.should.equal('tcp') - security_group1.rules[0].from_port.should.equal('80') - security_group1.rules[0].to_port.should.equal('8080') - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_security_group_with_update(): - vpc_conn = boto.vpc.connect_to_region("us-west-1") - vpc1 = vpc_conn.create_vpc("10.0.0.0/16") - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "test-security-group": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupDescription": "test security group", - "VpcId": vpc1.id, - "Tags": [ - { - "Key": "sg-name", - "Value": "sg" - } - ] - }, - }, - } - } - - template_json = json.dumps(template) - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - security_group = vpc_conn.get_all_security_groups( - filters={"tag:sg-name": "sg"})[0] - security_group.vpc_id.should.equal(vpc1.id) - - vpc2 = vpc_conn.create_vpc("10.1.0.0/16") - template['Resources'][ - 'test-security-group']['Properties']['VpcId'] = vpc2.id - template_json = json.dumps(template) - cf_conn.update_stack( - "test_stack", - template_body=template_json, - ) - security_group = vpc_conn.get_all_security_groups( - filters={"tag:sg-name": "sg"})[0] - security_group.vpc_id.should.equal(vpc2.id) - - -@mock_cloudformation_deprecated -@mock_ec2_deprecated -def test_subnets_should_be_created_with_availability_zone(): - vpc_conn = boto.vpc.connect_to_region('us-west-1') - vpc = vpc_conn.create_vpc("10.0.0.0/16") - - subnet_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "testSubnet": { - "Type": "AWS::EC2::Subnet", - "Properties": { - "VpcId": vpc.id, - "CidrBlock": "10.0.0.0/24", - "AvailabilityZone": "us-west-1b", - } - } - } - } - cf_conn = boto.cloudformation.connect_to_region("us-west-1") - template_json = json.dumps(subnet_template) - cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - subnet = vpc_conn.get_all_subnets(filters={'cidrBlock': '10.0.0.0/24'})[0] - subnet.availability_zone.should.equal('us-west-1b') - - -@mock_cloudformation_deprecated -@mock_datapipeline_deprecated -def test_datapipeline(): - dp_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "dataPipeline": { - "Properties": { - "Activate": "true", - "Name": "testDataPipeline", - "PipelineObjects": [ - { - "Fields": [ - { - "Key": "failureAndRerunMode", - "StringValue": "CASCADE" - }, - { - "Key": "scheduleType", - "StringValue": "cron" - }, - { - "Key": "schedule", - "RefValue": "DefaultSchedule" - }, - { - "Key": "pipelineLogUri", - "StringValue": "s3://bucket/logs" - }, - { - "Key": "type", - "StringValue": "Default" - }, - ], - "Id": "Default", - "Name": "Default" - }, - { - "Fields": [ - { - "Key": "startDateTime", - "StringValue": "1970-01-01T01:00:00" - }, - { - "Key": "period", - "StringValue": "1 Day" - }, - { - "Key": "type", - "StringValue": "Schedule" - } - ], - "Id": "DefaultSchedule", - "Name": "RunOnce" - } - ], - "PipelineTags": [] - }, - "Type": "AWS::DataPipeline::Pipeline" - } - } - } - cf_conn = boto.cloudformation.connect_to_region("us-east-1") - template_json = json.dumps(dp_template) - stack_id = cf_conn.create_stack( - "test_stack", - template_body=template_json, - ) - - dp_conn = boto.datapipeline.connect_to_region('us-east-1') - data_pipelines = dp_conn.list_pipelines() - - data_pipelines['pipelineIdList'].should.have.length_of(1) - data_pipelines['pipelineIdList'][0][ - 'name'].should.equal('testDataPipeline') - - stack_resources = cf_conn.list_stack_resources(stack_id) - stack_resources.should.have.length_of(1) - stack_resources[0].physical_resource_id.should.equal( - data_pipelines['pipelineIdList'][0]['id']) - - -@mock_cloudformation -@mock_lambda -def test_lambda_function(): - # switch this to python as backend lambda only supports python execution. - lambda_code = """ -def lambda_handler(event, context): - return (event, context) -""" - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "lambdaTest": { - "Type": "AWS::Lambda::Function", - "Properties": { - "Code": { - # CloudFormation expects a string as ZipFile, not a ZIP file base64-encoded - "ZipFile": {"Fn::Join": ["\n", lambda_code.splitlines()]} - }, - "Handler": "lambda_function.handler", - "Description": "Test function", - "MemorySize": 128, - "Role": "test-role", - "Runtime": "python2.7" - } - } - } - } - - template_json = json.dumps(template) - cf_conn = boto3.client('cloudformation', 'us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=template_json, - ) - - conn = boto3.client('lambda', 'us-east-1') - result = conn.list_functions() - result['Functions'].should.have.length_of(1) - result['Functions'][0]['Description'].should.equal('Test function') - result['Functions'][0]['Handler'].should.equal('lambda_function.handler') - result['Functions'][0]['MemorySize'].should.equal(128) - result['Functions'][0]['Role'].should.equal('test-role') - result['Functions'][0]['Runtime'].should.equal('python2.7') - - -@mock_cloudformation -@mock_ec2 -def test_nat_gateway(): - ec2_conn = boto3.client('ec2', 'us-east-1') - vpc_id = ec2_conn.create_vpc(CidrBlock="10.0.0.0/16")['Vpc']['VpcId'] - subnet_id = ec2_conn.create_subnet( - CidrBlock='10.0.1.0/24', VpcId=vpc_id)['Subnet']['SubnetId'] - route_table_id = ec2_conn.create_route_table( - VpcId=vpc_id)['RouteTable']['RouteTableId'] - - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "NAT": { - "DependsOn": "vpcgatewayattachment", - "Type": "AWS::EC2::NatGateway", - "Properties": { - "AllocationId": {"Fn::GetAtt": ["EIP", "AllocationId"]}, - "SubnetId": subnet_id - } - }, - "EIP": { - "Type": "AWS::EC2::EIP", - "Properties": { - "Domain": "vpc" - } - }, - "Route": { - "Type": "AWS::EC2::Route", - "Properties": { - "RouteTableId": route_table_id, - "DestinationCidrBlock": "0.0.0.0/0", - "NatGatewayId": {"Ref": "NAT"} - } - }, - "internetgateway": { - "Type": "AWS::EC2::InternetGateway" - }, - "vpcgatewayattachment": { - "Type": "AWS::EC2::VPCGatewayAttachment", - "Properties": { - "InternetGatewayId": { - "Ref": "internetgateway" - }, - "VpcId": vpc_id, - }, - } - } - } - - cf_conn = boto3.client('cloudformation', 'us-east-1') - cf_conn.create_stack( - StackName="test_stack", - TemplateBody=json.dumps(template), - ) - - result = ec2_conn.describe_nat_gateways() - - result['NatGateways'].should.have.length_of(1) - result['NatGateways'][0]['VpcId'].should.equal(vpc_id) - result['NatGateways'][0]['SubnetId'].should.equal(subnet_id) - result['NatGateways'][0]['State'].should.equal('available') - - -@mock_cloudformation() -@mock_kms() -def test_stack_kms(): - kms_key_template = { - 'Resources': { - 'kmskey': { - 'Properties': { - 'Description': 'A kms key', - 'EnableKeyRotation': True, - 'Enabled': True, - 'KeyPolicy': 'a policy', - }, - 'Type': 'AWS::KMS::Key' - } - } - } - kms_key_template_json = json.dumps(kms_key_template) - - cf_conn = boto3.client('cloudformation', 'us-east-1') - cf_conn.create_stack( - StackName='test_stack', - TemplateBody=kms_key_template_json, - ) - - kms_conn = boto3.client('kms', 'us-east-1') - keys = kms_conn.list_keys()['Keys'] - len(keys).should.equal(1) - result = kms_conn.describe_key(KeyId=keys[0]['KeyId']) - - result['KeyMetadata']['Enabled'].should.equal(True) - result['KeyMetadata']['KeyUsage'].should.equal('ENCRYPT_DECRYPT') - - -@mock_cloudformation() -@mock_ec2() -def test_stack_spot_fleet(): - conn = boto3.client('ec2', 'us-east-1') - - vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] - subnet = conn.create_subnet( - VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] - subnet_id = subnet['SubnetId'] - - spot_fleet_template = { - 'Resources': { - "SpotFleet": { - "Type": "AWS::EC2::SpotFleet", - "Properties": { - "SpotFleetRequestConfigData": { - "IamFleetRole": "arn:aws:iam::123456789012:role/fleet", - "SpotPrice": "0.12", - "TargetCapacity": 6, - "AllocationStrategy": "diversified", - "LaunchSpecifications": [ - { - "EbsOptimized": "false", - "InstanceType": 't2.small', - "ImageId": "ami-1234", - "SubnetId": subnet_id, - "WeightedCapacity": "2", - "SpotPrice": "0.13", - }, - { - "EbsOptimized": "true", - "InstanceType": 't2.large', - "ImageId": "ami-1234", - "Monitoring": {"Enabled": "true"}, - "SecurityGroups": [{"GroupId": "sg-123"}], - "SubnetId": subnet_id, - "IamInstanceProfile": {"Arn": "arn:aws:iam::123456789012:role/fleet"}, - "WeightedCapacity": "4", - "SpotPrice": "10.00", - } - ] - } - } - } - } - } - spot_fleet_template_json = json.dumps(spot_fleet_template) - - cf_conn = boto3.client('cloudformation', 'us-east-1') - stack_id = cf_conn.create_stack( - StackName='test_stack', - TemplateBody=spot_fleet_template_json, - )['StackId'] - - stack_resources = cf_conn.list_stack_resources(StackName=stack_id) - stack_resources['StackResourceSummaries'].should.have.length_of(1) - spot_fleet_id = stack_resources[ - 'StackResourceSummaries'][0]['PhysicalResourceId'] - - spot_fleet_requests = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] - len(spot_fleet_requests).should.equal(1) - spot_fleet_request = spot_fleet_requests[0] - spot_fleet_request['SpotFleetRequestState'].should.equal("active") - spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] - - spot_fleet_config['SpotPrice'].should.equal('0.12') - spot_fleet_config['TargetCapacity'].should.equal(6) - spot_fleet_config['IamFleetRole'].should.equal( - 'arn:aws:iam::123456789012:role/fleet') - spot_fleet_config['AllocationStrategy'].should.equal('diversified') - spot_fleet_config['FulfilledCapacity'].should.equal(6.0) - - len(spot_fleet_config['LaunchSpecifications']).should.equal(2) - launch_spec = spot_fleet_config['LaunchSpecifications'][0] - - launch_spec['EbsOptimized'].should.equal(False) - launch_spec['ImageId'].should.equal("ami-1234") - launch_spec['InstanceType'].should.equal("t2.small") - launch_spec['SubnetId'].should.equal(subnet_id) - launch_spec['SpotPrice'].should.equal("0.13") - launch_spec['WeightedCapacity'].should.equal(2.0) - - -@mock_cloudformation() -@mock_ec2() -def test_stack_spot_fleet_should_figure_out_default_price(): - conn = boto3.client('ec2', 'us-east-1') - - vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] - subnet = conn.create_subnet( - VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] - subnet_id = subnet['SubnetId'] - - spot_fleet_template = { - 'Resources': { - "SpotFleet1": { - "Type": "AWS::EC2::SpotFleet", - "Properties": { - "SpotFleetRequestConfigData": { - "IamFleetRole": "arn:aws:iam::123456789012:role/fleet", - "TargetCapacity": 6, - "AllocationStrategy": "diversified", - "LaunchSpecifications": [ - { - "EbsOptimized": "false", - "InstanceType": 't2.small', - "ImageId": "ami-1234", - "SubnetId": subnet_id, - "WeightedCapacity": "2", - }, - { - "EbsOptimized": "true", - "InstanceType": 't2.large', - "ImageId": "ami-1234", - "Monitoring": {"Enabled": "true"}, - "SecurityGroups": [{"GroupId": "sg-123"}], - "SubnetId": subnet_id, - "IamInstanceProfile": {"Arn": "arn:aws:iam::123456789012:role/fleet"}, - "WeightedCapacity": "4", - } - ] - } - } - } - } - } - spot_fleet_template_json = json.dumps(spot_fleet_template) - - cf_conn = boto3.client('cloudformation', 'us-east-1') - stack_id = cf_conn.create_stack( - StackName='test_stack', - TemplateBody=spot_fleet_template_json, - )['StackId'] - - stack_resources = cf_conn.list_stack_resources(StackName=stack_id) - stack_resources['StackResourceSummaries'].should.have.length_of(1) - spot_fleet_id = stack_resources[ - 'StackResourceSummaries'][0]['PhysicalResourceId'] - - spot_fleet_requests = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] - len(spot_fleet_requests).should.equal(1) - spot_fleet_request = spot_fleet_requests[0] - spot_fleet_request['SpotFleetRequestState'].should.equal("active") - spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] - - assert 'SpotPrice' not in spot_fleet_config - len(spot_fleet_config['LaunchSpecifications']).should.equal(2) - launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] - launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] - - assert 'SpotPrice' not in launch_spec1 - assert 'SpotPrice' not in launch_spec2 - - -@mock_ec2 -@mock_elbv2 -@mock_cloudformation -def test_stack_elbv2_resources_integration(): - alb_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Outputs": { - "albdns": { - "Description": "Load balanacer DNS", - "Value": {"Fn::GetAtt": ["alb", "DNSName"]}, - }, - "albname": { - "Description": "Load balancer name", - "Value": {"Fn::GetAtt": ["alb", "LoadBalancerName"]}, - }, - "canonicalhostedzoneid": { - "Description": "Load balancer canonical hosted zone ID", - "Value": {"Fn::GetAtt": ["alb", "CanonicalHostedZoneID"]}, - }, - }, - "Resources": { - "alb": { - "Type": "AWS::ElasticLoadBalancingV2::LoadBalancer", - "Properties": { - "Name": "myelbv2", - "Scheme": "internet-facing", - "Subnets": [{ - "Ref": "mysubnet", - }], - "SecurityGroups": [{ - "Ref": "mysg", - }], - "Type": "application", - "IpAddressType": "ipv4", - } - }, - "mytargetgroup1": { - "Type": "AWS::ElasticLoadBalancingV2::TargetGroup", - "Properties": { - "HealthCheckIntervalSeconds": 30, - "HealthCheckPath": "/status", - "HealthCheckPort": 80, - "HealthCheckProtocol": "HTTP", - "HealthCheckTimeoutSeconds": 5, - "HealthyThresholdCount": 30, - "UnhealthyThresholdCount": 5, - "Matcher": { - "HttpCode": "200,201" - }, - "Name": "mytargetgroup1", - "Port": 80, - "Protocol": "HTTP", - "TargetType": "instance", - "Targets": [{ - "Id": { - "Ref": "ec2instance", - "Port": 80, - }, - }], - "VpcId": { - "Ref": "myvpc", - } - } - }, - "mytargetgroup2": { - "Type": "AWS::ElasticLoadBalancingV2::TargetGroup", - "Properties": { - "HealthCheckIntervalSeconds": 30, - "HealthCheckPath": "/status", - "HealthCheckPort": 8080, - "HealthCheckProtocol": "HTTP", - "HealthCheckTimeoutSeconds": 5, - "HealthyThresholdCount": 30, - "UnhealthyThresholdCount": 5, - "Name": "mytargetgroup2", - "Port": 8080, - "Protocol": "HTTP", - "TargetType": "instance", - "Targets": [{ - "Id": { - "Ref": "ec2instance", - "Port": 8080, - }, - }], - "VpcId": { - "Ref": "myvpc", - } - } - }, - "listener": { - "Type": "AWS::ElasticLoadBalancingV2::Listener", - "Properties": { - "DefaultActions": [{ - "Type": "forward", - "TargetGroupArn": {"Ref": "mytargetgroup1"} - }], - "LoadBalancerArn": {"Ref": "alb"}, - "Port": "80", - "Protocol": "HTTP" - } - }, - "myvpc": { - "Type": "AWS::EC2::VPC", - "Properties": { - "CidrBlock": "10.0.0.0/16", - } - }, - "mysubnet": { - "Type": "AWS::EC2::Subnet", - "Properties": { - "CidrBlock": "10.0.0.0/27", - "VpcId": {"Ref": "myvpc"}, - } - }, - "mysg": { - "Type": "AWS::EC2::SecurityGroup", - "Properties": { - "GroupName": "mysg", - "GroupDescription": "test security group", - "VpcId": {"Ref": "myvpc"} - } - }, - "ec2instance": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-1234abcd", - "UserData": "some user data", - } - }, - }, - } - alb_template_json = json.dumps(alb_template) - - cfn_conn = boto3.client("cloudformation", "us-west-1") - cfn_conn.create_stack( - StackName="elb_stack", - TemplateBody=alb_template_json, - ) - - elbv2_conn = boto3.client("elbv2", "us-west-1") - - load_balancers = elbv2_conn.describe_load_balancers()['LoadBalancers'] - len(load_balancers).should.equal(1) - load_balancers[0]['LoadBalancerName'].should.equal('myelbv2') - load_balancers[0]['Scheme'].should.equal('internet-facing') - load_balancers[0]['Type'].should.equal('application') - load_balancers[0]['IpAddressType'].should.equal('ipv4') - - target_groups = sorted( - elbv2_conn.describe_target_groups()['TargetGroups'], - key=lambda tg: tg['TargetGroupName']) # sort to do comparison with indexes - len(target_groups).should.equal(2) - target_groups[0]['HealthCheckIntervalSeconds'].should.equal(30) - target_groups[0]['HealthCheckPath'].should.equal('/status') - target_groups[0]['HealthCheckPort'].should.equal('80') - target_groups[0]['HealthCheckProtocol'].should.equal('HTTP') - target_groups[0]['HealthCheckTimeoutSeconds'].should.equal(5) - target_groups[0]['HealthyThresholdCount'].should.equal(30) - target_groups[0]['UnhealthyThresholdCount'].should.equal(5) - target_groups[0]['Matcher'].should.equal({'HttpCode': '200,201'}) - target_groups[0]['TargetGroupName'].should.equal('mytargetgroup1') - target_groups[0]['Port'].should.equal(80) - target_groups[0]['Protocol'].should.equal('HTTP') - target_groups[0]['TargetType'].should.equal('instance') - - target_groups[1]['HealthCheckIntervalSeconds'].should.equal(30) - target_groups[1]['HealthCheckPath'].should.equal('/status') - target_groups[1]['HealthCheckPort'].should.equal('8080') - target_groups[1]['HealthCheckProtocol'].should.equal('HTTP') - target_groups[1]['HealthCheckTimeoutSeconds'].should.equal(5) - target_groups[1]['HealthyThresholdCount'].should.equal(30) - target_groups[1]['UnhealthyThresholdCount'].should.equal(5) - target_groups[1]['Matcher'].should.equal({'HttpCode': '200'}) - target_groups[1]['TargetGroupName'].should.equal('mytargetgroup2') - target_groups[1]['Port'].should.equal(8080) - target_groups[1]['Protocol'].should.equal('HTTP') - target_groups[1]['TargetType'].should.equal('instance') - - listeners = elbv2_conn.describe_listeners(LoadBalancerArn=load_balancers[0]['LoadBalancerArn'])['Listeners'] - len(listeners).should.equal(1) - listeners[0]['LoadBalancerArn'].should.equal(load_balancers[0]['LoadBalancerArn']) - listeners[0]['Port'].should.equal(80) - listeners[0]['Protocol'].should.equal('HTTP') - listeners[0]['DefaultActions'].should.equal([{ - "Type": "forward", - "TargetGroupArn": target_groups[0]['TargetGroupArn'] - }]) - - # test outputs - stacks = cfn_conn.describe_stacks(StackName='elb_stack')['Stacks'] - len(stacks).should.equal(1) - - dns = list(filter(lambda item: item['OutputKey'] == 'albdns', stacks[0]['Outputs']))[0] - name = list(filter(lambda item: item['OutputKey'] == 'albname', stacks[0]['Outputs']))[0] - - dns['OutputValue'].should.equal(load_balancers[0]['DNSName']) - name['OutputValue'].should.equal(load_balancers[0]['LoadBalancerName']) +from __future__ import unicode_literals +import json + +import base64 +import boto +import boto.cloudformation +import boto.datapipeline +import boto.ec2 +import boto.ec2.autoscale +import boto.ec2.elb +from boto.exception import BotoServerError +import boto.iam +import boto.redshift +import boto.sns +import boto.sqs +import boto.vpc +import boto3 +import sure # noqa + +from moto import ( + mock_autoscaling_deprecated, + mock_cloudformation, + mock_cloudformation_deprecated, + mock_datapipeline_deprecated, + mock_ec2, + mock_ec2_deprecated, + mock_elb, + mock_elb_deprecated, + mock_iam_deprecated, + mock_kms, + mock_lambda, + mock_rds_deprecated, + mock_rds2, + mock_rds2_deprecated, + mock_redshift, + mock_redshift_deprecated, + mock_route53_deprecated, + mock_sns_deprecated, + mock_sqs, + mock_sqs_deprecated, + mock_elbv2) + +from .fixtures import ( + ec2_classic_eip, + fn_join, + rds_mysql_with_db_parameter_group, + rds_mysql_with_read_replica, + redshift, + route53_ec2_instance_with_public_ip, + route53_health_check, + route53_roundrobin, + single_instance_with_ebs_volume, + vpc_eip, + vpc_single_instance_in_subnet, +) + + +@mock_cloudformation_deprecated() +def test_stack_sqs_integration(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueGroup": { + + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=sqs_template_json, + ) + + stack = conn.describe_stacks()[0] + queue = stack.describe_resources()[0] + queue.resource_type.should.equal('AWS::SQS::Queue') + queue.logical_resource_id.should.equal("QueueGroup") + queue.physical_resource_id.should.equal("my-queue") + + +@mock_cloudformation_deprecated() +def test_stack_list_resources(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueGroup": { + + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=sqs_template_json, + ) + + resources = conn.list_stack_resources("test_stack") + assert len(resources) == 1 + queue = resources[0] + queue.resource_type.should.equal('AWS::SQS::Queue') + queue.logical_resource_id.should.equal("QueueGroup") + queue.physical_resource_id.should.equal("my-queue") + + +@mock_cloudformation_deprecated() +@mock_sqs_deprecated() +def test_update_stack(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueGroup": { + + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=sqs_template_json, + ) + + sqs_conn = boto.sqs.connect_to_region("us-west-1") + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(1) + queues[0].get_attributes('VisibilityTimeout')[ + 'VisibilityTimeout'].should.equal('60') + + sqs_template['Resources']['QueueGroup'][ + 'Properties']['VisibilityTimeout'] = 100 + sqs_template_json = json.dumps(sqs_template) + conn.update_stack("test_stack", sqs_template_json) + + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(1) + queues[0].get_attributes('VisibilityTimeout')[ + 'VisibilityTimeout'].should.equal('100') + + +@mock_cloudformation_deprecated() +@mock_sqs_deprecated() +def test_update_stack_and_remove_resource(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueGroup": { + + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=sqs_template_json, + ) + + sqs_conn = boto.sqs.connect_to_region("us-west-1") + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(1) + + sqs_template['Resources'].pop('QueueGroup') + sqs_template_json = json.dumps(sqs_template) + conn.update_stack("test_stack", sqs_template_json) + + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(0) + + +@mock_cloudformation_deprecated() +@mock_sqs_deprecated() +def test_update_stack_and_add_resource(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": {}, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=sqs_template_json, + ) + + sqs_conn = boto.sqs.connect_to_region("us-west-1") + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(0) + + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueGroup": { + + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + conn.update_stack("test_stack", sqs_template_json) + + queues = sqs_conn.get_all_queues() + queues.should.have.length_of(1) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_stack_ec2_integration(): + ec2_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "WebServerGroup": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-1234abcd", + "UserData": "some user data", + } + }, + }, + } + ec2_template_json = json.dumps(ec2_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "ec2_stack", + template_body=ec2_template_json, + ) + + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + + stack = conn.describe_stacks()[0] + instance = stack.describe_resources()[0] + instance.resource_type.should.equal('AWS::EC2::Instance') + instance.logical_resource_id.should.contain("WebServerGroup") + instance.physical_resource_id.should.equal(ec2_instance.id) + + +@mock_ec2_deprecated() +@mock_elb_deprecated() +@mock_cloudformation_deprecated() +def test_stack_elb_integration_with_attached_ec2_instances(): + elb_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "MyELB": { + "Type": "AWS::ElasticLoadBalancing::LoadBalancer", + "Properties": { + "Instances": [{"Ref": "Ec2Instance1"}], + "LoadBalancerName": "test-elb", + "AvailabilityZones": ['us-east-1'], + "Listeners": [ + { + "InstancePort": "80", + "LoadBalancerPort": "80", + "Protocol": "HTTP", + } + ], + } + }, + "Ec2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-1234abcd", + "UserData": "some user data", + } + }, + }, + } + elb_template_json = json.dumps(elb_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "elb_stack", + template_body=elb_template_json, + ) + + elb_conn = boto.ec2.elb.connect_to_region("us-west-1") + load_balancer = elb_conn.get_all_load_balancers()[0] + + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + + load_balancer.instances[0].id.should.equal(ec2_instance.id) + list(load_balancer.availability_zones).should.equal(['us-east-1']) + + +@mock_elb_deprecated() +@mock_cloudformation_deprecated() +def test_stack_elb_integration_with_health_check(): + elb_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "MyELB": { + "Type": "AWS::ElasticLoadBalancing::LoadBalancer", + "Properties": { + "LoadBalancerName": "test-elb", + "AvailabilityZones": ['us-west-1'], + "HealthCheck": { + "HealthyThreshold": "3", + "Interval": "5", + "Target": "HTTP:80/healthcheck", + "Timeout": "4", + "UnhealthyThreshold": "2", + }, + "Listeners": [ + { + "InstancePort": "80", + "LoadBalancerPort": "80", + "Protocol": "HTTP", + } + ], + } + }, + }, + } + elb_template_json = json.dumps(elb_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "elb_stack", + template_body=elb_template_json, + ) + + elb_conn = boto.ec2.elb.connect_to_region("us-west-1") + load_balancer = elb_conn.get_all_load_balancers()[0] + health_check = load_balancer.health_check + + health_check.healthy_threshold.should.equal(3) + health_check.interval.should.equal(5) + health_check.target.should.equal("HTTP:80/healthcheck") + health_check.timeout.should.equal(4) + health_check.unhealthy_threshold.should.equal(2) + + +@mock_elb_deprecated() +@mock_cloudformation_deprecated() +def test_stack_elb_integration_with_update(): + elb_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "MyELB": { + "Type": "AWS::ElasticLoadBalancing::LoadBalancer", + "Properties": { + "LoadBalancerName": "test-elb", + "AvailabilityZones": ['us-west-1a'], + "Listeners": [ + { + "InstancePort": "80", + "LoadBalancerPort": "80", + "Protocol": "HTTP", + } + ], + "Policies": {"Ref": "AWS::NoValue"}, + } + }, + }, + } + elb_template_json = json.dumps(elb_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "elb_stack", + template_body=elb_template_json, + ) + + elb_conn = boto.ec2.elb.connect_to_region("us-west-1") + load_balancer = elb_conn.get_all_load_balancers()[0] + load_balancer.availability_zones[0].should.equal('us-west-1a') + + elb_template['Resources']['MyELB']['Properties'][ + 'AvailabilityZones'] = ['us-west-1b'] + elb_template_json = json.dumps(elb_template) + conn.update_stack( + "elb_stack", + template_body=elb_template_json, + ) + load_balancer = elb_conn.get_all_load_balancers()[0] + load_balancer.availability_zones[0].should.equal('us-west-1b') + + +@mock_ec2_deprecated() +@mock_redshift_deprecated() +@mock_cloudformation_deprecated() +def test_redshift_stack(): + redshift_template_json = json.dumps(redshift.template) + + vpc_conn = boto.vpc.connect_to_region("us-west-2") + conn = boto.cloudformation.connect_to_region("us-west-2") + conn.create_stack( + "redshift_stack", + template_body=redshift_template_json, + parameters=[ + ("DatabaseName", "mydb"), + ("ClusterType", "multi-node"), + ("NumberOfNodes", 2), + ("NodeType", "dw1.xlarge"), + ("MasterUsername", "myuser"), + ("MasterUserPassword", "mypass"), + ("InboundTraffic", "10.0.0.1/16"), + ("PortNumber", 5439), + ] + ) + + redshift_conn = boto.redshift.connect_to_region("us-west-2") + + cluster_res = redshift_conn.describe_clusters() + clusters = cluster_res['DescribeClustersResponse'][ + 'DescribeClustersResult']['Clusters'] + clusters.should.have.length_of(1) + cluster = clusters[0] + cluster['DBName'].should.equal("mydb") + cluster['NumberOfNodes'].should.equal(2) + cluster['NodeType'].should.equal("dw1.xlarge") + cluster['MasterUsername'].should.equal("myuser") + cluster['Port'].should.equal(5439) + cluster['VpcSecurityGroups'].should.have.length_of(1) + security_group_id = cluster['VpcSecurityGroups'][0]['VpcSecurityGroupId'] + + groups = vpc_conn.get_all_security_groups(group_ids=[security_group_id]) + groups.should.have.length_of(1) + group = groups[0] + group.rules.should.have.length_of(1) + group.rules[0].grants[0].cidr_ip.should.equal("10.0.0.1/16") + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_stack_security_groups(): + security_group_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "my-security-group": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "My other group", + }, + }, + "Ec2Instance2": { + "Type": "AWS::EC2::Instance", + "Properties": { + "SecurityGroups": [{"Ref": "InstanceSecurityGroup"}], + "ImageId": "ami-1234abcd", + } + }, + "InstanceSecurityGroup": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "My security group", + "Tags": [ + { + "Key": "bar", + "Value": "baz" + } + ], + "SecurityGroupIngress": [{ + "IpProtocol": "tcp", + "FromPort": "22", + "ToPort": "22", + "CidrIp": "123.123.123.123/32", + }, { + "IpProtocol": "tcp", + "FromPort": "80", + "ToPort": "8000", + "SourceSecurityGroupId": {"Ref": "my-security-group"}, + }] + } + } + }, + } + security_group_template_json = json.dumps(security_group_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "security_group_stack", + template_body=security_group_template_json, + tags={"foo": "bar"} + ) + + ec2_conn = boto.ec2.connect_to_region("us-west-1") + instance_group = ec2_conn.get_all_security_groups( + filters={'description': ['My security group']})[0] + other_group = ec2_conn.get_all_security_groups( + filters={'description': ['My other group']})[0] + + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + + ec2_instance.groups[0].id.should.equal(instance_group.id) + instance_group.description.should.equal("My security group") + instance_group.tags.should.have.key('foo').which.should.equal('bar') + instance_group.tags.should.have.key('bar').which.should.equal('baz') + rule1, rule2 = instance_group.rules + int(rule1.to_port).should.equal(22) + int(rule1.from_port).should.equal(22) + rule1.grants[0].cidr_ip.should.equal("123.123.123.123/32") + rule1.ip_protocol.should.equal('tcp') + + int(rule2.to_port).should.equal(8000) + int(rule2.from_port).should.equal(80) + rule2.ip_protocol.should.equal('tcp') + rule2.grants[0].group_id.should.equal(other_group.id) + + +@mock_autoscaling_deprecated() +@mock_elb_deprecated() +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +def test_autoscaling_group_with_elb(): + web_setup_template = { + "AWSTemplateFormatVersion": "2010-09-09", + + "Resources": { + "my-as-group": { + "Type": "AWS::AutoScaling::AutoScalingGroup", + "Properties": { + "AvailabilityZones": ['us-east1'], + "LaunchConfigurationName": {"Ref": "my-launch-config"}, + "MinSize": "2", + "MaxSize": "2", + "DesiredCapacity": "2", + "LoadBalancerNames": [{"Ref": "my-elb"}], + "Tags": [ + { + "Key": "propagated-test-tag", "Value": "propagated-test-tag-value", + "PropagateAtLaunch": True}, + { + "Key": "not-propagated-test-tag", + "Value": "not-propagated-test-tag-value", + "PropagateAtLaunch": False + } + ] + }, + }, + + "my-launch-config": { + "Type": "AWS::AutoScaling::LaunchConfiguration", + "Properties": { + "ImageId": "ami-1234abcd", + "UserData": "some user data", + } + }, + + "my-elb": { + "Type": "AWS::ElasticLoadBalancing::LoadBalancer", + "Properties": { + "AvailabilityZones": ['us-east1'], + "Listeners": [{ + "LoadBalancerPort": "80", + "InstancePort": "80", + "Protocol": "HTTP", + }], + "LoadBalancerName": "my-elb", + "HealthCheck": { + "Target": "HTTP:80", + "HealthyThreshold": "3", + "UnhealthyThreshold": "5", + "Interval": "30", + "Timeout": "5", + }, + }, + }, + } + } + + web_setup_template_json = json.dumps(web_setup_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "web_stack", + template_body=web_setup_template_json, + ) + + autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") + autoscale_group = autoscale_conn.get_all_groups()[0] + autoscale_group.launch_config_name.should.contain("my-launch-config") + autoscale_group.load_balancers[0].should.equal('my-elb') + + # Confirm the Launch config was actually created + autoscale_conn.get_all_launch_configurations().should.have.length_of(1) + + # Confirm the ELB was actually created + elb_conn = boto.ec2.elb.connect_to_region("us-west-1") + elb_conn.get_all_load_balancers().should.have.length_of(1) + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + as_group_resource = [resource for resource in resources if resource.resource_type == + 'AWS::AutoScaling::AutoScalingGroup'][0] + as_group_resource.physical_resource_id.should.contain("my-as-group") + + launch_config_resource = [ + resource for resource in resources if + resource.resource_type == 'AWS::AutoScaling::LaunchConfiguration'][0] + launch_config_resource.physical_resource_id.should.contain( + "my-launch-config") + + elb_resource = [resource for resource in resources if resource.resource_type == + 'AWS::ElasticLoadBalancing::LoadBalancer'][0] + elb_resource.physical_resource_id.should.contain("my-elb") + + # confirm the instances were created with the right tags + ec2_conn = boto.ec2.connect_to_region('us-west-1') + reservations = ec2_conn.get_all_reservations() + len(reservations).should.equal(1) + reservation = reservations[0] + len(reservation.instances).should.equal(2) + for instance in reservation.instances: + instance.tags['propagated-test-tag'].should.equal('propagated-test-tag-value') + instance.tags.keys().should_not.contain('not-propagated-test-tag') + + +@mock_autoscaling_deprecated() +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +def test_autoscaling_group_update(): + asg_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "my-as-group": { + "Type": "AWS::AutoScaling::AutoScalingGroup", + "Properties": { + "AvailabilityZones": ['us-west-1'], + "LaunchConfigurationName": {"Ref": "my-launch-config"}, + "MinSize": "2", + "MaxSize": "2", + "DesiredCapacity": "2" + }, + }, + + "my-launch-config": { + "Type": "AWS::AutoScaling::LaunchConfiguration", + "Properties": { + "ImageId": "ami-1234abcd", + "UserData": "some user data", + } + }, + }, + } + asg_template_json = json.dumps(asg_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "asg_stack", + template_body=asg_template_json, + ) + + autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") + asg = autoscale_conn.get_all_groups()[0] + asg.min_size.should.equal(2) + asg.max_size.should.equal(2) + asg.desired_capacity.should.equal(2) + + asg_template['Resources']['my-as-group']['Properties']['MaxSize'] = 3 + asg_template['Resources']['my-as-group']['Properties']['Tags'] = [ + { + "Key": "propagated-test-tag", "Value": "propagated-test-tag-value", + "PropagateAtLaunch": True}, + { + "Key": "not-propagated-test-tag", + "Value": "not-propagated-test-tag-value", + "PropagateAtLaunch": False + } + ] + asg_template_json = json.dumps(asg_template) + conn.update_stack( + "asg_stack", + template_body=asg_template_json, + ) + asg = autoscale_conn.get_all_groups()[0] + asg.min_size.should.equal(2) + asg.max_size.should.equal(3) + asg.desired_capacity.should.equal(2) + + # confirm the instances were created with the right tags + ec2_conn = boto.ec2.connect_to_region('us-west-1') + reservations = ec2_conn.get_all_reservations() + running_instance_count = 0 + for res in reservations: + for instance in res.instances: + if instance.state == 'running': + running_instance_count += 1 + instance.tags['propagated-test-tag'].should.equal('propagated-test-tag-value') + instance.tags.keys().should_not.contain('not-propagated-test-tag') + running_instance_count.should.equal(2) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_vpc_single_instance_in_subnet(): + template_json = json.dumps(vpc_single_instance_in_subnet.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + parameters=[("KeyName", "my_key")], + ) + + vpc_conn = boto.vpc.connect_to_region("us-west-1") + + vpc = vpc_conn.get_all_vpcs(filters={'cidrBlock': '10.0.0.0/16'})[0] + vpc.cidr_block.should.equal("10.0.0.0/16") + + # Add this once we implement the endpoint + # vpc_conn.get_all_internet_gateways().should.have.length_of(1) + + subnet = vpc_conn.get_all_subnets(filters={'vpcId': vpc.id})[0] + subnet.vpc_id.should.equal(vpc.id) + + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + instance = reservation.instances[0] + instance.tags["Foo"].should.equal("Bar") + # Check that the EIP is attached the the EC2 instance + eip = ec2_conn.get_all_addresses()[0] + eip.domain.should.equal('vpc') + eip.instance_id.should.equal(instance.id) + + security_group = ec2_conn.get_all_security_groups( + filters={'vpc_id': [vpc.id]})[0] + security_group.vpc_id.should.equal(vpc.id) + + stack = conn.describe_stacks()[0] + + vpc.tags.should.have.key('Application').which.should.equal(stack.stack_id) + + resources = stack.describe_resources() + vpc_resource = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::VPC'][0] + vpc_resource.physical_resource_id.should.equal(vpc.id) + + subnet_resource = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::Subnet'][0] + subnet_resource.physical_resource_id.should.equal(subnet.id) + + eip_resource = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] + eip_resource.physical_resource_id.should.equal(eip.public_ip) + + +@mock_cloudformation() +@mock_ec2() +@mock_rds2() +def test_rds_db_parameter_groups(): + ec2_conn = boto3.client("ec2", region_name="us-west-1") + ec2_conn.create_security_group( + GroupName='application', Description='Our Application Group') + + template_json = json.dumps(rds_mysql_with_db_parameter_group.template) + cf_conn = boto3.client('cloudformation', 'us-west-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=template_json, + Parameters=[{'ParameterKey': key, 'ParameterValue': value} for + key, value in [ + ("DBInstanceIdentifier", "master_db"), + ("DBName", "my_db"), + ("DBUser", "my_user"), + ("DBPassword", "my_password"), + ("DBAllocatedStorage", "20"), + ("DBInstanceClass", "db.m1.medium"), + ("EC2SecurityGroup", "application"), + ("MultiAZ", "true"), + ] + ], + ) + + rds_conn = boto3.client('rds', region_name="us-west-1") + + db_parameter_groups = rds_conn.describe_db_parameter_groups() + len(db_parameter_groups['DBParameterGroups']).should.equal(1) + db_parameter_group_name = db_parameter_groups[ + 'DBParameterGroups'][0]['DBParameterGroupName'] + + found_cloudformation_set_parameter = False + for db_parameter in rds_conn.describe_db_parameters(DBParameterGroupName=db_parameter_group_name)[ + 'Parameters']: + if db_parameter['ParameterName'] == 'BACKLOG_QUEUE_LIMIT' and db_parameter[ + 'ParameterValue'] == '2048': + found_cloudformation_set_parameter = True + + found_cloudformation_set_parameter.should.equal(True) + + +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +@mock_rds_deprecated() +def test_rds_mysql_with_read_replica(): + ec2_conn = boto.ec2.connect_to_region("us-west-1") + ec2_conn.create_security_group('application', 'Our Application Group') + + template_json = json.dumps(rds_mysql_with_read_replica.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + parameters=[ + ("DBInstanceIdentifier", "master_db"), + ("DBName", "my_db"), + ("DBUser", "my_user"), + ("DBPassword", "my_password"), + ("DBAllocatedStorage", "20"), + ("DBInstanceClass", "db.m1.medium"), + ("EC2SecurityGroup", "application"), + ("MultiAZ", "true"), + ], + ) + + rds_conn = boto.rds.connect_to_region("us-west-1") + + primary = rds_conn.get_all_dbinstances("master_db")[0] + primary.master_username.should.equal("my_user") + primary.allocated_storage.should.equal(20) + primary.instance_class.should.equal("db.m1.medium") + primary.multi_az.should.equal(True) + list(primary.read_replica_dbinstance_identifiers).should.have.length_of(1) + replica_id = primary.read_replica_dbinstance_identifiers[0] + + replica = rds_conn.get_all_dbinstances(replica_id)[0] + replica.instance_class.should.equal("db.m1.medium") + + security_group_name = primary.security_groups[0].name + security_group = rds_conn.get_all_dbsecurity_groups(security_group_name)[0] + security_group.ec2_groups[0].name.should.equal("application") + + +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +@mock_rds_deprecated() +def test_rds_mysql_with_read_replica_in_vpc(): + template_json = json.dumps(rds_mysql_with_read_replica.template) + conn = boto.cloudformation.connect_to_region("eu-central-1") + conn.create_stack( + "test_stack", + template_body=template_json, + parameters=[ + ("DBInstanceIdentifier", "master_db"), + ("DBName", "my_db"), + ("DBUser", "my_user"), + ("DBPassword", "my_password"), + ("DBAllocatedStorage", "20"), + ("DBInstanceClass", "db.m1.medium"), + ("MultiAZ", "true"), + ], + ) + + rds_conn = boto.rds.connect_to_region("eu-central-1") + primary = rds_conn.get_all_dbinstances("master_db")[0] + + subnet_group_name = primary.subnet_group.name + subnet_group = rds_conn.get_all_db_subnet_groups(subnet_group_name)[0] + subnet_group.description.should.equal("my db subnet group") + + +@mock_autoscaling_deprecated() +@mock_iam_deprecated() +@mock_cloudformation_deprecated() +def test_iam_roles(): + iam_template = { + "AWSTemplateFormatVersion": "2010-09-09", + + "Resources": { + + "my-launch-config": { + "Properties": { + "IamInstanceProfile": {"Ref": "my-instance-profile-with-path"}, + "ImageId": "ami-1234abcd", + }, + "Type": "AWS::AutoScaling::LaunchConfiguration" + }, + "my-instance-profile-with-path": { + "Properties": { + "Path": "my-path", + "Roles": [{"Ref": "my-role-with-path"}], + }, + "Type": "AWS::IAM::InstanceProfile" + }, + "my-instance-profile-no-path": { + "Properties": { + "Roles": [{"Ref": "my-role-no-path"}], + }, + "Type": "AWS::IAM::InstanceProfile" + }, + "my-role-with-path": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": [ + "sts:AssumeRole" + ], + "Effect": "Allow", + "Principal": { + "Service": [ + "ec2.amazonaws.com" + ] + } + } + ] + }, + "Path": "my-path", + "Policies": [ + { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "ec2:CreateTags", + "ec2:DescribeInstances", + "ec2:DescribeTags" + ], + "Effect": "Allow", + "Resource": [ + "*" + ] + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "EC2_Tags" + }, + { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "sqs:*" + ], + "Effect": "Allow", + "Resource": [ + "*" + ] + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "SQS" + }, + ] + }, + "Type": "AWS::IAM::Role" + }, + "my-role-no-path": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": [ + "sts:AssumeRole" + ], + "Effect": "Allow", + "Principal": { + "Service": [ + "ec2.amazonaws.com" + ] + } + } + ] + }, + }, + "Type": "AWS::IAM::Role" + } + } + } + + iam_template_json = json.dumps(iam_template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=iam_template_json, + ) + + iam_conn = boto.iam.connect_to_region("us-west-1") + + role_results = iam_conn.list_roles()['list_roles_response'][ + 'list_roles_result']['roles'] + role_name_to_id = {} + for role_result in role_results: + role = iam_conn.get_role(role_result.role_name) + role.role_name.should.contain("my-role") + if 'with-path' in role.role_name: + role_name_to_id['with-path'] = role.role_id + role.path.should.equal("my-path") + else: + role_name_to_id['no-path'] = role.role_id + role.role_name.should.contain('no-path') + role.path.should.equal('/') + + instance_profile_responses = iam_conn.list_instance_profiles()[ + 'list_instance_profiles_response']['list_instance_profiles_result']['instance_profiles'] + instance_profile_responses.should.have.length_of(2) + instance_profile_names = [] + + for instance_profile_response in instance_profile_responses: + instance_profile = iam_conn.get_instance_profile(instance_profile_response.instance_profile_name) + instance_profile_names.append(instance_profile.instance_profile_name) + instance_profile.instance_profile_name.should.contain( + "my-instance-profile") + if "with-path" in instance_profile.instance_profile_name: + instance_profile.path.should.equal("my-path") + instance_profile.role_id.should.equal(role_name_to_id['with-path']) + else: + instance_profile.instance_profile_name.should.contain('no-path') + instance_profile.role_id.should.equal(role_name_to_id['no-path']) + instance_profile.path.should.equal('/') + + autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1") + launch_config = autoscale_conn.get_all_launch_configurations()[0] + launch_config.instance_profile_name.should.contain("my-instance-profile-with-path") + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + instance_profile_resources = [ + resource for resource in resources if resource.resource_type == 'AWS::IAM::InstanceProfile'] + {ip.physical_resource_id for ip in instance_profile_resources}.should.equal(set(instance_profile_names)) + + role_resources = [ + resource for resource in resources if resource.resource_type == 'AWS::IAM::Role'] + {r.physical_resource_id for r in role_resources}.should.equal(set(role_name_to_id.values())) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_single_instance_with_ebs_volume(): + template_json = json.dumps(single_instance_with_ebs_volume.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + parameters=[("KeyName", "key_name")] + ) + + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + + volumes = ec2_conn.get_all_volumes() + # Grab the mounted drive + volume = [ + volume for volume in volumes if volume.attach_data.device == '/dev/sdh'][0] + volume.volume_state().should.equal('in-use') + volume.attach_data.instance_id.should.equal(ec2_instance.id) + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + ebs_volumes = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::Volume'] + ebs_volumes[0].physical_resource_id.should.equal(volume.id) + + +@mock_cloudformation_deprecated() +def test_create_template_without_required_param(): + template_json = json.dumps(single_instance_with_ebs_volume.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack.when.called_with( + "test_stack", + template_body=template_json, + ).should.throw(BotoServerError) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_classic_eip(): + template_json = json.dumps(ec2_classic_eip.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack("test_stack", template_body=template_json) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + eip = ec2_conn.get_all_addresses()[0] + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + cfn_eip = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] + cfn_eip.physical_resource_id.should.equal(eip.public_ip) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_vpc_eip(): + template_json = json.dumps(vpc_eip.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack("test_stack", template_body=template_json) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + eip = ec2_conn.get_all_addresses()[0] + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + cfn_eip = [ + resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0] + cfn_eip.physical_resource_id.should.equal(eip.public_ip) + + +@mock_ec2_deprecated() +@mock_cloudformation_deprecated() +def test_fn_join(): + template_json = json.dumps(fn_join.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack("test_stack", template_body=template_json) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + eip = ec2_conn.get_all_addresses()[0] + + stack = conn.describe_stacks()[0] + fn_join_output = stack.outputs[0] + fn_join_output.value.should.equal('test eip:{0}'.format(eip.public_ip)) + + +@mock_cloudformation_deprecated() +@mock_sqs_deprecated() +def test_conditional_resources(): + sqs_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Parameters": { + "EnvType": { + "Description": "Environment type.", + "Type": "String", + } + }, + "Conditions": { + "CreateQueue": {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]} + }, + "Resources": { + "QueueGroup": { + "Condition": "CreateQueue", + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + }, + } + sqs_template_json = json.dumps(sqs_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack_without_queue", + template_body=sqs_template_json, + parameters=[("EnvType", "staging")], + ) + sqs_conn = boto.sqs.connect_to_region("us-west-1") + list(sqs_conn.get_all_queues()).should.have.length_of(0) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack_with_queue", + template_body=sqs_template_json, + parameters=[("EnvType", "prod")], + ) + sqs_conn = boto.sqs.connect_to_region("us-west-1") + list(sqs_conn.get_all_queues()).should.have.length_of(1) + + +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +def test_conditional_if_handling(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Conditions": { + "EnvEqualsPrd": { + "Fn::Equals": [ + { + "Ref": "ENV" + }, + "prd" + ] + } + }, + "Parameters": { + "ENV": { + "Default": "dev", + "Description": "Deployment environment for the stack (dev/prd)", + "Type": "String" + }, + }, + "Description": "Stack 1", + "Resources": { + "App1": { + "Properties": { + "ImageId": { + "Fn::If": [ + "EnvEqualsPrd", + "ami-00000000", + "ami-ffffffff" + ] + }, + }, + "Type": "AWS::EC2::Instance" + }, + } + } + dummy_template_json = json.dumps(dummy_template) + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack('test_stack1', template_body=dummy_template_json) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + ec2_instance.image_id.should.equal("ami-ffffffff") + ec2_instance.terminate() + + conn = boto.cloudformation.connect_to_region("us-west-2") + conn.create_stack( + 'test_stack1', template_body=dummy_template_json, parameters=[("ENV", "prd")]) + ec2_conn = boto.ec2.connect_to_region("us-west-2") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + ec2_instance.image_id.should.equal("ami-00000000") + + +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +def test_cloudformation_mapping(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Mappings": { + "RegionMap": { + "us-east-1": {"32": "ami-6411e20d", "64": "ami-7a11e213"}, + "us-west-1": {"32": "ami-c9c7978c", "64": "ami-cfc7978a"}, + "eu-west-1": {"32": "ami-37c2f643", "64": "ami-31c2f645"}, + "ap-southeast-1": {"32": "ami-66f28c34", "64": "ami-60f28c32"}, + "ap-northeast-1": {"32": "ami-9c03a89d", "64": "ami-a003a8a1"} + } + }, + "Resources": { + "WebServer": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": { + "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "32"] + }, + "InstanceType": "m1.small" + }, + "Type": "AWS::EC2::Instance", + }, + }, + } + + dummy_template_json = json.dumps(dummy_template) + + conn = boto.cloudformation.connect_to_region("us-east-1") + conn.create_stack('test_stack1', template_body=dummy_template_json) + ec2_conn = boto.ec2.connect_to_region("us-east-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + ec2_instance.image_id.should.equal("ami-6411e20d") + + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack('test_stack1', template_body=dummy_template_json) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + reservation = ec2_conn.get_all_instances()[0] + ec2_instance = reservation.instances[0] + ec2_instance.image_id.should.equal("ami-c9c7978c") + + +@mock_cloudformation_deprecated() +@mock_route53_deprecated() +def test_route53_roundrobin(): + route53_conn = boto.connect_route53() + + template_json = json.dumps(route53_roundrobin.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + stack = conn.create_stack( + "test_stack", + template_body=template_json, + ) + + zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ + 'HostedZones'] + list(zones).should.have.length_of(1) + zone_id = zones[0]['Id'] + zone_id = zone_id.split('/') + zone_id = zone_id[2] + + rrsets = route53_conn.get_all_rrsets(zone_id) + rrsets.hosted_zone_id.should.equal(zone_id) + rrsets.should.have.length_of(2) + record_set1 = rrsets[0] + record_set1.name.should.equal('test_stack.us-west-1.my_zone.') + record_set1.identifier.should.equal("test_stack AWS") + record_set1.type.should.equal('CNAME') + record_set1.ttl.should.equal('900') + record_set1.weight.should.equal('3') + record_set1.resource_records[0].should.equal("aws.amazon.com") + + record_set2 = rrsets[1] + record_set2.name.should.equal('test_stack.us-west-1.my_zone.') + record_set2.identifier.should.equal("test_stack Amazon") + record_set2.type.should.equal('CNAME') + record_set2.ttl.should.equal('900') + record_set2.weight.should.equal('1') + record_set2.resource_records[0].should.equal("www.amazon.com") + + stack = conn.describe_stacks()[0] + output = stack.outputs[0] + output.key.should.equal('DomainName') + output.value.should.equal( + 'arn:aws:route53:::hostedzone/{0}'.format(zone_id)) + + +@mock_cloudformation_deprecated() +@mock_ec2_deprecated() +@mock_route53_deprecated() +def test_route53_ec2_instance_with_public_ip(): + route53_conn = boto.connect_route53() + ec2_conn = boto.ec2.connect_to_region("us-west-1") + + template_json = json.dumps(route53_ec2_instance_with_public_ip.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + ) + + instance_id = ec2_conn.get_all_reservations()[0].instances[0].id + + zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ + 'HostedZones'] + list(zones).should.have.length_of(1) + zone_id = zones[0]['Id'] + zone_id = zone_id.split('/') + zone_id = zone_id[2] + + rrsets = route53_conn.get_all_rrsets(zone_id) + rrsets.should.have.length_of(1) + + record_set1 = rrsets[0] + record_set1.name.should.equal('{0}.us-west-1.my_zone.'.format(instance_id)) + record_set1.identifier.should.equal(None) + record_set1.type.should.equal('A') + record_set1.ttl.should.equal('900') + record_set1.weight.should.equal(None) + record_set1.resource_records[0].should.equal("10.0.0.25") + + +@mock_cloudformation_deprecated() +@mock_route53_deprecated() +def test_route53_associate_health_check(): + route53_conn = boto.connect_route53() + + template_json = json.dumps(route53_health_check.template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + ) + + checks = route53_conn.get_list_health_checks()['ListHealthChecksResponse'][ + 'HealthChecks'] + list(checks).should.have.length_of(1) + check = checks[0] + health_check_id = check['Id'] + config = check['HealthCheckConfig'] + config["FailureThreshold"].should.equal("3") + config["IPAddress"].should.equal("10.0.0.4") + config["Port"].should.equal("80") + config["RequestInterval"].should.equal("10") + config["ResourcePath"].should.equal("/") + config["Type"].should.equal("HTTP") + + zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ + 'HostedZones'] + list(zones).should.have.length_of(1) + zone_id = zones[0]['Id'] + zone_id = zone_id.split('/') + zone_id = zone_id[2] + + rrsets = route53_conn.get_all_rrsets(zone_id) + rrsets.should.have.length_of(1) + + record_set = rrsets[0] + record_set.health_check.should.equal(health_check_id) + + +@mock_cloudformation_deprecated() +@mock_route53_deprecated() +def test_route53_with_update(): + route53_conn = boto.connect_route53() + + template_json = json.dumps(route53_health_check.template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + + zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ + 'HostedZones'] + list(zones).should.have.length_of(1) + zone_id = zones[0]['Id'] + zone_id = zone_id.split('/') + zone_id = zone_id[2] + + rrsets = route53_conn.get_all_rrsets(zone_id) + rrsets.should.have.length_of(1) + + record_set = rrsets[0] + record_set.resource_records.should.equal(["my.example.com"]) + + route53_health_check.template['Resources']['myDNSRecord'][ + 'Properties']['ResourceRecords'] = ["my_other.example.com"] + template_json = json.dumps(route53_health_check.template) + cf_conn.update_stack( + "test_stack", + template_body=template_json, + ) + + zones = route53_conn.get_all_hosted_zones()['ListHostedZonesResponse'][ + 'HostedZones'] + list(zones).should.have.length_of(1) + zone_id = zones[0]['Id'] + zone_id = zone_id.split('/') + zone_id = zone_id[2] + + rrsets = route53_conn.get_all_rrsets(zone_id) + rrsets.should.have.length_of(1) + + record_set = rrsets[0] + record_set.resource_records.should.equal(["my_other.example.com"]) + + +@mock_cloudformation_deprecated() +@mock_sns_deprecated() +def test_sns_topic(): + dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "MySNSTopic": { + "Type": "AWS::SNS::Topic", + "Properties": { + "Subscription": [ + {"Endpoint": "https://example.com", "Protocol": "https"}, + ], + "TopicName": "my_topics", + } + } + }, + "Outputs": { + "topic_name": { + "Value": {"Fn::GetAtt": ["MySNSTopic", "TopicName"]} + }, + "topic_arn": { + "Value": {"Ref": "MySNSTopic"} + }, + } + } + template_json = json.dumps(dummy_template) + conn = boto.cloudformation.connect_to_region("us-west-1") + stack = conn.create_stack( + "test_stack", + template_body=template_json, + ) + + sns_conn = boto.sns.connect_to_region("us-west-1") + topics = sns_conn.get_all_topics()["ListTopicsResponse"][ + "ListTopicsResult"]["Topics"] + topics.should.have.length_of(1) + topic_arn = topics[0]['TopicArn'] + topic_arn.should.contain("my_topics") + + subscriptions = sns_conn.get_all_subscriptions()["ListSubscriptionsResponse"][ + "ListSubscriptionsResult"]["Subscriptions"] + subscriptions.should.have.length_of(1) + subscription = subscriptions[0] + subscription["TopicArn"].should.equal(topic_arn) + subscription["Protocol"].should.equal("https") + subscription["SubscriptionArn"].should.contain(topic_arn) + subscription["Endpoint"].should.equal("https://example.com") + + stack = conn.describe_stacks()[0] + topic_name_output = [x for x in stack.outputs if x.key == 'topic_name'][0] + topic_name_output.value.should.equal("my_topics") + topic_arn_output = [x for x in stack.outputs if x.key == 'topic_arn'][0] + topic_arn_output.value.should.equal(topic_arn) + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_vpc_gateway_attachment_creation_should_attach_itself_to_vpc(): + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "internetgateway": { + "Type": "AWS::EC2::InternetGateway" + }, + "testvpc": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16", + "EnableDnsHostnames": "true", + "EnableDnsSupport": "true", + "InstanceTenancy": "default" + }, + }, + "vpcgatewayattachment": { + "Type": "AWS::EC2::VPCGatewayAttachment", + "Properties": { + "InternetGatewayId": { + "Ref": "internetgateway" + }, + "VpcId": { + "Ref": "testvpc" + } + }, + }, + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + + vpc_conn = boto.vpc.connect_to_region("us-west-1") + vpc = vpc_conn.get_all_vpcs(filters={'cidrBlock': '10.0.0.0/16'})[0] + igws = vpc_conn.get_all_internet_gateways( + filters={'attachment.vpc-id': vpc.id} + ) + + igws.should.have.length_of(1) + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_vpc_peering_creation(): + vpc_conn = boto.vpc.connect_to_region("us-west-1") + vpc_source = vpc_conn.create_vpc("10.0.0.0/16") + peer_vpc = vpc_conn.create_vpc("10.1.0.0/16") + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "vpcpeeringconnection": { + "Type": "AWS::EC2::VPCPeeringConnection", + "Properties": { + "PeerVpcId": peer_vpc.id, + "VpcId": vpc_source.id, + } + }, + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + + peering_connections = vpc_conn.get_all_vpc_peering_connections() + peering_connections.should.have.length_of(1) + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_multiple_security_group_ingress_separate_from_security_group_by_id(): + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "test-security-group1": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "Tags": [ + { + "Key": "sg-name", + "Value": "sg1" + } + ] + }, + }, + "test-security-group2": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "Tags": [ + { + "Key": "sg-name", + "Value": "sg2" + } + ] + }, + }, + "test-sg-ingress": { + "Type": "AWS::EC2::SecurityGroupIngress", + "Properties": { + "GroupId": {"Ref": "test-security-group1"}, + "IpProtocol": "tcp", + "FromPort": "80", + "ToPort": "8080", + "SourceSecurityGroupId": {"Ref": "test-security-group2"}, + } + } + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + + security_group1 = ec2_conn.get_all_security_groups( + filters={"tag:sg-name": "sg1"})[0] + security_group2 = ec2_conn.get_all_security_groups( + filters={"tag:sg-name": "sg2"})[0] + + security_group1.rules.should.have.length_of(1) + security_group1.rules[0].grants.should.have.length_of(1) + security_group1.rules[0].grants[ + 0].group_id.should.equal(security_group2.id) + security_group1.rules[0].ip_protocol.should.equal('tcp') + security_group1.rules[0].from_port.should.equal('80') + security_group1.rules[0].to_port.should.equal('8080') + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_security_group_ingress_separate_from_security_group_by_id(): + ec2_conn = boto.ec2.connect_to_region("us-west-1") + ec2_conn.create_security_group( + "test-security-group1", "test security group") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "test-security-group2": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "Tags": [ + { + "Key": "sg-name", + "Value": "sg2" + } + ] + }, + }, + "test-sg-ingress": { + "Type": "AWS::EC2::SecurityGroupIngress", + "Properties": { + "GroupName": "test-security-group1", + "IpProtocol": "tcp", + "FromPort": "80", + "ToPort": "8080", + "SourceSecurityGroupId": {"Ref": "test-security-group2"}, + } + } + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + security_group1 = ec2_conn.get_all_security_groups( + groupnames=["test-security-group1"])[0] + security_group2 = ec2_conn.get_all_security_groups( + filters={"tag:sg-name": "sg2"})[0] + + security_group1.rules.should.have.length_of(1) + security_group1.rules[0].grants.should.have.length_of(1) + security_group1.rules[0].grants[ + 0].group_id.should.equal(security_group2.id) + security_group1.rules[0].ip_protocol.should.equal('tcp') + security_group1.rules[0].from_port.should.equal('80') + security_group1.rules[0].to_port.should.equal('8080') + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_security_group_ingress_separate_from_security_group_by_id_using_vpc(): + vpc_conn = boto.vpc.connect_to_region("us-west-1") + vpc = vpc_conn.create_vpc("10.0.0.0/16") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "test-security-group1": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "VpcId": vpc.id, + "Tags": [ + { + "Key": "sg-name", + "Value": "sg1" + } + ] + }, + }, + "test-security-group2": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "VpcId": vpc.id, + "Tags": [ + { + "Key": "sg-name", + "Value": "sg2" + } + ] + }, + }, + "test-sg-ingress": { + "Type": "AWS::EC2::SecurityGroupIngress", + "Properties": { + "GroupId": {"Ref": "test-security-group1"}, + "VpcId": vpc.id, + "IpProtocol": "tcp", + "FromPort": "80", + "ToPort": "8080", + "SourceSecurityGroupId": {"Ref": "test-security-group2"}, + } + } + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + security_group1 = vpc_conn.get_all_security_groups( + filters={"tag:sg-name": "sg1"})[0] + security_group2 = vpc_conn.get_all_security_groups( + filters={"tag:sg-name": "sg2"})[0] + + security_group1.rules.should.have.length_of(1) + security_group1.rules[0].grants.should.have.length_of(1) + security_group1.rules[0].grants[ + 0].group_id.should.equal(security_group2.id) + security_group1.rules[0].ip_protocol.should.equal('tcp') + security_group1.rules[0].from_port.should.equal('80') + security_group1.rules[0].to_port.should.equal('8080') + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_security_group_with_update(): + vpc_conn = boto.vpc.connect_to_region("us-west-1") + vpc1 = vpc_conn.create_vpc("10.0.0.0/16") + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "test-security-group": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupDescription": "test security group", + "VpcId": vpc1.id, + "Tags": [ + { + "Key": "sg-name", + "Value": "sg" + } + ] + }, + }, + } + } + + template_json = json.dumps(template) + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + security_group = vpc_conn.get_all_security_groups( + filters={"tag:sg-name": "sg"})[0] + security_group.vpc_id.should.equal(vpc1.id) + + vpc2 = vpc_conn.create_vpc("10.1.0.0/16") + template['Resources'][ + 'test-security-group']['Properties']['VpcId'] = vpc2.id + template_json = json.dumps(template) + cf_conn.update_stack( + "test_stack", + template_body=template_json, + ) + security_group = vpc_conn.get_all_security_groups( + filters={"tag:sg-name": "sg"})[0] + security_group.vpc_id.should.equal(vpc2.id) + + +@mock_cloudformation_deprecated +@mock_ec2_deprecated +def test_subnets_should_be_created_with_availability_zone(): + vpc_conn = boto.vpc.connect_to_region('us-west-1') + vpc = vpc_conn.create_vpc("10.0.0.0/16") + + subnet_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "testSubnet": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "VpcId": vpc.id, + "CidrBlock": "10.0.0.0/24", + "AvailabilityZone": "us-west-1b", + } + } + } + } + cf_conn = boto.cloudformation.connect_to_region("us-west-1") + template_json = json.dumps(subnet_template) + cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + subnet = vpc_conn.get_all_subnets(filters={'cidrBlock': '10.0.0.0/24'})[0] + subnet.availability_zone.should.equal('us-west-1b') + + +@mock_cloudformation_deprecated +@mock_datapipeline_deprecated +def test_datapipeline(): + dp_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "dataPipeline": { + "Properties": { + "Activate": "true", + "Name": "testDataPipeline", + "PipelineObjects": [ + { + "Fields": [ + { + "Key": "failureAndRerunMode", + "StringValue": "CASCADE" + }, + { + "Key": "scheduleType", + "StringValue": "cron" + }, + { + "Key": "schedule", + "RefValue": "DefaultSchedule" + }, + { + "Key": "pipelineLogUri", + "StringValue": "s3://bucket/logs" + }, + { + "Key": "type", + "StringValue": "Default" + }, + ], + "Id": "Default", + "Name": "Default" + }, + { + "Fields": [ + { + "Key": "startDateTime", + "StringValue": "1970-01-01T01:00:00" + }, + { + "Key": "period", + "StringValue": "1 Day" + }, + { + "Key": "type", + "StringValue": "Schedule" + } + ], + "Id": "DefaultSchedule", + "Name": "RunOnce" + } + ], + "PipelineTags": [] + }, + "Type": "AWS::DataPipeline::Pipeline" + } + } + } + cf_conn = boto.cloudformation.connect_to_region("us-east-1") + template_json = json.dumps(dp_template) + stack_id = cf_conn.create_stack( + "test_stack", + template_body=template_json, + ) + + dp_conn = boto.datapipeline.connect_to_region('us-east-1') + data_pipelines = dp_conn.list_pipelines() + + data_pipelines['pipelineIdList'].should.have.length_of(1) + data_pipelines['pipelineIdList'][0][ + 'name'].should.equal('testDataPipeline') + + stack_resources = cf_conn.list_stack_resources(stack_id) + stack_resources.should.have.length_of(1) + stack_resources[0].physical_resource_id.should.equal( + data_pipelines['pipelineIdList'][0]['id']) + + +@mock_cloudformation +@mock_lambda +def test_lambda_function(): + # switch this to python as backend lambda only supports python execution. + lambda_code = """ +def lambda_handler(event, context): + return (event, context) +""" + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "lambdaTest": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": { + # CloudFormation expects a string as ZipFile, not a ZIP file base64-encoded + "ZipFile": {"Fn::Join": ["\n", lambda_code.splitlines()]} + }, + "Handler": "lambda_function.handler", + "Description": "Test function", + "MemorySize": 128, + "Role": "test-role", + "Runtime": "python2.7", + "Environment": { + "Variables": { + "TEST_ENV_KEY": "test-env-val", + } + }, + } + } + } + } + + template_json = json.dumps(template) + cf_conn = boto3.client('cloudformation', 'us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=template_json, + ) + + conn = boto3.client('lambda', 'us-east-1') + result = conn.list_functions() + result['Functions'].should.have.length_of(1) + result['Functions'][0]['Description'].should.equal('Test function') + result['Functions'][0]['Handler'].should.equal('lambda_function.handler') + result['Functions'][0]['MemorySize'].should.equal(128) + result['Functions'][0]['Role'].should.equal('test-role') + result['Functions'][0]['Runtime'].should.equal('python2.7') + result['Functions'][0]['Environment'].should.equal({ + "Variables": {"TEST_ENV_KEY": "test-env-val"} + }) + + +@mock_cloudformation +@mock_ec2 +def test_nat_gateway(): + ec2_conn = boto3.client('ec2', 'us-east-1') + vpc_id = ec2_conn.create_vpc(CidrBlock="10.0.0.0/16")['Vpc']['VpcId'] + subnet_id = ec2_conn.create_subnet( + CidrBlock='10.0.1.0/24', VpcId=vpc_id)['Subnet']['SubnetId'] + route_table_id = ec2_conn.create_route_table( + VpcId=vpc_id)['RouteTable']['RouteTableId'] + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "NAT": { + "DependsOn": "vpcgatewayattachment", + "Type": "AWS::EC2::NatGateway", + "Properties": { + "AllocationId": {"Fn::GetAtt": ["EIP", "AllocationId"]}, + "SubnetId": subnet_id + } + }, + "EIP": { + "Type": "AWS::EC2::EIP", + "Properties": { + "Domain": "vpc" + } + }, + "Route": { + "Type": "AWS::EC2::Route", + "Properties": { + "RouteTableId": route_table_id, + "DestinationCidrBlock": "0.0.0.0/0", + "NatGatewayId": {"Ref": "NAT"} + } + }, + "internetgateway": { + "Type": "AWS::EC2::InternetGateway" + }, + "vpcgatewayattachment": { + "Type": "AWS::EC2::VPCGatewayAttachment", + "Properties": { + "InternetGatewayId": { + "Ref": "internetgateway" + }, + "VpcId": vpc_id, + }, + } + } + } + + cf_conn = boto3.client('cloudformation', 'us-east-1') + cf_conn.create_stack( + StackName="test_stack", + TemplateBody=json.dumps(template), + ) + + result = ec2_conn.describe_nat_gateways() + + result['NatGateways'].should.have.length_of(1) + result['NatGateways'][0]['VpcId'].should.equal(vpc_id) + result['NatGateways'][0]['SubnetId'].should.equal(subnet_id) + result['NatGateways'][0]['State'].should.equal('available') + + +@mock_cloudformation() +@mock_kms() +def test_stack_kms(): + kms_key_template = { + 'Resources': { + 'kmskey': { + 'Properties': { + 'Description': 'A kms key', + 'EnableKeyRotation': True, + 'Enabled': True, + 'KeyPolicy': 'a policy', + }, + 'Type': 'AWS::KMS::Key' + } + } + } + kms_key_template_json = json.dumps(kms_key_template) + + cf_conn = boto3.client('cloudformation', 'us-east-1') + cf_conn.create_stack( + StackName='test_stack', + TemplateBody=kms_key_template_json, + ) + + kms_conn = boto3.client('kms', 'us-east-1') + keys = kms_conn.list_keys()['Keys'] + len(keys).should.equal(1) + result = kms_conn.describe_key(KeyId=keys[0]['KeyId']) + + result['KeyMetadata']['Enabled'].should.equal(True) + result['KeyMetadata']['KeyUsage'].should.equal('ENCRYPT_DECRYPT') + + +@mock_cloudformation() +@mock_ec2() +def test_stack_spot_fleet(): + conn = boto3.client('ec2', 'us-east-1') + + vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] + subnet = conn.create_subnet( + VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] + subnet_id = subnet['SubnetId'] + + spot_fleet_template = { + 'Resources': { + "SpotFleet": { + "Type": "AWS::EC2::SpotFleet", + "Properties": { + "SpotFleetRequestConfigData": { + "IamFleetRole": "arn:aws:iam::123456789012:role/fleet", + "SpotPrice": "0.12", + "TargetCapacity": 6, + "AllocationStrategy": "diversified", + "LaunchSpecifications": [ + { + "EbsOptimized": "false", + "InstanceType": 't2.small', + "ImageId": "ami-1234", + "SubnetId": subnet_id, + "WeightedCapacity": "2", + "SpotPrice": "0.13", + }, + { + "EbsOptimized": "true", + "InstanceType": 't2.large', + "ImageId": "ami-1234", + "Monitoring": {"Enabled": "true"}, + "SecurityGroups": [{"GroupId": "sg-123"}], + "SubnetId": subnet_id, + "IamInstanceProfile": {"Arn": "arn:aws:iam::123456789012:role/fleet"}, + "WeightedCapacity": "4", + "SpotPrice": "10.00", + } + ] + } + } + } + } + } + spot_fleet_template_json = json.dumps(spot_fleet_template) + + cf_conn = boto3.client('cloudformation', 'us-east-1') + stack_id = cf_conn.create_stack( + StackName='test_stack', + TemplateBody=spot_fleet_template_json, + )['StackId'] + + stack_resources = cf_conn.list_stack_resources(StackName=stack_id) + stack_resources['StackResourceSummaries'].should.have.length_of(1) + spot_fleet_id = stack_resources[ + 'StackResourceSummaries'][0]['PhysicalResourceId'] + + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_request['SpotFleetRequestState'].should.equal("active") + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + spot_fleet_config['SpotPrice'].should.equal('0.12') + spot_fleet_config['TargetCapacity'].should.equal(6) + spot_fleet_config['IamFleetRole'].should.equal( + 'arn:aws:iam::123456789012:role/fleet') + spot_fleet_config['AllocationStrategy'].should.equal('diversified') + spot_fleet_config['FulfilledCapacity'].should.equal(6.0) + + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec = spot_fleet_config['LaunchSpecifications'][0] + + launch_spec['EbsOptimized'].should.equal(False) + launch_spec['ImageId'].should.equal("ami-1234") + launch_spec['InstanceType'].should.equal("t2.small") + launch_spec['SubnetId'].should.equal(subnet_id) + launch_spec['SpotPrice'].should.equal("0.13") + launch_spec['WeightedCapacity'].should.equal(2.0) + + +@mock_cloudformation() +@mock_ec2() +def test_stack_spot_fleet_should_figure_out_default_price(): + conn = boto3.client('ec2', 'us-east-1') + + vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] + subnet = conn.create_subnet( + VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] + subnet_id = subnet['SubnetId'] + + spot_fleet_template = { + 'Resources': { + "SpotFleet1": { + "Type": "AWS::EC2::SpotFleet", + "Properties": { + "SpotFleetRequestConfigData": { + "IamFleetRole": "arn:aws:iam::123456789012:role/fleet", + "TargetCapacity": 6, + "AllocationStrategy": "diversified", + "LaunchSpecifications": [ + { + "EbsOptimized": "false", + "InstanceType": 't2.small', + "ImageId": "ami-1234", + "SubnetId": subnet_id, + "WeightedCapacity": "2", + }, + { + "EbsOptimized": "true", + "InstanceType": 't2.large', + "ImageId": "ami-1234", + "Monitoring": {"Enabled": "true"}, + "SecurityGroups": [{"GroupId": "sg-123"}], + "SubnetId": subnet_id, + "IamInstanceProfile": {"Arn": "arn:aws:iam::123456789012:role/fleet"}, + "WeightedCapacity": "4", + } + ] + } + } + } + } + } + spot_fleet_template_json = json.dumps(spot_fleet_template) + + cf_conn = boto3.client('cloudformation', 'us-east-1') + stack_id = cf_conn.create_stack( + StackName='test_stack', + TemplateBody=spot_fleet_template_json, + )['StackId'] + + stack_resources = cf_conn.list_stack_resources(StackName=stack_id) + stack_resources['StackResourceSummaries'].should.have.length_of(1) + spot_fleet_id = stack_resources[ + 'StackResourceSummaries'][0]['PhysicalResourceId'] + + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_request['SpotFleetRequestState'].should.equal("active") + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + assert 'SpotPrice' not in spot_fleet_config + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] + launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] + + assert 'SpotPrice' not in launch_spec1 + assert 'SpotPrice' not in launch_spec2 + + +@mock_ec2 +@mock_elbv2 +@mock_cloudformation +def test_stack_elbv2_resources_integration(): + alb_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Outputs": { + "albdns": { + "Description": "Load balanacer DNS", + "Value": {"Fn::GetAtt": ["alb", "DNSName"]}, + }, + "albname": { + "Description": "Load balancer name", + "Value": {"Fn::GetAtt": ["alb", "LoadBalancerName"]}, + }, + "canonicalhostedzoneid": { + "Description": "Load balancer canonical hosted zone ID", + "Value": {"Fn::GetAtt": ["alb", "CanonicalHostedZoneID"]}, + }, + }, + "Resources": { + "alb": { + "Type": "AWS::ElasticLoadBalancingV2::LoadBalancer", + "Properties": { + "Name": "myelbv2", + "Scheme": "internet-facing", + "Subnets": [{ + "Ref": "mysubnet", + }], + "SecurityGroups": [{ + "Ref": "mysg", + }], + "Type": "application", + "IpAddressType": "ipv4", + } + }, + "mytargetgroup1": { + "Type": "AWS::ElasticLoadBalancingV2::TargetGroup", + "Properties": { + "HealthCheckIntervalSeconds": 30, + "HealthCheckPath": "/status", + "HealthCheckPort": 80, + "HealthCheckProtocol": "HTTP", + "HealthCheckTimeoutSeconds": 5, + "HealthyThresholdCount": 30, + "UnhealthyThresholdCount": 5, + "Matcher": { + "HttpCode": "200,201" + }, + "Name": "mytargetgroup1", + "Port": 80, + "Protocol": "HTTP", + "TargetType": "instance", + "Targets": [{ + "Id": { + "Ref": "ec2instance", + "Port": 80, + }, + }], + "VpcId": { + "Ref": "myvpc", + } + } + }, + "mytargetgroup2": { + "Type": "AWS::ElasticLoadBalancingV2::TargetGroup", + "Properties": { + "HealthCheckIntervalSeconds": 30, + "HealthCheckPath": "/status", + "HealthCheckPort": 8080, + "HealthCheckProtocol": "HTTP", + "HealthCheckTimeoutSeconds": 5, + "HealthyThresholdCount": 30, + "UnhealthyThresholdCount": 5, + "Name": "mytargetgroup2", + "Port": 8080, + "Protocol": "HTTP", + "TargetType": "instance", + "Targets": [{ + "Id": { + "Ref": "ec2instance", + "Port": 8080, + }, + }], + "VpcId": { + "Ref": "myvpc", + } + } + }, + "listener": { + "Type": "AWS::ElasticLoadBalancingV2::Listener", + "Properties": { + "DefaultActions": [{ + "Type": "forward", + "TargetGroupArn": {"Ref": "mytargetgroup1"} + }], + "LoadBalancerArn": {"Ref": "alb"}, + "Port": "80", + "Protocol": "HTTP" + } + }, + "myvpc": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16", + } + }, + "mysubnet": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "CidrBlock": "10.0.0.0/27", + "VpcId": {"Ref": "myvpc"}, + } + }, + "mysg": { + "Type": "AWS::EC2::SecurityGroup", + "Properties": { + "GroupName": "mysg", + "GroupDescription": "test security group", + "VpcId": {"Ref": "myvpc"} + } + }, + "ec2instance": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-1234abcd", + "UserData": "some user data", + } + }, + }, + } + alb_template_json = json.dumps(alb_template) + + cfn_conn = boto3.client("cloudformation", "us-west-1") + cfn_conn.create_stack( + StackName="elb_stack", + TemplateBody=alb_template_json, + ) + + elbv2_conn = boto3.client("elbv2", "us-west-1") + + load_balancers = elbv2_conn.describe_load_balancers()['LoadBalancers'] + len(load_balancers).should.equal(1) + load_balancers[0]['LoadBalancerName'].should.equal('myelbv2') + load_balancers[0]['Scheme'].should.equal('internet-facing') + load_balancers[0]['Type'].should.equal('application') + load_balancers[0]['IpAddressType'].should.equal('ipv4') + + target_groups = sorted( + elbv2_conn.describe_target_groups()['TargetGroups'], + key=lambda tg: tg['TargetGroupName']) # sort to do comparison with indexes + len(target_groups).should.equal(2) + target_groups[0]['HealthCheckIntervalSeconds'].should.equal(30) + target_groups[0]['HealthCheckPath'].should.equal('/status') + target_groups[0]['HealthCheckPort'].should.equal('80') + target_groups[0]['HealthCheckProtocol'].should.equal('HTTP') + target_groups[0]['HealthCheckTimeoutSeconds'].should.equal(5) + target_groups[0]['HealthyThresholdCount'].should.equal(30) + target_groups[0]['UnhealthyThresholdCount'].should.equal(5) + target_groups[0]['Matcher'].should.equal({'HttpCode': '200,201'}) + target_groups[0]['TargetGroupName'].should.equal('mytargetgroup1') + target_groups[0]['Port'].should.equal(80) + target_groups[0]['Protocol'].should.equal('HTTP') + target_groups[0]['TargetType'].should.equal('instance') + + target_groups[1]['HealthCheckIntervalSeconds'].should.equal(30) + target_groups[1]['HealthCheckPath'].should.equal('/status') + target_groups[1]['HealthCheckPort'].should.equal('8080') + target_groups[1]['HealthCheckProtocol'].should.equal('HTTP') + target_groups[1]['HealthCheckTimeoutSeconds'].should.equal(5) + target_groups[1]['HealthyThresholdCount'].should.equal(30) + target_groups[1]['UnhealthyThresholdCount'].should.equal(5) + target_groups[1]['Matcher'].should.equal({'HttpCode': '200'}) + target_groups[1]['TargetGroupName'].should.equal('mytargetgroup2') + target_groups[1]['Port'].should.equal(8080) + target_groups[1]['Protocol'].should.equal('HTTP') + target_groups[1]['TargetType'].should.equal('instance') + + listeners = elbv2_conn.describe_listeners(LoadBalancerArn=load_balancers[0]['LoadBalancerArn'])['Listeners'] + len(listeners).should.equal(1) + listeners[0]['LoadBalancerArn'].should.equal(load_balancers[0]['LoadBalancerArn']) + listeners[0]['Port'].should.equal(80) + listeners[0]['Protocol'].should.equal('HTTP') + listeners[0]['DefaultActions'].should.equal([{ + "Type": "forward", + "TargetGroupArn": target_groups[0]['TargetGroupArn'] + }]) + + # test outputs + stacks = cfn_conn.describe_stacks(StackName='elb_stack')['Stacks'] + len(stacks).should.equal(1) + + dns = list(filter(lambda item: item['OutputKey'] == 'albdns', stacks[0]['Outputs']))[0] + name = list(filter(lambda item: item['OutputKey'] == 'albname', stacks[0]['Outputs']))[0] + + dns['OutputValue'].should.equal(load_balancers[0]['DNSName']) + name['OutputValue'].should.equal(load_balancers[0]['LoadBalancerName']) diff --git a/tests/test_cloudformation/test_stack_parsing.py b/tests/test_cloudformation/test_stack_parsing.py index 9aea55f54..d21db2d48 100644 --- a/tests/test_cloudformation/test_stack_parsing.py +++ b/tests/test_cloudformation/test_stack_parsing.py @@ -1,471 +1,471 @@ -from __future__ import unicode_literals -import json -import yaml - -from mock import patch -import sure # noqa - -from moto.cloudformation.exceptions import ValidationError -from moto.cloudformation.models import FakeStack -from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export -from moto.sqs.models import Queue -from moto.s3.models import FakeBucket -from moto.cloudformation.utils import yaml_tag_constructor -from boto.cloudformation.stack import Output - - - -dummy_template = { - "AWSTemplateFormatVersion": "2010-09-09", - - "Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.", - - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": "my-queue", - "VisibilityTimeout": 60, - } - }, - "S3Bucket": { - "Type": "AWS::S3::Bucket", - "DeletionPolicy": "Retain" - }, - }, -} - -name_type_template = { - "AWSTemplateFormatVersion": "2010-09-09", - - "Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.", - - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "VisibilityTimeout": 60, - } - }, - }, -} - -output_dict = { - "Outputs": { - "Output1": { - "Value": {"Ref": "Queue"}, - "Description": "This is a description." - } - } -} - -bad_output = { - "Outputs": { - "Output1": { - "Value": {"Fn::GetAtt": ["Queue", "InvalidAttribute"]} - } - } -} - -get_attribute_output = { - "Outputs": { - "Output1": { - "Value": {"Fn::GetAtt": ["Queue", "QueueName"]} - } - } -} - -get_availability_zones_output = { - "Outputs": { - "Output1": { - "Value": {"Fn::GetAZs": ""} - } - } -} - -split_select_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::Select": [ "1", {"Fn::Split": [ "-", "123-myqueue" ] } ] }, - "VisibilityTimeout": 60, - } - } - } -} - -sub_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "Queue1": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::Sub": '${AWS::StackName}-queue-${!Literal}'}, - "VisibilityTimeout": 60, - } - }, - "Queue2": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::Sub": '${Queue1.QueueName}'}, - "VisibilityTimeout": 60, - } - }, - } -} - -export_value_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::Sub": '${AWS::StackName}-queue'}, - "VisibilityTimeout": 60, - } - } - }, - "Outputs": { - "Output1": { - "Value": "value", - "Export": {"Name": 'queue-us-west-1'} - } - } -} - -import_value_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "Queue": { - "Type": "AWS::SQS::Queue", - "Properties": { - "QueueName": {"Fn::ImportValue": 'queue-us-west-1'}, - "VisibilityTimeout": 60, - } - } - } -} - -outputs_template = dict(list(dummy_template.items()) + - list(output_dict.items())) -bad_outputs_template = dict( - list(dummy_template.items()) + list(bad_output.items())) -get_attribute_outputs_template = dict( - list(dummy_template.items()) + list(get_attribute_output.items())) -get_availability_zones_template = dict( - list(dummy_template.items()) + list(get_availability_zones_output.items())) - -dummy_template_json = json.dumps(dummy_template) -name_type_template_json = json.dumps(name_type_template) -output_type_template_json = json.dumps(outputs_template) -bad_output_template_json = json.dumps(bad_outputs_template) -get_attribute_outputs_template_json = json.dumps( - get_attribute_outputs_template) -get_availability_zones_template_json = json.dumps( - get_availability_zones_template) -split_select_template_json = json.dumps(split_select_template) -sub_template_json = json.dumps(sub_template) -export_value_template_json = json.dumps(export_value_template) -import_value_template_json = json.dumps(import_value_template) - - -def test_parse_stack_resources(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=dummy_template_json, - parameters={}, - region_name='us-west-1') - - stack.resource_map.should.have.length_of(2) - - queue = stack.resource_map['Queue'] - queue.should.be.a(Queue) - queue.name.should.equal("my-queue") - - bucket = stack.resource_map['S3Bucket'] - bucket.should.be.a(FakeBucket) - bucket.physical_resource_id.should.equal(bucket.name) - - -@patch("moto.cloudformation.parsing.logger") -def test_missing_resource_logs(logger): - resource_class_from_type("foobar") - logger.warning.assert_called_with( - 'No Moto CloudFormation support for %s', 'foobar') - - -def test_parse_stack_with_name_type_resource(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=name_type_template_json, - parameters={}, - region_name='us-west-1') - - stack.resource_map.should.have.length_of(1) - list(stack.resource_map.keys())[0].should.equal('Queue') - queue = list(stack.resource_map.values())[0] - queue.should.be.a(Queue) - - -def test_parse_stack_with_yaml_template(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=yaml.dump(name_type_template), - parameters={}, - region_name='us-west-1') - - stack.resource_map.should.have.length_of(1) - list(stack.resource_map.keys())[0].should.equal('Queue') - queue = list(stack.resource_map.values())[0] - queue.should.be.a(Queue) - - -def test_parse_stack_with_outputs(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=output_type_template_json, - parameters={}, - region_name='us-west-1') - - stack.output_map.should.have.length_of(1) - list(stack.output_map.keys())[0].should.equal('Output1') - output = list(stack.output_map.values())[0] - output.should.be.a(Output) - output.description.should.equal("This is a description.") - - -def test_parse_stack_with_get_attribute_outputs(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=get_attribute_outputs_template_json, - parameters={}, - region_name='us-west-1') - - stack.output_map.should.have.length_of(1) - list(stack.output_map.keys())[0].should.equal('Output1') - output = list(stack.output_map.values())[0] - output.should.be.a(Output) - output.value.should.equal("my-queue") - -def test_parse_stack_with_get_attribute_kms(): - from .fixtures.kms_key import template - - template_json = json.dumps(template) - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=template_json, - parameters={}, - region_name='us-west-1') - - stack.output_map.should.have.length_of(1) - list(stack.output_map.keys())[0].should.equal('KeyArn') - output = list(stack.output_map.values())[0] - output.should.be.a(Output) - -def test_parse_stack_with_get_availability_zones(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=get_availability_zones_template_json, - parameters={}, - region_name='us-east-1') - - stack.output_map.should.have.length_of(1) - list(stack.output_map.keys())[0].should.equal('Output1') - output = list(stack.output_map.values())[0] - output.should.be.a(Output) - output.value.should.equal([ "us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d" ]) - - -def test_parse_stack_with_bad_get_attribute_outputs(): - FakeStack.when.called_with( - "test_id", "test_stack", bad_output_template_json, {}, "us-west-1").should.throw(ValidationError) - - -def test_parse_equals_condition(): - parse_condition( - condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(True) - - parse_condition( - condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - resources_map={"EnvType": "staging"}, - condition_map={}, - ).should.equal(False) - - -def test_parse_not_condition(): - parse_condition( - condition={ - "Fn::Not": [{ - "Fn::Equals": [{"Ref": "EnvType"}, "prod"] - }] - }, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(False) - - parse_condition( - condition={ - "Fn::Not": [{ - "Fn::Equals": [{"Ref": "EnvType"}, "prod"] - }] - }, - resources_map={"EnvType": "staging"}, - condition_map={}, - ).should.equal(True) - - -def test_parse_and_condition(): - parse_condition( - condition={ - "Fn::And": [ - {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, - ] - }, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(False) - - parse_condition( - condition={ - "Fn::And": [ - {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - ] - }, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(True) - - -def test_parse_or_condition(): - parse_condition( - condition={ - "Fn::Or": [ - {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, - {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, - ] - }, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(True) - - parse_condition( - condition={ - "Fn::Or": [ - {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, - {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, - ] - }, - resources_map={"EnvType": "prod"}, - condition_map={}, - ).should.equal(False) - - -def test_reference_other_conditions(): - parse_condition( - condition={"Fn::Not": [{"Condition": "OtherCondition"}]}, - resources_map={}, - condition_map={"OtherCondition": True}, - ).should.equal(False) - - -def test_parse_split_and_select(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=split_select_template_json, - parameters={}, - region_name='us-west-1') - - stack.resource_map.should.have.length_of(1) - queue = stack.resource_map['Queue'] - queue.name.should.equal("myqueue") - - -def test_sub(): - stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=sub_template_json, - parameters={}, - region_name='us-west-1') - - queue1 = stack.resource_map['Queue1'] - queue2 = stack.resource_map['Queue2'] - queue2.name.should.equal(queue1.name) - - -def test_import(): - export_stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=export_value_template_json, - parameters={}, - region_name='us-west-1') - import_stack = FakeStack( - stack_id="test_id", - name="test_stack", - template=import_value_template_json, - parameters={}, - region_name='us-west-1', - cross_stack_resources={export_stack.exports[0].value: export_stack.exports[0]}) - - queue = import_stack.resource_map['Queue'] - queue.name.should.equal("value") - - - -def test_short_form_func_in_yaml_teamplate(): - template = """--- - KeyB64: !Base64 valueToEncode - KeyRef: !Ref foo - KeyAnd: !And - - A - - B - KeyEquals: !Equals [A, B] - KeyIf: !If [A, B, C] - KeyNot: !Not [A] - KeyOr: !Or [A, B] - KeyFindInMap: !FindInMap [A, B, C] - KeyGetAtt: !GetAtt A.B - KeyGetAZs: !GetAZs A - KeyImportValue: !ImportValue A - KeyJoin: !Join [ ":", [A, B, C] ] - KeySelect: !Select [A, B] - KeySplit: !Split [A, B] - KeySub: !Sub A - """ - yaml.add_multi_constructor('', yaml_tag_constructor) - template_dict = yaml.load(template) - key_and_expects = [ - ['KeyRef', {'Ref': 'foo'}], - ['KeyB64', {'Fn::Base64': 'valueToEncode'}], - ['KeyAnd', {'Fn::And': ['A', 'B']}], - ['KeyEquals', {'Fn::Equals': ['A', 'B']}], - ['KeyIf', {'Fn::If': ['A', 'B', 'C']}], - ['KeyNot', {'Fn::Not': ['A']}], - ['KeyOr', {'Fn::Or': ['A', 'B']}], - ['KeyFindInMap', {'Fn::FindInMap': ['A', 'B', 'C']}], - ['KeyGetAtt', {'Fn::GetAtt': ['A', 'B']}], - ['KeyGetAZs', {'Fn::GetAZs': 'A'}], - ['KeyImportValue', {'Fn::ImportValue': 'A'}], - ['KeyJoin', {'Fn::Join': [ ":", [ 'A', 'B', 'C' ] ]}], - ['KeySelect', {'Fn::Select': ['A', 'B']}], - ['KeySplit', {'Fn::Split': ['A', 'B']}], - ['KeySub', {'Fn::Sub': 'A'}], - ] - for k, v in key_and_expects: - template_dict.should.have.key(k).which.should.be.equal(v) +from __future__ import unicode_literals +import json +import yaml + +from mock import patch +import sure # noqa + +from moto.cloudformation.exceptions import ValidationError +from moto.cloudformation.models import FakeStack +from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export +from moto.sqs.models import Queue +from moto.s3.models import FakeBucket +from moto.cloudformation.utils import yaml_tag_constructor +from boto.cloudformation.stack import Output + + + +dummy_template = { + "AWSTemplateFormatVersion": "2010-09-09", + + "Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.", + + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": "my-queue", + "VisibilityTimeout": 60, + } + }, + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "DeletionPolicy": "Retain" + }, + }, +} + +name_type_template = { + "AWSTemplateFormatVersion": "2010-09-09", + + "Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.", + + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "VisibilityTimeout": 60, + } + }, + }, +} + +output_dict = { + "Outputs": { + "Output1": { + "Value": {"Ref": "Queue"}, + "Description": "This is a description." + } + } +} + +bad_output = { + "Outputs": { + "Output1": { + "Value": {"Fn::GetAtt": ["Queue", "InvalidAttribute"]} + } + } +} + +get_attribute_output = { + "Outputs": { + "Output1": { + "Value": {"Fn::GetAtt": ["Queue", "QueueName"]} + } + } +} + +get_availability_zones_output = { + "Outputs": { + "Output1": { + "Value": {"Fn::GetAZs": ""} + } + } +} + +split_select_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::Select": [ "1", {"Fn::Split": [ "-", "123-myqueue" ] } ] }, + "VisibilityTimeout": 60, + } + } + } +} + +sub_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Queue1": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::Sub": '${AWS::StackName}-queue-${!Literal}'}, + "VisibilityTimeout": 60, + } + }, + "Queue2": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::Sub": '${Queue1.QueueName}'}, + "VisibilityTimeout": 60, + } + }, + } +} + +export_value_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::Sub": '${AWS::StackName}-queue'}, + "VisibilityTimeout": 60, + } + } + }, + "Outputs": { + "Output1": { + "Value": "value", + "Export": {"Name": 'queue-us-west-1'} + } + } +} + +import_value_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "Queue": { + "Type": "AWS::SQS::Queue", + "Properties": { + "QueueName": {"Fn::ImportValue": 'queue-us-west-1'}, + "VisibilityTimeout": 60, + } + } + } +} + +outputs_template = dict(list(dummy_template.items()) + + list(output_dict.items())) +bad_outputs_template = dict( + list(dummy_template.items()) + list(bad_output.items())) +get_attribute_outputs_template = dict( + list(dummy_template.items()) + list(get_attribute_output.items())) +get_availability_zones_template = dict( + list(dummy_template.items()) + list(get_availability_zones_output.items())) + +dummy_template_json = json.dumps(dummy_template) +name_type_template_json = json.dumps(name_type_template) +output_type_template_json = json.dumps(outputs_template) +bad_output_template_json = json.dumps(bad_outputs_template) +get_attribute_outputs_template_json = json.dumps( + get_attribute_outputs_template) +get_availability_zones_template_json = json.dumps( + get_availability_zones_template) +split_select_template_json = json.dumps(split_select_template) +sub_template_json = json.dumps(sub_template) +export_value_template_json = json.dumps(export_value_template) +import_value_template_json = json.dumps(import_value_template) + + +def test_parse_stack_resources(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=dummy_template_json, + parameters={}, + region_name='us-west-1') + + stack.resource_map.should.have.length_of(2) + + queue = stack.resource_map['Queue'] + queue.should.be.a(Queue) + queue.name.should.equal("my-queue") + + bucket = stack.resource_map['S3Bucket'] + bucket.should.be.a(FakeBucket) + bucket.physical_resource_id.should.equal(bucket.name) + + +@patch("moto.cloudformation.parsing.logger") +def test_missing_resource_logs(logger): + resource_class_from_type("foobar") + logger.warning.assert_called_with( + 'No Moto CloudFormation support for %s', 'foobar') + + +def test_parse_stack_with_name_type_resource(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=name_type_template_json, + parameters={}, + region_name='us-west-1') + + stack.resource_map.should.have.length_of(1) + list(stack.resource_map.keys())[0].should.equal('Queue') + queue = list(stack.resource_map.values())[0] + queue.should.be.a(Queue) + + +def test_parse_stack_with_yaml_template(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=yaml.dump(name_type_template), + parameters={}, + region_name='us-west-1') + + stack.resource_map.should.have.length_of(1) + list(stack.resource_map.keys())[0].should.equal('Queue') + queue = list(stack.resource_map.values())[0] + queue.should.be.a(Queue) + + +def test_parse_stack_with_outputs(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=output_type_template_json, + parameters={}, + region_name='us-west-1') + + stack.output_map.should.have.length_of(1) + list(stack.output_map.keys())[0].should.equal('Output1') + output = list(stack.output_map.values())[0] + output.should.be.a(Output) + output.description.should.equal("This is a description.") + + +def test_parse_stack_with_get_attribute_outputs(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=get_attribute_outputs_template_json, + parameters={}, + region_name='us-west-1') + + stack.output_map.should.have.length_of(1) + list(stack.output_map.keys())[0].should.equal('Output1') + output = list(stack.output_map.values())[0] + output.should.be.a(Output) + output.value.should.equal("my-queue") + +def test_parse_stack_with_get_attribute_kms(): + from .fixtures.kms_key import template + + template_json = json.dumps(template) + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=template_json, + parameters={}, + region_name='us-west-1') + + stack.output_map.should.have.length_of(1) + list(stack.output_map.keys())[0].should.equal('KeyArn') + output = list(stack.output_map.values())[0] + output.should.be.a(Output) + +def test_parse_stack_with_get_availability_zones(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=get_availability_zones_template_json, + parameters={}, + region_name='us-east-1') + + stack.output_map.should.have.length_of(1) + list(stack.output_map.keys())[0].should.equal('Output1') + output = list(stack.output_map.values())[0] + output.should.be.a(Output) + output.value.should.equal([ "us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d" ]) + + +def test_parse_stack_with_bad_get_attribute_outputs(): + FakeStack.when.called_with( + "test_id", "test_stack", bad_output_template_json, {}, "us-west-1").should.throw(ValidationError) + + +def test_parse_equals_condition(): + parse_condition( + condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(True) + + parse_condition( + condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + resources_map={"EnvType": "staging"}, + condition_map={}, + ).should.equal(False) + + +def test_parse_not_condition(): + parse_condition( + condition={ + "Fn::Not": [{ + "Fn::Equals": [{"Ref": "EnvType"}, "prod"] + }] + }, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(False) + + parse_condition( + condition={ + "Fn::Not": [{ + "Fn::Equals": [{"Ref": "EnvType"}, "prod"] + }] + }, + resources_map={"EnvType": "staging"}, + condition_map={}, + ).should.equal(True) + + +def test_parse_and_condition(): + parse_condition( + condition={ + "Fn::And": [ + {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, + ] + }, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(False) + + parse_condition( + condition={ + "Fn::And": [ + {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + ] + }, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(True) + + +def test_parse_or_condition(): + parse_condition( + condition={ + "Fn::Or": [ + {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}, + {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, + ] + }, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(True) + + parse_condition( + condition={ + "Fn::Or": [ + {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, + {"Fn::Equals": [{"Ref": "EnvType"}, "staging"]}, + ] + }, + resources_map={"EnvType": "prod"}, + condition_map={}, + ).should.equal(False) + + +def test_reference_other_conditions(): + parse_condition( + condition={"Fn::Not": [{"Condition": "OtherCondition"}]}, + resources_map={}, + condition_map={"OtherCondition": True}, + ).should.equal(False) + + +def test_parse_split_and_select(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=split_select_template_json, + parameters={}, + region_name='us-west-1') + + stack.resource_map.should.have.length_of(1) + queue = stack.resource_map['Queue'] + queue.name.should.equal("myqueue") + + +def test_sub(): + stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=sub_template_json, + parameters={}, + region_name='us-west-1') + + queue1 = stack.resource_map['Queue1'] + queue2 = stack.resource_map['Queue2'] + queue2.name.should.equal(queue1.name) + + +def test_import(): + export_stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=export_value_template_json, + parameters={}, + region_name='us-west-1') + import_stack = FakeStack( + stack_id="test_id", + name="test_stack", + template=import_value_template_json, + parameters={}, + region_name='us-west-1', + cross_stack_resources={export_stack.exports[0].value: export_stack.exports[0]}) + + queue = import_stack.resource_map['Queue'] + queue.name.should.equal("value") + + + +def test_short_form_func_in_yaml_teamplate(): + template = """--- + KeyB64: !Base64 valueToEncode + KeyRef: !Ref foo + KeyAnd: !And + - A + - B + KeyEquals: !Equals [A, B] + KeyIf: !If [A, B, C] + KeyNot: !Not [A] + KeyOr: !Or [A, B] + KeyFindInMap: !FindInMap [A, B, C] + KeyGetAtt: !GetAtt A.B + KeyGetAZs: !GetAZs A + KeyImportValue: !ImportValue A + KeyJoin: !Join [ ":", [A, B, C] ] + KeySelect: !Select [A, B] + KeySplit: !Split [A, B] + KeySub: !Sub A + """ + yaml.add_multi_constructor('', yaml_tag_constructor, Loader=yaml.Loader) + template_dict = yaml.load(template, Loader=yaml.Loader) + key_and_expects = [ + ['KeyRef', {'Ref': 'foo'}], + ['KeyB64', {'Fn::Base64': 'valueToEncode'}], + ['KeyAnd', {'Fn::And': ['A', 'B']}], + ['KeyEquals', {'Fn::Equals': ['A', 'B']}], + ['KeyIf', {'Fn::If': ['A', 'B', 'C']}], + ['KeyNot', {'Fn::Not': ['A']}], + ['KeyOr', {'Fn::Or': ['A', 'B']}], + ['KeyFindInMap', {'Fn::FindInMap': ['A', 'B', 'C']}], + ['KeyGetAtt', {'Fn::GetAtt': ['A', 'B']}], + ['KeyGetAZs', {'Fn::GetAZs': 'A'}], + ['KeyImportValue', {'Fn::ImportValue': 'A'}], + ['KeyJoin', {'Fn::Join': [ ":", [ 'A', 'B', 'C' ] ]}], + ['KeySelect', {'Fn::Select': ['A', 'B']}], + ['KeySplit', {'Fn::Split': ['A', 'B']}], + ['KeySub', {'Fn::Sub': 'A'}], + ] + for k, v in key_and_expects: + template_dict.should.have.key(k).which.should.be.equal(v) diff --git a/tests/test_cloudformation/test_validate.py b/tests/test_cloudformation/test_validate.py new file mode 100644 index 000000000..e2c3af05d --- /dev/null +++ b/tests/test_cloudformation/test_validate.py @@ -0,0 +1,115 @@ +from collections import OrderedDict +import json +import yaml +import os +import boto3 +from nose.tools import raises +import botocore + + +from moto.cloudformation.exceptions import ValidationError +from moto.cloudformation.models import FakeStack +from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export +from moto.sqs.models import Queue +from moto.s3.models import FakeBucket +from moto.cloudformation.utils import yaml_tag_constructor +from boto.cloudformation.stack import Output +from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2 + +json_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": { + "EC2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-d3adb33f", + "KeyName": "dummy", + "InstanceType": "t2.micro", + "Tags": [ + { + "Key": "Description", + "Value": "Test tag" + }, + { + "Key": "Name", + "Value": "Name tag for tests" + } + ] + } + } + } +} + +# One resource is required +json_bad_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1" +} + +dummy_template_json = json.dumps(json_template) +dummy_bad_template_json = json.dumps(json_bad_template) + + +@mock_cloudformation +def test_boto3_json_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=dummy_template_json, + ) + assert response['Description'] == "Stack 1" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_json_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=dummy_bad_template_json, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True + + +yaml_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 +""" + +yaml_bad_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template +""" + +@mock_cloudformation +def test_boto3_yaml_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=yaml_template, + ) + assert response['Description'] == "Simple CloudFormation Test Template" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_yaml_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=yaml_bad_template, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 6c0ad131b..e4e38e821 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -1,601 +1,1164 @@ -from __future__ import unicode_literals - -import boto3 -import json -import os -import uuid - -from jose import jws - -from moto import mock_cognitoidp -import sure # noqa - - -@mock_cognitoidp -def test_create_user_pool(): - conn = boto3.client("cognito-idp", "us-west-2") - - name = str(uuid.uuid4()) - value = str(uuid.uuid4()) - result = conn.create_user_pool( - PoolName=name, - LambdaConfig={ - "PreSignUp": value - } - ) - - result["UserPool"]["Id"].should_not.be.none - result["UserPool"]["Id"].should.match(r'[\w-]+_[0-9a-zA-Z]+') - result["UserPool"]["Name"].should.equal(name) - result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) - - -@mock_cognitoidp -def test_list_user_pools(): - conn = boto3.client("cognito-idp", "us-west-2") - - name = str(uuid.uuid4()) - conn.create_user_pool(PoolName=name) - result = conn.list_user_pools(MaxResults=10) - result["UserPools"].should.have.length_of(1) - result["UserPools"][0]["Name"].should.equal(name) - - -@mock_cognitoidp -def test_describe_user_pool(): - conn = boto3.client("cognito-idp", "us-west-2") - - name = str(uuid.uuid4()) - value = str(uuid.uuid4()) - user_pool_details = conn.create_user_pool( - PoolName=name, - LambdaConfig={ - "PreSignUp": value - } - ) - - result = conn.describe_user_pool(UserPoolId=user_pool_details["UserPool"]["Id"]) - result["UserPool"]["Name"].should.equal(name) - result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) - - -@mock_cognitoidp -def test_delete_user_pool(): - conn = boto3.client("cognito-idp", "us-west-2") - - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(1) - conn.delete_user_pool(UserPoolId=user_pool_id) - conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(0) - - -@mock_cognitoidp -def test_create_user_pool_domain(): - conn = boto3.client("cognito-idp", "us-west-2") - - domain = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - result = conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) - result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - - -@mock_cognitoidp -def test_describe_user_pool_domain(): - conn = boto3.client("cognito-idp", "us-west-2") - - domain = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) - result = conn.describe_user_pool_domain(Domain=domain) - result["DomainDescription"]["Domain"].should.equal(domain) - result["DomainDescription"]["UserPoolId"].should.equal(user_pool_id) - result["DomainDescription"]["AWSAccountId"].should_not.be.none - - -@mock_cognitoidp -def test_delete_user_pool_domain(): - conn = boto3.client("cognito-idp", "us-west-2") - - domain = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) - result = conn.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) - result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - result = conn.describe_user_pool_domain(Domain=domain) - # This is a surprising behavior of the real service: describing a missing domain comes - # back with status 200 and a DomainDescription of {} - result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) - result["DomainDescription"].keys().should.have.length_of(0) - - -@mock_cognitoidp -def test_create_user_pool_client(): - conn = boto3.client("cognito-idp", "us-west-2") - - client_name = str(uuid.uuid4()) - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - result = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=client_name, - CallbackURLs=[value], - ) - - result["UserPoolClient"]["UserPoolId"].should.equal(user_pool_id) - result["UserPoolClient"]["ClientId"].should_not.be.none - result["UserPoolClient"]["ClientName"].should.equal(client_name) - result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) - result["UserPoolClient"]["CallbackURLs"][0].should.equal(value) - - -@mock_cognitoidp -def test_list_user_pool_clients(): - conn = boto3.client("cognito-idp", "us-west-2") - - client_name = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_user_pool_client(UserPoolId=user_pool_id, ClientName=client_name) - result = conn.list_user_pool_clients(UserPoolId=user_pool_id, MaxResults=10) - result["UserPoolClients"].should.have.length_of(1) - result["UserPoolClients"][0]["ClientName"].should.equal(client_name) - - -@mock_cognitoidp -def test_describe_user_pool_client(): - conn = boto3.client("cognito-idp", "us-west-2") - - client_name = str(uuid.uuid4()) - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - client_details = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=client_name, - CallbackURLs=[value], - ) - - result = conn.describe_user_pool_client( - UserPoolId=user_pool_id, - ClientId=client_details["UserPoolClient"]["ClientId"], - ) - - result["UserPoolClient"]["ClientName"].should.equal(client_name) - result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) - result["UserPoolClient"]["CallbackURLs"][0].should.equal(value) - - -@mock_cognitoidp -def test_update_user_pool_client(): - conn = boto3.client("cognito-idp", "us-west-2") - - old_client_name = str(uuid.uuid4()) - new_client_name = str(uuid.uuid4()) - old_value = str(uuid.uuid4()) - new_value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - client_details = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=old_client_name, - CallbackURLs=[old_value], - ) - - result = conn.update_user_pool_client( - UserPoolId=user_pool_id, - ClientId=client_details["UserPoolClient"]["ClientId"], - ClientName=new_client_name, - CallbackURLs=[new_value], - ) - - result["UserPoolClient"]["ClientName"].should.equal(new_client_name) - result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) - result["UserPoolClient"]["CallbackURLs"][0].should.equal(new_value) - - -@mock_cognitoidp -def test_delete_user_pool_client(): - conn = boto3.client("cognito-idp", "us-west-2") - - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - client_details = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=str(uuid.uuid4()), - ) - - conn.delete_user_pool_client( - UserPoolId=user_pool_id, - ClientId=client_details["UserPoolClient"]["ClientId"], - ) - - caught = False - try: - conn.describe_user_pool_client( - UserPoolId=user_pool_id, - ClientId=client_details["UserPoolClient"]["ClientId"], - ) - except conn.exceptions.ResourceNotFoundException: - caught = True - - caught.should.be.true - - -@mock_cognitoidp -def test_create_identity_provider(): - conn = boto3.client("cognito-idp", "us-west-2") - - provider_name = str(uuid.uuid4()) - provider_type = "Facebook" - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - result = conn.create_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ProviderType=provider_type, - ProviderDetails={ - "thing": value - }, - ) - - result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) - result["IdentityProvider"]["ProviderName"].should.equal(provider_name) - result["IdentityProvider"]["ProviderType"].should.equal(provider_type) - result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value) - - -@mock_cognitoidp -def test_list_identity_providers(): - conn = boto3.client("cognito-idp", "us-west-2") - - provider_name = str(uuid.uuid4()) - provider_type = "Facebook" - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ProviderType=provider_type, - ProviderDetails={}, - ) - - result = conn.list_identity_providers( - UserPoolId=user_pool_id, - MaxResults=10, - ) - - result["Providers"].should.have.length_of(1) - result["Providers"][0]["ProviderName"].should.equal(provider_name) - result["Providers"][0]["ProviderType"].should.equal(provider_type) - - -@mock_cognitoidp -def test_describe_identity_providers(): - conn = boto3.client("cognito-idp", "us-west-2") - - provider_name = str(uuid.uuid4()) - provider_type = "Facebook" - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ProviderType=provider_type, - ProviderDetails={ - "thing": value - }, - ) - - result = conn.describe_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ) - - result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) - result["IdentityProvider"]["ProviderName"].should.equal(provider_name) - result["IdentityProvider"]["ProviderType"].should.equal(provider_type) - result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value) - - -@mock_cognitoidp -def test_delete_identity_providers(): - conn = boto3.client("cognito-idp", "us-west-2") - - provider_name = str(uuid.uuid4()) - provider_type = "Facebook" - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.create_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ProviderType=provider_type, - ProviderDetails={ - "thing": value - }, - ) - - conn.delete_identity_provider(UserPoolId=user_pool_id, ProviderName=provider_name) - - caught = False - try: - conn.describe_identity_provider( - UserPoolId=user_pool_id, - ProviderName=provider_name, - ) - except conn.exceptions.ResourceNotFoundException: - caught = True - - caught.should.be.true - - -@mock_cognitoidp -def test_admin_create_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - result = conn.admin_create_user( - UserPoolId=user_pool_id, - Username=username, - UserAttributes=[ - {"Name": "thing", "Value": value} - ], - ) - - result["User"]["Username"].should.equal(username) - result["User"]["UserStatus"].should.equal("FORCE_CHANGE_PASSWORD") - result["User"]["Attributes"].should.have.length_of(1) - result["User"]["Attributes"][0]["Name"].should.equal("thing") - result["User"]["Attributes"][0]["Value"].should.equal(value) - result["User"]["Enabled"].should.equal(True) - - -@mock_cognitoidp -def test_admin_get_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - value = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.admin_create_user( - UserPoolId=user_pool_id, - Username=username, - UserAttributes=[ - {"Name": "thing", "Value": value} - ], - ) - - result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username) - result["Username"].should.equal(username) - result["UserAttributes"].should.have.length_of(1) - result["UserAttributes"][0]["Name"].should.equal("thing") - result["UserAttributes"][0]["Value"].should.equal(value) - - -@mock_cognitoidp -def test_admin_get_missing_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - - caught = False - try: - conn.admin_get_user(UserPoolId=user_pool_id, Username=username) - except conn.exceptions.UserNotFoundException: - caught = True - - caught.should.be.true - - -@mock_cognitoidp -def test_list_users(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.admin_create_user(UserPoolId=user_pool_id, Username=username) - result = conn.list_users(UserPoolId=user_pool_id) - result["Users"].should.have.length_of(1) - result["Users"][0]["Username"].should.equal(username) - - -@mock_cognitoidp -def test_admin_disable_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.admin_create_user(UserPoolId=user_pool_id, Username=username) - - result = conn.admin_disable_user(UserPoolId=user_pool_id, Username=username) - list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected - - conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \ - ["Enabled"].should.equal(False) - - -@mock_cognitoidp -def test_admin_enable_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.admin_create_user(UserPoolId=user_pool_id, Username=username) - conn.admin_disable_user(UserPoolId=user_pool_id, Username=username) - - result = conn.admin_enable_user(UserPoolId=user_pool_id, Username=username) - list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected - - conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \ - ["Enabled"].should.equal(True) - - -@mock_cognitoidp -def test_admin_delete_user(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - conn.admin_create_user(UserPoolId=user_pool_id, Username=username) - conn.admin_delete_user(UserPoolId=user_pool_id, Username=username) - - caught = False - try: - conn.admin_get_user(UserPoolId=user_pool_id, Username=username) - except conn.exceptions.UserNotFoundException: - caught = True - - caught.should.be.true - - -def authentication_flow(conn): - username = str(uuid.uuid4()) - temporary_password = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - user_attribute_name = str(uuid.uuid4()) - user_attribute_value = str(uuid.uuid4()) - client_id = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=str(uuid.uuid4()), - ReadAttributes=[user_attribute_name] - )["UserPoolClient"]["ClientId"] - - conn.admin_create_user( - UserPoolId=user_pool_id, - Username=username, - TemporaryPassword=temporary_password, - UserAttributes=[{ - 'Name': user_attribute_name, - 'Value': user_attribute_value - }] - ) - - result = conn.admin_initiate_auth( - UserPoolId=user_pool_id, - ClientId=client_id, - AuthFlow="ADMIN_NO_SRP_AUTH", - AuthParameters={ - "USERNAME": username, - "PASSWORD": temporary_password - }, - ) - - # A newly created user is forced to set a new password - result["ChallengeName"].should.equal("NEW_PASSWORD_REQUIRED") - result["Session"].should_not.be.none - - # This sets a new password and logs the user in (creates tokens) - new_password = str(uuid.uuid4()) - result = conn.respond_to_auth_challenge( - Session=result["Session"], - ClientId=client_id, - ChallengeName="NEW_PASSWORD_REQUIRED", - ChallengeResponses={ - "USERNAME": username, - "NEW_PASSWORD": new_password - } - ) - - result["AuthenticationResult"]["IdToken"].should_not.be.none - result["AuthenticationResult"]["AccessToken"].should_not.be.none - - return { - "user_pool_id": user_pool_id, - "client_id": client_id, - "id_token": result["AuthenticationResult"]["IdToken"], - "access_token": result["AuthenticationResult"]["AccessToken"], - "username": username, - "password": new_password, - "additional_fields": { - user_attribute_name: user_attribute_value - } - } - - -@mock_cognitoidp -def test_authentication_flow(): - conn = boto3.client("cognito-idp", "us-west-2") - - authentication_flow(conn) - - -@mock_cognitoidp -def test_token_legitimacy(): - conn = boto3.client("cognito-idp", "us-west-2") - - path = "../../moto/cognitoidp/resources/jwks-public.json" - with open(os.path.join(os.path.dirname(__file__), path)) as f: - json_web_key = json.loads(f.read())["keys"][0] - - outputs = authentication_flow(conn) - id_token = outputs["id_token"] - access_token = outputs["access_token"] - client_id = outputs["client_id"] - issuer = "https://cognito-idp.us-west-2.amazonaws.com/{}".format(outputs["user_pool_id"]) - id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256")) - id_claims["iss"].should.equal(issuer) - id_claims["aud"].should.equal(client_id) - access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256")) - access_claims["iss"].should.equal(issuer) - access_claims["aud"].should.equal(client_id) - for k, v in outputs["additional_fields"].items(): - access_claims[k].should.equal(v) - - -@mock_cognitoidp -def test_change_password(): - conn = boto3.client("cognito-idp", "us-west-2") - - outputs = authentication_flow(conn) - - # Take this opportunity to test change_password, which requires an access token. - newer_password = str(uuid.uuid4()) - conn.change_password( - AccessToken=outputs["access_token"], - PreviousPassword=outputs["password"], - ProposedPassword=newer_password, - ) - - # Log in again, which should succeed without a challenge because the user is no - # longer in the force-new-password state. - result = conn.admin_initiate_auth( - UserPoolId=outputs["user_pool_id"], - ClientId=outputs["client_id"], - AuthFlow="ADMIN_NO_SRP_AUTH", - AuthParameters={ - "USERNAME": outputs["username"], - "PASSWORD": newer_password, - }, - ) - - result["AuthenticationResult"].should_not.be.none - - -@mock_cognitoidp -def test_forgot_password(): - conn = boto3.client("cognito-idp", "us-west-2") - - result = conn.forgot_password(ClientId=str(uuid.uuid4()), Username=str(uuid.uuid4())) - result["CodeDeliveryDetails"].should_not.be.none - - -@mock_cognitoidp -def test_confirm_forgot_password(): - conn = boto3.client("cognito-idp", "us-west-2") - - username = str(uuid.uuid4()) - user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] - client_id = conn.create_user_pool_client( - UserPoolId=user_pool_id, - ClientName=str(uuid.uuid4()), - )["UserPoolClient"]["ClientId"] - - conn.admin_create_user( - UserPoolId=user_pool_id, - Username=username, - TemporaryPassword=str(uuid.uuid4()), - ) - - conn.confirm_forgot_password( - ClientId=client_id, - Username=username, - ConfirmationCode=str(uuid.uuid4()), - Password=str(uuid.uuid4()), - ) +from __future__ import unicode_literals + +import json +import os +import random +import uuid + +import boto3 +# noinspection PyUnresolvedReferences +import sure # noqa +from botocore.exceptions import ClientError +from jose import jws +from nose.tools import assert_raises + +from moto import mock_cognitoidp + + +@mock_cognitoidp +def test_create_user_pool(): + conn = boto3.client("cognito-idp", "us-west-2") + + name = str(uuid.uuid4()) + value = str(uuid.uuid4()) + result = conn.create_user_pool( + PoolName=name, + LambdaConfig={ + "PreSignUp": value + } + ) + + result["UserPool"]["Id"].should_not.be.none + result["UserPool"]["Id"].should.match(r'[\w-]+_[0-9a-zA-Z]+') + result["UserPool"]["Name"].should.equal(name) + result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) + + +@mock_cognitoidp +def test_list_user_pools(): + conn = boto3.client("cognito-idp", "us-west-2") + + name = str(uuid.uuid4()) + conn.create_user_pool(PoolName=name) + result = conn.list_user_pools(MaxResults=10) + result["UserPools"].should.have.length_of(1) + result["UserPools"][0]["Name"].should.equal(name) + + +@mock_cognitoidp +def test_list_user_pools_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pools + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token) + result_2["UserPools"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = pool_count + 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(pool_count) + result.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_describe_user_pool(): + conn = boto3.client("cognito-idp", "us-west-2") + + name = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_details = conn.create_user_pool( + PoolName=name, + LambdaConfig={ + "PreSignUp": value + } + ) + + result = conn.describe_user_pool(UserPoolId=user_pool_details["UserPool"]["Id"]) + result["UserPool"]["Name"].should.equal(name) + result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) + + +@mock_cognitoidp +def test_delete_user_pool(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(1) + conn.delete_user_pool(UserPoolId=user_pool_id) + conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(0) + + +@mock_cognitoidp +def test_create_user_pool_domain(): + conn = boto3.client("cognito-idp", "us-west-2") + + domain = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + result = conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) + result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) + + +@mock_cognitoidp +def test_describe_user_pool_domain(): + conn = boto3.client("cognito-idp", "us-west-2") + + domain = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) + result = conn.describe_user_pool_domain(Domain=domain) + result["DomainDescription"]["Domain"].should.equal(domain) + result["DomainDescription"]["UserPoolId"].should.equal(user_pool_id) + result["DomainDescription"]["AWSAccountId"].should_not.be.none + + +@mock_cognitoidp +def test_delete_user_pool_domain(): + conn = boto3.client("cognito-idp", "us-west-2") + + domain = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) + result = conn.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain) + result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) + result = conn.describe_user_pool_domain(Domain=domain) + # This is a surprising behavior of the real service: describing a missing domain comes + # back with status 200 and a DomainDescription of {} + result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200) + result["DomainDescription"].keys().should.have.length_of(0) + + +@mock_cognitoidp +def test_create_user_pool_client(): + conn = boto3.client("cognito-idp", "us-west-2") + + client_name = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + result = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=client_name, + CallbackURLs=[value], + ) + + result["UserPoolClient"]["UserPoolId"].should.equal(user_pool_id) + result["UserPoolClient"]["ClientId"].should_not.be.none + result["UserPoolClient"]["ClientName"].should.equal(client_name) + result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) + result["UserPoolClient"]["CallbackURLs"][0].should.equal(value) + + +@mock_cognitoidp +def test_list_user_pool_clients(): + conn = boto3.client("cognito-idp", "us-west-2") + + client_name = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_user_pool_client(UserPoolId=user_pool_id, ClientName=client_name) + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, MaxResults=10) + result["UserPoolClients"].should.have.length_of(1) + result["UserPoolClients"][0]["ClientName"].should.equal(client_name) + + +@mock_cognitoidp +def test_list_user_pool_clients_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["UserPoolClients"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = client_count + 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(client_count) + result.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_describe_user_pool_client(): + conn = boto3.client("cognito-idp", "us-west-2") + + client_name = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_details = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=client_name, + CallbackURLs=[value], + ) + + result = conn.describe_user_pool_client( + UserPoolId=user_pool_id, + ClientId=client_details["UserPoolClient"]["ClientId"], + ) + + result["UserPoolClient"]["ClientName"].should.equal(client_name) + result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) + result["UserPoolClient"]["CallbackURLs"][0].should.equal(value) + + +@mock_cognitoidp +def test_update_user_pool_client(): + conn = boto3.client("cognito-idp", "us-west-2") + + old_client_name = str(uuid.uuid4()) + new_client_name = str(uuid.uuid4()) + old_value = str(uuid.uuid4()) + new_value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_details = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=old_client_name, + CallbackURLs=[old_value], + ) + + result = conn.update_user_pool_client( + UserPoolId=user_pool_id, + ClientId=client_details["UserPoolClient"]["ClientId"], + ClientName=new_client_name, + CallbackURLs=[new_value], + ) + + result["UserPoolClient"]["ClientName"].should.equal(new_client_name) + result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1) + result["UserPoolClient"]["CallbackURLs"][0].should.equal(new_value) + + +@mock_cognitoidp +def test_delete_user_pool_client(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_details = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=str(uuid.uuid4()), + ) + + conn.delete_user_pool_client( + UserPoolId=user_pool_id, + ClientId=client_details["UserPoolClient"]["ClientId"], + ) + + caught = False + try: + conn.describe_user_pool_client( + UserPoolId=user_pool_id, + ClientId=client_details["UserPoolClient"]["ClientId"], + ) + except conn.exceptions.ResourceNotFoundException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_create_identity_provider(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + result = conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={ + "thing": value + }, + ) + + result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) + result["IdentityProvider"]["ProviderName"].should.equal(provider_name) + result["IdentityProvider"]["ProviderType"].should.equal(provider_type) + result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value) + + +@mock_cognitoidp +def test_list_identity_providers(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + result = conn.list_identity_providers( + UserPoolId=user_pool_id, + MaxResults=10, + ) + + result["Providers"].should.have.length_of(1) + result["Providers"][0]["ProviderName"].should.equal(provider_name) + result["Providers"][0]["ProviderType"].should.equal(provider_type) + + +@mock_cognitoidp +def test_list_identity_providers_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["Providers"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = identity_provider_count + 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(identity_provider_count) + result.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_describe_identity_providers(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={ + "thing": value + }, + ) + + result = conn.describe_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ) + + result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) + result["IdentityProvider"]["ProviderName"].should.equal(provider_name) + result["IdentityProvider"]["ProviderType"].should.equal(provider_type) + result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value) + + +@mock_cognitoidp +def test_update_identity_provider(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + new_value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={ + "thing": value + }, + ) + + result = conn.update_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderDetails={ + "thing": new_value + }, + ) + + result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) + result["IdentityProvider"]["ProviderName"].should.equal(provider_name) + result["IdentityProvider"]["ProviderType"].should.equal(provider_type) + result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(new_value) + + +@mock_cognitoidp +def test_update_identity_provider_no_user_pool(): + conn = boto3.client("cognito-idp", "us-west-2") + + new_value = str(uuid.uuid4()) + + with assert_raises(conn.exceptions.ResourceNotFoundException) as cm: + conn.update_identity_provider( + UserPoolId="foo", + ProviderName="bar", + ProviderDetails={ + "thing": new_value + }, + ) + + cm.exception.operation_name.should.equal('UpdateIdentityProvider') + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_update_identity_provider_no_identity_provider(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + new_value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + with assert_raises(conn.exceptions.ResourceNotFoundException) as cm: + conn.update_identity_provider( + UserPoolId=user_pool_id, + ProviderName="foo", + ProviderDetails={ + "thing": new_value + }, + ) + + cm.exception.operation_name.should.equal('UpdateIdentityProvider') + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_delete_identity_providers(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={ + "thing": value + }, + ) + + conn.delete_identity_provider(UserPoolId=user_pool_id, ProviderName=provider_name) + + caught = False + try: + conn.describe_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ) + except conn.exceptions.ResourceNotFoundException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_create_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + description = str(uuid.uuid4()) + role_arn = "arn:aws:iam:::role/my-iam-role" + precedence = random.randint(0, 100000) + + result = conn.create_group( + GroupName=group_name, + UserPoolId=user_pool_id, + Description=description, + RoleArn=role_arn, + Precedence=precedence, + ) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["Description"].should.equal(description) + result["Group"]["RoleArn"].should.equal(role_arn) + result["Group"]["Precedence"].should.equal(precedence) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_create_group_with_duplicate_name_raises_error(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + with assert_raises(ClientError) as cm: + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.operation_name.should.equal('CreateGroup') + cm.exception.response['Error']['Code'].should.equal('GroupExistsException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_get_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_list_groups(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.list_groups(UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_delete_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + with assert_raises(ClientError) as cm: + conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + + +@mock_cognitoidp +def test_admin_add_user_to_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + +@mock_cognitoidp +def test_admin_add_user_to_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + +@mock_cognitoidp +def test_list_users_in_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username) + + +@mock_cognitoidp +def test_list_users_in_group_ignores_deleted_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + username2 = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username2) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name) + conn.admin_delete_user(UserPoolId=user_pool_id, Username=username) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username2) + + +@mock_cognitoidp +def test_admin_list_groups_for_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_admin_list_groups_for_user_ignores_deleted_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + group_name2 = str(uuid.uuid4()) + conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2) + conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name2) + + +@mock_cognitoidp +def test_admin_remove_user_from_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \ + ["Users"].should.have.length_of(0) + conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \ + ["Groups"].should.have.length_of(0) + + +@mock_cognitoidp +def test_admin_remove_user_from_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + +@mock_cognitoidp +def test_admin_create_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + result = conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[ + {"Name": "thing", "Value": value} + ], + ) + + result["User"]["Username"].should.equal(username) + result["User"]["UserStatus"].should.equal("FORCE_CHANGE_PASSWORD") + result["User"]["Attributes"].should.have.length_of(1) + result["User"]["Attributes"][0]["Name"].should.equal("thing") + result["User"]["Attributes"][0]["Value"].should.equal(value) + result["User"]["Enabled"].should.equal(True) + + +@mock_cognitoidp +def test_admin_get_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + UserAttributes=[ + {"Name": "thing", "Value": value} + ], + ) + + result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username) + result["Username"].should.equal(username) + result["UserAttributes"].should.have.length_of(1) + result["UserAttributes"][0]["Name"].should.equal("thing") + result["UserAttributes"][0]["Value"].should.equal(value) + + +@mock_cognitoidp +def test_admin_get_missing_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + caught = False + try: + conn.admin_get_user(UserPoolId=user_pool_id, Username=username) + except conn.exceptions.UserNotFoundException: + caught = True + + caught.should.be.true + + +@mock_cognitoidp +def test_list_users(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + result = conn.list_users(UserPoolId=user_pool_id) + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username) + + +@mock_cognitoidp +def test_list_users_returns_limit_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_returns_pagination_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + next_token = result["PaginationToken"] + result_2 = conn.list_users(UserPoolId=user_pool_id, + Limit=max_results, PaginationToken=next_token) + result_2["Users"].should.have.length_of(max_results) + result_2.shouldnt.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_when_limit_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = user_count + 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(user_count) + result.shouldnt.have.key("PaginationToken") + + +@mock_cognitoidp +def test_admin_disable_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + result = conn.admin_disable_user(UserPoolId=user_pool_id, Username=username) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \ + ["Enabled"].should.equal(False) + + +@mock_cognitoidp +def test_admin_enable_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + conn.admin_disable_user(UserPoolId=user_pool_id, Username=username) + + result = conn.admin_enable_user(UserPoolId=user_pool_id, Username=username) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \ + ["Enabled"].should.equal(True) + + +@mock_cognitoidp +def test_admin_delete_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + conn.admin_delete_user(UserPoolId=user_pool_id, Username=username) + + caught = False + try: + conn.admin_get_user(UserPoolId=user_pool_id, Username=username) + except conn.exceptions.UserNotFoundException: + caught = True + + caught.should.be.true + + +def authentication_flow(conn): + username = str(uuid.uuid4()) + temporary_password = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + user_attribute_name = str(uuid.uuid4()) + user_attribute_value = str(uuid.uuid4()) + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=str(uuid.uuid4()), + ReadAttributes=[user_attribute_name] + )["UserPoolClient"]["ClientId"] + + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + TemporaryPassword=temporary_password, + UserAttributes=[{ + 'Name': user_attribute_name, + 'Value': user_attribute_value + }] + ) + + result = conn.admin_initiate_auth( + UserPoolId=user_pool_id, + ClientId=client_id, + AuthFlow="ADMIN_NO_SRP_AUTH", + AuthParameters={ + "USERNAME": username, + "PASSWORD": temporary_password + }, + ) + + # A newly created user is forced to set a new password + result["ChallengeName"].should.equal("NEW_PASSWORD_REQUIRED") + result["Session"].should_not.be.none + + # This sets a new password and logs the user in (creates tokens) + new_password = str(uuid.uuid4()) + result = conn.respond_to_auth_challenge( + Session=result["Session"], + ClientId=client_id, + ChallengeName="NEW_PASSWORD_REQUIRED", + ChallengeResponses={ + "USERNAME": username, + "NEW_PASSWORD": new_password + } + ) + + result["AuthenticationResult"]["IdToken"].should_not.be.none + result["AuthenticationResult"]["AccessToken"].should_not.be.none + + return { + "user_pool_id": user_pool_id, + "client_id": client_id, + "id_token": result["AuthenticationResult"]["IdToken"], + "access_token": result["AuthenticationResult"]["AccessToken"], + "username": username, + "password": new_password, + "additional_fields": { + user_attribute_name: user_attribute_value + } + } + + +@mock_cognitoidp +def test_authentication_flow(): + conn = boto3.client("cognito-idp", "us-west-2") + + authentication_flow(conn) + + +@mock_cognitoidp +def test_token_legitimacy(): + conn = boto3.client("cognito-idp", "us-west-2") + + path = "../../moto/cognitoidp/resources/jwks-public.json" + with open(os.path.join(os.path.dirname(__file__), path)) as f: + json_web_key = json.loads(f.read())["keys"][0] + + outputs = authentication_flow(conn) + id_token = outputs["id_token"] + access_token = outputs["access_token"] + client_id = outputs["client_id"] + issuer = "https://cognito-idp.us-west-2.amazonaws.com/{}".format(outputs["user_pool_id"]) + id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256")) + id_claims["iss"].should.equal(issuer) + id_claims["aud"].should.equal(client_id) + access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256")) + access_claims["iss"].should.equal(issuer) + access_claims["aud"].should.equal(client_id) + for k, v in outputs["additional_fields"].items(): + access_claims[k].should.equal(v) + + +@mock_cognitoidp +def test_change_password(): + conn = boto3.client("cognito-idp", "us-west-2") + + outputs = authentication_flow(conn) + + # Take this opportunity to test change_password, which requires an access token. + newer_password = str(uuid.uuid4()) + conn.change_password( + AccessToken=outputs["access_token"], + PreviousPassword=outputs["password"], + ProposedPassword=newer_password, + ) + + # Log in again, which should succeed without a challenge because the user is no + # longer in the force-new-password state. + result = conn.admin_initiate_auth( + UserPoolId=outputs["user_pool_id"], + ClientId=outputs["client_id"], + AuthFlow="ADMIN_NO_SRP_AUTH", + AuthParameters={ + "USERNAME": outputs["username"], + "PASSWORD": newer_password, + }, + ) + + result["AuthenticationResult"].should_not.be.none + + +@mock_cognitoidp +def test_forgot_password(): + conn = boto3.client("cognito-idp", "us-west-2") + + result = conn.forgot_password(ClientId=str(uuid.uuid4()), Username=str(uuid.uuid4())) + result["CodeDeliveryDetails"].should_not.be.none + + +@mock_cognitoidp +def test_confirm_forgot_password(): + conn = boto3.client("cognito-idp", "us-west-2") + + username = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + client_id = conn.create_user_pool_client( + UserPoolId=user_pool_id, + ClientName=str(uuid.uuid4()), + )["UserPoolClient"]["ClientId"] + + conn.admin_create_user( + UserPoolId=user_pool_id, + Username=username, + TemporaryPassword=str(uuid.uuid4()), + ) + + conn.confirm_forgot_password( + ClientId=client_id, + Username=username, + ConfirmationCode=str(uuid.uuid4()), + Password=str(uuid.uuid4()), + ) diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py new file mode 100644 index 000000000..96c62455c --- /dev/null +++ b/tests/test_config/test_config.py @@ -0,0 +1,491 @@ +from datetime import datetime, timedelta + +import boto3 +from botocore.exceptions import ClientError +from nose.tools import assert_raises + +from moto.config import mock_config + + +@mock_config +def test_put_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Try without a name supplied: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={'roleARN': 'somearn'}) + assert ce.exception.response['Error']['Code'] == 'InvalidConfigurationRecorderNameException' + assert 'is not valid, blank string.' in ce.exception.response['Error']['Message'] + + # Try with a really long name: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={'name': 'a' * 257, 'roleARN': 'somearn'}) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + + # With resource types and flags set to True: + bad_groups = [ + {'allSupported': True, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, + {'allSupported': False, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, + {'allSupported': True, 'includeGlobalResourceTypes': False, 'resourceTypes': ['item']}, + {'allSupported': False, 'includeGlobalResourceTypes': False, 'resourceTypes': []}, + {'includeGlobalResourceTypes': False, 'resourceTypes': []}, + {'includeGlobalResourceTypes': True}, + {'resourceTypes': []}, + {} + ] + + for bg in bad_groups: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'default', + 'roleARN': 'somearn', + 'recordingGroup': bg + }) + assert ce.exception.response['Error']['Code'] == 'InvalidRecordingGroupException' + assert ce.exception.response['Error']['Message'] == 'The recording group provided is not valid' + + # With an invalid Resource Type: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'default', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + # 2 good, and 2 bad: + 'resourceTypes': ['AWS::EC2::Volume', 'LOLNO', 'AWS::EC2::VPC', 'LOLSTILLNO'] + } + }) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert "2 validation error detected: Value '['LOLNO', 'LOLSTILLNO']" in str(ce.exception.response['Error']['Message']) + assert 'AWS::EC2::Instance' in ce.exception.response['Error']['Message'] + + # Create a proper one: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert not result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 2 + assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \ + and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes'] + + # Now update the configuration recorder: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': True, + 'includeGlobalResourceTypes': True + } + }) + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert result[0]['recordingGroup']['allSupported'] + assert result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 0 + + # With a default recording group (i.e. lacking one) + client.put_configuration_recorder(ConfigurationRecorder={'name': 'testrecorder', 'roleARN': 'somearn'}) + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert not result[0]['recordingGroup'].get('resourceTypes') + + # Can currently only have exactly 1 Config Recorder in an account/region: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'someotherrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + } + }) + assert ce.exception.response['Error']['Code'] == 'MaxNumberOfConfigurationRecordersExceededException' + assert "maximum number of configuration recorders: 1 is reached." in ce.exception.response['Error']['Message'] + + +@mock_config +def test_describe_configurations(): + client = boto3.client('config', region_name='us-west-2') + + # Without any configurations: + result = client.describe_configuration_recorders() + assert not result['ConfigurationRecorders'] + + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert not result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 2 + assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \ + and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes'] + + # Specify an incorrect name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorders(ConfigurationRecorderNames=['wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + # And with both a good and wrong name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorders(ConfigurationRecorderNames=['testrecorder', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_delivery_channels(): + client = boto3.client('config', region_name='us-west-2') + + # Try without a config recorder: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={}) + assert ce.exception.response['Error']['Code'] == 'NoAvailableConfigurationRecorderException' + assert ce.exception.response['Error']['Message'] == 'Configuration recorder is not available to ' \ + 'put delivery channel.' + + # Create a config recorder to continue testing: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Try without a name supplied: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={}) + assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryChannelNameException' + assert 'is not valid, blank string.' in ce.exception.response['Error']['Message'] + + # Try with a really long name: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'a' * 257}) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + + # Without specifying a bucket name: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel'}) + assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException' + assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.' + + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': ''}) + assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException' + assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.' + + # With an empty string for the S3 key prefix: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', 's3BucketName': 'somebucket', 's3KeyPrefix': ''}) + assert ce.exception.response['Error']['Code'] == 'InvalidS3KeyPrefixException' + assert 'empty s3 key prefix.' in ce.exception.response['Error']['Message'] + + # With an empty string for the SNS ARN: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', 's3BucketName': 'somebucket', 'snsTopicARN': ''}) + assert ce.exception.response['Error']['Code'] == 'InvalidSNSTopicARNException' + assert 'The sns topic arn' in ce.exception.response['Error']['Message'] + + # With an invalid delivery frequency: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'WRONG'} + }) + assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryFrequency' + assert 'WRONG' in ce.exception.response['Error']['Message'] + assert 'TwentyFour_Hours' in ce.exception.response['Error']['Message'] + + # Create a proper one: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 2 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + + # Overwrite it with another proper configuration: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'snsTopicARN': 'sometopicarn', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'} + }) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 4 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + assert result[0]['snsTopicARN'] == 'sometopicarn' + assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours' + + # Can only have 1: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel2', 's3BucketName': 'somebucket'}) + assert ce.exception.response['Error']['Code'] == 'MaxNumberOfDeliveryChannelsExceededException' + assert 'because the maximum number of delivery channels: 1 is reached.' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_describe_delivery_channels(): + client = boto3.client('config', region_name='us-west-2') + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without any channels: + result = client.describe_delivery_channels() + assert not result['DeliveryChannels'] + + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 2 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + + # Overwrite it with another proper configuration: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'snsTopicARN': 'sometopicarn', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'} + }) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 4 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + assert result[0]['snsTopicARN'] == 'sometopicarn' + assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours' + + # Specify an incorrect name: + with assert_raises(ClientError) as ce: + client.describe_delivery_channels(DeliveryChannelNames=['wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + # And with both a good and wrong name: + with assert_raises(ClientError) as ce: + client.describe_delivery_channels(DeliveryChannelNames=['testchannel', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_start_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Without a config recorder: + with assert_raises(ClientError) as ce: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without a delivery channel: + with assert_raises(ClientError) as ce: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoAvailableDeliveryChannelException' + + # Make the delivery channel: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + + # Start it: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Verify it's enabled: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + lower_bound = (datetime.utcnow() - timedelta(minutes=5)) + assert result[0]['recording'] + assert result[0]['lastStatus'] == 'PENDING' + assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow() + + +@mock_config +def test_stop_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Without a config recorder: + with assert_raises(ClientError) as ce: + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Make the delivery channel for creation: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + + # Start it: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Verify it's disabled: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + lower_bound = (datetime.utcnow() - timedelta(minutes=5)) + assert not result[0]['recording'] + assert result[0]['lastStatus'] == 'PENDING' + assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStopTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow() + + +@mock_config +def test_describe_configuration_recorder_status(): + client = boto3.client('config', region_name='us-west-2') + + # Without any: + result = client.describe_configuration_recorder_status() + assert not result['ConfigurationRecordersStatus'] + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without specifying a config recorder: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert not result[0]['recording'] + + # With a proper name: + result = client.describe_configuration_recorder_status( + ConfigurationRecorderNames=['testrecorder'])['ConfigurationRecordersStatus'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert not result[0]['recording'] + + # Invalid name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorder_status(ConfigurationRecorderNames=['testrecorder', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_delete_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Delete it: + client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Try again -- it should be deleted: + with assert_raises(ClientError) as ce: + client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + +@mock_config +def test_delete_delivery_channel(): + client = boto3.client('config', region_name='us-west-2') + + # Need a recorder to test the constraint on recording being enabled: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # With the recorder enabled: + with assert_raises(ClientError) as ce: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + assert ce.exception.response['Error']['Code'] == 'LastDeliveryChannelDeleteFailedException' + assert 'because there is a running configuration recorder.' in ce.exception.response['Error']['Message'] + + # Stop recording: + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Try again: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + + # Verify: + with assert_raises(ClientError) as ce: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index 8cef24cda..32fd61d16 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1,1338 +1,1681 @@ -from __future__ import unicode_literals, print_function - -from decimal import Decimal - -import six -import boto -import boto3 -from boto3.dynamodb.conditions import Attr, Key -import sure # noqa -import requests -from moto import mock_dynamodb2, mock_dynamodb2_deprecated -from moto.dynamodb2 import dynamodb_backend2 -from boto.exception import JSONResponseError -from botocore.exceptions import ClientError -from tests.helpers import requires_boto_gte -import tests.backport_assert_raises - -import moto.dynamodb2.comparisons -import moto.dynamodb2.models - -from nose.tools import assert_raises -try: - import boto.dynamodb2 -except ImportError: - print("This boto version is not supported") - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_list_tables(): - name = 'TestTable' - # Should make tables properly with boto - dynamodb_backend2.create_table(name, schema=[ - {u'KeyType': u'HASH', u'AttributeName': u'forum_name'}, - {u'KeyType': u'RANGE', u'AttributeName': u'subject'} - ]) - conn = boto.dynamodb2.connect_to_region( - 'us-east-1', - aws_access_key_id="ak", - aws_secret_access_key="sk") - assert conn.list_tables()["TableNames"] == [name] - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_list_tables_layer_1(): - # Should make tables properly with boto - dynamodb_backend2.create_table("test_1", schema=[ - {u'KeyType': u'HASH', u'AttributeName': u'name'} - ]) - dynamodb_backend2.create_table("test_2", schema=[ - {u'KeyType': u'HASH', u'AttributeName': u'name'} - ]) - conn = boto.dynamodb2.connect_to_region( - 'us-east-1', - aws_access_key_id="ak", - aws_secret_access_key="sk") - - res = conn.list_tables(limit=1) - expected = {"TableNames": ["test_1"], "LastEvaluatedTableName": "test_1"} - res.should.equal(expected) - - res = conn.list_tables(limit=1, exclusive_start_table_name="test_1") - expected = {"TableNames": ["test_2"]} - res.should.equal(expected) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_describe_missing_table(): - conn = boto.dynamodb2.connect_to_region( - 'us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - with assert_raises(JSONResponseError): - conn.describe_table('messages') - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_list_table_tags(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - table_description = conn.describe_table(TableName=name) - arn = table_description['Table']['TableArn'] - - # Tag table - tags = [{'Key': 'TestTag', 'Value': 'TestValue'}, {'Key': 'TestTag2', 'Value': 'TestValue2'}] - conn.tag_resource(ResourceArn=arn, Tags=tags) - - # Check tags - resp = conn.list_tags_of_resource(ResourceArn=arn) - assert resp["Tags"] == tags - - # Remove 1 tag - conn.untag_resource(ResourceArn=arn, TagKeys=['TestTag']) - - # Check tags - resp = conn.list_tags_of_resource(ResourceArn=arn) - assert resp["Tags"] == [{'Key': 'TestTag2', 'Value': 'TestValue2'}] - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_list_table_tags_empty(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - table_description = conn.describe_table(TableName=name) - arn = table_description['Table']['TableArn'] - tags = [{'Key':'TestTag', 'Value': 'TestValue'}] - # conn.tag_resource(ResourceArn=arn, - # Tags=tags) - resp = conn.list_tags_of_resource(ResourceArn=arn) - assert resp["Tags"] == [] - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_list_table_tags_paginated(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - table_description = conn.describe_table(TableName=name) - arn = table_description['Table']['TableArn'] - for i in range(11): - tags = [{'Key':'TestTag%d' % i, 'Value': 'TestValue'}] - conn.tag_resource(ResourceArn=arn, - Tags=tags) - resp = conn.list_tags_of_resource(ResourceArn=arn) - assert len(resp["Tags"]) == 10 - assert 'NextToken' in resp.keys() - resp2 = conn.list_tags_of_resource(ResourceArn=arn, - NextToken=resp['NextToken']) - assert len(resp2["Tags"]) == 1 - assert 'NextToken' not in resp2.keys() - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_list_not_found_table_tags(): - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - arn = 'DymmyArn' - try: - conn.list_tags_of_resource(ResourceArn=arn) - except ClientError as exception: - assert exception.response['Error']['Code'] == "ResourceNotFoundException" - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_item_add_empty_string_exception(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - - with assert_raises(ClientError) as ex: - conn.put_item( - TableName=name, - Item={ - 'forum_name': { 'S': 'LOLCat Forum' }, - 'subject': { 'S': 'Check this out!' }, - 'Body': { 'S': 'http://url_to_lolcat.gif'}, - 'SentBy': { 'S': "" }, - 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, - } - ) - - ex.exception.response['Error']['Code'].should.equal('ValidationException') - ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) - ex.exception.response['Error']['Message'].should.equal( - 'One or more parameter values were invalid: An AttributeValue may not contain an empty string' - ) - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_update_item_with_empty_string_exception(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - - conn.put_item( - TableName=name, - Item={ - 'forum_name': { 'S': 'LOLCat Forum' }, - 'subject': { 'S': 'Check this out!' }, - 'Body': { 'S': 'http://url_to_lolcat.gif'}, - 'SentBy': { 'S': "test" }, - 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, - } - ) - - with assert_raises(ClientError) as ex: - conn.update_item( - TableName=name, - Key={ - 'forum_name': { 'S': 'LOLCat Forum'}, - }, - UpdateExpression='set Body=:Body', - ExpressionAttributeValues={ - ':Body': {'S': ''} - }) - - ex.exception.response['Error']['Code'].should.equal('ValidationException') - ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) - ex.exception.response['Error']['Message'].should.equal( - 'One or more parameter values were invalid: An AttributeValue may not contain an empty string' - ) - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_query_invalid_table(): - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - try: - conn.query(TableName='invalid_table', KeyConditionExpression='index1 = :partitionkeyval', ExpressionAttributeValues={':partitionkeyval': {'S':'test'}}) - except ClientError as exception: - assert exception.response['Error']['Code'] == "ResourceNotFoundException" - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_scan_returns_consumed_capacity(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - - conn.put_item( - TableName=name, - Item={ - 'forum_name': { 'S': 'LOLCat Forum' }, - 'subject': { 'S': 'Check this out!' }, - 'Body': { 'S': 'http://url_to_lolcat.gif'}, - 'SentBy': { 'S': "test" }, - 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, - } - ) - - response = conn.scan( - TableName=name, - ) - - assert 'ConsumedCapacity' in response - assert 'CapacityUnits' in response['ConsumedCapacity'] - assert response['ConsumedCapacity']['TableName'] == name - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_put_item_with_special_chars(): - name = 'TestTable' - conn = boto3.client('dynamodb', - region_name='us-west-2', - aws_access_key_id="ak", - aws_secret_access_key="sk") - - conn.create_table(TableName=name, - KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], - AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], - ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) - - conn.put_item( - TableName=name, - Item={ - 'forum_name': { 'S': 'LOLCat Forum' }, - 'subject': { 'S': 'Check this out!' }, - 'Body': { 'S': 'http://url_to_lolcat.gif'}, - 'SentBy': { 'S': "test" }, - 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, - '"': {"S": "foo"}, - } - ) - - -@requires_boto_gte("2.9") -@mock_dynamodb2 -def test_query_returns_consumed_capacity(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message' - }) - - results = table.query( - KeyConditionExpression=Key('forum_name').eq( - 'the-key') - ) - - assert 'ConsumedCapacity' in results - assert 'CapacityUnits' in results['ConsumedCapacity'] - assert results['ConsumedCapacity']['CapacityUnits'] == 1 - - -@mock_dynamodb2 -def test_basic_projection_expressions(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message' - }) - - table.put_item(Item={ - 'forum_name': 'not-the-key', - 'subject': '123', - 'body': 'some other test message' - }) - # Test a query returning all items - results = table.query( - KeyConditionExpression=Key('forum_name').eq( - 'the-key'), - ProjectionExpression='body, subject' - ) - - assert 'body' in results['Items'][0] - assert results['Items'][0]['body'] == 'some test message' - assert 'subject' in results['Items'][0] - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '1234', - 'body': 'yet another test message' - }) - - results = table.query( - KeyConditionExpression=Key('forum_name').eq( - 'the-key'), - ProjectionExpression='body' - ) - - assert 'body' in results['Items'][0] - assert 'subject' not in results['Items'][0] - assert results['Items'][0]['body'] == 'some test message' - assert 'body' in results['Items'][1] - assert 'subject' not in results['Items'][1] - assert results['Items'][1]['body'] == 'yet another test message' - - # The projection expression should not remove data from storage - results = table.query( - KeyConditionExpression=Key('forum_name').eq( - 'the-key'), - ) - assert 'subject' in results['Items'][0] - assert 'body' in results['Items'][1] - assert 'forum_name' in results['Items'][1] - - -@mock_dynamodb2 -def test_basic_projection_expressions_with_attr_expression_names(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message', - 'attachment': 'something' - }) - - table.put_item(Item={ - 'forum_name': 'not-the-key', - 'subject': '123', - 'body': 'some other test message', - 'attachment': 'something' - }) - # Test a query returning all items - - results = table.query( - KeyConditionExpression=Key('forum_name').eq( - 'the-key'), - ProjectionExpression='#rl, #rt, subject', - ExpressionAttributeNames={ - '#rl': 'body', - '#rt': 'attachment' - }, - ) - - assert 'body' in results['Items'][0] - assert results['Items'][0]['body'] == 'some test message' - assert 'subject' in results['Items'][0] - assert results['Items'][0]['subject'] == '123' - assert 'attachment' in results['Items'][0] - assert results['Items'][0]['attachment'] == 'something' - - -@mock_dynamodb2 -def test_put_item_returns_consumed_capacity(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - response = table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message', - }) - - assert 'ConsumedCapacity' in response - - -@mock_dynamodb2 -def test_update_item_returns_consumed_capacity(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message', - }) - - response = table.update_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }, - UpdateExpression='set body=:tb', - ExpressionAttributeValues={ - ':tb': 'a new message' - }) - - assert 'ConsumedCapacity' in response - assert 'CapacityUnits' in response['ConsumedCapacity'] - assert 'TableName' in response['ConsumedCapacity'] - - -@mock_dynamodb2 -def test_get_item_returns_consumed_capacity(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': 'some test message', - }) - - response = table.get_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }) - - assert 'ConsumedCapacity' in response - assert 'CapacityUnits' in response['ConsumedCapacity'] - assert 'TableName' in response['ConsumedCapacity'] - - -def test_filter_expression(): - row1 = moto.dynamodb2.models.Item(None, None, None, None, {'Id': {'N': '8'}, 'Subs': {'N': '5'}, 'Desc': {'S': 'Some description'}, 'KV': {'SS': ['test1', 'test2']}}) - row2 = moto.dynamodb2.models.Item(None, None, None, None, {'Id': {'N': '8'}, 'Subs': {'N': '10'}, 'Desc': {'S': 'A description'}, 'KV': {'SS': ['test3', 'test4']}}) - - # NOT test 1 - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('NOT attribute_not_exists(Id)', {}, {}) - filter_expr.expr(row1).should.be(True) - - # NOT test 2 - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('NOT (Id = :v0)', {}, {':v0': {'N': 8}}) - filter_expr.expr(row1).should.be(False) # Id = 8 so should be false - - # AND test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id > :v0 AND Subs < :v1', {}, {':v0': {'N': 5}, ':v1': {'N': 7}}) - filter_expr.expr(row1).should.be(True) - filter_expr.expr(row2).should.be(False) - - # OR test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id = :v0 OR Id=:v1', {}, {':v0': {'N': 5}, ':v1': {'N': 8}}) - filter_expr.expr(row1).should.be(True) - - # BETWEEN test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id BETWEEN :v0 AND :v1', {}, {':v0': {'N': 5}, ':v1': {'N': 10}}) - filter_expr.expr(row1).should.be(True) - - # PAREN test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id = :v0 AND (Subs = :v0 OR Subs = :v1)', {}, {':v0': {'N': 8}, ':v1': {'N': 5}}) - filter_expr.expr(row1).should.be(True) - - # IN test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id IN :v0', {}, {':v0': {'NS': [7, 8, 9]}}) - filter_expr.expr(row1).should.be(True) - - # attribute function tests (with extra spaces) - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_exists(Id) AND attribute_not_exists (User)', {}, {}) - filter_expr.expr(row1).should.be(True) - - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_type(Id, N)', {}, {}) - filter_expr.expr(row1).should.be(True) - - # beginswith function test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('begins_with(Desc, Some)', {}, {}) - filter_expr.expr(row1).should.be(True) - filter_expr.expr(row2).should.be(False) - - # contains function test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('contains(KV, test1)', {}, {}) - filter_expr.expr(row1).should.be(True) - filter_expr.expr(row2).should.be(False) - - # size function test - filter_expr = moto.dynamodb2.comparisons.get_filter_expression('size(Desc) > size(KV)', {}, {}) - filter_expr.expr(row1).should.be(True) - - # Expression from @batkuip - filter_expr = moto.dynamodb2.comparisons.get_filter_expression( - '(#n0 < :v0 AND attribute_not_exists(#n1))', - {'#n0': 'Subs', '#n1': 'fanout_ts'}, - {':v0': {'N': '7'}} - ) - filter_expr.expr(row1).should.be(True) - # Expression from to check contains on string value - filter_expr = moto.dynamodb2.comparisons.get_filter_expression( - 'contains(#n0, :v0)', - {'#n0': 'Desc'}, - {':v0': {'S': 'Some'}} - ) - filter_expr.expr(row1).should.be(True) - filter_expr.expr(row2).should.be(False) - - -@mock_dynamodb2 -def test_query_filter(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'S': 'app1'} - } - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'S': 'app2'} - } - ) - - table = dynamodb.Table('test1') - response = table.query( - KeyConditionExpression=Key('client').eq('client1') - ) - assert response['Count'] == 2 - - response = table.query( - KeyConditionExpression=Key('client').eq('client1'), - FilterExpression=Attr('app').eq('app2') - ) - assert response['Count'] == 1 - assert response['Items'][0]['app'] == 'app2' - response = table.query( - KeyConditionExpression=Key('client').eq('client1'), - FilterExpression=Attr('app').contains('app') - ) - assert response['Count'] == 2 - - -@mock_dynamodb2 -def test_scan_filter(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'S': 'app1'} - } - ) - - table = dynamodb.Table('test1') - response = table.scan( - FilterExpression=Attr('app').eq('app2') - ) - assert response['Count'] == 0 - - response = table.scan( - FilterExpression=Attr('app').eq('app1') - ) - assert response['Count'] == 1 - - -@mock_dynamodb2 -def test_scan_filter2(): - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'N': '1'} - } - ) - - response = client.scan( - TableName='test1', - Select='ALL_ATTRIBUTES', - FilterExpression='#tb >= :dt', - ExpressionAttributeNames={"#tb": "app"}, - ExpressionAttributeValues={":dt": {"N": str(1)}} - ) - assert response['Count'] == 1 - - -@mock_dynamodb2 -def test_scan_filter3(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'N': '1'}, - 'active': {'BOOL': True} - } - ) - - table = dynamodb.Table('test1') - response = table.scan( - FilterExpression=Attr('active').eq(True) - ) - assert response['Count'] == 1 - - -@mock_dynamodb2 -def test_scan_filter4(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - - table = dynamodb.Table('test1') - response = table.scan( - FilterExpression=Attr('epoch_ts').lt(7) & Attr('fanout_ts').not_exists() - ) - # Just testing - assert response['Count'] == 0 - - -@mock_dynamodb2 -def test_bad_scan_filter(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - table = dynamodb.Table('test1') - - # Bad expression - try: - table.scan( - FilterExpression='client test' - ) - except ClientError as err: - err.response['Error']['Code'].should.equal('ValidationError') - else: - raise RuntimeError('Should of raised ResourceInUseException') - - -@mock_dynamodb2 -def test_duplicate_create(): - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - - try: - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - except ClientError as err: - err.response['Error']['Code'].should.equal('ResourceInUseException') - else: - raise RuntimeError('Should of raised ResourceInUseException') - - -@mock_dynamodb2 -def test_delete_table(): - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - - client.delete_table(TableName='test1') - - resp = client.list_tables() - len(resp['TableNames']).should.equal(0) - - try: - client.delete_table(TableName='test1') - except ClientError as err: - err.response['Error']['Code'].should.equal('ResourceNotFoundException') - else: - raise RuntimeError('Should of raised ResourceNotFoundException') - - -@mock_dynamodb2 -def test_delete_item(): - client = boto3.client('dynamodb', region_name='us-east-1') - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'S': 'app1'} - } - ) - client.put_item( - TableName='test1', - Item={ - 'client': {'S': 'client1'}, - 'app': {'S': 'app2'} - } - ) - - table = dynamodb.Table('test1') - response = table.scan() - assert response['Count'] == 2 - - # Test deletion and returning old value - response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD') - response['Attributes'].should.contain('client') - response['Attributes'].should.contain('app') - - response = table.scan() - assert response['Count'] == 1 - - # Test deletion returning nothing - response = table.delete_item(Key={'client': 'client1', 'app': 'app2'}) - len(response['Attributes']).should.equal(0) - - response = table.scan() - assert response['Count'] == 0 - - -@mock_dynamodb2 -def test_describe_limits(): - client = boto3.client('dynamodb', region_name='eu-central-1') - resp = client.describe_limits() - - resp['AccountMaxReadCapacityUnits'].should.equal(20000) - resp['AccountMaxWriteCapacityUnits'].should.equal(20000) - resp['TableMaxWriteCapacityUnits'].should.equal(10000) - resp['TableMaxReadCapacityUnits'].should.equal(10000) - - -@mock_dynamodb2 -def test_set_ttl(): - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - - client.update_time_to_live( - TableName='test1', - TimeToLiveSpecification={ - 'Enabled': True, - 'AttributeName': 'expire' - } - ) - - resp = client.describe_time_to_live(TableName='test1') - resp['TimeToLiveDescription']['TimeToLiveStatus'].should.equal('ENABLED') - resp['TimeToLiveDescription']['AttributeName'].should.equal('expire') - - client.update_time_to_live( - TableName='test1', - TimeToLiveSpecification={ - 'Enabled': False, - 'AttributeName': 'expire' - } - ) - - resp = client.describe_time_to_live(TableName='test1') - resp['TimeToLiveDescription']['TimeToLiveStatus'].should.equal('DISABLED') - - -# https://github.com/spulec/moto/issues/1043 -@mock_dynamodb2 -def test_query_missing_expr_names(): - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - client.create_table( - TableName='test1', - AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], - KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], - ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} - ) - client.put_item(TableName='test1', Item={'client': {'S': 'test1'}, 'app': {'S': 'test1'}}) - client.put_item(TableName='test1', Item={'client': {'S': 'test2'}, 'app': {'S': 'test2'}}) - - resp = client.query(TableName='test1', KeyConditionExpression='client=:client', - ExpressionAttributeValues={':client': {'S': 'test1'}}) - - resp['Count'].should.equal(1) - resp['Items'][0]['client']['S'].should.equal('test1') - - resp = client.query(TableName='test1', KeyConditionExpression=':name=test2', - ExpressionAttributeNames={':name': 'client'}) - - resp['Count'].should.equal(1) - resp['Items'][0]['client']['S'].should.equal('test2') - - -# https://github.com/spulec/moto/issues/1342 -@mock_dynamodb2 -def test_update_item_on_map(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - client = boto3.client('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123', - 'body': {'nested': {'data': 'test'}}, - }) - - resp = table.scan() - resp['Items'][0]['body'].should.equal({'nested': {'data': 'test'}}) - - # Nonexistent nested attributes are supported for existing top-level attributes. - table.update_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }, - UpdateExpression='SET body.#nested.#data = :tb, body.nested.#nonexistentnested.#data = :tb2', - ExpressionAttributeNames={ - '#nested': 'nested', - '#nonexistentnested': 'nonexistentnested', - '#data': 'data' - }, - ExpressionAttributeValues={ - ':tb': 'new_value', - ':tb2': 'other_value' - }) - - resp = table.scan() - resp['Items'][0]['body'].should.equal({ - 'nested': { - 'data': 'new_value', - 'nonexistentnested': {'data': 'other_value'} - } - }) - - # Test nested value for a nonexistent attribute. - with assert_raises(client.exceptions.ConditionalCheckFailedException): - table.update_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }, - UpdateExpression='SET nonexistent.#nested = :tb', - ExpressionAttributeNames={ - '#nested': 'nested' - }, - ExpressionAttributeValues={ - ':tb': 'new_value' - }) - - - -# https://github.com/spulec/moto/issues/1358 -@mock_dynamodb2 -def test_update_if_not_exists(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH' - }, - { - 'AttributeName': 'subject', - 'KeyType': 'RANGE' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table = dynamodb.Table('users') - - table.put_item(Item={ - 'forum_name': 'the-key', - 'subject': '123' - }) - - table.update_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }, - # if_not_exists without space - UpdateExpression='SET created_at=if_not_exists(created_at,:created_at)', - ExpressionAttributeValues={ - ':created_at': 123 - } - ) - - resp = table.scan() - assert resp['Items'][0]['created_at'] == 123 - - table.update_item(Key={ - 'forum_name': 'the-key', - 'subject': '123' - }, - # if_not_exists with space - UpdateExpression='SET created_at = if_not_exists (created_at, :created_at)', - ExpressionAttributeValues={ - ':created_at': 456 - } - ) - - resp = table.scan() - # Still the original value - assert resp['Items'][0]['created_at'] == 123 - - -@mock_dynamodb2 -def test_query_global_secondary_index_when_created_via_update_table_resource(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - # Create the DynamoDB table. - dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'user_id', - 'KeyType': 'HASH' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'user_id', - 'AttributeType': 'N', - }, - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - { - 'AttributeName': 'subject', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - }, - ) - table = dynamodb.Table('users') - table.update( - AttributeDefinitions=[ - { - 'AttributeName': 'forum_name', - 'AttributeType': 'S' - }, - ], - GlobalSecondaryIndexUpdates=[ - {'Create': - { - 'IndexName': 'forum_name_index', - 'KeySchema': [ - { - 'AttributeName': 'forum_name', - 'KeyType': 'HASH', - }, - ], - 'Projection': { - 'ProjectionType': 'ALL', - }, - 'ProvisionedThroughput': { - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - }, - } - } - ] - ) - - next_user_id = 1 - for my_forum_name in ['cats', 'dogs']: - for my_subject in ['my pet is the cutest', 'wow look at what my pet did', "don't you love my pet?"]: - table.put_item(Item={'user_id': next_user_id, 'forum_name': my_forum_name, 'subject': my_subject}) - next_user_id += 1 - - # get all the cat users - forum_only_query_response = table.query( - IndexName='forum_name_index', - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('forum_name').eq('cats'), - ) - forum_only_items = forum_only_query_response['Items'] - assert len(forum_only_items) == 3 - for item in forum_only_items: - assert item['forum_name'] == 'cats' - - # query all cat users with a particular subject - forum_and_subject_query_results = table.query( - IndexName='forum_name_index', - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('forum_name').eq('cats'), - FilterExpression=Attr('subject').eq('my pet is the cutest'), - ) - forum_and_subject_items = forum_and_subject_query_results['Items'] - assert len(forum_and_subject_items) == 1 - assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats', - 'subject': 'my pet is the cutest'} +from __future__ import unicode_literals, print_function + +from decimal import Decimal + +import six +import boto +import boto3 +from boto3.dynamodb.conditions import Attr, Key +import sure # noqa +import requests +from moto import mock_dynamodb2, mock_dynamodb2_deprecated +from moto.dynamodb2 import dynamodb_backend2 +from boto.exception import JSONResponseError +from botocore.exceptions import ClientError +from tests.helpers import requires_boto_gte +import tests.backport_assert_raises + +import moto.dynamodb2.comparisons +import moto.dynamodb2.models + +from nose.tools import assert_raises +try: + import boto.dynamodb2 +except ImportError: + print("This boto version is not supported") + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_list_tables(): + name = 'TestTable' + # Should make tables properly with boto + dynamodb_backend2.create_table(name, schema=[ + {u'KeyType': u'HASH', u'AttributeName': u'forum_name'}, + {u'KeyType': u'RANGE', u'AttributeName': u'subject'} + ]) + conn = boto.dynamodb2.connect_to_region( + 'us-east-1', + aws_access_key_id="ak", + aws_secret_access_key="sk") + assert conn.list_tables()["TableNames"] == [name] + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_list_tables_layer_1(): + # Should make tables properly with boto + dynamodb_backend2.create_table("test_1", schema=[ + {u'KeyType': u'HASH', u'AttributeName': u'name'} + ]) + dynamodb_backend2.create_table("test_2", schema=[ + {u'KeyType': u'HASH', u'AttributeName': u'name'} + ]) + conn = boto.dynamodb2.connect_to_region( + 'us-east-1', + aws_access_key_id="ak", + aws_secret_access_key="sk") + + res = conn.list_tables(limit=1) + expected = {"TableNames": ["test_1"], "LastEvaluatedTableName": "test_1"} + res.should.equal(expected) + + res = conn.list_tables(limit=1, exclusive_start_table_name="test_1") + expected = {"TableNames": ["test_2"]} + res.should.equal(expected) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_describe_missing_table(): + conn = boto.dynamodb2.connect_to_region( + 'us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + with assert_raises(JSONResponseError): + conn.describe_table('messages') + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_list_table_tags(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + table_description = conn.describe_table(TableName=name) + arn = table_description['Table']['TableArn'] + + # Tag table + tags = [{'Key': 'TestTag', 'Value': 'TestValue'}, {'Key': 'TestTag2', 'Value': 'TestValue2'}] + conn.tag_resource(ResourceArn=arn, Tags=tags) + + # Check tags + resp = conn.list_tags_of_resource(ResourceArn=arn) + assert resp["Tags"] == tags + + # Remove 1 tag + conn.untag_resource(ResourceArn=arn, TagKeys=['TestTag']) + + # Check tags + resp = conn.list_tags_of_resource(ResourceArn=arn) + assert resp["Tags"] == [{'Key': 'TestTag2', 'Value': 'TestValue2'}] + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_list_table_tags_empty(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + table_description = conn.describe_table(TableName=name) + arn = table_description['Table']['TableArn'] + tags = [{'Key':'TestTag', 'Value': 'TestValue'}] + # conn.tag_resource(ResourceArn=arn, + # Tags=tags) + resp = conn.list_tags_of_resource(ResourceArn=arn) + assert resp["Tags"] == [] + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_list_table_tags_paginated(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'id','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'id','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + table_description = conn.describe_table(TableName=name) + arn = table_description['Table']['TableArn'] + for i in range(11): + tags = [{'Key':'TestTag%d' % i, 'Value': 'TestValue'}] + conn.tag_resource(ResourceArn=arn, + Tags=tags) + resp = conn.list_tags_of_resource(ResourceArn=arn) + assert len(resp["Tags"]) == 10 + assert 'NextToken' in resp.keys() + resp2 = conn.list_tags_of_resource(ResourceArn=arn, + NextToken=resp['NextToken']) + assert len(resp2["Tags"]) == 1 + assert 'NextToken' not in resp2.keys() + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_list_not_found_table_tags(): + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + arn = 'DymmyArn' + try: + conn.list_tags_of_resource(ResourceArn=arn) + except ClientError as exception: + assert exception.response['Error']['Code'] == "ResourceNotFoundException" + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_item_add_empty_string_exception(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + + with assert_raises(ClientError) as ex: + conn.put_item( + TableName=name, + Item={ + 'forum_name': { 'S': 'LOLCat Forum' }, + 'subject': { 'S': 'Check this out!' }, + 'Body': { 'S': 'http://url_to_lolcat.gif'}, + 'SentBy': { 'S': "" }, + 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, + } + ) + + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal( + 'One or more parameter values were invalid: An AttributeValue may not contain an empty string' + ) + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_update_item_with_empty_string_exception(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + + conn.put_item( + TableName=name, + Item={ + 'forum_name': { 'S': 'LOLCat Forum' }, + 'subject': { 'S': 'Check this out!' }, + 'Body': { 'S': 'http://url_to_lolcat.gif'}, + 'SentBy': { 'S': "test" }, + 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, + } + ) + + with assert_raises(ClientError) as ex: + conn.update_item( + TableName=name, + Key={ + 'forum_name': { 'S': 'LOLCat Forum'}, + }, + UpdateExpression='set Body=:Body', + ExpressionAttributeValues={ + ':Body': {'S': ''} + }) + + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal( + 'One or more parameter values were invalid: An AttributeValue may not contain an empty string' + ) + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_query_invalid_table(): + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + try: + conn.query(TableName='invalid_table', KeyConditionExpression='index1 = :partitionkeyval', ExpressionAttributeValues={':partitionkeyval': {'S':'test'}}) + except ClientError as exception: + assert exception.response['Error']['Code'] == "ResourceNotFoundException" + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_scan_returns_consumed_capacity(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + + conn.put_item( + TableName=name, + Item={ + 'forum_name': { 'S': 'LOLCat Forum' }, + 'subject': { 'S': 'Check this out!' }, + 'Body': { 'S': 'http://url_to_lolcat.gif'}, + 'SentBy': { 'S': "test" }, + 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, + } + ) + + response = conn.scan( + TableName=name, + ) + + assert 'ConsumedCapacity' in response + assert 'CapacityUnits' in response['ConsumedCapacity'] + assert response['ConsumedCapacity']['TableName'] == name + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_put_item_with_special_chars(): + name = 'TestTable' + conn = boto3.client('dynamodb', + region_name='us-west-2', + aws_access_key_id="ak", + aws_secret_access_key="sk") + + conn.create_table(TableName=name, + KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}], + AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}], + ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5}) + + conn.put_item( + TableName=name, + Item={ + 'forum_name': { 'S': 'LOLCat Forum' }, + 'subject': { 'S': 'Check this out!' }, + 'Body': { 'S': 'http://url_to_lolcat.gif'}, + 'SentBy': { 'S': "test" }, + 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'}, + '"': {"S": "foo"}, + } + ) + + +@requires_boto_gte("2.9") +@mock_dynamodb2 +def test_query_returns_consumed_capacity(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message' + }) + + results = table.query( + KeyConditionExpression=Key('forum_name').eq( + 'the-key') + ) + + assert 'ConsumedCapacity' in results + assert 'CapacityUnits' in results['ConsumedCapacity'] + assert results['ConsumedCapacity']['CapacityUnits'] == 1 + + +@mock_dynamodb2 +def test_basic_projection_expressions(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message' + }) + + table.put_item(Item={ + 'forum_name': 'not-the-key', + 'subject': '123', + 'body': 'some other test message' + }) + # Test a query returning all items + results = table.query( + KeyConditionExpression=Key('forum_name').eq( + 'the-key'), + ProjectionExpression='body, subject' + ) + + assert 'body' in results['Items'][0] + assert results['Items'][0]['body'] == 'some test message' + assert 'subject' in results['Items'][0] + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '1234', + 'body': 'yet another test message' + }) + + results = table.query( + KeyConditionExpression=Key('forum_name').eq( + 'the-key'), + ProjectionExpression='body' + ) + + assert 'body' in results['Items'][0] + assert 'subject' not in results['Items'][0] + assert results['Items'][0]['body'] == 'some test message' + assert 'body' in results['Items'][1] + assert 'subject' not in results['Items'][1] + assert results['Items'][1]['body'] == 'yet another test message' + + # The projection expression should not remove data from storage + results = table.query( + KeyConditionExpression=Key('forum_name').eq( + 'the-key'), + ) + assert 'subject' in results['Items'][0] + assert 'body' in results['Items'][1] + assert 'forum_name' in results['Items'][1] + + +@mock_dynamodb2 +def test_basic_projection_expressions_with_attr_expression_names(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message', + 'attachment': 'something' + }) + + table.put_item(Item={ + 'forum_name': 'not-the-key', + 'subject': '123', + 'body': 'some other test message', + 'attachment': 'something' + }) + # Test a query returning all items + + results = table.query( + KeyConditionExpression=Key('forum_name').eq( + 'the-key'), + ProjectionExpression='#rl, #rt, subject', + ExpressionAttributeNames={ + '#rl': 'body', + '#rt': 'attachment' + }, + ) + + assert 'body' in results['Items'][0] + assert results['Items'][0]['body'] == 'some test message' + assert 'subject' in results['Items'][0] + assert results['Items'][0]['subject'] == '123' + assert 'attachment' in results['Items'][0] + assert results['Items'][0]['attachment'] == 'something' + + +@mock_dynamodb2 +def test_put_item_returns_consumed_capacity(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + response = table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message', + }) + + assert 'ConsumedCapacity' in response + + +@mock_dynamodb2 +def test_update_item_returns_consumed_capacity(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message', + }) + + response = table.update_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }, + UpdateExpression='set body=:tb', + ExpressionAttributeValues={ + ':tb': 'a new message' + }) + + assert 'ConsumedCapacity' in response + assert 'CapacityUnits' in response['ConsumedCapacity'] + assert 'TableName' in response['ConsumedCapacity'] + + +@mock_dynamodb2 +def test_get_item_returns_consumed_capacity(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': 'some test message', + }) + + response = table.get_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }) + + assert 'ConsumedCapacity' in response + assert 'CapacityUnits' in response['ConsumedCapacity'] + assert 'TableName' in response['ConsumedCapacity'] + + +def test_filter_expression(): + row1 = moto.dynamodb2.models.Item(None, None, None, None, {'Id': {'N': '8'}, 'Subs': {'N': '5'}, 'Desc': {'S': 'Some description'}, 'KV': {'SS': ['test1', 'test2']}}) + row2 = moto.dynamodb2.models.Item(None, None, None, None, {'Id': {'N': '8'}, 'Subs': {'N': '10'}, 'Desc': {'S': 'A description'}, 'KV': {'SS': ['test3', 'test4']}}) + + # NOT test 1 + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('NOT attribute_not_exists(Id)', {}, {}) + filter_expr.expr(row1).should.be(True) + + # NOT test 2 + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('NOT (Id = :v0)', {}, {':v0': {'N': 8}}) + filter_expr.expr(row1).should.be(False) # Id = 8 so should be false + + # AND test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id > :v0 AND Subs < :v1', {}, {':v0': {'N': 5}, ':v1': {'N': 7}}) + filter_expr.expr(row1).should.be(True) + filter_expr.expr(row2).should.be(False) + + # OR test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id = :v0 OR Id=:v1', {}, {':v0': {'N': 5}, ':v1': {'N': 8}}) + filter_expr.expr(row1).should.be(True) + + # BETWEEN test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id BETWEEN :v0 AND :v1', {}, {':v0': {'N': 5}, ':v1': {'N': 10}}) + filter_expr.expr(row1).should.be(True) + + # PAREN test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id = :v0 AND (Subs = :v0 OR Subs = :v1)', {}, {':v0': {'N': 8}, ':v1': {'N': 5}}) + filter_expr.expr(row1).should.be(True) + + # IN test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id IN :v0', {}, {':v0': {'NS': [7, 8, 9]}}) + filter_expr.expr(row1).should.be(True) + + # attribute function tests (with extra spaces) + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_exists(Id) AND attribute_not_exists (User)', {}, {}) + filter_expr.expr(row1).should.be(True) + + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_type(Id, N)', {}, {}) + filter_expr.expr(row1).should.be(True) + + # beginswith function test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('begins_with(Desc, Some)', {}, {}) + filter_expr.expr(row1).should.be(True) + filter_expr.expr(row2).should.be(False) + + # contains function test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('contains(KV, test1)', {}, {}) + filter_expr.expr(row1).should.be(True) + filter_expr.expr(row2).should.be(False) + + # size function test + filter_expr = moto.dynamodb2.comparisons.get_filter_expression('size(Desc) > size(KV)', {}, {}) + filter_expr.expr(row1).should.be(True) + + # Expression from @batkuip + filter_expr = moto.dynamodb2.comparisons.get_filter_expression( + '(#n0 < :v0 AND attribute_not_exists(#n1))', + {'#n0': 'Subs', '#n1': 'fanout_ts'}, + {':v0': {'N': '7'}} + ) + filter_expr.expr(row1).should.be(True) + # Expression from to check contains on string value + filter_expr = moto.dynamodb2.comparisons.get_filter_expression( + 'contains(#n0, :v0)', + {'#n0': 'Desc'}, + {':v0': {'S': 'Some'}} + ) + filter_expr.expr(row1).should.be(True) + filter_expr.expr(row2).should.be(False) + + +@mock_dynamodb2 +def test_query_filter(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'} + } + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app2'} + } + ) + + table = dynamodb.Table('test1') + response = table.query( + KeyConditionExpression=Key('client').eq('client1') + ) + assert response['Count'] == 2 + + response = table.query( + KeyConditionExpression=Key('client').eq('client1'), + FilterExpression=Attr('app').eq('app2') + ) + assert response['Count'] == 1 + assert response['Items'][0]['app'] == 'app2' + response = table.query( + KeyConditionExpression=Key('client').eq('client1'), + FilterExpression=Attr('app').contains('app') + ) + assert response['Count'] == 2 + + +@mock_dynamodb2 +def test_scan_filter(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'} + } + ) + + table = dynamodb.Table('test1') + response = table.scan( + FilterExpression=Attr('app').eq('app2') + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('app').eq('app1') + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne('app2') + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne('app1') + ) + assert response['Count'] == 0 + + +@mock_dynamodb2 +def test_scan_filter2(): + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'N': '1'} + } + ) + + response = client.scan( + TableName='test1', + Select='ALL_ATTRIBUTES', + FilterExpression='#tb >= :dt', + ExpressionAttributeNames={"#tb": "app"}, + ExpressionAttributeValues={":dt": {"N": str(1)}} + ) + assert response['Count'] == 1 + + +@mock_dynamodb2 +def test_scan_filter3(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'N': '1'}, + 'active': {'BOOL': True} + } + ) + + table = dynamodb.Table('test1') + response = table.scan( + FilterExpression=Attr('active').eq(True) + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('active').ne(True) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('active').ne(False) + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne(1) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('app').ne(2) + ) + assert response['Count'] == 1 + + +@mock_dynamodb2 +def test_scan_filter4(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'N'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + + table = dynamodb.Table('test1') + response = table.scan( + FilterExpression=Attr('epoch_ts').lt(7) & Attr('fanout_ts').not_exists() + ) + # Just testing + assert response['Count'] == 0 + + +@mock_dynamodb2 +def test_bad_scan_filter(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + table = dynamodb.Table('test1') + + # Bad expression + try: + table.scan( + FilterExpression='client test' + ) + except ClientError as err: + err.response['Error']['Code'].should.equal('ValidationError') + else: + raise RuntimeError('Should of raised ResourceInUseException') + + +@mock_dynamodb2 +def test_duplicate_create(): + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + + try: + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + except ClientError as err: + err.response['Error']['Code'].should.equal('ResourceInUseException') + else: + raise RuntimeError('Should of raised ResourceInUseException') + + +@mock_dynamodb2 +def test_delete_table(): + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + + client.delete_table(TableName='test1') + + resp = client.list_tables() + len(resp['TableNames']).should.equal(0) + + try: + client.delete_table(TableName='test1') + except ClientError as err: + err.response['Error']['Code'].should.equal('ResourceNotFoundException') + else: + raise RuntimeError('Should of raised ResourceNotFoundException') + + +@mock_dynamodb2 +def test_delete_item(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'} + } + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app2'} + } + ) + + table = dynamodb.Table('test1') + response = table.scan() + assert response['Count'] == 2 + + # Test ReturnValues validation + with assert_raises(ClientError) as ex: + table.delete_item(Key={'client': 'client1', 'app': 'app1'}, + ReturnValues='ALL_NEW') + + # Test deletion and returning old value + response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD') + response['Attributes'].should.contain('client') + response['Attributes'].should.contain('app') + + response = table.scan() + assert response['Count'] == 1 + + # Test deletion returning nothing + response = table.delete_item(Key={'client': 'client1', 'app': 'app2'}) + len(response['Attributes']).should.equal(0) + + response = table.scan() + assert response['Count'] == 0 + + +@mock_dynamodb2 +def test_describe_limits(): + client = boto3.client('dynamodb', region_name='eu-central-1') + resp = client.describe_limits() + + resp['AccountMaxReadCapacityUnits'].should.equal(20000) + resp['AccountMaxWriteCapacityUnits'].should.equal(20000) + resp['TableMaxWriteCapacityUnits'].should.equal(10000) + resp['TableMaxReadCapacityUnits'].should.equal(10000) + + +@mock_dynamodb2 +def test_set_ttl(): + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + + client.update_time_to_live( + TableName='test1', + TimeToLiveSpecification={ + 'Enabled': True, + 'AttributeName': 'expire' + } + ) + + resp = client.describe_time_to_live(TableName='test1') + resp['TimeToLiveDescription']['TimeToLiveStatus'].should.equal('ENABLED') + resp['TimeToLiveDescription']['AttributeName'].should.equal('expire') + + client.update_time_to_live( + TableName='test1', + TimeToLiveSpecification={ + 'Enabled': False, + 'AttributeName': 'expire' + } + ) + + resp = client.describe_time_to_live(TableName='test1') + resp['TimeToLiveDescription']['TimeToLiveStatus'].should.equal('DISABLED') + + +# https://github.com/spulec/moto/issues/1043 +@mock_dynamodb2 +def test_query_missing_expr_names(): + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item(TableName='test1', Item={'client': {'S': 'test1'}, 'app': {'S': 'test1'}}) + client.put_item(TableName='test1', Item={'client': {'S': 'test2'}, 'app': {'S': 'test2'}}) + + resp = client.query(TableName='test1', KeyConditionExpression='client=:client', + ExpressionAttributeValues={':client': {'S': 'test1'}}) + + resp['Count'].should.equal(1) + resp['Items'][0]['client']['S'].should.equal('test1') + + resp = client.query(TableName='test1', KeyConditionExpression=':name=test2', + ExpressionAttributeNames={':name': 'client'}) + + resp['Count'].should.equal(1) + resp['Items'][0]['client']['S'].should.equal('test2') + + +# https://github.com/spulec/moto/issues/1342 +@mock_dynamodb2 +def test_update_item_on_map(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + client = boto3.client('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123', + 'body': {'nested': {'data': 'test'}}, + }) + + resp = table.scan() + resp['Items'][0]['body'].should.equal({'nested': {'data': 'test'}}) + + # Nonexistent nested attributes are supported for existing top-level attributes. + table.update_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }, + UpdateExpression='SET body.#nested.#data = :tb, body.nested.#nonexistentnested.#data = :tb2', + ExpressionAttributeNames={ + '#nested': 'nested', + '#nonexistentnested': 'nonexistentnested', + '#data': 'data' + }, + ExpressionAttributeValues={ + ':tb': 'new_value', + ':tb2': 'other_value' + }) + + resp = table.scan() + resp['Items'][0]['body'].should.equal({ + 'nested': { + 'data': 'new_value', + 'nonexistentnested': {'data': 'other_value'} + } + }) + + # Test nested value for a nonexistent attribute. + with assert_raises(client.exceptions.ConditionalCheckFailedException): + table.update_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }, + UpdateExpression='SET nonexistent.#nested = :tb', + ExpressionAttributeNames={ + '#nested': 'nested' + }, + ExpressionAttributeValues={ + ':tb': 'new_value' + }) + + + +# https://github.com/spulec/moto/issues/1358 +@mock_dynamodb2 +def test_update_if_not_exists(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'subject', + 'KeyType': 'RANGE' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table = dynamodb.Table('users') + + table.put_item(Item={ + 'forum_name': 'the-key', + 'subject': '123' + }) + + table.update_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }, + # if_not_exists without space + UpdateExpression='SET created_at=if_not_exists(created_at,:created_at)', + ExpressionAttributeValues={ + ':created_at': 123 + } + ) + + resp = table.scan() + assert resp['Items'][0]['created_at'] == 123 + + table.update_item(Key={ + 'forum_name': 'the-key', + 'subject': '123' + }, + # if_not_exists with space + UpdateExpression='SET created_at = if_not_exists (created_at, :created_at)', + ExpressionAttributeValues={ + ':created_at': 456 + } + ) + + resp = table.scan() + # Still the original value + assert resp['Items'][0]['created_at'] == 123 + + +# https://github.com/spulec/moto/issues/1937 +@mock_dynamodb2 +def test_update_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + def update(col, to, rv): + return dynamodb.update_item( + TableName='moto-test', + Key={'id': {'S': 'foo'}}, + AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}}, + ReturnValues=rv + ) + + r = update('col1', 'val1', 'ALL_NEW') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col1', 'val2', 'ALL_OLD') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col2', 'val3', 'UPDATED_NEW') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col2', 'val4', 'UPDATED_OLD') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col1', 'val5', 'NONE') + assert r['Attributes'] == {} + + with assert_raises(ClientError) as ex: + r = update('col1', 'val6', 'WRONG') + + +@mock_dynamodb2 +def test_put_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}, + ReturnValues='NONE' + ) + assert 'Attributes' not in r + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}}, + ReturnValues='ALL_OLD' + ) + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + with assert_raises(ClientError) as ex: + dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}}, + ReturnValues='ALL_NEW' + ) + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value') + + +@mock_dynamodb2 +def test_query_global_secondary_index_when_created_via_update_table_resource(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'user_id', + 'KeyType': 'HASH' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'user_id', + 'AttributeType': 'N', + }, + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'subject', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + }, + ) + table = dynamodb.Table('users') + table.update( + AttributeDefinitions=[ + { + 'AttributeName': 'forum_name', + 'AttributeType': 'S' + }, + ], + GlobalSecondaryIndexUpdates=[ + {'Create': + { + 'IndexName': 'forum_name_index', + 'KeySchema': [ + { + 'AttributeName': 'forum_name', + 'KeyType': 'HASH', + }, + ], + 'Projection': { + 'ProjectionType': 'ALL', + }, + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + }, + } + } + ] + ) + + next_user_id = 1 + for my_forum_name in ['cats', 'dogs']: + for my_subject in ['my pet is the cutest', 'wow look at what my pet did', "don't you love my pet?"]: + table.put_item(Item={'user_id': next_user_id, 'forum_name': my_forum_name, 'subject': my_subject}) + next_user_id += 1 + + # get all the cat users + forum_only_query_response = table.query( + IndexName='forum_name_index', + Select='ALL_ATTRIBUTES', + KeyConditionExpression=Key('forum_name').eq('cats'), + ) + forum_only_items = forum_only_query_response['Items'] + assert len(forum_only_items) == 3 + for item in forum_only_items: + assert item['forum_name'] == 'cats' + + # query all cat users with a particular subject + forum_and_subject_query_results = table.query( + IndexName='forum_name_index', + Select='ALL_ATTRIBUTES', + KeyConditionExpression=Key('forum_name').eq('cats'), + FilterExpression=Attr('subject').eq('my pet is the cutest'), + ) + forum_and_subject_items = forum_and_subject_query_results['Items'] + assert len(forum_and_subject_items) == 1 + assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats', + 'subject': 'my pet is the cutest'} + + +@mock_dynamodb2 +def test_dynamodb_streams_1(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + + resp = conn.delete_table(TableName='test-streams') + + assert 'StreamSpecification' in resp['TableDescription'] + + +@mock_dynamodb2 +def test_dynamodb_streams_2(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-stream-update', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + ) + + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-stream-update', + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + +@mock_dynamodb2 +def test_condition_expressions(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + } + ) + + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match', + ExpressionAttributeNames={ + '#existing': 'existing', + '#nonexistent': 'nope', + '#match': 'match', + }, + ExpressionAttributeValues={ + ':match': {'S': 'match'} + } + ) + + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='NOT(attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2))', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2)', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='NOT(attribute_not_exists(#nonexistent1) AND attribute_not_exists(#nonexistent2))', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match', + ExpressionAttributeNames={ + '#existing': 'existing', + '#nonexistent': 'nope', + '#match': 'match', + }, + ExpressionAttributeValues={ + ':match': {'S': 'match2'} + } + ) + + +@mock_dynamodb2 +def test_query_gsi_with_range_key(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + dynamodb.create_table( + TableName='test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[ + {'AttributeName': 'id', 'AttributeType': 'S'}, + {'AttributeName': 'gsi_hash_key', 'AttributeType': 'S'}, + {'AttributeName': 'gsi_range_key', 'AttributeType': 'S'} + ], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + GlobalSecondaryIndexes=[ + { + 'IndexName': 'test_gsi', + 'KeySchema': [ + { + 'AttributeName': 'gsi_hash_key', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'gsi_range_key', + 'KeyType': 'RANGE' + }, + ], + 'Projection': { + 'ProjectionType': 'ALL', + }, + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1 + } + }, + ] + ) + + dynamodb.put_item( + TableName='test', + Item={ + 'id': {'S': 'test1'}, + 'gsi_hash_key': {'S': 'key1'}, + 'gsi_range_key': {'S': 'range1'}, + } + ) + dynamodb.put_item( + TableName='test', + Item={ + 'id': {'S': 'test2'}, + 'gsi_hash_key': {'S': 'key1'}, + } + ) + + res = dynamodb.query(TableName='test', IndexName='test_gsi', + KeyConditionExpression='gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key', + ExpressionAttributeValues={ + ':gsi_hash_key': {'S': 'key1'}, + ':gsi_range_key': {'S': 'range1'} + }) + res.should.have.key("Count").equal(1) + res.should.have.key("Items") + res['Items'][0].should.equal({ + 'id': {'S': 'test1'}, + 'gsi_hash_key': {'S': 'key1'}, + 'gsi_range_key': {'S': 'range1'}, + }) diff --git a/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py b/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py index faa826fb0..874804db0 100644 --- a/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py +++ b/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py @@ -1,790 +1,831 @@ -from __future__ import unicode_literals - -import boto -import boto3 -from boto3.dynamodb.conditions import Key -import sure # noqa -from freezegun import freeze_time -from boto.exception import JSONResponseError -from moto import mock_dynamodb2, mock_dynamodb2_deprecated -from tests.helpers import requires_boto_gte -import botocore -try: - from boto.dynamodb2.fields import HashKey - from boto.dynamodb2.table import Table - from boto.dynamodb2.table import Item - from boto.dynamodb2.exceptions import ConditionalCheckFailedException, ItemNotFound -except ImportError: - pass - - -def create_table(): - table = Table.create('messages', schema=[ - HashKey('forum_name') - ], throughput={ - 'read': 10, - 'write': 10, - }) - return table - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -@freeze_time("2012-01-14") -def test_create_table(): - create_table() - expected = { - 'Table': { - 'AttributeDefinitions': [ - {'AttributeName': 'forum_name', 'AttributeType': 'S'} - ], - 'ProvisionedThroughput': { - 'NumberOfDecreasesToday': 0, 'WriteCapacityUnits': 10, 'ReadCapacityUnits': 10 - }, - 'TableSizeBytes': 0, - 'TableName': 'messages', - 'TableStatus': 'ACTIVE', - 'TableArn': 'arn:aws:dynamodb:us-east-1:123456789011:table/messages', - 'KeySchema': [ - {'KeyType': 'HASH', 'AttributeName': 'forum_name'} - ], - 'ItemCount': 0, 'CreationDateTime': 1326499200.0, - 'GlobalSecondaryIndexes': [], - 'LocalSecondaryIndexes': [] - } - } - conn = boto.dynamodb2.connect_to_region( - 'us-east-1', - aws_access_key_id="ak", - aws_secret_access_key="sk" - ) - - conn.describe_table('messages').should.equal(expected) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_delete_table(): - create_table() - conn = boto.dynamodb2.layer1.DynamoDBConnection() - conn.list_tables()["TableNames"].should.have.length_of(1) - - conn.delete_table('messages') - conn.list_tables()["TableNames"].should.have.length_of(0) - - conn.delete_table.when.called_with( - 'messages').should.throw(JSONResponseError) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_update_table_throughput(): - table = create_table() - table.throughput["read"].should.equal(10) - table.throughput["write"].should.equal(10) - - table.update(throughput={ - 'read': 5, - 'write': 6, - }) - - table.throughput["read"].should.equal(5) - table.throughput["write"].should.equal(6) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_item_add_and_describe_and_update(): - table = create_table() - - data = { - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - } - - table.put_item(data=data) - returned_item = table.get_item(forum_name="LOLCat Forum") - returned_item.should_not.be.none - - dict(returned_item).should.equal({ - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - }) - - returned_item['SentBy'] = 'User B' - returned_item.save(overwrite=True) - - returned_item = table.get_item( - forum_name='LOLCat Forum' - ) - dict(returned_item).should.equal({ - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User B', - }) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_item_partial_save(): - table = create_table() - - data = { - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - } - - table.put_item(data=data) - returned_item = table.get_item(forum_name="LOLCat Forum") - - returned_item['SentBy'] = 'User B' - returned_item.partial_save() - - returned_item = table.get_item( - forum_name='LOLCat Forum' - ) - dict(returned_item).should.equal({ - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User B', - }) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_item_put_without_table(): - conn = boto.dynamodb2.layer1.DynamoDBConnection() - - conn.put_item.when.called_with( - table_name='undeclared-table', - item={ - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - } - ).should.throw(JSONResponseError) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_get_item_with_undeclared_table(): - conn = boto.dynamodb2.layer1.DynamoDBConnection() - - conn.get_item.when.called_with( - table_name='undeclared-table', - key={"forum_name": {"S": "LOLCat Forum"}}, - ).should.throw(JSONResponseError) - - -@requires_boto_gte("2.30.0") -@mock_dynamodb2_deprecated -def test_delete_item(): - table = create_table() - - item_data = { - 'forum_name': 'LOLCat Forum', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - } - item = Item(table, item_data) - item.save() - table.count().should.equal(1) - - response = item.delete() - - response.should.equal(True) - - table.count().should.equal(0) - - # Deletes are idempotent and 'False' here would imply an error condition - item.delete().should.equal(True) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_delete_item_with_undeclared_table(): - conn = boto.dynamodb2.layer1.DynamoDBConnection() - - conn.delete_item.when.called_with( - table_name='undeclared-table', - key={"forum_name": {"S": "LOLCat Forum"}}, - ).should.throw(JSONResponseError) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_query(): - table = create_table() - - item_data = { - 'forum_name': 'the-key', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - } - item = Item(table, item_data) - item.save(overwrite=True) - table.count().should.equal(1) - table = Table("messages") - - results = table.query(forum_name__eq='the-key') - sum(1 for _ in results).should.equal(1) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_query_with_undeclared_table(): - conn = boto.dynamodb2.layer1.DynamoDBConnection() - - conn.query.when.called_with( - table_name='undeclared-table', - key_conditions={"forum_name": { - "ComparisonOperator": "EQ", "AttributeValueList": [{"S": "the-key"}]}} - ).should.throw(JSONResponseError) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_scan(): - table = create_table() - - item_data = { - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - } - item_data['forum_name'] = 'the-key' - - item = Item(table, item_data) - item.save() - - item['forum_name'] = 'the-key2' - item.save(overwrite=True) - - item_data = { - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User B', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - 'Ids': set([1, 2, 3]), - 'PK': 7, - } - item_data['forum_name'] = 'the-key3' - item = Item(table, item_data) - item.save() - - results = table.scan() - sum(1 for _ in results).should.equal(3) - - results = table.scan(SentBy__eq='User B') - sum(1 for _ in results).should.equal(1) - - results = table.scan(Body__beginswith='http') - sum(1 for _ in results).should.equal(3) - - results = table.scan(Ids__null=False) - sum(1 for _ in results).should.equal(1) - - results = table.scan(Ids__null=True) - sum(1 for _ in results).should.equal(2) - - results = table.scan(PK__between=[8, 9]) - sum(1 for _ in results).should.equal(0) - - results = table.scan(PK__between=[5, 8]) - sum(1 for _ in results).should.equal(1) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_scan_with_undeclared_table(): - conn = boto.dynamodb2.layer1.DynamoDBConnection() - - conn.scan.when.called_with( - table_name='undeclared-table', - scan_filter={ - "SentBy": { - "AttributeValueList": [{ - "S": "User B"} - ], - "ComparisonOperator": "EQ" - } - }, - ).should.throw(JSONResponseError) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_write_batch(): - table = create_table() - - with table.batch_write() as batch: - batch.put_item(data={ - 'forum_name': 'the-key', - 'subject': '123', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - }) - batch.put_item(data={ - 'forum_name': 'the-key2', - 'subject': '789', - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User B', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - }) - - table.count().should.equal(2) - with table.batch_write() as batch: - batch.delete_item( - forum_name='the-key', - subject='789' - ) - - table.count().should.equal(1) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_batch_read(): - table = create_table() - - item_data = { - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User A', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - } - item_data['forum_name'] = 'the-key1' - item = Item(table, item_data) - item.save() - - item = Item(table, item_data) - item_data['forum_name'] = 'the-key2' - item.save(overwrite=True) - - item_data = { - 'Body': 'http://url_to_lolcat.gif', - 'SentBy': 'User B', - 'ReceivedTime': '12/9/2011 11:36:03 PM', - 'Ids': set([1, 2, 3]), - 'PK': 7, - } - item = Item(table, item_data) - item_data['forum_name'] = 'another-key' - item.save(overwrite=True) - - results = table.batch_get( - keys=[ - {'forum_name': 'the-key1'}, - {'forum_name': 'another-key'}, - ] - ) - - # Iterate through so that batch_item gets called - count = len([x for x in results]) - count.should.equal(2) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_get_key_fields(): - table = create_table() - kf = table.get_key_fields() - kf[0].should.equal('forum_name') - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_get_missing_item(): - table = create_table() - table.get_item.when.called_with( - forum_name='missing').should.throw(ItemNotFound) - - -@requires_boto_gte("2.9") -@mock_dynamodb2_deprecated -def test_get_special_item(): - table = Table.create('messages', schema=[ - HashKey('date-joined') - ], throughput={ - 'read': 10, - 'write': 10, - }) - - data = { - 'date-joined': 127549192, - 'SentBy': 'User A', - } - table.put_item(data=data) - returned_item = table.get_item(**{'date-joined': 127549192}) - dict(returned_item).should.equal(data) - - -@mock_dynamodb2_deprecated -def test_update_item_remove(): - conn = boto.dynamodb2.connect_to_region("us-east-1") - table = Table.create('messages', schema=[ - HashKey('username') - ]) - - data = { - 'username': "steve", - 'SentBy': 'User A', - 'SentTo': 'User B', - } - table.put_item(data=data) - key_map = { - 'username': {"S": "steve"} - } - - # Then remove the SentBy field - conn.update_item("messages", key_map, - update_expression="REMOVE SentBy, SentTo") - - returned_item = table.get_item(username="steve") - dict(returned_item).should.equal({ - 'username': "steve", - }) - - -@mock_dynamodb2_deprecated -def test_update_item_set(): - conn = boto.dynamodb2.connect_to_region("us-east-1") - table = Table.create('messages', schema=[ - HashKey('username') - ]) - - data = { - 'username': "steve", - 'SentBy': 'User A', - } - table.put_item(data=data) - key_map = { - 'username': {"S": "steve"} - } - - conn.update_item("messages", key_map, - update_expression="SET foo=bar, blah=baz REMOVE SentBy") - - returned_item = table.get_item(username="steve") - dict(returned_item).should.equal({ - 'username': "steve", - 'foo': 'bar', - 'blah': 'baz', - }) - - -@mock_dynamodb2_deprecated -def test_failed_overwrite(): - table = Table.create('messages', schema=[ - HashKey('id'), - ], throughput={ - 'read': 7, - 'write': 3, - }) - - data1 = {'id': '123', 'data': '678'} - table.put_item(data=data1) - - data2 = {'id': '123', 'data': '345'} - table.put_item(data=data2, overwrite=True) - - data3 = {'id': '123', 'data': '812'} - table.put_item.when.called_with(data=data3).should.throw( - ConditionalCheckFailedException) - - returned_item = table.lookup('123') - dict(returned_item).should.equal(data2) - - data4 = {'id': '124', 'data': 812} - table.put_item(data=data4) - - returned_item = table.lookup('124') - dict(returned_item).should.equal(data4) - - -@mock_dynamodb2_deprecated -def test_conflicting_writes(): - table = Table.create('messages', schema=[ - HashKey('id'), - ]) - - item_data = {'id': '123', 'data': '678'} - item1 = Item(table, item_data) - item2 = Item(table, item_data) - item1.save() - - item1['data'] = '579' - item2['data'] = '912' - - item1.save() - item2.save.when.called_with().should.throw(ConditionalCheckFailedException) - - -""" -boto3 -""" - - -@mock_dynamodb2 -def test_boto3_create_table(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'username', - 'KeyType': 'HASH' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'username', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - table.name.should.equal('users') - - -def _create_user_table(): - dynamodb = boto3.resource('dynamodb', region_name='us-east-1') - - table = dynamodb.create_table( - TableName='users', - KeySchema=[ - { - 'AttributeName': 'username', - 'KeyType': 'HASH' - }, - ], - AttributeDefinitions=[ - { - 'AttributeName': 'username', - 'AttributeType': 'S' - }, - ], - ProvisionedThroughput={ - 'ReadCapacityUnits': 5, - 'WriteCapacityUnits': 5 - } - ) - return dynamodb.Table('users') - - -@mock_dynamodb2 -def test_boto3_conditions(): - table = _create_user_table() - - table.put_item(Item={'username': 'johndoe'}) - table.put_item(Item={'username': 'janedoe'}) - - response = table.query( - KeyConditionExpression=Key('username').eq('johndoe') - ) - response['Count'].should.equal(1) - response['Items'].should.have.length_of(1) - response['Items'][0].should.equal({"username": "johndoe"}) - - -@mock_dynamodb2 -def test_boto3_put_item_conditions_pass(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.put_item( - Item={'username': 'johndoe', 'foo': 'baz'}, - Expected={ - 'foo': { - 'ComparisonOperator': 'EQ', - 'AttributeValueList': ['bar'] - } - }) - final_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(final_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_put_item_conditions_pass_because_expect_not_exists_by_compare_to_null(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.put_item( - Item={'username': 'johndoe', 'foo': 'baz'}, - Expected={ - 'whatever': { - 'ComparisonOperator': 'NULL', - } - }) - final_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(final_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_put_item_conditions_pass_because_expect_exists_by_compare_to_not_null(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.put_item( - Item={'username': 'johndoe', 'foo': 'baz'}, - Expected={ - 'foo': { - 'ComparisonOperator': 'NOT_NULL', - } - }) - final_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(final_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_put_item_conditions_fail(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.put_item.when.called_with( - Item={'username': 'johndoe', 'foo': 'baz'}, - Expected={ - 'foo': { - 'ComparisonOperator': 'NE', - 'AttributeValueList': ['bar'] - } - }).should.throw(botocore.client.ClientError) - -@mock_dynamodb2 -def test_boto3_update_item_conditions_fail(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) - table.update_item.when.called_with( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=bar', - Expected={ - 'foo': { - 'Value': 'bar', - } - }).should.throw(botocore.client.ClientError) - -@mock_dynamodb2 -def test_boto3_update_item_conditions_fail_because_expect_not_exists(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) - table.update_item.when.called_with( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=bar', - Expected={ - 'foo': { - 'Exists': False - } - }).should.throw(botocore.client.ClientError) - -@mock_dynamodb2 -def test_boto3_update_item_conditions_fail_because_expect_not_exists_by_compare_to_null(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) - table.update_item.when.called_with( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=bar', - Expected={ - 'foo': { - 'ComparisonOperator': 'NULL', - } - }).should.throw(botocore.client.ClientError) - -@mock_dynamodb2 -def test_boto3_update_item_conditions_pass(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.update_item( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=baz', - Expected={ - 'foo': { - 'Value': 'bar', - } - }) - returned_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(returned_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_update_item_conditions_pass_because_expect_not_exists(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.update_item( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=baz', - Expected={ - 'whatever': { - 'Exists': False, - } - }) - returned_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(returned_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_update_item_conditions_pass_because_expect_not_exists_by_compare_to_null(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.update_item( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=baz', - Expected={ - 'whatever': { - 'ComparisonOperator': 'NULL', - } - }) - returned_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(returned_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_update_item_conditions_pass_because_expect_exists_by_compare_to_not_null(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.update_item( - Key={'username': 'johndoe'}, - UpdateExpression='SET foo=baz', - Expected={ - 'foo': { - 'ComparisonOperator': 'NOT_NULL', - } - }) - returned_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(returned_item)['Item']['foo'].should.equal("baz") - -@mock_dynamodb2 -def test_boto3_put_item_conditions_pass(): - table = _create_user_table() - table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) - table.put_item( - Item={'username': 'johndoe', 'foo': 'baz'}, - Expected={ - 'foo': { - 'ComparisonOperator': 'EQ', - 'AttributeValueList': ['bar'] - } - }) - returned_item = table.get_item(Key={'username': 'johndoe'}) - assert dict(returned_item)['Item']['foo'].should.equal("baz") - - -@mock_dynamodb2 -def test_scan_pagination(): - table = _create_user_table() - - expected_usernames = ['user{0}'.format(i) for i in range(10)] - for u in expected_usernames: - table.put_item(Item={'username': u}) - - page1 = table.scan(Limit=6) - page1['Count'].should.equal(6) - page1['Items'].should.have.length_of(6) - page1.should.have.key('LastEvaluatedKey') - - page2 = table.scan(Limit=6, - ExclusiveStartKey=page1['LastEvaluatedKey']) - page2['Count'].should.equal(4) - page2['Items'].should.have.length_of(4) - page2.should_not.have.key('LastEvaluatedKey') - - results = page1['Items'] + page2['Items'] - usernames = set([r['username'] for r in results]) - usernames.should.equal(set(expected_usernames)) +from __future__ import unicode_literals + +import boto +import boto3 +from boto3.dynamodb.conditions import Key +import sure # noqa +from freezegun import freeze_time +from boto.exception import JSONResponseError +from moto import mock_dynamodb2, mock_dynamodb2_deprecated +from tests.helpers import requires_boto_gte +import botocore +try: + from boto.dynamodb2.fields import HashKey + from boto.dynamodb2.table import Table + from boto.dynamodb2.table import Item + from boto.dynamodb2.exceptions import ConditionalCheckFailedException, ItemNotFound +except ImportError: + pass + + +def create_table(): + table = Table.create('messages', schema=[ + HashKey('forum_name') + ], throughput={ + 'read': 10, + 'write': 10, + }) + return table + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +@freeze_time("2012-01-14") +def test_create_table(): + create_table() + expected = { + 'Table': { + 'AttributeDefinitions': [ + {'AttributeName': 'forum_name', 'AttributeType': 'S'} + ], + 'ProvisionedThroughput': { + 'NumberOfDecreasesToday': 0, 'WriteCapacityUnits': 10, 'ReadCapacityUnits': 10 + }, + 'TableSizeBytes': 0, + 'TableName': 'messages', + 'TableStatus': 'ACTIVE', + 'TableArn': 'arn:aws:dynamodb:us-east-1:123456789011:table/messages', + 'KeySchema': [ + {'KeyType': 'HASH', 'AttributeName': 'forum_name'} + ], + 'ItemCount': 0, 'CreationDateTime': 1326499200.0, + 'GlobalSecondaryIndexes': [], + 'LocalSecondaryIndexes': [] + } + } + conn = boto.dynamodb2.connect_to_region( + 'us-east-1', + aws_access_key_id="ak", + aws_secret_access_key="sk" + ) + + conn.describe_table('messages').should.equal(expected) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_delete_table(): + create_table() + conn = boto.dynamodb2.layer1.DynamoDBConnection() + conn.list_tables()["TableNames"].should.have.length_of(1) + + conn.delete_table('messages') + conn.list_tables()["TableNames"].should.have.length_of(0) + + conn.delete_table.when.called_with( + 'messages').should.throw(JSONResponseError) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_update_table_throughput(): + table = create_table() + table.throughput["read"].should.equal(10) + table.throughput["write"].should.equal(10) + + table.update(throughput={ + 'read': 5, + 'write': 6, + }) + + table.throughput["read"].should.equal(5) + table.throughput["write"].should.equal(6) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_item_add_and_describe_and_update(): + table = create_table() + + data = { + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + } + + table.put_item(data=data) + returned_item = table.get_item(forum_name="LOLCat Forum") + returned_item.should_not.be.none + + dict(returned_item).should.equal({ + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + }) + + returned_item['SentBy'] = 'User B' + returned_item.save(overwrite=True) + + returned_item = table.get_item( + forum_name='LOLCat Forum' + ) + dict(returned_item).should.equal({ + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User B', + }) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_item_partial_save(): + table = create_table() + + data = { + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + } + + table.put_item(data=data) + returned_item = table.get_item(forum_name="LOLCat Forum") + + returned_item['SentBy'] = 'User B' + returned_item.partial_save() + + returned_item = table.get_item( + forum_name='LOLCat Forum' + ) + dict(returned_item).should.equal({ + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User B', + }) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_item_put_without_table(): + conn = boto.dynamodb2.layer1.DynamoDBConnection() + + conn.put_item.when.called_with( + table_name='undeclared-table', + item={ + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + } + ).should.throw(JSONResponseError) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_get_item_with_undeclared_table(): + conn = boto.dynamodb2.layer1.DynamoDBConnection() + + conn.get_item.when.called_with( + table_name='undeclared-table', + key={"forum_name": {"S": "LOLCat Forum"}}, + ).should.throw(JSONResponseError) + + +@requires_boto_gte("2.30.0") +@mock_dynamodb2_deprecated +def test_delete_item(): + table = create_table() + + item_data = { + 'forum_name': 'LOLCat Forum', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + } + item = Item(table, item_data) + item.save() + table.count().should.equal(1) + + response = item.delete() + + response.should.equal(True) + + table.count().should.equal(0) + + # Deletes are idempotent and 'False' here would imply an error condition + item.delete().should.equal(True) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_delete_item_with_undeclared_table(): + conn = boto.dynamodb2.layer1.DynamoDBConnection() + + conn.delete_item.when.called_with( + table_name='undeclared-table', + key={"forum_name": {"S": "LOLCat Forum"}}, + ).should.throw(JSONResponseError) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_query(): + table = create_table() + + item_data = { + 'forum_name': 'the-key', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + } + item = Item(table, item_data) + item.save(overwrite=True) + table.count().should.equal(1) + table = Table("messages") + + results = table.query(forum_name__eq='the-key') + sum(1 for _ in results).should.equal(1) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_query_with_undeclared_table(): + conn = boto.dynamodb2.layer1.DynamoDBConnection() + + conn.query.when.called_with( + table_name='undeclared-table', + key_conditions={"forum_name": { + "ComparisonOperator": "EQ", "AttributeValueList": [{"S": "the-key"}]}} + ).should.throw(JSONResponseError) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_scan(): + table = create_table() + + item_data = { + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + } + item_data['forum_name'] = 'the-key' + + item = Item(table, item_data) + item.save() + + item['forum_name'] = 'the-key2' + item.save(overwrite=True) + + item_data = { + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User B', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + 'Ids': set([1, 2, 3]), + 'PK': 7, + } + item_data['forum_name'] = 'the-key3' + item = Item(table, item_data) + item.save() + + results = table.scan() + sum(1 for _ in results).should.equal(3) + + results = table.scan(SentBy__eq='User B') + sum(1 for _ in results).should.equal(1) + + results = table.scan(Body__beginswith='http') + sum(1 for _ in results).should.equal(3) + + results = table.scan(Ids__null=False) + sum(1 for _ in results).should.equal(1) + + results = table.scan(Ids__null=True) + sum(1 for _ in results).should.equal(2) + + results = table.scan(PK__between=[8, 9]) + sum(1 for _ in results).should.equal(0) + + results = table.scan(PK__between=[5, 8]) + sum(1 for _ in results).should.equal(1) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_scan_with_undeclared_table(): + conn = boto.dynamodb2.layer1.DynamoDBConnection() + + conn.scan.when.called_with( + table_name='undeclared-table', + scan_filter={ + "SentBy": { + "AttributeValueList": [{ + "S": "User B"} + ], + "ComparisonOperator": "EQ" + } + }, + ).should.throw(JSONResponseError) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_write_batch(): + table = create_table() + + with table.batch_write() as batch: + batch.put_item(data={ + 'forum_name': 'the-key', + 'subject': '123', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + }) + batch.put_item(data={ + 'forum_name': 'the-key2', + 'subject': '789', + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User B', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + }) + + table.count().should.equal(2) + with table.batch_write() as batch: + batch.delete_item( + forum_name='the-key', + subject='789' + ) + + table.count().should.equal(1) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_batch_read(): + table = create_table() + + item_data = { + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User A', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + } + item_data['forum_name'] = 'the-key1' + item = Item(table, item_data) + item.save() + + item = Item(table, item_data) + item_data['forum_name'] = 'the-key2' + item.save(overwrite=True) + + item_data = { + 'Body': 'http://url_to_lolcat.gif', + 'SentBy': 'User B', + 'ReceivedTime': '12/9/2011 11:36:03 PM', + 'Ids': set([1, 2, 3]), + 'PK': 7, + } + item = Item(table, item_data) + item_data['forum_name'] = 'another-key' + item.save(overwrite=True) + + results = table.batch_get( + keys=[ + {'forum_name': 'the-key1'}, + {'forum_name': 'another-key'}, + ] + ) + + # Iterate through so that batch_item gets called + count = len([x for x in results]) + count.should.equal(2) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_get_key_fields(): + table = create_table() + kf = table.get_key_fields() + kf[0].should.equal('forum_name') + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_get_missing_item(): + table = create_table() + table.get_item.when.called_with( + forum_name='missing').should.throw(ItemNotFound) + + +@requires_boto_gte("2.9") +@mock_dynamodb2_deprecated +def test_get_special_item(): + table = Table.create('messages', schema=[ + HashKey('date-joined') + ], throughput={ + 'read': 10, + 'write': 10, + }) + + data = { + 'date-joined': 127549192, + 'SentBy': 'User A', + } + table.put_item(data=data) + returned_item = table.get_item(**{'date-joined': 127549192}) + dict(returned_item).should.equal(data) + + +@mock_dynamodb2_deprecated +def test_update_item_remove(): + conn = boto.dynamodb2.connect_to_region("us-east-1") + table = Table.create('messages', schema=[ + HashKey('username') + ]) + + data = { + 'username': "steve", + 'SentBy': 'User A', + 'SentTo': 'User B', + } + table.put_item(data=data) + key_map = { + 'username': {"S": "steve"} + } + + # Then remove the SentBy field + conn.update_item("messages", key_map, + update_expression="REMOVE SentBy, SentTo") + + returned_item = table.get_item(username="steve") + dict(returned_item).should.equal({ + 'username': "steve", + }) + + +@mock_dynamodb2_deprecated +def test_update_item_set(): + conn = boto.dynamodb2.connect_to_region("us-east-1") + table = Table.create('messages', schema=[ + HashKey('username') + ]) + + data = { + 'username': "steve", + 'SentBy': 'User A', + } + table.put_item(data=data) + key_map = { + 'username': {"S": "steve"} + } + + conn.update_item("messages", key_map, + update_expression="SET foo=bar, blah=baz REMOVE SentBy") + + returned_item = table.get_item(username="steve") + dict(returned_item).should.equal({ + 'username': "steve", + 'foo': 'bar', + 'blah': 'baz', + }) + + +@mock_dynamodb2_deprecated +def test_failed_overwrite(): + table = Table.create('messages', schema=[ + HashKey('id'), + ], throughput={ + 'read': 7, + 'write': 3, + }) + + data1 = {'id': '123', 'data': '678'} + table.put_item(data=data1) + + data2 = {'id': '123', 'data': '345'} + table.put_item(data=data2, overwrite=True) + + data3 = {'id': '123', 'data': '812'} + table.put_item.when.called_with(data=data3).should.throw( + ConditionalCheckFailedException) + + returned_item = table.lookup('123') + dict(returned_item).should.equal(data2) + + data4 = {'id': '124', 'data': 812} + table.put_item(data=data4) + + returned_item = table.lookup('124') + dict(returned_item).should.equal(data4) + + +@mock_dynamodb2_deprecated +def test_conflicting_writes(): + table = Table.create('messages', schema=[ + HashKey('id'), + ]) + + item_data = {'id': '123', 'data': '678'} + item1 = Item(table, item_data) + item2 = Item(table, item_data) + item1.save() + + item1['data'] = '579' + item2['data'] = '912' + + item1.save() + item2.save.when.called_with().should.throw(ConditionalCheckFailedException) + + +""" +boto3 +""" + + +@mock_dynamodb2 +def test_boto3_create_table(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'username', + 'KeyType': 'HASH' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'username', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + table.name.should.equal('users') + + +def _create_user_table(): + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'username', + 'KeyType': 'HASH' + }, + ], + AttributeDefinitions=[ + { + 'AttributeName': 'username', + 'AttributeType': 'S' + }, + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + return dynamodb.Table('users') + + +@mock_dynamodb2 +def test_boto3_conditions(): + table = _create_user_table() + + table.put_item(Item={'username': 'johndoe'}) + table.put_item(Item={'username': 'janedoe'}) + + response = table.query( + KeyConditionExpression=Key('username').eq('johndoe') + ) + response['Count'].should.equal(1) + response['Items'].should.have.length_of(1) + response['Items'][0].should.equal({"username": "johndoe"}) + + +@mock_dynamodb2 +def test_boto3_put_item_conditions_pass(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.put_item( + Item={'username': 'johndoe', 'foo': 'baz'}, + Expected={ + 'foo': { + 'ComparisonOperator': 'EQ', + 'AttributeValueList': ['bar'] + } + }) + final_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(final_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_put_item_conditions_pass_because_expect_not_exists_by_compare_to_null(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.put_item( + Item={'username': 'johndoe', 'foo': 'baz'}, + Expected={ + 'whatever': { + 'ComparisonOperator': 'NULL', + } + }) + final_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(final_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_put_item_conditions_pass_because_expect_exists_by_compare_to_not_null(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.put_item( + Item={'username': 'johndoe', 'foo': 'baz'}, + Expected={ + 'foo': { + 'ComparisonOperator': 'NOT_NULL', + } + }) + final_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(final_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_put_item_conditions_fail(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.put_item.when.called_with( + Item={'username': 'johndoe', 'foo': 'baz'}, + Expected={ + 'foo': { + 'ComparisonOperator': 'NE', + 'AttributeValueList': ['bar'] + } + }).should.throw(botocore.client.ClientError) + +@mock_dynamodb2 +def test_boto3_update_item_conditions_fail(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) + table.update_item.when.called_with( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=bar', + Expected={ + 'foo': { + 'Value': 'bar', + } + }).should.throw(botocore.client.ClientError) + +@mock_dynamodb2 +def test_boto3_update_item_conditions_fail_because_expect_not_exists(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) + table.update_item.when.called_with( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=bar', + Expected={ + 'foo': { + 'Exists': False + } + }).should.throw(botocore.client.ClientError) + +@mock_dynamodb2 +def test_boto3_update_item_conditions_fail_because_expect_not_exists_by_compare_to_null(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'baz'}) + table.update_item.when.called_with( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=bar', + Expected={ + 'foo': { + 'ComparisonOperator': 'NULL', + } + }).should.throw(botocore.client.ClientError) + +@mock_dynamodb2 +def test_boto3_update_item_conditions_pass(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=baz', + Expected={ + 'foo': { + 'Value': 'bar', + } + }) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_update_item_conditions_pass_because_expect_not_exists(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=baz', + Expected={ + 'whatever': { + 'Exists': False, + } + }) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_update_item_conditions_pass_because_expect_not_exists_by_compare_to_null(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=baz', + Expected={ + 'whatever': { + 'ComparisonOperator': 'NULL', + } + }) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_update_item_conditions_pass_because_expect_exists_by_compare_to_not_null(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=baz', + Expected={ + 'foo': { + 'ComparisonOperator': 'NOT_NULL', + } + }) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal("baz") + + +@mock_dynamodb2 +def test_boto3_update_settype_item_with_conditions(): + class OrderedSet(set): + """A set with predictable iteration order""" + def __init__(self, values): + super(OrderedSet, self).__init__(values) + self.__ordered_values = values + + def __iter__(self): + return iter(self.__ordered_values) + + table = _create_user_table() + table.put_item(Item={'username': 'johndoe'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=:new_value', + ExpressionAttributeValues={ + ':new_value': OrderedSet(['hello', 'world']), + }, + ) + + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=:new_value', + ExpressionAttributeValues={ + ':new_value': set(['baz']), + }, + Expected={ + 'foo': { + 'ComparisonOperator': 'EQ', + 'AttributeValueList': [ + OrderedSet(['world', 'hello']), # Opposite order to original + ], + } + }, + ) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal(set(['baz'])) + + +@mock_dynamodb2 +def test_boto3_put_item_conditions_pass(): + table = _create_user_table() + table.put_item(Item={'username': 'johndoe', 'foo': 'bar'}) + table.put_item( + Item={'username': 'johndoe', 'foo': 'baz'}, + Expected={ + 'foo': { + 'ComparisonOperator': 'EQ', + 'AttributeValueList': ['bar'] + } + }) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal("baz") + + +@mock_dynamodb2 +def test_scan_pagination(): + table = _create_user_table() + + expected_usernames = ['user{0}'.format(i) for i in range(10)] + for u in expected_usernames: + table.put_item(Item={'username': u}) + + page1 = table.scan(Limit=6) + page1['Count'].should.equal(6) + page1['Items'].should.have.length_of(6) + page1.should.have.key('LastEvaluatedKey') + + page2 = table.scan(Limit=6, + ExclusiveStartKey=page1['LastEvaluatedKey']) + page2['Count'].should.equal(4) + page2['Items'].should.have.length_of(4) + page2.should_not.have.key('LastEvaluatedKey') + + results = page1['Items'] + page2['Items'] + usernames = set([r['username'] for r in results]) + usernames.should.equal(set(expected_usernames)) diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py new file mode 100644 index 000000000..b60c21053 --- /dev/null +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -0,0 +1,234 @@ +from __future__ import unicode_literals, print_function + +from nose.tools import assert_raises + +import boto3 +from moto import mock_dynamodb2, mock_dynamodbstreams + + +class TestCore(): + stream_arn = None + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + # create a table with a stream + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + self.stream_arn = resp['TableDescription']['LatestStreamArn'] + + def teardown(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + conn.delete_table(TableName='test-streams') + self.stream_arn = None + + for m in self.mocks: + m.stop() + + + def test_verify_stream(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.describe_table(TableName='test-streams') + assert 'LatestStreamArn' in resp['Table'] + + def test_describe_stream(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + assert 'StreamDescription' in resp + desc = resp['StreamDescription'] + assert desc['StreamArn'] == self.stream_arn + assert desc['TableName'] == 'test-streams' + + def test_list_streams(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.list_streams() + assert resp['Streams'][0]['StreamArn'] == self.stream_arn + + resp = conn.list_streams(TableName='no-stream') + assert not resp['Streams'] + + def test_get_shard_iterator(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + assert 'ShardIterator' in resp + + def test_get_records_empty(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='LATEST' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert 'Records' in resp + assert len(resp['Records']) == 0 + + def test_get_records_seq(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'foo'} + } + ) + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'bar'}, + 'second_col': {'S': 'baz'} + } + ) + conn.delete_item( + TableName='test-streams', + Key={'id': {'S': 'entry1'}} + ) + + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 3 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'MODIFY' + assert resp['Records'][2]['eventName'] == 'DELETE' + + # now try fetching from the next shard iterator, it should be + # empty + resp = conn.get_records(ShardIterator=resp['NextShardIterator']) + assert len(resp['Records']) == 0 + + +class TestEdges(): + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + def teardown(self): + for m in self.mocks: + m.stop() + + + def test_enable_stream_on_table(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1} + ) + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'KEYS_ONLY' + } + ) + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'KEYS_ONLY' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + + # now try to enable it again + with assert_raises(conn.exceptions.ResourceInUseException): + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'OLD_IMAGES' + } + ) + + def test_stream_with_range_key(self): + dyn = boto3.client('dynamodb', region_name='us-east-1') + + resp = dyn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}, + {'AttributeName': 'color', 'KeyType': 'RANGE'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}, + {'AttributeName': 'color', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamViewType': 'NEW_IMAGES' + } + ) + stream_arn = resp['TableDescription']['LatestStreamArn'] + + streams = boto3.client('dynamodbstreams', region_name='us-east-1') + resp = streams.describe_stream(StreamArn=stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = streams.get_shard_iterator( + StreamArn=stream_arn, + ShardId=shard_id, + ShardIteratorType='LATEST' + ) + iterator_id = resp['ShardIterator'] + + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}} + ) + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}} + ) + + resp = streams.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 2 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'INSERT' + diff --git a/tests/test_ec2/test_amis.py b/tests/test_ec2/test_amis.py index bb5fb3fac..fd7234511 100644 --- a/tests/test_ec2/test_amis.py +++ b/tests/test_ec2/test_amis.py @@ -1,776 +1,776 @@ -from __future__ import unicode_literals - -import boto -import boto.ec2 -import boto3 -from boto.exception import EC2ResponseError -from botocore.exceptions import ClientError -# Ensure 'assert_raises' context manager support for Python 2.6 -from nose.tools import assert_raises -import sure # noqa - -from moto import mock_ec2_deprecated, mock_ec2 -from moto.ec2.models import AMIS -from tests.helpers import requires_boto_gte - - -@mock_ec2_deprecated -def test_ami_create_and_delete(): - conn = boto.connect_ec2('the_key', 'the_secret') - - initial_ami_count = len(AMIS) - conn.get_all_volumes().should.have.length_of(0) - conn.get_all_snapshots().should.have.length_of(initial_ami_count) - - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - image_id = conn.create_image( - instance.id, "test-ami", "this is a test ami", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateImage operation: Request would have succeeded, but DryRun flag is set') - - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - - all_images = conn.get_all_images() - set([i.id for i in all_images]).should.contain(image_id) - - retrieved_image = [i for i in all_images if i.id == image_id][0] - - retrieved_image.id.should.equal(image_id) - retrieved_image.virtualization_type.should.equal(instance.virtualization_type) - retrieved_image.architecture.should.equal(instance.architecture) - retrieved_image.kernel_id.should.equal(instance.kernel) - retrieved_image.platform.should.equal(instance.platform) - retrieved_image.creationDate.should_not.be.none - instance.terminate() - - # Ensure we're no longer creating a volume - volumes = conn.get_all_volumes() - volumes.should.have.length_of(0) - - # Validate auto-created snapshot - snapshots = conn.get_all_snapshots() - snapshots.should.have.length_of(initial_ami_count + 1) - - retrieved_image_snapshot_id = retrieved_image.block_device_mapping.current_value.snapshot_id - [s.id for s in snapshots].should.contain(retrieved_image_snapshot_id) - snapshot = [s for s in snapshots if s.id == retrieved_image_snapshot_id][0] - snapshot.description.should.equal( - "Auto-created snapshot for AMI {0}".format(retrieved_image.id)) - - # root device should be in AMI's block device mappings - root_mapping = retrieved_image.block_device_mapping.get(retrieved_image.root_device_name) - root_mapping.should_not.be.none - - # Deregister - with assert_raises(EC2ResponseError) as ex: - success = conn.deregister_image(image_id, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DeregisterImage operation: Request would have succeeded, but DryRun flag is set') - - success = conn.deregister_image(image_id) - success.should.be.true - - with assert_raises(EC2ResponseError) as cm: - conn.deregister_image(image_id) - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@requires_boto_gte("2.14.0") -@mock_ec2_deprecated -def test_ami_copy(): - conn = boto.ec2.connect_to_region("us-west-1") - - initial_ami_count = len(AMIS) - conn.get_all_volumes().should.have.length_of(0) - conn.get_all_snapshots().should.have.length_of(initial_ami_count) - - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - source_image_id = conn.create_image( - instance.id, "test-ami", "this is a test ami") - instance.terminate() - source_image = conn.get_all_images(image_ids=[source_image_id])[0] - - # Boto returns a 'CopyImage' object with an image_id attribute here. Use - # the image_id to fetch the full info. - with assert_raises(EC2ResponseError) as ex: - copy_image_ref = conn.copy_image( - source_image.region.name, source_image.id, "test-copy-ami", "this is a test copy ami", - dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CopyImage operation: Request would have succeeded, but DryRun flag is set') - - copy_image_ref = conn.copy_image( - source_image.region.name, source_image.id, "test-copy-ami", "this is a test copy ami") - copy_image_id = copy_image_ref.image_id - copy_image = conn.get_all_images(image_ids=[copy_image_id])[0] - - copy_image.id.should.equal(copy_image_id) - copy_image.virtualization_type.should.equal( - source_image.virtualization_type) - copy_image.architecture.should.equal(source_image.architecture) - copy_image.kernel_id.should.equal(source_image.kernel_id) - copy_image.platform.should.equal(source_image.platform) - - # Ensure we're no longer creating a volume - conn.get_all_volumes().should.have.length_of(0) - - # Validate auto-created snapshot - conn.get_all_snapshots().should.have.length_of(initial_ami_count + 2) - - copy_image.block_device_mapping.current_value.snapshot_id.should_not.equal( - source_image.block_device_mapping.current_value.snapshot_id) - - # Copy from non-existent source ID. - with assert_raises(EC2ResponseError) as cm: - conn.copy_image(source_image.region.name, 'ami-abcd1234', - "test-copy-ami", "this is a test copy ami") - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Copy from non-existent source region. - with assert_raises(EC2ResponseError) as cm: - invalid_region = 'us-east-1' if (source_image.region.name != - 'us-east-1') else 'us-west-1' - conn.copy_image(invalid_region, source_image.id, - "test-copy-ami", "this is a test copy ami") - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_ami_tagging(): - conn = boto.connect_vpc('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_all_images()[0] - - with assert_raises(EC2ResponseError) as ex: - image.add_tag("a key", "some value", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') - - image.add_tag("a key", "some value") - - tag = conn.get_all_tags()[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - # Refresh the DHCP options - image = conn.get_all_images()[0] - image.tags.should.have.length_of(1) - image.tags["a key"].should.equal("some value") - - -@mock_ec2_deprecated -def test_ami_create_from_missing_instance(): - conn = boto.connect_ec2('the_key', 'the_secret') - args = ["i-abcdefg", "test-ami", "this is a test ami"] - - with assert_raises(EC2ResponseError) as cm: - conn.create_image(*args) - cm.exception.code.should.equal('InvalidInstanceID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_ami_pulls_attributes_from_instance(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - instance.modify_attribute("kernel", "test-kernel") - - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - image.kernel_id.should.equal('test-kernel') - - -@mock_ec2_deprecated -def test_ami_filters(): - conn = boto.connect_ec2('the_key', 'the_secret') - - reservationA = conn.run_instances('ami-1234abcd') - instanceA = reservationA.instances[0] - instanceA.modify_attribute("architecture", "i386") - instanceA.modify_attribute("kernel", "k-1234abcd") - instanceA.modify_attribute("platform", "windows") - instanceA.modify_attribute("virtualization_type", "hvm") - imageA_id = conn.create_image( - instanceA.id, "test-ami-A", "this is a test ami") - imageA = conn.get_image(imageA_id) - - reservationB = conn.run_instances('ami-abcd1234') - instanceB = reservationB.instances[0] - instanceB.modify_attribute("architecture", "x86_64") - instanceB.modify_attribute("kernel", "k-abcd1234") - instanceB.modify_attribute("platform", "linux") - instanceB.modify_attribute("virtualization_type", "paravirtual") - imageB_id = conn.create_image( - instanceB.id, "test-ami-B", "this is a test ami") - imageB = conn.get_image(imageB_id) - imageB.set_launch_permissions(group_names=("all")) - - amis_by_architecture = conn.get_all_images( - filters={'architecture': 'x86_64'}) - set([ami.id for ami in amis_by_architecture]).should.contain(imageB.id) - len(amis_by_architecture).should.equal(35) - - amis_by_kernel = conn.get_all_images(filters={'kernel-id': 'k-abcd1234'}) - set([ami.id for ami in amis_by_kernel]).should.equal(set([imageB.id])) - - amis_by_virtualization = conn.get_all_images( - filters={'virtualization-type': 'paravirtual'}) - set([ami.id for ami in amis_by_virtualization] - ).should.contain(imageB.id) - len(amis_by_virtualization).should.equal(3) - - amis_by_platform = conn.get_all_images(filters={'platform': 'windows'}) - set([ami.id for ami in amis_by_platform]).should.contain(imageA.id) - len(amis_by_platform).should.equal(24) - - amis_by_id = conn.get_all_images(filters={'image-id': imageA.id}) - set([ami.id for ami in amis_by_id]).should.equal(set([imageA.id])) - - amis_by_state = conn.get_all_images(filters={'state': 'available'}) - ami_ids_by_state = [ami.id for ami in amis_by_state] - ami_ids_by_state.should.contain(imageA.id) - ami_ids_by_state.should.contain(imageB.id) - len(amis_by_state).should.equal(36) - - amis_by_name = conn.get_all_images(filters={'name': imageA.name}) - set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id])) - - amis_by_public = conn.get_all_images(filters={'is-public': True}) - set([ami.id for ami in amis_by_public]).should.contain(imageB.id) - len(amis_by_public).should.equal(35) - - amis_by_nonpublic = conn.get_all_images(filters={'is-public': False}) - set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id) - len(amis_by_nonpublic).should.equal(1) - - -@mock_ec2_deprecated -def test_ami_filtering_via_tag(): - conn = boto.connect_vpc('the_key', 'the_secret') - - reservationA = conn.run_instances('ami-1234abcd') - instanceA = reservationA.instances[0] - imageA_id = conn.create_image( - instanceA.id, "test-ami-A", "this is a test ami") - imageA = conn.get_image(imageA_id) - imageA.add_tag("a key", "some value") - - reservationB = conn.run_instances('ami-abcd1234') - instanceB = reservationB.instances[0] - imageB_id = conn.create_image( - instanceB.id, "test-ami-B", "this is a test ami") - imageB = conn.get_image(imageB_id) - imageB.add_tag("another key", "some other value") - - amis_by_tagA = conn.get_all_images(filters={'tag:a key': 'some value'}) - set([ami.id for ami in amis_by_tagA]).should.equal(set([imageA.id])) - - amis_by_tagB = conn.get_all_images( - filters={'tag:another key': 'some other value'}) - set([ami.id for ami in amis_by_tagB]).should.equal(set([imageB.id])) - - -@mock_ec2_deprecated -def test_getting_missing_ami(): - conn = boto.connect_ec2('the_key', 'the_secret') - - with assert_raises(EC2ResponseError) as cm: - conn.get_image('ami-missing') - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_getting_malformed_ami(): - conn = boto.connect_ec2('the_key', 'the_secret') - - with assert_raises(EC2ResponseError) as cm: - conn.get_image('foo-missing') - cm.exception.code.should.equal('InvalidAMIID.Malformed') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_ami_attribute_group_permissions(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - - # Baseline - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.name.should.equal('launch_permission') - attributes.attrs.should.have.length_of(0) - - ADD_GROUP_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'add', - 'groups': 'all'} - - REMOVE_GROUP_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'remove', - 'groups': 'all'} - - # Add 'all' group and confirm - with assert_raises(EC2ResponseError) as ex: - conn.modify_image_attribute( - **dict(ADD_GROUP_ARGS, **{'dry_run': True})) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyImageAttribute operation: Request would have succeeded, but DryRun flag is set') - - conn.modify_image_attribute(**ADD_GROUP_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs['groups'].should.have.length_of(1) - attributes.attrs['groups'].should.equal(['all']) - image = conn.get_image(image_id) - image.is_public.should.equal(True) - - # Add is idempotent - conn.modify_image_attribute.when.called_with( - **ADD_GROUP_ARGS).should_not.throw(EC2ResponseError) - - # Remove 'all' group and confirm - conn.modify_image_attribute(**REMOVE_GROUP_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs.should.have.length_of(0) - image = conn.get_image(image_id) - image.is_public.should.equal(False) - - # Remove is idempotent - conn.modify_image_attribute.when.called_with( - **REMOVE_GROUP_ARGS).should_not.throw(EC2ResponseError) - - -@mock_ec2_deprecated -def test_ami_attribute_user_permissions(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - - # Baseline - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.name.should.equal('launch_permission') - attributes.attrs.should.have.length_of(0) - - # Both str and int values should work. - USER1 = '123456789011' - USER2 = 123456789022 - - ADD_USERS_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'add', - 'user_ids': [USER1, USER2]} - - REMOVE_USERS_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'remove', - 'user_ids': [USER1, USER2]} - - REMOVE_SINGLE_USER_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'remove', - 'user_ids': [USER1]} - - # Add multiple users and confirm - conn.modify_image_attribute(**ADD_USERS_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs['user_ids'].should.have.length_of(2) - set(attributes.attrs['user_ids']).should.equal( - set([str(USER1), str(USER2)])) - image = conn.get_image(image_id) - image.is_public.should.equal(False) - - # Add is idempotent - conn.modify_image_attribute.when.called_with( - **ADD_USERS_ARGS).should_not.throw(EC2ResponseError) - - # Remove single user and confirm - conn.modify_image_attribute(**REMOVE_SINGLE_USER_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs['user_ids'].should.have.length_of(1) - set(attributes.attrs['user_ids']).should.equal(set([str(USER2)])) - image = conn.get_image(image_id) - image.is_public.should.equal(False) - - # Remove multiple users and confirm - conn.modify_image_attribute(**REMOVE_USERS_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs.should.have.length_of(0) - image = conn.get_image(image_id) - image.is_public.should.equal(False) - - # Remove is idempotent - conn.modify_image_attribute.when.called_with( - **REMOVE_USERS_ARGS).should_not.throw(EC2ResponseError) - - -@mock_ec2 -def test_ami_describe_executable_users(): - conn = boto3.client('ec2', region_name='us-east-1') - ec2 = boto3.resource('ec2', 'us-east-1') - ec2.create_instances(ImageId='', - MinCount=1, - MaxCount=1) - response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) - instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] - image_id = conn.create_image(InstanceId=instance_id, - Name='TestImage', )['ImageId'] - - USER1 = '123456789011' - - ADD_USER_ARGS = {'ImageId': image_id, - 'Attribute': 'launchPermission', - 'OperationType': 'add', - 'UserIds': [USER1]} - - # Add users and get no images - conn.modify_image_attribute(**ADD_USER_ARGS) - - attributes = conn.describe_image_attribute(ImageId=image_id, - Attribute='LaunchPermissions', - DryRun=False) - attributes['LaunchPermissions'].should.have.length_of(1) - attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) - images = conn.describe_images(ExecutableUsers=[USER1])['Images'] - images.should.have.length_of(1) - images[0]['ImageId'].should.equal(image_id) - - -@mock_ec2 -def test_ami_describe_executable_users_negative(): - conn = boto3.client('ec2', region_name='us-east-1') - ec2 = boto3.resource('ec2', 'us-east-1') - ec2.create_instances(ImageId='', - MinCount=1, - MaxCount=1) - response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) - instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] - image_id = conn.create_image(InstanceId=instance_id, - Name='TestImage')['ImageId'] - - USER1 = '123456789011' - USER2 = '113355789012' - - ADD_USER_ARGS = {'ImageId': image_id, - 'Attribute': 'launchPermission', - 'OperationType': 'add', - 'UserIds': [USER1]} - - # Add users and get no images - # Add users and get no images - conn.modify_image_attribute(**ADD_USER_ARGS) - - attributes = conn.describe_image_attribute(ImageId=image_id, - Attribute='LaunchPermissions', - DryRun=False) - attributes['LaunchPermissions'].should.have.length_of(1) - attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) - images = conn.describe_images(ExecutableUsers=[USER2])['Images'] - images.should.have.length_of(0) - - -@mock_ec2 -def test_ami_describe_executable_users_and_filter(): - conn = boto3.client('ec2', region_name='us-east-1') - ec2 = boto3.resource('ec2', 'us-east-1') - ec2.create_instances(ImageId='', - MinCount=1, - MaxCount=1) - response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) - instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] - image_id = conn.create_image(InstanceId=instance_id, - Name='ImageToDelete', )['ImageId'] - - USER1 = '123456789011' - - ADD_USER_ARGS = {'ImageId': image_id, - 'Attribute': 'launchPermission', - 'OperationType': 'add', - 'UserIds': [USER1]} - - # Add users and get no images - conn.modify_image_attribute(**ADD_USER_ARGS) - - attributes = conn.describe_image_attribute(ImageId=image_id, - Attribute='LaunchPermissions', - DryRun=False) - attributes['LaunchPermissions'].should.have.length_of(1) - attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) - images = conn.describe_images(ExecutableUsers=[USER1], - Filters=[{'Name': 'state', 'Values': ['available']}])['Images'] - images.should.have.length_of(1) - images[0]['ImageId'].should.equal(image_id) - - -@mock_ec2_deprecated -def test_ami_attribute_user_and_group_permissions(): - """ - Boto supports adding/removing both users and groups at the same time. - Just spot-check this -- input variations, idempotency, etc are validated - via user-specific and group-specific tests above. - """ - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - - # Baseline - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.name.should.equal('launch_permission') - attributes.attrs.should.have.length_of(0) - - USER1 = '123456789011' - USER2 = '123456789022' - - ADD_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'add', - 'groups': ['all'], - 'user_ids': [USER1, USER2]} - - REMOVE_ARGS = {'image_id': image.id, - 'attribute': 'launchPermission', - 'operation': 'remove', - 'groups': ['all'], - 'user_ids': [USER1, USER2]} - - # Add and confirm - conn.modify_image_attribute(**ADD_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs['user_ids'].should.have.length_of(2) - set(attributes.attrs['user_ids']).should.equal(set([USER1, USER2])) - set(attributes.attrs['groups']).should.equal(set(['all'])) - image = conn.get_image(image_id) - image.is_public.should.equal(True) - - # Remove and confirm - conn.modify_image_attribute(**REMOVE_ARGS) - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs.should.have.length_of(0) - image = conn.get_image(image_id) - image.is_public.should.equal(False) - - -@mock_ec2_deprecated -def test_ami_attribute_error_cases(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - - # Error: Add with group != 'all' - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute(image.id, - attribute='launchPermission', - operation='add', - groups='everyone') - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add with user ID that isn't an integer. - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute(image.id, - attribute='launchPermission', - operation='add', - user_ids='12345678901A') - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add with user ID that is > length 12. - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute(image.id, - attribute='launchPermission', - operation='add', - user_ids='1234567890123') - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add with user ID that is < length 12. - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute(image.id, - attribute='launchPermission', - operation='add', - user_ids='12345678901') - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add with one invalid user ID among other valid IDs, ensure no - # partial changes. - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute(image.id, - attribute='launchPermission', - operation='add', - user_ids=['123456789011', 'foo', '123456789022']) - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - attributes = conn.get_image_attribute( - image.id, attribute='launchPermission') - attributes.attrs.should.have.length_of(0) - - # Error: Add with invalid image ID - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute("ami-abcd1234", - attribute='launchPermission', - operation='add', - groups='all') - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Remove with invalid image ID - with assert_raises(EC2ResponseError) as cm: - conn.modify_image_attribute("ami-abcd1234", - attribute='launchPermission', - operation='remove', - groups='all') - cm.exception.code.should.equal('InvalidAMIID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2 -def test_ami_describe_non_existent(): - ec2 = boto3.resource('ec2', region_name='us-west-1') - # Valid pattern but non-existent id - img = ec2.Image('ami-abcd1234') - with assert_raises(ClientError): - img.load() - # Invalid ami pattern - img = ec2.Image('not_an_ami_id') - with assert_raises(ClientError): - img.load() - - -@mock_ec2 -def test_ami_filter_wildcard(): - ec2_resource = boto3.resource('ec2', region_name='us-west-1') - ec2_client = boto3.client('ec2', region_name='us-west-1') - - instance = ec2_resource.create_instances(ImageId='ami-1234abcd', MinCount=1, MaxCount=1)[0] - instance.create_image(Name='test-image') - - # create an image with the same owner but will not match the filter - instance.create_image(Name='not-matching-image') - - my_images = ec2_client.describe_images( - Owners=['111122223333'], - Filters=[{'Name': 'name', 'Values': ['test*']}] - )['Images'] - my_images.should.have.length_of(1) - - -@mock_ec2 -def test_ami_filter_by_owner_id(): - client = boto3.client('ec2', region_name='us-east-1') - - ubuntu_id = '099720109477' - - ubuntu_images = client.describe_images(Owners=[ubuntu_id]) - all_images = client.describe_images() - - ubuntu_ids = [ami['OwnerId'] for ami in ubuntu_images['Images']] - all_ids = [ami['OwnerId'] for ami in all_images['Images']] - - # Assert all ubuntu_ids are the same and one equals ubuntu_id - assert all(ubuntu_ids) and ubuntu_ids[0] == ubuntu_id - # Check we actually have a subset of images - assert len(ubuntu_ids) < len(all_ids) - - -@mock_ec2 -def test_ami_filter_by_self(): - ec2_resource = boto3.resource('ec2', region_name='us-west-1') - ec2_client = boto3.client('ec2', region_name='us-west-1') - - my_images = ec2_client.describe_images(Owners=['self'])['Images'] - my_images.should.have.length_of(0) - - # Create a new image - instance = ec2_resource.create_instances(ImageId='ami-1234abcd', MinCount=1, MaxCount=1)[0] - instance.create_image(Name='test-image') - - my_images = ec2_client.describe_images(Owners=['self'])['Images'] - my_images.should.have.length_of(1) - - -@mock_ec2 -def test_ami_snapshots_have_correct_owner(): - ec2_client = boto3.client('ec2', region_name='us-west-1') - - images_response = ec2_client.describe_images() - - owner_id_to_snapshot_ids = {} - for image in images_response['Images']: - owner_id = image['OwnerId'] - snapshot_ids = [ - block_device_mapping['Ebs']['SnapshotId'] - for block_device_mapping in image['BlockDeviceMappings'] - ] - existing_snapshot_ids = owner_id_to_snapshot_ids.get(owner_id, []) - owner_id_to_snapshot_ids[owner_id] = ( - existing_snapshot_ids + snapshot_ids - ) - - for owner_id in owner_id_to_snapshot_ids: - snapshots_rseponse = ec2_client.describe_snapshots( - SnapshotIds=owner_id_to_snapshot_ids[owner_id] - ) - - for snapshot in snapshots_rseponse['Snapshots']: - assert owner_id == snapshot['OwnerId'] +from __future__ import unicode_literals + +import boto +import boto.ec2 +import boto3 +from boto.exception import EC2ResponseError +from botocore.exceptions import ClientError +# Ensure 'assert_raises' context manager support for Python 2.6 +from nose.tools import assert_raises +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2 +from moto.ec2.models import AMIS +from tests.helpers import requires_boto_gte + + +@mock_ec2_deprecated +def test_ami_create_and_delete(): + conn = boto.connect_ec2('the_key', 'the_secret') + + initial_ami_count = len(AMIS) + conn.get_all_volumes().should.have.length_of(0) + conn.get_all_snapshots().should.have.length_of(initial_ami_count) + + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + image_id = conn.create_image( + instance.id, "test-ami", "this is a test ami", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateImage operation: Request would have succeeded, but DryRun flag is set') + + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + + all_images = conn.get_all_images() + set([i.id for i in all_images]).should.contain(image_id) + + retrieved_image = [i for i in all_images if i.id == image_id][0] + + retrieved_image.id.should.equal(image_id) + retrieved_image.virtualization_type.should.equal(instance.virtualization_type) + retrieved_image.architecture.should.equal(instance.architecture) + retrieved_image.kernel_id.should.equal(instance.kernel) + retrieved_image.platform.should.equal(instance.platform) + retrieved_image.creationDate.should_not.be.none + instance.terminate() + + # Ensure we're no longer creating a volume + volumes = conn.get_all_volumes() + volumes.should.have.length_of(0) + + # Validate auto-created snapshot + snapshots = conn.get_all_snapshots() + snapshots.should.have.length_of(initial_ami_count + 1) + + retrieved_image_snapshot_id = retrieved_image.block_device_mapping.current_value.snapshot_id + [s.id for s in snapshots].should.contain(retrieved_image_snapshot_id) + snapshot = [s for s in snapshots if s.id == retrieved_image_snapshot_id][0] + snapshot.description.should.equal( + "Auto-created snapshot for AMI {0}".format(retrieved_image.id)) + + # root device should be in AMI's block device mappings + root_mapping = retrieved_image.block_device_mapping.get(retrieved_image.root_device_name) + root_mapping.should_not.be.none + + # Deregister + with assert_raises(EC2ResponseError) as ex: + success = conn.deregister_image(image_id, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DeregisterImage operation: Request would have succeeded, but DryRun flag is set') + + success = conn.deregister_image(image_id) + success.should.be.true + + with assert_raises(EC2ResponseError) as cm: + conn.deregister_image(image_id) + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@requires_boto_gte("2.14.0") +@mock_ec2_deprecated +def test_ami_copy(): + conn = boto.ec2.connect_to_region("us-west-1") + + initial_ami_count = len(AMIS) + conn.get_all_volumes().should.have.length_of(0) + conn.get_all_snapshots().should.have.length_of(initial_ami_count) + + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + source_image_id = conn.create_image( + instance.id, "test-ami", "this is a test ami") + instance.terminate() + source_image = conn.get_all_images(image_ids=[source_image_id])[0] + + # Boto returns a 'CopyImage' object with an image_id attribute here. Use + # the image_id to fetch the full info. + with assert_raises(EC2ResponseError) as ex: + copy_image_ref = conn.copy_image( + source_image.region.name, source_image.id, "test-copy-ami", "this is a test copy ami", + dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CopyImage operation: Request would have succeeded, but DryRun flag is set') + + copy_image_ref = conn.copy_image( + source_image.region.name, source_image.id, "test-copy-ami", "this is a test copy ami") + copy_image_id = copy_image_ref.image_id + copy_image = conn.get_all_images(image_ids=[copy_image_id])[0] + + copy_image.id.should.equal(copy_image_id) + copy_image.virtualization_type.should.equal( + source_image.virtualization_type) + copy_image.architecture.should.equal(source_image.architecture) + copy_image.kernel_id.should.equal(source_image.kernel_id) + copy_image.platform.should.equal(source_image.platform) + + # Ensure we're no longer creating a volume + conn.get_all_volumes().should.have.length_of(0) + + # Validate auto-created snapshot + conn.get_all_snapshots().should.have.length_of(initial_ami_count + 2) + + copy_image.block_device_mapping.current_value.snapshot_id.should_not.equal( + source_image.block_device_mapping.current_value.snapshot_id) + + # Copy from non-existent source ID. + with assert_raises(EC2ResponseError) as cm: + conn.copy_image(source_image.region.name, 'ami-abcd1234', + "test-copy-ami", "this is a test copy ami") + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Copy from non-existent source region. + with assert_raises(EC2ResponseError) as cm: + invalid_region = 'us-east-1' if (source_image.region.name != + 'us-east-1') else 'us-west-1' + conn.copy_image(invalid_region, source_image.id, + "test-copy-ami", "this is a test copy ami") + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_ami_tagging(): + conn = boto.connect_vpc('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_all_images()[0] + + with assert_raises(EC2ResponseError) as ex: + image.add_tag("a key", "some value", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') + + image.add_tag("a key", "some value") + + tag = conn.get_all_tags()[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + # Refresh the DHCP options + image = conn.get_all_images()[0] + image.tags.should.have.length_of(1) + image.tags["a key"].should.equal("some value") + + +@mock_ec2_deprecated +def test_ami_create_from_missing_instance(): + conn = boto.connect_ec2('the_key', 'the_secret') + args = ["i-abcdefg", "test-ami", "this is a test ami"] + + with assert_raises(EC2ResponseError) as cm: + conn.create_image(*args) + cm.exception.code.should.equal('InvalidInstanceID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_ami_pulls_attributes_from_instance(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + instance.modify_attribute("kernel", "test-kernel") + + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + image.kernel_id.should.equal('test-kernel') + + +@mock_ec2_deprecated +def test_ami_filters(): + conn = boto.connect_ec2('the_key', 'the_secret') + + reservationA = conn.run_instances('ami-1234abcd') + instanceA = reservationA.instances[0] + instanceA.modify_attribute("architecture", "i386") + instanceA.modify_attribute("kernel", "k-1234abcd") + instanceA.modify_attribute("platform", "windows") + instanceA.modify_attribute("virtualization_type", "hvm") + imageA_id = conn.create_image( + instanceA.id, "test-ami-A", "this is a test ami") + imageA = conn.get_image(imageA_id) + + reservationB = conn.run_instances('ami-abcd1234') + instanceB = reservationB.instances[0] + instanceB.modify_attribute("architecture", "x86_64") + instanceB.modify_attribute("kernel", "k-abcd1234") + instanceB.modify_attribute("platform", "linux") + instanceB.modify_attribute("virtualization_type", "paravirtual") + imageB_id = conn.create_image( + instanceB.id, "test-ami-B", "this is a test ami") + imageB = conn.get_image(imageB_id) + imageB.set_launch_permissions(group_names=("all")) + + amis_by_architecture = conn.get_all_images( + filters={'architecture': 'x86_64'}) + set([ami.id for ami in amis_by_architecture]).should.contain(imageB.id) + len(amis_by_architecture).should.equal(35) + + amis_by_kernel = conn.get_all_images(filters={'kernel-id': 'k-abcd1234'}) + set([ami.id for ami in amis_by_kernel]).should.equal(set([imageB.id])) + + amis_by_virtualization = conn.get_all_images( + filters={'virtualization-type': 'paravirtual'}) + set([ami.id for ami in amis_by_virtualization] + ).should.contain(imageB.id) + len(amis_by_virtualization).should.equal(3) + + amis_by_platform = conn.get_all_images(filters={'platform': 'windows'}) + set([ami.id for ami in amis_by_platform]).should.contain(imageA.id) + len(amis_by_platform).should.equal(24) + + amis_by_id = conn.get_all_images(filters={'image-id': imageA.id}) + set([ami.id for ami in amis_by_id]).should.equal(set([imageA.id])) + + amis_by_state = conn.get_all_images(filters={'state': 'available'}) + ami_ids_by_state = [ami.id for ami in amis_by_state] + ami_ids_by_state.should.contain(imageA.id) + ami_ids_by_state.should.contain(imageB.id) + len(amis_by_state).should.equal(36) + + amis_by_name = conn.get_all_images(filters={'name': imageA.name}) + set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id])) + + amis_by_public = conn.get_all_images(filters={'is-public': 'true'}) + set([ami.id for ami in amis_by_public]).should.contain(imageB.id) + len(amis_by_public).should.equal(35) + + amis_by_nonpublic = conn.get_all_images(filters={'is-public': 'false'}) + set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id) + len(amis_by_nonpublic).should.equal(1) + + +@mock_ec2_deprecated +def test_ami_filtering_via_tag(): + conn = boto.connect_vpc('the_key', 'the_secret') + + reservationA = conn.run_instances('ami-1234abcd') + instanceA = reservationA.instances[0] + imageA_id = conn.create_image( + instanceA.id, "test-ami-A", "this is a test ami") + imageA = conn.get_image(imageA_id) + imageA.add_tag("a key", "some value") + + reservationB = conn.run_instances('ami-abcd1234') + instanceB = reservationB.instances[0] + imageB_id = conn.create_image( + instanceB.id, "test-ami-B", "this is a test ami") + imageB = conn.get_image(imageB_id) + imageB.add_tag("another key", "some other value") + + amis_by_tagA = conn.get_all_images(filters={'tag:a key': 'some value'}) + set([ami.id for ami in amis_by_tagA]).should.equal(set([imageA.id])) + + amis_by_tagB = conn.get_all_images( + filters={'tag:another key': 'some other value'}) + set([ami.id for ami in amis_by_tagB]).should.equal(set([imageB.id])) + + +@mock_ec2_deprecated +def test_getting_missing_ami(): + conn = boto.connect_ec2('the_key', 'the_secret') + + with assert_raises(EC2ResponseError) as cm: + conn.get_image('ami-missing') + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_getting_malformed_ami(): + conn = boto.connect_ec2('the_key', 'the_secret') + + with assert_raises(EC2ResponseError) as cm: + conn.get_image('foo-missing') + cm.exception.code.should.equal('InvalidAMIID.Malformed') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_ami_attribute_group_permissions(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + + # Baseline + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.name.should.equal('launch_permission') + attributes.attrs.should.have.length_of(0) + + ADD_GROUP_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'add', + 'groups': 'all'} + + REMOVE_GROUP_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'remove', + 'groups': 'all'} + + # Add 'all' group and confirm + with assert_raises(EC2ResponseError) as ex: + conn.modify_image_attribute( + **dict(ADD_GROUP_ARGS, **{'dry_run': True})) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyImageAttribute operation: Request would have succeeded, but DryRun flag is set') + + conn.modify_image_attribute(**ADD_GROUP_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs['groups'].should.have.length_of(1) + attributes.attrs['groups'].should.equal(['all']) + image = conn.get_image(image_id) + image.is_public.should.equal(True) + + # Add is idempotent + conn.modify_image_attribute.when.called_with( + **ADD_GROUP_ARGS).should_not.throw(EC2ResponseError) + + # Remove 'all' group and confirm + conn.modify_image_attribute(**REMOVE_GROUP_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs.should.have.length_of(0) + image = conn.get_image(image_id) + image.is_public.should.equal(False) + + # Remove is idempotent + conn.modify_image_attribute.when.called_with( + **REMOVE_GROUP_ARGS).should_not.throw(EC2ResponseError) + + +@mock_ec2_deprecated +def test_ami_attribute_user_permissions(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + + # Baseline + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.name.should.equal('launch_permission') + attributes.attrs.should.have.length_of(0) + + # Both str and int values should work. + USER1 = '123456789011' + USER2 = 123456789022 + + ADD_USERS_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'add', + 'user_ids': [USER1, USER2]} + + REMOVE_USERS_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'remove', + 'user_ids': [USER1, USER2]} + + REMOVE_SINGLE_USER_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'remove', + 'user_ids': [USER1]} + + # Add multiple users and confirm + conn.modify_image_attribute(**ADD_USERS_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs['user_ids'].should.have.length_of(2) + set(attributes.attrs['user_ids']).should.equal( + set([str(USER1), str(USER2)])) + image = conn.get_image(image_id) + image.is_public.should.equal(False) + + # Add is idempotent + conn.modify_image_attribute.when.called_with( + **ADD_USERS_ARGS).should_not.throw(EC2ResponseError) + + # Remove single user and confirm + conn.modify_image_attribute(**REMOVE_SINGLE_USER_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs['user_ids'].should.have.length_of(1) + set(attributes.attrs['user_ids']).should.equal(set([str(USER2)])) + image = conn.get_image(image_id) + image.is_public.should.equal(False) + + # Remove multiple users and confirm + conn.modify_image_attribute(**REMOVE_USERS_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs.should.have.length_of(0) + image = conn.get_image(image_id) + image.is_public.should.equal(False) + + # Remove is idempotent + conn.modify_image_attribute.when.called_with( + **REMOVE_USERS_ARGS).should_not.throw(EC2ResponseError) + + +@mock_ec2 +def test_ami_describe_executable_users(): + conn = boto3.client('ec2', region_name='us-east-1') + ec2 = boto3.resource('ec2', 'us-east-1') + ec2.create_instances(ImageId='', + MinCount=1, + MaxCount=1) + response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) + instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] + image_id = conn.create_image(InstanceId=instance_id, + Name='TestImage', )['ImageId'] + + USER1 = '123456789011' + + ADD_USER_ARGS = {'ImageId': image_id, + 'Attribute': 'launchPermission', + 'OperationType': 'add', + 'UserIds': [USER1]} + + # Add users and get no images + conn.modify_image_attribute(**ADD_USER_ARGS) + + attributes = conn.describe_image_attribute(ImageId=image_id, + Attribute='LaunchPermissions', + DryRun=False) + attributes['LaunchPermissions'].should.have.length_of(1) + attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) + images = conn.describe_images(ExecutableUsers=[USER1])['Images'] + images.should.have.length_of(1) + images[0]['ImageId'].should.equal(image_id) + + +@mock_ec2 +def test_ami_describe_executable_users_negative(): + conn = boto3.client('ec2', region_name='us-east-1') + ec2 = boto3.resource('ec2', 'us-east-1') + ec2.create_instances(ImageId='', + MinCount=1, + MaxCount=1) + response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) + instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] + image_id = conn.create_image(InstanceId=instance_id, + Name='TestImage')['ImageId'] + + USER1 = '123456789011' + USER2 = '113355789012' + + ADD_USER_ARGS = {'ImageId': image_id, + 'Attribute': 'launchPermission', + 'OperationType': 'add', + 'UserIds': [USER1]} + + # Add users and get no images + # Add users and get no images + conn.modify_image_attribute(**ADD_USER_ARGS) + + attributes = conn.describe_image_attribute(ImageId=image_id, + Attribute='LaunchPermissions', + DryRun=False) + attributes['LaunchPermissions'].should.have.length_of(1) + attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) + images = conn.describe_images(ExecutableUsers=[USER2])['Images'] + images.should.have.length_of(0) + + +@mock_ec2 +def test_ami_describe_executable_users_and_filter(): + conn = boto3.client('ec2', region_name='us-east-1') + ec2 = boto3.resource('ec2', 'us-east-1') + ec2.create_instances(ImageId='', + MinCount=1, + MaxCount=1) + response = conn.describe_instances(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) + instance_id = response['Reservations'][0]['Instances'][0]['InstanceId'] + image_id = conn.create_image(InstanceId=instance_id, + Name='ImageToDelete', )['ImageId'] + + USER1 = '123456789011' + + ADD_USER_ARGS = {'ImageId': image_id, + 'Attribute': 'launchPermission', + 'OperationType': 'add', + 'UserIds': [USER1]} + + # Add users and get no images + conn.modify_image_attribute(**ADD_USER_ARGS) + + attributes = conn.describe_image_attribute(ImageId=image_id, + Attribute='LaunchPermissions', + DryRun=False) + attributes['LaunchPermissions'].should.have.length_of(1) + attributes['LaunchPermissions'][0]['UserId'].should.equal(USER1) + images = conn.describe_images(ExecutableUsers=[USER1], + Filters=[{'Name': 'state', 'Values': ['available']}])['Images'] + images.should.have.length_of(1) + images[0]['ImageId'].should.equal(image_id) + + +@mock_ec2_deprecated +def test_ami_attribute_user_and_group_permissions(): + """ + Boto supports adding/removing both users and groups at the same time. + Just spot-check this -- input variations, idempotency, etc are validated + via user-specific and group-specific tests above. + """ + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + + # Baseline + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.name.should.equal('launch_permission') + attributes.attrs.should.have.length_of(0) + + USER1 = '123456789011' + USER2 = '123456789022' + + ADD_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'add', + 'groups': ['all'], + 'user_ids': [USER1, USER2]} + + REMOVE_ARGS = {'image_id': image.id, + 'attribute': 'launchPermission', + 'operation': 'remove', + 'groups': ['all'], + 'user_ids': [USER1, USER2]} + + # Add and confirm + conn.modify_image_attribute(**ADD_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs['user_ids'].should.have.length_of(2) + set(attributes.attrs['user_ids']).should.equal(set([USER1, USER2])) + set(attributes.attrs['groups']).should.equal(set(['all'])) + image = conn.get_image(image_id) + image.is_public.should.equal(True) + + # Remove and confirm + conn.modify_image_attribute(**REMOVE_ARGS) + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs.should.have.length_of(0) + image = conn.get_image(image_id) + image.is_public.should.equal(False) + + +@mock_ec2_deprecated +def test_ami_attribute_error_cases(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + + # Error: Add with group != 'all' + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute(image.id, + attribute='launchPermission', + operation='add', + groups='everyone') + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add with user ID that isn't an integer. + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute(image.id, + attribute='launchPermission', + operation='add', + user_ids='12345678901A') + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add with user ID that is > length 12. + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute(image.id, + attribute='launchPermission', + operation='add', + user_ids='1234567890123') + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add with user ID that is < length 12. + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute(image.id, + attribute='launchPermission', + operation='add', + user_ids='12345678901') + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add with one invalid user ID among other valid IDs, ensure no + # partial changes. + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute(image.id, + attribute='launchPermission', + operation='add', + user_ids=['123456789011', 'foo', '123456789022']) + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + attributes = conn.get_image_attribute( + image.id, attribute='launchPermission') + attributes.attrs.should.have.length_of(0) + + # Error: Add with invalid image ID + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute("ami-abcd1234", + attribute='launchPermission', + operation='add', + groups='all') + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Remove with invalid image ID + with assert_raises(EC2ResponseError) as cm: + conn.modify_image_attribute("ami-abcd1234", + attribute='launchPermission', + operation='remove', + groups='all') + cm.exception.code.should.equal('InvalidAMIID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2 +def test_ami_describe_non_existent(): + ec2 = boto3.resource('ec2', region_name='us-west-1') + # Valid pattern but non-existent id + img = ec2.Image('ami-abcd1234') + with assert_raises(ClientError): + img.load() + # Invalid ami pattern + img = ec2.Image('not_an_ami_id') + with assert_raises(ClientError): + img.load() + + +@mock_ec2 +def test_ami_filter_wildcard(): + ec2_resource = boto3.resource('ec2', region_name='us-west-1') + ec2_client = boto3.client('ec2', region_name='us-west-1') + + instance = ec2_resource.create_instances(ImageId='ami-1234abcd', MinCount=1, MaxCount=1)[0] + instance.create_image(Name='test-image') + + # create an image with the same owner but will not match the filter + instance.create_image(Name='not-matching-image') + + my_images = ec2_client.describe_images( + Owners=['111122223333'], + Filters=[{'Name': 'name', 'Values': ['test*']}] + )['Images'] + my_images.should.have.length_of(1) + + +@mock_ec2 +def test_ami_filter_by_owner_id(): + client = boto3.client('ec2', region_name='us-east-1') + + ubuntu_id = '099720109477' + + ubuntu_images = client.describe_images(Owners=[ubuntu_id]) + all_images = client.describe_images() + + ubuntu_ids = [ami['OwnerId'] for ami in ubuntu_images['Images']] + all_ids = [ami['OwnerId'] for ami in all_images['Images']] + + # Assert all ubuntu_ids are the same and one equals ubuntu_id + assert all(ubuntu_ids) and ubuntu_ids[0] == ubuntu_id + # Check we actually have a subset of images + assert len(ubuntu_ids) < len(all_ids) + + +@mock_ec2 +def test_ami_filter_by_self(): + ec2_resource = boto3.resource('ec2', region_name='us-west-1') + ec2_client = boto3.client('ec2', region_name='us-west-1') + + my_images = ec2_client.describe_images(Owners=['self'])['Images'] + my_images.should.have.length_of(0) + + # Create a new image + instance = ec2_resource.create_instances(ImageId='ami-1234abcd', MinCount=1, MaxCount=1)[0] + instance.create_image(Name='test-image') + + my_images = ec2_client.describe_images(Owners=['self'])['Images'] + my_images.should.have.length_of(1) + + +@mock_ec2 +def test_ami_snapshots_have_correct_owner(): + ec2_client = boto3.client('ec2', region_name='us-west-1') + + images_response = ec2_client.describe_images() + + owner_id_to_snapshot_ids = {} + for image in images_response['Images']: + owner_id = image['OwnerId'] + snapshot_ids = [ + block_device_mapping['Ebs']['SnapshotId'] + for block_device_mapping in image['BlockDeviceMappings'] + ] + existing_snapshot_ids = owner_id_to_snapshot_ids.get(owner_id, []) + owner_id_to_snapshot_ids[owner_id] = ( + existing_snapshot_ids + snapshot_ids + ) + + for owner_id in owner_id_to_snapshot_ids: + snapshots_rseponse = ec2_client.describe_snapshots( + SnapshotIds=owner_id_to_snapshot_ids[owner_id] + ) + + for snapshot in snapshots_rseponse['Snapshots']: + assert owner_id == snapshot['OwnerId'] diff --git a/tests/test_ec2/test_elastic_block_store.py b/tests/test_ec2/test_elastic_block_store.py index a5583f44b..8f4a00b13 100644 --- a/tests/test_ec2/test_elastic_block_store.py +++ b/tests/test_ec2/test_elastic_block_store.py @@ -1,665 +1,677 @@ -from __future__ import unicode_literals -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises -from nose.tools import assert_raises - -from moto.ec2 import ec2_backends -import boto -import boto3 -from botocore.exceptions import ClientError -from boto.exception import EC2ResponseError -from freezegun import freeze_time -import sure # noqa - -from moto import mock_ec2_deprecated, mock_ec2 - - -@mock_ec2_deprecated -def test_create_and_delete_volume(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a") - - all_volumes = conn.get_all_volumes() - - current_volume = [item for item in all_volumes if item.id == volume.id] - current_volume.should.have.length_of(1) - current_volume[0].size.should.equal(80) - current_volume[0].zone.should.equal("us-east-1a") - current_volume[0].encrypted.should.be(False) - - volume = current_volume[0] - - with assert_raises(EC2ResponseError) as ex: - volume.delete(dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DeleteVolume operation: Request would have succeeded, but DryRun flag is set') - - volume.delete() - - all_volumes = conn.get_all_volumes() - my_volume = [item for item in all_volumes if item.id == volume.id] - my_volume.should.have.length_of(0) - - # Deleting something that was already deleted should throw an error - with assert_raises(EC2ResponseError) as cm: - volume.delete() - cm.exception.code.should.equal('InvalidVolume.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_create_encrypted_volume_dryrun(): - conn = boto.connect_ec2('the_key', 'the_secret') - with assert_raises(EC2ResponseError) as ex: - conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set') - - -@mock_ec2_deprecated -def test_create_encrypted_volume(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a", encrypted=True) - - with assert_raises(EC2ResponseError) as ex: - conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set') - - all_volumes = [vol for vol in conn.get_all_volumes() if vol.id == volume.id] - all_volumes[0].encrypted.should.be(True) - - -@mock_ec2_deprecated -def test_filter_volume_by_id(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume1 = conn.create_volume(80, "us-east-1a") - volume2 = conn.create_volume(36, "us-east-1b") - volume3 = conn.create_volume(20, "us-east-1c") - vol1 = conn.get_all_volumes(volume_ids=volume3.id) - vol1.should.have.length_of(1) - vol1[0].size.should.equal(20) - vol1[0].zone.should.equal('us-east-1c') - vol2 = conn.get_all_volumes(volume_ids=[volume1.id, volume2.id]) - vol2.should.have.length_of(2) - - with assert_raises(EC2ResponseError) as cm: - conn.get_all_volumes(volume_ids=['vol-does_not_exist']) - cm.exception.code.should.equal('InvalidVolume.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_volume_filters(): - conn = boto.connect_ec2('the_key', 'the_secret') - - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - instance.update() - - volume1 = conn.create_volume(80, "us-east-1a", encrypted=True) - volume2 = conn.create_volume(36, "us-east-1b", encrypted=False) - volume3 = conn.create_volume(20, "us-east-1c", encrypted=True) - - snapshot = volume3.create_snapshot(description='testsnap') - volume4 = conn.create_volume(25, "us-east-1a", snapshot=snapshot) - - conn.create_tags([volume1.id], {'testkey1': 'testvalue1'}) - conn.create_tags([volume2.id], {'testkey2': 'testvalue2'}) - - volume1.update() - volume2.update() - volume3.update() - volume4.update() - - block_mapping = instance.block_device_mapping['/dev/sda1'] - - volume_ids = (volume1.id, volume2.id, volume3.id, volume4.id, block_mapping.volume_id) - - volumes_by_attach_time = conn.get_all_volumes( - filters={'attachment.attach-time': block_mapping.attach_time}) - set([vol.id for vol in volumes_by_attach_time] - ).should.equal({block_mapping.volume_id}) - - volumes_by_attach_device = conn.get_all_volumes( - filters={'attachment.device': '/dev/sda1'}) - set([vol.id for vol in volumes_by_attach_device] - ).should.equal({block_mapping.volume_id}) - - volumes_by_attach_instance_id = conn.get_all_volumes( - filters={'attachment.instance-id': instance.id}) - set([vol.id for vol in volumes_by_attach_instance_id] - ).should.equal({block_mapping.volume_id}) - - volumes_by_attach_status = conn.get_all_volumes( - filters={'attachment.status': 'attached'}) - set([vol.id for vol in volumes_by_attach_status] - ).should.equal({block_mapping.volume_id}) - - volumes_by_create_time = conn.get_all_volumes( - filters={'create-time': volume4.create_time}) - set([vol.create_time for vol in volumes_by_create_time] - ).should.equal({volume4.create_time}) - - volumes_by_size = conn.get_all_volumes(filters={'size': volume2.size}) - set([vol.id for vol in volumes_by_size]).should.equal({volume2.id}) - - volumes_by_snapshot_id = conn.get_all_volumes( - filters={'snapshot-id': snapshot.id}) - set([vol.id for vol in volumes_by_snapshot_id] - ).should.equal({volume4.id}) - - volumes_by_status = conn.get_all_volumes(filters={'status': 'in-use'}) - set([vol.id for vol in volumes_by_status]).should.equal( - {block_mapping.volume_id}) - - volumes_by_id = conn.get_all_volumes(filters={'volume-id': volume1.id}) - set([vol.id for vol in volumes_by_id]).should.equal({volume1.id}) - - volumes_by_tag_key = conn.get_all_volumes(filters={'tag-key': 'testkey1'}) - set([vol.id for vol in volumes_by_tag_key]).should.equal({volume1.id}) - - volumes_by_tag_value = conn.get_all_volumes( - filters={'tag-value': 'testvalue1'}) - set([vol.id for vol in volumes_by_tag_value] - ).should.equal({volume1.id}) - - volumes_by_tag = conn.get_all_volumes( - filters={'tag:testkey1': 'testvalue1'}) - set([vol.id for vol in volumes_by_tag]).should.equal({volume1.id}) - - volumes_by_unencrypted = conn.get_all_volumes( - filters={'encrypted': 'false'}) - set([vol.id for vol in volumes_by_unencrypted if vol.id in volume_ids]).should.equal( - {block_mapping.volume_id, volume2.id} - ) - - volumes_by_encrypted = conn.get_all_volumes(filters={'encrypted': 'true'}) - set([vol.id for vol in volumes_by_encrypted if vol.id in volume_ids]).should.equal( - {volume1.id, volume3.id, volume4.id} - ) - - volumes_by_availability_zone = conn.get_all_volumes(filters={'availability-zone': 'us-east-1b'}) - set([vol.id for vol in volumes_by_availability_zone if vol.id in volume_ids]).should.equal( - {volume2.id} - ) - - -@mock_ec2_deprecated -def test_volume_attach_and_detach(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - volume = conn.create_volume(80, "us-east-1a") - - volume.update() - volume.volume_state().should.equal('available') - - with assert_raises(EC2ResponseError) as ex: - volume.attach(instance.id, "/dev/sdh", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the AttachVolume operation: Request would have succeeded, but DryRun flag is set') - - volume.attach(instance.id, "/dev/sdh") - - volume.update() - volume.volume_state().should.equal('in-use') - volume.attachment_state().should.equal('attached') - - volume.attach_data.instance_id.should.equal(instance.id) - - with assert_raises(EC2ResponseError) as ex: - volume.detach(dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DetachVolume operation: Request would have succeeded, but DryRun flag is set') - - volume.detach() - - volume.update() - volume.volume_state().should.equal('available') - - with assert_raises(EC2ResponseError) as cm1: - volume.attach('i-1234abcd', "/dev/sdh") - cm1.exception.code.should.equal('InvalidInstanceID.NotFound') - cm1.exception.status.should.equal(400) - cm1.exception.request_id.should_not.be.none - - with assert_raises(EC2ResponseError) as cm2: - conn.detach_volume(volume.id, instance.id, "/dev/sdh") - cm2.exception.code.should.equal('InvalidAttachment.NotFound') - cm2.exception.status.should.equal(400) - cm2.exception.request_id.should_not.be.none - - with assert_raises(EC2ResponseError) as cm3: - conn.detach_volume(volume.id, 'i-1234abcd', "/dev/sdh") - cm3.exception.code.should.equal('InvalidInstanceID.NotFound') - cm3.exception.status.should.equal(400) - cm3.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_create_snapshot(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a") - - with assert_raises(EC2ResponseError) as ex: - snapshot = volume.create_snapshot('a dryrun snapshot', dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set') - - snapshot = volume.create_snapshot('a test snapshot') - snapshot.update() - snapshot.status.should.equal('completed') - - snapshots = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] - snapshots.should.have.length_of(1) - snapshots[0].description.should.equal('a test snapshot') - snapshots[0].start_time.should_not.be.none - snapshots[0].encrypted.should.be(False) - - # Create snapshot without description - num_snapshots = len(conn.get_all_snapshots()) - - snapshot = volume.create_snapshot() - conn.get_all_snapshots().should.have.length_of(num_snapshots + 1) - - snapshot.delete() - conn.get_all_snapshots().should.have.length_of(num_snapshots) - - # Deleting something that was already deleted should throw an error - with assert_raises(EC2ResponseError) as cm: - snapshot.delete() - cm.exception.code.should.equal('InvalidSnapshot.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_create_encrypted_snapshot(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a", encrypted=True) - snapshot = volume.create_snapshot('a test snapshot') - snapshot.update() - snapshot.status.should.equal('completed') - - snapshots = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] - snapshots.should.have.length_of(1) - snapshots[0].description.should.equal('a test snapshot') - snapshots[0].start_time.should_not.be.none - snapshots[0].encrypted.should.be(True) - - -@mock_ec2_deprecated -def test_filter_snapshot_by_id(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume1 = conn.create_volume(36, "us-east-1a") - snap1 = volume1.create_snapshot('a test snapshot 1') - volume2 = conn.create_volume(42, 'us-east-1a') - snap2 = volume2.create_snapshot('a test snapshot 2') - volume3 = conn.create_volume(84, 'us-east-1a') - snap3 = volume3.create_snapshot('a test snapshot 3') - snapshots1 = conn.get_all_snapshots(snapshot_ids=snap2.id) - snapshots1.should.have.length_of(1) - snapshots1[0].volume_id.should.equal(volume2.id) - snapshots1[0].region.name.should.equal(conn.region.name) - snapshots2 = conn.get_all_snapshots(snapshot_ids=[snap2.id, snap3.id]) - snapshots2.should.have.length_of(2) - for s in snapshots2: - s.start_time.should_not.be.none - s.volume_id.should.be.within([volume2.id, volume3.id]) - s.region.name.should.equal(conn.region.name) - - with assert_raises(EC2ResponseError) as cm: - conn.get_all_snapshots(snapshot_ids=['snap-does_not_exist']) - cm.exception.code.should.equal('InvalidSnapshot.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_snapshot_filters(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume1 = conn.create_volume(20, "us-east-1a", encrypted=False) - volume2 = conn.create_volume(25, "us-east-1a", encrypted=True) - - snapshot1 = volume1.create_snapshot(description='testsnapshot1') - snapshot2 = volume1.create_snapshot(description='testsnapshot2') - snapshot3 = volume2.create_snapshot(description='testsnapshot3') - - conn.create_tags([snapshot1.id], {'testkey1': 'testvalue1'}) - conn.create_tags([snapshot2.id], {'testkey2': 'testvalue2'}) - - snapshots_by_description = conn.get_all_snapshots( - filters={'description': 'testsnapshot1'}) - set([snap.id for snap in snapshots_by_description] - ).should.equal({snapshot1.id}) - - snapshots_by_id = conn.get_all_snapshots( - filters={'snapshot-id': snapshot1.id}) - set([snap.id for snap in snapshots_by_id] - ).should.equal({snapshot1.id}) - - snapshots_by_start_time = conn.get_all_snapshots( - filters={'start-time': snapshot1.start_time}) - set([snap.start_time for snap in snapshots_by_start_time] - ).should.equal({snapshot1.start_time}) - - snapshots_by_volume_id = conn.get_all_snapshots( - filters={'volume-id': volume1.id}) - set([snap.id for snap in snapshots_by_volume_id] - ).should.equal({snapshot1.id, snapshot2.id}) - - snapshots_by_status = conn.get_all_snapshots( - filters={'status': 'completed'}) - ({snapshot1.id, snapshot2.id, snapshot3.id} - - {snap.id for snap in snapshots_by_status}).should.have.length_of(0) - - snapshots_by_volume_size = conn.get_all_snapshots( - filters={'volume-size': volume1.size}) - set([snap.id for snap in snapshots_by_volume_size] - ).should.equal({snapshot1.id, snapshot2.id}) - - snapshots_by_tag_key = conn.get_all_snapshots( - filters={'tag-key': 'testkey1'}) - set([snap.id for snap in snapshots_by_tag_key] - ).should.equal({snapshot1.id}) - - snapshots_by_tag_value = conn.get_all_snapshots( - filters={'tag-value': 'testvalue1'}) - set([snap.id for snap in snapshots_by_tag_value] - ).should.equal({snapshot1.id}) - - snapshots_by_tag = conn.get_all_snapshots( - filters={'tag:testkey1': 'testvalue1'}) - set([snap.id for snap in snapshots_by_tag] - ).should.equal({snapshot1.id}) - - snapshots_by_encrypted = conn.get_all_snapshots( - filters={'encrypted': 'true'}) - set([snap.id for snap in snapshots_by_encrypted] - ).should.equal({snapshot3.id}) - - -@mock_ec2_deprecated -def test_snapshot_attribute(): - import copy - - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a") - snapshot = volume.create_snapshot() - - # Baseline - attributes = conn.get_snapshot_attribute( - snapshot.id, attribute='createVolumePermission') - attributes.name.should.equal('create_volume_permission') - attributes.attrs.should.have.length_of(0) - - ADD_GROUP_ARGS = {'snapshot_id': snapshot.id, - 'attribute': 'createVolumePermission', - 'operation': 'add', - 'groups': 'all'} - - REMOVE_GROUP_ARGS = {'snapshot_id': snapshot.id, - 'attribute': 'createVolumePermission', - 'operation': 'remove', - 'groups': 'all'} - - # Add 'all' group and confirm - - with assert_raises(EC2ResponseError) as ex: - conn.modify_snapshot_attribute( - **dict(ADD_GROUP_ARGS, **{'dry_run': True})) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set') - - conn.modify_snapshot_attribute(**ADD_GROUP_ARGS) - - attributes = conn.get_snapshot_attribute( - snapshot.id, attribute='createVolumePermission') - attributes.attrs['groups'].should.have.length_of(1) - attributes.attrs['groups'].should.equal(['all']) - - # Add is idempotent - conn.modify_snapshot_attribute.when.called_with( - **ADD_GROUP_ARGS).should_not.throw(EC2ResponseError) - - # Remove 'all' group and confirm - with assert_raises(EC2ResponseError) as ex: - conn.modify_snapshot_attribute( - **dict(REMOVE_GROUP_ARGS, **{'dry_run': True})) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set') - - conn.modify_snapshot_attribute(**REMOVE_GROUP_ARGS) - - attributes = conn.get_snapshot_attribute( - snapshot.id, attribute='createVolumePermission') - attributes.attrs.should.have.length_of(0) - - # Remove is idempotent - conn.modify_snapshot_attribute.when.called_with( - **REMOVE_GROUP_ARGS).should_not.throw(EC2ResponseError) - - # Error: Add with group != 'all' - with assert_raises(EC2ResponseError) as cm: - conn.modify_snapshot_attribute(snapshot.id, - attribute='createVolumePermission', - operation='add', - groups='everyone') - cm.exception.code.should.equal('InvalidAMIAttributeItemValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add with invalid snapshot ID - with assert_raises(EC2ResponseError) as cm: - conn.modify_snapshot_attribute("snapshot-abcd1234", - attribute='createVolumePermission', - operation='add', - groups='all') - cm.exception.code.should.equal('InvalidSnapshot.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Remove with invalid snapshot ID - with assert_raises(EC2ResponseError) as cm: - conn.modify_snapshot_attribute("snapshot-abcd1234", - attribute='createVolumePermission', - operation='remove', - groups='all') - cm.exception.code.should.equal('InvalidSnapshot.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - # Error: Add or remove with user ID instead of group - conn.modify_snapshot_attribute.when.called_with(snapshot.id, - attribute='createVolumePermission', - operation='add', - user_ids=['user']).should.throw(NotImplementedError) - conn.modify_snapshot_attribute.when.called_with(snapshot.id, - attribute='createVolumePermission', - operation='remove', - user_ids=['user']).should.throw(NotImplementedError) - - -@mock_ec2_deprecated -def test_create_volume_from_snapshot(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a") - snapshot = volume.create_snapshot('a test snapshot') - - with assert_raises(EC2ResponseError) as ex: - snapshot = volume.create_snapshot('a test snapshot', dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set') - - snapshot = volume.create_snapshot('a test snapshot') - snapshot.update() - snapshot.status.should.equal('completed') - - new_volume = snapshot.create_volume('us-east-1a') - new_volume.size.should.equal(80) - new_volume.snapshot_id.should.equal(snapshot.id) - - -@mock_ec2_deprecated -def test_create_volume_from_encrypted_snapshot(): - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a", encrypted=True) - - snapshot = volume.create_snapshot('a test snapshot') - snapshot.update() - snapshot.status.should.equal('completed') - - new_volume = snapshot.create_volume('us-east-1a') - new_volume.size.should.equal(80) - new_volume.snapshot_id.should.equal(snapshot.id) - new_volume.encrypted.should.be(True) - - -@mock_ec2_deprecated -def test_modify_attribute_blockDeviceMapping(): - """ - Reproduces the missing feature explained at [0], where we want to mock a - call to modify an instance attribute of type: blockDeviceMapping. - - [0] https://github.com/spulec/moto/issues/160 - """ - conn = boto.ec2.connect_to_region("us-east-1") - - reservation = conn.run_instances('ami-1234abcd') - - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - instance.modify_attribute('blockDeviceMapping', { - '/dev/sda1': True}, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyInstanceAttribute operation: Request would have succeeded, but DryRun flag is set') - - instance.modify_attribute('blockDeviceMapping', {'/dev/sda1': True}) - - instance = ec2_backends[conn.region.name].get_instance(instance.id) - instance.block_device_mapping.should.have.key('/dev/sda1') - instance.block_device_mapping[ - '/dev/sda1'].delete_on_termination.should.be(True) - - -@mock_ec2_deprecated -def test_volume_tag_escaping(): - conn = boto.connect_ec2('the_key', 'the_secret') - vol = conn.create_volume(10, 'us-east-1a') - snapshot = conn.create_snapshot(vol.id, 'Desc') - - with assert_raises(EC2ResponseError) as ex: - snapshot.add_tags({'key': ''}, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') - snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] - dict(snaps[0].tags).should_not.be.equal( - {'key': ''}) - - snapshot.add_tags({'key': ''}) - - snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] - dict(snaps[0].tags).should.equal({'key': ''}) - - -@freeze_time -@mock_ec2 -def test_copy_snapshot(): - ec2_client = boto3.client('ec2', region_name='eu-west-1') - dest_ec2_client = boto3.client('ec2', region_name='eu-west-2') - - volume_response = ec2_client.create_volume( - AvailabilityZone='eu-west-1a', Size=10 - ) - - create_snapshot_response = ec2_client.create_snapshot( - VolumeId=volume_response['VolumeId'] - ) - - copy_snapshot_response = dest_ec2_client.copy_snapshot( - SourceSnapshotId=create_snapshot_response['SnapshotId'], - SourceRegion="eu-west-1" - ) - - ec2 = boto3.resource('ec2', region_name='eu-west-1') - dest_ec2 = boto3.resource('ec2', region_name='eu-west-2') - - source = ec2.Snapshot(create_snapshot_response['SnapshotId']) - dest = dest_ec2.Snapshot(copy_snapshot_response['SnapshotId']) - - attribs = ['data_encryption_key_id', 'encrypted', - 'kms_key_id', 'owner_alias', 'owner_id', - 'progress', 'state', 'state_message', - 'tags', 'volume_id', 'volume_size'] - - for attrib in attribs: - getattr(source, attrib).should.equal(getattr(dest, attrib)) - - # Copy from non-existent source ID. - with assert_raises(ClientError) as cm: - create_snapshot_error = ec2_client.create_snapshot( - VolumeId='vol-abcd1234' - ) - cm.exception.response['Error']['Code'].should.equal('InvalidVolume.NotFound') - cm.exception.response['Error']['Message'].should.equal("The volume 'vol-abcd1234' does not exist.") - cm.exception.response['ResponseMetadata']['RequestId'].should_not.be.none - cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) - - # Copy from non-existent source region. - with assert_raises(ClientError) as cm: - copy_snapshot_response = dest_ec2_client.copy_snapshot( - SourceSnapshotId=create_snapshot_response['SnapshotId'], - SourceRegion="eu-west-2" - ) - cm.exception.response['Error']['Code'].should.equal('InvalidSnapshot.NotFound') - cm.exception.response['Error']['Message'].should.be.none - cm.exception.response['ResponseMetadata']['RequestId'].should_not.be.none - cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) - -@mock_ec2 -def test_search_for_many_snapshots(): - ec2_client = boto3.client('ec2', region_name='eu-west-1') - - volume_response = ec2_client.create_volume( - AvailabilityZone='eu-west-1a', Size=10 - ) - - snapshot_ids = [] - for i in range(1, 20): - create_snapshot_response = ec2_client.create_snapshot( - VolumeId=volume_response['VolumeId'] - ) - snapshot_ids.append(create_snapshot_response['SnapshotId']) - - snapshots_response = ec2_client.describe_snapshots( - SnapshotIds=snapshot_ids - ) - - assert len(snapshots_response['Snapshots']) == len(snapshot_ids) +from __future__ import unicode_literals +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises +from nose.tools import assert_raises + +from moto.ec2 import ec2_backends +import boto +import boto3 +from botocore.exceptions import ClientError +from boto.exception import EC2ResponseError +from freezegun import freeze_time +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2 + + +@mock_ec2_deprecated +def test_create_and_delete_volume(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a") + + all_volumes = conn.get_all_volumes() + + current_volume = [item for item in all_volumes if item.id == volume.id] + current_volume.should.have.length_of(1) + current_volume[0].size.should.equal(80) + current_volume[0].zone.should.equal("us-east-1a") + current_volume[0].encrypted.should.be(False) + + volume = current_volume[0] + + with assert_raises(EC2ResponseError) as ex: + volume.delete(dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DeleteVolume operation: Request would have succeeded, but DryRun flag is set') + + volume.delete() + + all_volumes = conn.get_all_volumes() + my_volume = [item for item in all_volumes if item.id == volume.id] + my_volume.should.have.length_of(0) + + # Deleting something that was already deleted should throw an error + with assert_raises(EC2ResponseError) as cm: + volume.delete() + cm.exception.code.should.equal('InvalidVolume.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_create_encrypted_volume_dryrun(): + conn = boto.connect_ec2('the_key', 'the_secret') + with assert_raises(EC2ResponseError) as ex: + conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set') + + +@mock_ec2_deprecated +def test_create_encrypted_volume(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a", encrypted=True) + + with assert_raises(EC2ResponseError) as ex: + conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set') + + all_volumes = [vol for vol in conn.get_all_volumes() if vol.id == volume.id] + all_volumes[0].encrypted.should.be(True) + + +@mock_ec2_deprecated +def test_filter_volume_by_id(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume1 = conn.create_volume(80, "us-east-1a") + volume2 = conn.create_volume(36, "us-east-1b") + volume3 = conn.create_volume(20, "us-east-1c") + vol1 = conn.get_all_volumes(volume_ids=volume3.id) + vol1.should.have.length_of(1) + vol1[0].size.should.equal(20) + vol1[0].zone.should.equal('us-east-1c') + vol2 = conn.get_all_volumes(volume_ids=[volume1.id, volume2.id]) + vol2.should.have.length_of(2) + + with assert_raises(EC2ResponseError) as cm: + conn.get_all_volumes(volume_ids=['vol-does_not_exist']) + cm.exception.code.should.equal('InvalidVolume.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_volume_filters(): + conn = boto.connect_ec2('the_key', 'the_secret') + + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + instance.update() + + volume1 = conn.create_volume(80, "us-east-1a", encrypted=True) + volume2 = conn.create_volume(36, "us-east-1b", encrypted=False) + volume3 = conn.create_volume(20, "us-east-1c", encrypted=True) + + snapshot = volume3.create_snapshot(description='testsnap') + volume4 = conn.create_volume(25, "us-east-1a", snapshot=snapshot) + + conn.create_tags([volume1.id], {'testkey1': 'testvalue1'}) + conn.create_tags([volume2.id], {'testkey2': 'testvalue2'}) + + volume1.update() + volume2.update() + volume3.update() + volume4.update() + + block_mapping = instance.block_device_mapping['/dev/sda1'] + + volume_ids = (volume1.id, volume2.id, volume3.id, volume4.id, block_mapping.volume_id) + + volumes_by_attach_time = conn.get_all_volumes( + filters={'attachment.attach-time': block_mapping.attach_time}) + set([vol.id for vol in volumes_by_attach_time] + ).should.equal({block_mapping.volume_id}) + + volumes_by_attach_device = conn.get_all_volumes( + filters={'attachment.device': '/dev/sda1'}) + set([vol.id for vol in volumes_by_attach_device] + ).should.equal({block_mapping.volume_id}) + + volumes_by_attach_instance_id = conn.get_all_volumes( + filters={'attachment.instance-id': instance.id}) + set([vol.id for vol in volumes_by_attach_instance_id] + ).should.equal({block_mapping.volume_id}) + + volumes_by_attach_status = conn.get_all_volumes( + filters={'attachment.status': 'attached'}) + set([vol.id for vol in volumes_by_attach_status] + ).should.equal({block_mapping.volume_id}) + + volumes_by_create_time = conn.get_all_volumes( + filters={'create-time': volume4.create_time}) + set([vol.create_time for vol in volumes_by_create_time] + ).should.equal({volume4.create_time}) + + volumes_by_size = conn.get_all_volumes(filters={'size': volume2.size}) + set([vol.id for vol in volumes_by_size]).should.equal({volume2.id}) + + volumes_by_snapshot_id = conn.get_all_volumes( + filters={'snapshot-id': snapshot.id}) + set([vol.id for vol in volumes_by_snapshot_id] + ).should.equal({volume4.id}) + + volumes_by_status = conn.get_all_volumes(filters={'status': 'in-use'}) + set([vol.id for vol in volumes_by_status]).should.equal( + {block_mapping.volume_id}) + + volumes_by_id = conn.get_all_volumes(filters={'volume-id': volume1.id}) + set([vol.id for vol in volumes_by_id]).should.equal({volume1.id}) + + volumes_by_tag_key = conn.get_all_volumes(filters={'tag-key': 'testkey1'}) + set([vol.id for vol in volumes_by_tag_key]).should.equal({volume1.id}) + + volumes_by_tag_value = conn.get_all_volumes( + filters={'tag-value': 'testvalue1'}) + set([vol.id for vol in volumes_by_tag_value] + ).should.equal({volume1.id}) + + volumes_by_tag = conn.get_all_volumes( + filters={'tag:testkey1': 'testvalue1'}) + set([vol.id for vol in volumes_by_tag]).should.equal({volume1.id}) + + volumes_by_unencrypted = conn.get_all_volumes( + filters={'encrypted': 'false'}) + set([vol.id for vol in volumes_by_unencrypted if vol.id in volume_ids]).should.equal( + {block_mapping.volume_id, volume2.id} + ) + + volumes_by_encrypted = conn.get_all_volumes(filters={'encrypted': 'true'}) + set([vol.id for vol in volumes_by_encrypted if vol.id in volume_ids]).should.equal( + {volume1.id, volume3.id, volume4.id} + ) + + volumes_by_availability_zone = conn.get_all_volumes(filters={'availability-zone': 'us-east-1b'}) + set([vol.id for vol in volumes_by_availability_zone if vol.id in volume_ids]).should.equal( + {volume2.id} + ) + + +@mock_ec2_deprecated +def test_volume_attach_and_detach(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + volume = conn.create_volume(80, "us-east-1a") + + volume.update() + volume.volume_state().should.equal('available') + + with assert_raises(EC2ResponseError) as ex: + volume.attach(instance.id, "/dev/sdh", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the AttachVolume operation: Request would have succeeded, but DryRun flag is set') + + volume.attach(instance.id, "/dev/sdh") + + volume.update() + volume.volume_state().should.equal('in-use') + volume.attachment_state().should.equal('attached') + + volume.attach_data.instance_id.should.equal(instance.id) + + with assert_raises(EC2ResponseError) as ex: + volume.detach(dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DetachVolume operation: Request would have succeeded, but DryRun flag is set') + + volume.detach() + + volume.update() + volume.volume_state().should.equal('available') + + with assert_raises(EC2ResponseError) as cm1: + volume.attach('i-1234abcd', "/dev/sdh") + cm1.exception.code.should.equal('InvalidInstanceID.NotFound') + cm1.exception.status.should.equal(400) + cm1.exception.request_id.should_not.be.none + + with assert_raises(EC2ResponseError) as cm2: + conn.detach_volume(volume.id, instance.id, "/dev/sdh") + cm2.exception.code.should.equal('InvalidAttachment.NotFound') + cm2.exception.status.should.equal(400) + cm2.exception.request_id.should_not.be.none + + with assert_raises(EC2ResponseError) as cm3: + conn.detach_volume(volume.id, 'i-1234abcd', "/dev/sdh") + cm3.exception.code.should.equal('InvalidInstanceID.NotFound') + cm3.exception.status.should.equal(400) + cm3.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_create_snapshot(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a") + + with assert_raises(EC2ResponseError) as ex: + snapshot = volume.create_snapshot('a dryrun snapshot', dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set') + + snapshot = volume.create_snapshot('a test snapshot') + snapshot.update() + snapshot.status.should.equal('completed') + + snapshots = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] + snapshots.should.have.length_of(1) + snapshots[0].description.should.equal('a test snapshot') + snapshots[0].start_time.should_not.be.none + snapshots[0].encrypted.should.be(False) + + # Create snapshot without description + num_snapshots = len(conn.get_all_snapshots()) + + snapshot = volume.create_snapshot() + conn.get_all_snapshots().should.have.length_of(num_snapshots + 1) + + snapshot.delete() + conn.get_all_snapshots().should.have.length_of(num_snapshots) + + # Deleting something that was already deleted should throw an error + with assert_raises(EC2ResponseError) as cm: + snapshot.delete() + cm.exception.code.should.equal('InvalidSnapshot.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_create_encrypted_snapshot(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a", encrypted=True) + snapshot = volume.create_snapshot('a test snapshot') + snapshot.update() + snapshot.status.should.equal('completed') + + snapshots = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] + snapshots.should.have.length_of(1) + snapshots[0].description.should.equal('a test snapshot') + snapshots[0].start_time.should_not.be.none + snapshots[0].encrypted.should.be(True) + + +@mock_ec2_deprecated +def test_filter_snapshot_by_id(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume1 = conn.create_volume(36, "us-east-1a") + snap1 = volume1.create_snapshot('a test snapshot 1') + volume2 = conn.create_volume(42, 'us-east-1a') + snap2 = volume2.create_snapshot('a test snapshot 2') + volume3 = conn.create_volume(84, 'us-east-1a') + snap3 = volume3.create_snapshot('a test snapshot 3') + snapshots1 = conn.get_all_snapshots(snapshot_ids=snap2.id) + snapshots1.should.have.length_of(1) + snapshots1[0].volume_id.should.equal(volume2.id) + snapshots1[0].region.name.should.equal(conn.region.name) + snapshots2 = conn.get_all_snapshots(snapshot_ids=[snap2.id, snap3.id]) + snapshots2.should.have.length_of(2) + for s in snapshots2: + s.start_time.should_not.be.none + s.volume_id.should.be.within([volume2.id, volume3.id]) + s.region.name.should.equal(conn.region.name) + + with assert_raises(EC2ResponseError) as cm: + conn.get_all_snapshots(snapshot_ids=['snap-does_not_exist']) + cm.exception.code.should.equal('InvalidSnapshot.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_snapshot_filters(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume1 = conn.create_volume(20, "us-east-1a", encrypted=False) + volume2 = conn.create_volume(25, "us-east-1a", encrypted=True) + + snapshot1 = volume1.create_snapshot(description='testsnapshot1') + snapshot2 = volume1.create_snapshot(description='testsnapshot2') + snapshot3 = volume2.create_snapshot(description='testsnapshot3') + + conn.create_tags([snapshot1.id], {'testkey1': 'testvalue1'}) + conn.create_tags([snapshot2.id], {'testkey2': 'testvalue2'}) + + snapshots_by_description = conn.get_all_snapshots( + filters={'description': 'testsnapshot1'}) + set([snap.id for snap in snapshots_by_description] + ).should.equal({snapshot1.id}) + + snapshots_by_id = conn.get_all_snapshots( + filters={'snapshot-id': snapshot1.id}) + set([snap.id for snap in snapshots_by_id] + ).should.equal({snapshot1.id}) + + snapshots_by_start_time = conn.get_all_snapshots( + filters={'start-time': snapshot1.start_time}) + set([snap.start_time for snap in snapshots_by_start_time] + ).should.equal({snapshot1.start_time}) + + snapshots_by_volume_id = conn.get_all_snapshots( + filters={'volume-id': volume1.id}) + set([snap.id for snap in snapshots_by_volume_id] + ).should.equal({snapshot1.id, snapshot2.id}) + + snapshots_by_status = conn.get_all_snapshots( + filters={'status': 'completed'}) + ({snapshot1.id, snapshot2.id, snapshot3.id} - + {snap.id for snap in snapshots_by_status}).should.have.length_of(0) + + snapshots_by_volume_size = conn.get_all_snapshots( + filters={'volume-size': volume1.size}) + set([snap.id for snap in snapshots_by_volume_size] + ).should.equal({snapshot1.id, snapshot2.id}) + + snapshots_by_tag_key = conn.get_all_snapshots( + filters={'tag-key': 'testkey1'}) + set([snap.id for snap in snapshots_by_tag_key] + ).should.equal({snapshot1.id}) + + snapshots_by_tag_value = conn.get_all_snapshots( + filters={'tag-value': 'testvalue1'}) + set([snap.id for snap in snapshots_by_tag_value] + ).should.equal({snapshot1.id}) + + snapshots_by_tag = conn.get_all_snapshots( + filters={'tag:testkey1': 'testvalue1'}) + set([snap.id for snap in snapshots_by_tag] + ).should.equal({snapshot1.id}) + + snapshots_by_encrypted = conn.get_all_snapshots( + filters={'encrypted': 'true'}) + set([snap.id for snap in snapshots_by_encrypted] + ).should.equal({snapshot3.id}) + + +@mock_ec2_deprecated +def test_snapshot_attribute(): + import copy + + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a") + snapshot = volume.create_snapshot() + + # Baseline + attributes = conn.get_snapshot_attribute( + snapshot.id, attribute='createVolumePermission') + attributes.name.should.equal('create_volume_permission') + attributes.attrs.should.have.length_of(0) + + ADD_GROUP_ARGS = {'snapshot_id': snapshot.id, + 'attribute': 'createVolumePermission', + 'operation': 'add', + 'groups': 'all'} + + REMOVE_GROUP_ARGS = {'snapshot_id': snapshot.id, + 'attribute': 'createVolumePermission', + 'operation': 'remove', + 'groups': 'all'} + + # Add 'all' group and confirm + + with assert_raises(EC2ResponseError) as ex: + conn.modify_snapshot_attribute( + **dict(ADD_GROUP_ARGS, **{'dry_run': True})) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set') + + conn.modify_snapshot_attribute(**ADD_GROUP_ARGS) + + attributes = conn.get_snapshot_attribute( + snapshot.id, attribute='createVolumePermission') + attributes.attrs['groups'].should.have.length_of(1) + attributes.attrs['groups'].should.equal(['all']) + + # Add is idempotent + conn.modify_snapshot_attribute.when.called_with( + **ADD_GROUP_ARGS).should_not.throw(EC2ResponseError) + + # Remove 'all' group and confirm + with assert_raises(EC2ResponseError) as ex: + conn.modify_snapshot_attribute( + **dict(REMOVE_GROUP_ARGS, **{'dry_run': True})) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set') + + conn.modify_snapshot_attribute(**REMOVE_GROUP_ARGS) + + attributes = conn.get_snapshot_attribute( + snapshot.id, attribute='createVolumePermission') + attributes.attrs.should.have.length_of(0) + + # Remove is idempotent + conn.modify_snapshot_attribute.when.called_with( + **REMOVE_GROUP_ARGS).should_not.throw(EC2ResponseError) + + # Error: Add with group != 'all' + with assert_raises(EC2ResponseError) as cm: + conn.modify_snapshot_attribute(snapshot.id, + attribute='createVolumePermission', + operation='add', + groups='everyone') + cm.exception.code.should.equal('InvalidAMIAttributeItemValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add with invalid snapshot ID + with assert_raises(EC2ResponseError) as cm: + conn.modify_snapshot_attribute("snapshot-abcd1234", + attribute='createVolumePermission', + operation='add', + groups='all') + cm.exception.code.should.equal('InvalidSnapshot.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Remove with invalid snapshot ID + with assert_raises(EC2ResponseError) as cm: + conn.modify_snapshot_attribute("snapshot-abcd1234", + attribute='createVolumePermission', + operation='remove', + groups='all') + cm.exception.code.should.equal('InvalidSnapshot.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + # Error: Add or remove with user ID instead of group + conn.modify_snapshot_attribute.when.called_with(snapshot.id, + attribute='createVolumePermission', + operation='add', + user_ids=['user']).should.throw(NotImplementedError) + conn.modify_snapshot_attribute.when.called_with(snapshot.id, + attribute='createVolumePermission', + operation='remove', + user_ids=['user']).should.throw(NotImplementedError) + + +@mock_ec2_deprecated +def test_create_volume_from_snapshot(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a") + snapshot = volume.create_snapshot('a test snapshot') + + with assert_raises(EC2ResponseError) as ex: + snapshot = volume.create_snapshot('a test snapshot', dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set') + + snapshot = volume.create_snapshot('a test snapshot') + snapshot.update() + snapshot.status.should.equal('completed') + + new_volume = snapshot.create_volume('us-east-1a') + new_volume.size.should.equal(80) + new_volume.snapshot_id.should.equal(snapshot.id) + + +@mock_ec2_deprecated +def test_create_volume_from_encrypted_snapshot(): + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a", encrypted=True) + + snapshot = volume.create_snapshot('a test snapshot') + snapshot.update() + snapshot.status.should.equal('completed') + + new_volume = snapshot.create_volume('us-east-1a') + new_volume.size.should.equal(80) + new_volume.snapshot_id.should.equal(snapshot.id) + new_volume.encrypted.should.be(True) + + +@mock_ec2_deprecated +def test_modify_attribute_blockDeviceMapping(): + """ + Reproduces the missing feature explained at [0], where we want to mock a + call to modify an instance attribute of type: blockDeviceMapping. + + [0] https://github.com/spulec/moto/issues/160 + """ + conn = boto.ec2.connect_to_region("us-east-1") + + reservation = conn.run_instances('ami-1234abcd') + + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + instance.modify_attribute('blockDeviceMapping', { + '/dev/sda1': True}, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyInstanceAttribute operation: Request would have succeeded, but DryRun flag is set') + + instance.modify_attribute('blockDeviceMapping', {'/dev/sda1': True}) + + instance = ec2_backends[conn.region.name].get_instance(instance.id) + instance.block_device_mapping.should.have.key('/dev/sda1') + instance.block_device_mapping[ + '/dev/sda1'].delete_on_termination.should.be(True) + + +@mock_ec2_deprecated +def test_volume_tag_escaping(): + conn = boto.connect_ec2('the_key', 'the_secret') + vol = conn.create_volume(10, 'us-east-1a') + snapshot = conn.create_snapshot(vol.id, 'Desc') + + with assert_raises(EC2ResponseError) as ex: + snapshot.add_tags({'key': ''}, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') + snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] + dict(snaps[0].tags).should_not.be.equal( + {'key': ''}) + + snapshot.add_tags({'key': ''}) + + snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] + dict(snaps[0].tags).should.equal({'key': ''}) + + +@mock_ec2 +def test_volume_property_hidden_when_no_tags_exist(): + ec2_client = boto3.client('ec2', region_name='us-east-1') + + volume_response = ec2_client.create_volume( + Size=10, + AvailabilityZone='us-east-1a' + ) + + volume_response.get('Tags').should.equal(None) + + +@freeze_time +@mock_ec2 +def test_copy_snapshot(): + ec2_client = boto3.client('ec2', region_name='eu-west-1') + dest_ec2_client = boto3.client('ec2', region_name='eu-west-2') + + volume_response = ec2_client.create_volume( + AvailabilityZone='eu-west-1a', Size=10 + ) + + create_snapshot_response = ec2_client.create_snapshot( + VolumeId=volume_response['VolumeId'] + ) + + copy_snapshot_response = dest_ec2_client.copy_snapshot( + SourceSnapshotId=create_snapshot_response['SnapshotId'], + SourceRegion="eu-west-1" + ) + + ec2 = boto3.resource('ec2', region_name='eu-west-1') + dest_ec2 = boto3.resource('ec2', region_name='eu-west-2') + + source = ec2.Snapshot(create_snapshot_response['SnapshotId']) + dest = dest_ec2.Snapshot(copy_snapshot_response['SnapshotId']) + + attribs = ['data_encryption_key_id', 'encrypted', + 'kms_key_id', 'owner_alias', 'owner_id', + 'progress', 'state', 'state_message', + 'tags', 'volume_id', 'volume_size'] + + for attrib in attribs: + getattr(source, attrib).should.equal(getattr(dest, attrib)) + + # Copy from non-existent source ID. + with assert_raises(ClientError) as cm: + create_snapshot_error = ec2_client.create_snapshot( + VolumeId='vol-abcd1234' + ) + cm.exception.response['Error']['Code'].should.equal('InvalidVolume.NotFound') + cm.exception.response['Error']['Message'].should.equal("The volume 'vol-abcd1234' does not exist.") + cm.exception.response['ResponseMetadata']['RequestId'].should_not.be.none + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + # Copy from non-existent source region. + with assert_raises(ClientError) as cm: + copy_snapshot_response = dest_ec2_client.copy_snapshot( + SourceSnapshotId=create_snapshot_response['SnapshotId'], + SourceRegion="eu-west-2" + ) + cm.exception.response['Error']['Code'].should.equal('InvalidSnapshot.NotFound') + cm.exception.response['Error']['Message'].should.be.none + cm.exception.response['ResponseMetadata']['RequestId'].should_not.be.none + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + +@mock_ec2 +def test_search_for_many_snapshots(): + ec2_client = boto3.client('ec2', region_name='eu-west-1') + + volume_response = ec2_client.create_volume( + AvailabilityZone='eu-west-1a', Size=10 + ) + + snapshot_ids = [] + for i in range(1, 20): + create_snapshot_response = ec2_client.create_snapshot( + VolumeId=volume_response['VolumeId'] + ) + snapshot_ids.append(create_snapshot_response['SnapshotId']) + + snapshots_response = ec2_client.describe_snapshots( + SnapshotIds=snapshot_ids + ) + + assert len(snapshots_response['Snapshots']) == len(snapshot_ids) diff --git a/tests/test_ec2/test_elastic_network_interfaces.py b/tests/test_ec2/test_elastic_network_interfaces.py index 56959e484..70e78ae12 100644 --- a/tests/test_ec2/test_elastic_network_interfaces.py +++ b/tests/test_ec2/test_elastic_network_interfaces.py @@ -1,362 +1,367 @@ -from __future__ import unicode_literals -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises -from nose.tools import assert_raises - -import boto3 -from botocore.exceptions import ClientError -import boto -import boto.cloudformation -import boto.ec2 -from boto.exception import EC2ResponseError -import sure # noqa - -from moto import mock_ec2, mock_cloudformation_deprecated, mock_ec2_deprecated -from tests.helpers import requires_boto_gte -from tests.test_cloudformation.fixtures import vpc_eni -import json - - -@mock_ec2_deprecated -def test_elastic_network_interfaces(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - - with assert_raises(EC2ResponseError) as ex: - eni = conn.create_network_interface(subnet.id, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateNetworkInterface operation: Request would have succeeded, but DryRun flag is set') - - eni = conn.create_network_interface(subnet.id) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - eni = all_enis[0] - eni.groups.should.have.length_of(0) - eni.private_ip_addresses.should.have.length_of(0) - - with assert_raises(EC2ResponseError) as ex: - conn.delete_network_interface(eni.id, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DeleteNetworkInterface operation: Request would have succeeded, but DryRun flag is set') - - conn.delete_network_interface(eni.id) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(0) - - with assert_raises(EC2ResponseError) as cm: - conn.delete_network_interface(eni.id) - cm.exception.error_code.should.equal('InvalidNetworkInterfaceID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_elastic_network_interfaces_subnet_validation(): - conn = boto.connect_vpc('the_key', 'the_secret') - - with assert_raises(EC2ResponseError) as cm: - conn.create_network_interface("subnet-abcd1234") - cm.exception.error_code.should.equal('InvalidSubnetID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_elastic_network_interfaces_with_private_ip(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - private_ip = "54.0.0.1" - eni = conn.create_network_interface(subnet.id, private_ip) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - - eni = all_enis[0] - eni.groups.should.have.length_of(0) - - eni.private_ip_addresses.should.have.length_of(1) - eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) - - -@mock_ec2_deprecated -def test_elastic_network_interfaces_with_groups(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - conn.create_network_interface( - subnet.id, groups=[security_group1.id, security_group2.id]) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - - eni = all_enis[0] - eni.groups.should.have.length_of(2) - set([group.id for group in eni.groups]).should.equal( - set([security_group1.id, security_group2.id])) - - -@requires_boto_gte("2.12.0") -@mock_ec2_deprecated -def test_elastic_network_interfaces_modify_attribute(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - conn.create_network_interface(subnet.id, groups=[security_group1.id]) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - - eni = all_enis[0] - eni.groups.should.have.length_of(1) - eni.groups[0].id.should.equal(security_group1.id) - - with assert_raises(EC2ResponseError) as ex: - conn.modify_network_interface_attribute( - eni.id, 'groupset', [security_group2.id], dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyNetworkInterface operation: Request would have succeeded, but DryRun flag is set') - - conn.modify_network_interface_attribute( - eni.id, 'groupset', [security_group2.id]) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - - eni = all_enis[0] - eni.groups.should.have.length_of(1) - eni.groups[0].id.should.equal(security_group2.id) - - -@mock_ec2_deprecated -def test_elastic_network_interfaces_filtering(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - - eni1 = conn.create_network_interface( - subnet.id, groups=[security_group1.id, security_group2.id]) - eni2 = conn.create_network_interface( - subnet.id, groups=[security_group1.id]) - eni3 = conn.create_network_interface(subnet.id) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(3) - - # Filter by NetworkInterfaceId - enis_by_id = conn.get_all_network_interfaces([eni1.id]) - enis_by_id.should.have.length_of(1) - set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id])) - - # Filter by ENI ID - enis_by_id = conn.get_all_network_interfaces( - filters={'network-interface-id': eni1.id}) - enis_by_id.should.have.length_of(1) - set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id])) - - # Filter by Security Group - enis_by_group = conn.get_all_network_interfaces( - filters={'group-id': security_group1.id}) - enis_by_group.should.have.length_of(2) - set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id, eni2.id])) - - # Filter by ENI ID and Security Group - enis_by_group = conn.get_all_network_interfaces( - filters={'network-interface-id': eni1.id, 'group-id': security_group1.id}) - enis_by_group.should.have.length_of(1) - set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id])) - - # Unsupported filter - conn.get_all_network_interfaces.when.called_with( - filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError) - - -@mock_ec2 -def test_elastic_network_interfaces_get_by_tag_name(): - ec2 = boto3.resource('ec2', region_name='us-west-2') - ec2_client = boto3.client('ec2', region_name='us-west-2') - - vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') - subnet = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') - - eni1 = ec2.create_network_interface( - SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') - - with assert_raises(ClientError) as ex: - eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}], DryRun=True) - ex.exception.response['Error']['Code'].should.equal('DryRunOperation') - ex.exception.response['ResponseMetadata'][ - 'HTTPStatusCode'].should.equal(400) - ex.exception.response['Error']['Message'].should.equal( - 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') - - eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}]) - - # The status of the new interface should be 'available' - waiter = ec2_client.get_waiter('network_interface_available') - waiter.wait(NetworkInterfaceIds=[eni1.id]) - - filters = [{'Name': 'tag:Name', 'Values': ['eni1']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'tag:Name', 'Values': ['wrong-name']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - -@mock_ec2 -def test_elastic_network_interfaces_get_by_availability_zone(): - ec2 = boto3.resource('ec2', region_name='us-west-2') - ec2_client = boto3.client('ec2', region_name='us-west-2') - - vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') - subnet1 = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') - - subnet2 = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.1.0/24', AvailabilityZone='us-west-2b') - - eni1 = ec2.create_network_interface( - SubnetId=subnet1.id, PrivateIpAddress='10.0.0.15') - - eni2 = ec2.create_network_interface( - SubnetId=subnet2.id, PrivateIpAddress='10.0.1.15') - - # The status of the new interface should be 'available' - waiter = ec2_client.get_waiter('network_interface_available') - waiter.wait(NetworkInterfaceIds=[eni1.id, eni2.id]) - - filters = [{'Name': 'availability-zone', 'Values': ['us-west-2a']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'availability-zone', 'Values': ['us-west-2c']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - -@mock_ec2 -def test_elastic_network_interfaces_get_by_private_ip(): - ec2 = boto3.resource('ec2', region_name='us-west-2') - ec2_client = boto3.client('ec2', region_name='us-west-2') - - vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') - subnet = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') - - eni1 = ec2.create_network_interface( - SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') - - # The status of the new interface should be 'available' - waiter = ec2_client.get_waiter('network_interface_available') - waiter.wait(NetworkInterfaceIds=[eni1.id]) - - filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.5']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.10']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.5']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.10']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - -@mock_ec2 -def test_elastic_network_interfaces_get_by_vpc_id(): - ec2 = boto3.resource('ec2', region_name='us-west-2') - ec2_client = boto3.client('ec2', region_name='us-west-2') - - vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') - subnet = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') - - eni1 = ec2.create_network_interface( - SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') - - # The status of the new interface should be 'available' - waiter = ec2_client.get_waiter('network_interface_available') - waiter.wait(NetworkInterfaceIds=[eni1.id]) - - filters = [{'Name': 'vpc-id', 'Values': [subnet.vpc_id]}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'vpc-id', 'Values': ['vpc-aaaa1111']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - -@mock_ec2 -def test_elastic_network_interfaces_get_by_subnet_id(): - ec2 = boto3.resource('ec2', region_name='us-west-2') - ec2_client = boto3.client('ec2', region_name='us-west-2') - - vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') - subnet = ec2.create_subnet( - VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') - - eni1 = ec2.create_network_interface( - SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') - - # The status of the new interface should be 'available' - waiter = ec2_client.get_waiter('network_interface_available') - waiter.wait(NetworkInterfaceIds=[eni1.id]) - - filters = [{'Name': 'subnet-id', 'Values': [subnet.id]}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(1) - - filters = [{'Name': 'subnet-id', 'Values': ['subnet-aaaa1111']}] - enis = list(ec2.network_interfaces.filter(Filters=filters)) - enis.should.have.length_of(0) - - -@mock_ec2_deprecated -@mock_cloudformation_deprecated -def test_elastic_network_interfaces_cloudformation(): - template = vpc_eni.template - template_json = json.dumps(template) - conn = boto.cloudformation.connect_to_region("us-west-1") - conn.create_stack( - "test_stack", - template_body=template_json, - ) - ec2_conn = boto.ec2.connect_to_region("us-west-1") - eni = ec2_conn.get_all_network_interfaces()[0] - - stack = conn.describe_stacks()[0] - resources = stack.describe_resources() - cfn_eni = [resource for resource in resources if resource.resource_type == - 'AWS::EC2::NetworkInterface'][0] - cfn_eni.physical_resource_id.should.equal(eni.id) +from __future__ import unicode_literals +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises +from nose.tools import assert_raises + +import boto3 +from botocore.exceptions import ClientError +import boto +import boto.cloudformation +import boto.ec2 +from boto.exception import EC2ResponseError +import sure # noqa + +from moto import mock_ec2, mock_cloudformation_deprecated, mock_ec2_deprecated +from tests.helpers import requires_boto_gte +from tests.test_cloudformation.fixtures import vpc_eni +import json + + +@mock_ec2_deprecated +def test_elastic_network_interfaces(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + + with assert_raises(EC2ResponseError) as ex: + eni = conn.create_network_interface(subnet.id, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateNetworkInterface operation: Request would have succeeded, but DryRun flag is set') + + eni = conn.create_network_interface(subnet.id) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + eni = all_enis[0] + eni.groups.should.have.length_of(0) + eni.private_ip_addresses.should.have.length_of(1) + eni.private_ip_addresses[0].private_ip_address.startswith('10.').should.be.true + + with assert_raises(EC2ResponseError) as ex: + conn.delete_network_interface(eni.id, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DeleteNetworkInterface operation: Request would have succeeded, but DryRun flag is set') + + conn.delete_network_interface(eni.id) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(0) + + with assert_raises(EC2ResponseError) as cm: + conn.delete_network_interface(eni.id) + cm.exception.error_code.should.equal('InvalidNetworkInterfaceID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_elastic_network_interfaces_subnet_validation(): + conn = boto.connect_vpc('the_key', 'the_secret') + + with assert_raises(EC2ResponseError) as cm: + conn.create_network_interface("subnet-abcd1234") + cm.exception.error_code.should.equal('InvalidSubnetID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_elastic_network_interfaces_with_private_ip(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + private_ip = "54.0.0.1" + eni = conn.create_network_interface(subnet.id, private_ip) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + eni = all_enis[0] + eni.groups.should.have.length_of(0) + + eni.private_ip_addresses.should.have.length_of(1) + eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) + + +@mock_ec2_deprecated +def test_elastic_network_interfaces_with_groups(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + conn.create_network_interface( + subnet.id, groups=[security_group1.id, security_group2.id]) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + eni = all_enis[0] + eni.groups.should.have.length_of(2) + set([group.id for group in eni.groups]).should.equal( + set([security_group1.id, security_group2.id])) + + +@requires_boto_gte("2.12.0") +@mock_ec2_deprecated +def test_elastic_network_interfaces_modify_attribute(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + conn.create_network_interface(subnet.id, groups=[security_group1.id]) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + eni = all_enis[0] + eni.groups.should.have.length_of(1) + eni.groups[0].id.should.equal(security_group1.id) + + with assert_raises(EC2ResponseError) as ex: + conn.modify_network_interface_attribute( + eni.id, 'groupset', [security_group2.id], dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyNetworkInterface operation: Request would have succeeded, but DryRun flag is set') + + conn.modify_network_interface_attribute( + eni.id, 'groupset', [security_group2.id]) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + eni = all_enis[0] + eni.groups.should.have.length_of(1) + eni.groups[0].id.should.equal(security_group2.id) + + +@mock_ec2_deprecated +def test_elastic_network_interfaces_filtering(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + + eni1 = conn.create_network_interface( + subnet.id, groups=[security_group1.id, security_group2.id]) + eni2 = conn.create_network_interface( + subnet.id, groups=[security_group1.id]) + eni3 = conn.create_network_interface(subnet.id) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(3) + + # Filter by NetworkInterfaceId + enis_by_id = conn.get_all_network_interfaces([eni1.id]) + enis_by_id.should.have.length_of(1) + set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id])) + + # Filter by ENI ID + enis_by_id = conn.get_all_network_interfaces( + filters={'network-interface-id': eni1.id}) + enis_by_id.should.have.length_of(1) + set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id])) + + # Filter by Security Group + enis_by_group = conn.get_all_network_interfaces( + filters={'group-id': security_group1.id}) + enis_by_group.should.have.length_of(2) + set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id, eni2.id])) + + # Filter by ENI ID and Security Group + enis_by_group = conn.get_all_network_interfaces( + filters={'network-interface-id': eni1.id, 'group-id': security_group1.id}) + enis_by_group.should.have.length_of(1) + set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id])) + + # Unsupported filter + conn.get_all_network_interfaces.when.called_with( + filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError) + + +@mock_ec2 +def test_elastic_network_interfaces_get_by_tag_name(): + ec2 = boto3.resource('ec2', region_name='us-west-2') + ec2_client = boto3.client('ec2', region_name='us-west-2') + + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + subnet = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') + + eni1 = ec2.create_network_interface( + SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') + + with assert_raises(ClientError) as ex: + eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}], DryRun=True) + ex.exception.response['Error']['Code'].should.equal('DryRunOperation') + ex.exception.response['ResponseMetadata'][ + 'HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal( + 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') + + eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}]) + + # The status of the new interface should be 'available' + waiter = ec2_client.get_waiter('network_interface_available') + waiter.wait(NetworkInterfaceIds=[eni1.id]) + + filters = [{'Name': 'tag:Name', 'Values': ['eni1']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'tag:Name', 'Values': ['wrong-name']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + +@mock_ec2 +def test_elastic_network_interfaces_get_by_availability_zone(): + ec2 = boto3.resource('ec2', region_name='us-west-2') + ec2_client = boto3.client('ec2', region_name='us-west-2') + + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + subnet1 = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') + + subnet2 = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.1.0/24', AvailabilityZone='us-west-2b') + + eni1 = ec2.create_network_interface( + SubnetId=subnet1.id, PrivateIpAddress='10.0.0.15') + + eni2 = ec2.create_network_interface( + SubnetId=subnet2.id, PrivateIpAddress='10.0.1.15') + + # The status of the new interface should be 'available' + waiter = ec2_client.get_waiter('network_interface_available') + waiter.wait(NetworkInterfaceIds=[eni1.id, eni2.id]) + + filters = [{'Name': 'availability-zone', 'Values': ['us-west-2a']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'availability-zone', 'Values': ['us-west-2c']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + +@mock_ec2 +def test_elastic_network_interfaces_get_by_private_ip(): + ec2 = boto3.resource('ec2', region_name='us-west-2') + ec2_client = boto3.client('ec2', region_name='us-west-2') + + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + subnet = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') + + eni1 = ec2.create_network_interface( + SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') + + # The status of the new interface should be 'available' + waiter = ec2_client.get_waiter('network_interface_available') + waiter.wait(NetworkInterfaceIds=[eni1.id]) + + filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.5']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.10']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.5']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.10']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + +@mock_ec2 +def test_elastic_network_interfaces_get_by_vpc_id(): + ec2 = boto3.resource('ec2', region_name='us-west-2') + ec2_client = boto3.client('ec2', region_name='us-west-2') + + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + subnet = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') + + eni1 = ec2.create_network_interface( + SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') + + # The status of the new interface should be 'available' + waiter = ec2_client.get_waiter('network_interface_available') + waiter.wait(NetworkInterfaceIds=[eni1.id]) + + filters = [{'Name': 'vpc-id', 'Values': [subnet.vpc_id]}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'vpc-id', 'Values': ['vpc-aaaa1111']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + +@mock_ec2 +def test_elastic_network_interfaces_get_by_subnet_id(): + ec2 = boto3.resource('ec2', region_name='us-west-2') + ec2_client = boto3.client('ec2', region_name='us-west-2') + + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + subnet = ec2.create_subnet( + VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a') + + eni1 = ec2.create_network_interface( + SubnetId=subnet.id, PrivateIpAddress='10.0.10.5') + + # The status of the new interface should be 'available' + waiter = ec2_client.get_waiter('network_interface_available') + waiter.wait(NetworkInterfaceIds=[eni1.id]) + + filters = [{'Name': 'subnet-id', 'Values': [subnet.id]}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(1) + + filters = [{'Name': 'subnet-id', 'Values': ['subnet-aaaa1111']}] + enis = list(ec2.network_interfaces.filter(Filters=filters)) + enis.should.have.length_of(0) + + +@mock_ec2_deprecated +@mock_cloudformation_deprecated +def test_elastic_network_interfaces_cloudformation(): + template = vpc_eni.template + template_json = json.dumps(template) + conn = boto.cloudformation.connect_to_region("us-west-1") + conn.create_stack( + "test_stack", + template_body=template_json, + ) + ec2_conn = boto.ec2.connect_to_region("us-west-1") + eni = ec2_conn.get_all_network_interfaces()[0] + eni.private_ip_addresses.should.have.length_of(1) + + stack = conn.describe_stacks()[0] + resources = stack.describe_resources() + cfn_eni = [resource for resource in resources if resource.resource_type == + 'AWS::EC2::NetworkInterface'][0] + cfn_eni.physical_resource_id.should.equal(eni.id) + + outputs = {output.key: output.value for output in stack.outputs} + outputs['ENIIpAddress'].should.equal(eni.private_ip_addresses[0].private_ip_address) diff --git a/tests/test_ec2/test_instances.py b/tests/test_ec2/test_instances.py index 109017b3c..c0f0eea4d 100644 --- a/tests/test_ec2/test_instances.py +++ b/tests/test_ec2/test_instances.py @@ -1,1256 +1,1271 @@ -from __future__ import unicode_literals -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises -from nose.tools import assert_raises - -import base64 -import datetime -import ipaddress - -import six -import boto -import boto3 -from boto.ec2.instance import Reservation, InstanceAttribute -from boto.exception import EC2ResponseError, EC2ResponseError -from freezegun import freeze_time -import sure # noqa - -from moto import mock_ec2_deprecated, mock_ec2 -from tests.helpers import requires_boto_gte - - -################ Test Readme ############### -def add_servers(ami_id, count): - conn = boto.connect_ec2() - for index in range(count): - conn.run_instances(ami_id) - - -@mock_ec2_deprecated -def test_add_servers(): - add_servers('ami-1234abcd', 2) - - conn = boto.connect_ec2() - reservations = conn.get_all_instances() - assert len(reservations) == 2 - instance1 = reservations[0].instances[0] - assert instance1.image_id == 'ami-1234abcd' - -############################################ - - -@freeze_time("2014-01-01 05:00:00") -@mock_ec2_deprecated -def test_instance_launch_and_terminate(): - conn = boto.connect_ec2('the_key', 'the_secret') - - with assert_raises(EC2ResponseError) as ex: - reservation = conn.run_instances('ami-1234abcd', dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set') - - reservation = conn.run_instances('ami-1234abcd') - reservation.should.be.a(Reservation) - reservation.instances.should.have.length_of(1) - instance = reservation.instances[0] - instance.state.should.equal('pending') - - reservations = conn.get_all_instances() - reservations.should.have.length_of(1) - reservations[0].id.should.equal(reservation.id) - instances = reservations[0].instances - instances.should.have.length_of(1) - instance = instances[0] - instance.id.should.equal(instance.id) - instance.state.should.equal('running') - instance.launch_time.should.equal("2014-01-01T05:00:00.000Z") - instance.vpc_id.should.equal(None) - instance.placement.should.equal('us-east-1a') - - root_device_name = instance.root_device_name - instance.block_device_mapping[ - root_device_name].status.should.equal('in-use') - volume_id = instance.block_device_mapping[root_device_name].volume_id - volume_id.should.match(r'vol-\w+') - - volume = conn.get_all_volumes(volume_ids=[volume_id])[0] - volume.attach_data.instance_id.should.equal(instance.id) - volume.status.should.equal('in-use') - - with assert_raises(EC2ResponseError) as ex: - conn.terminate_instances([instance.id], dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set') - - conn.terminate_instances([instance.id]) - - reservations = conn.get_all_instances() - instance = reservations[0].instances[0] - instance.state.should.equal('terminated') - - -@mock_ec2_deprecated -def test_terminate_empty_instances(): - conn = boto.connect_ec2('the_key', 'the_secret') - conn.terminate_instances.when.called_with( - []).should.throw(EC2ResponseError) - - -@freeze_time("2014-01-01 05:00:00") -@mock_ec2_deprecated -def test_instance_attach_volume(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - vol1 = conn.create_volume(size=36, zone=conn.region.name) - vol1.attach(instance.id, "/dev/sda1") - vol1.update() - vol2 = conn.create_volume(size=65, zone=conn.region.name) - vol2.attach(instance.id, "/dev/sdb1") - vol2.update() - vol3 = conn.create_volume(size=130, zone=conn.region.name) - vol3.attach(instance.id, "/dev/sdc1") - vol3.update() - - reservations = conn.get_all_instances() - instance = reservations[0].instances[0] - - instance.block_device_mapping.should.have.length_of(3) - - for v in conn.get_all_volumes(volume_ids=[instance.block_device_mapping['/dev/sdc1'].volume_id]): - v.attach_data.instance_id.should.equal(instance.id) - # can do due to freeze_time decorator. - v.attach_data.attach_time.should.equal(instance.launch_time) - # can do due to freeze_time decorator. - v.create_time.should.equal(instance.launch_time) - v.region.name.should.equal(instance.region.name) - v.status.should.equal('in-use') - - -@mock_ec2_deprecated -def test_get_instances_by_id(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=2) - instance1, instance2 = reservation.instances - - reservations = conn.get_all_instances(instance_ids=[instance1.id]) - reservations.should.have.length_of(1) - reservation = reservations[0] - reservation.instances.should.have.length_of(1) - reservation.instances[0].id.should.equal(instance1.id) - - reservations = conn.get_all_instances( - instance_ids=[instance1.id, instance2.id]) - reservations.should.have.length_of(1) - reservation = reservations[0] - reservation.instances.should.have.length_of(2) - instance_ids = [instance.id for instance in reservation.instances] - instance_ids.should.equal([instance1.id, instance2.id]) - - # Call get_all_instances with a bad id should raise an error - with assert_raises(EC2ResponseError) as cm: - conn.get_all_instances(instance_ids=[instance1.id, "i-1234abcd"]) - cm.exception.code.should.equal('InvalidInstanceID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2 -def test_get_paginated_instances(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-east-1') - conn = boto3.resource('ec2', 'us-east-1') - for i in range(100): - conn.create_instances(ImageId=image_id, - MinCount=1, - MaxCount=1) - resp = client.describe_instances(MaxResults=50) - reservations = resp['Reservations'] - reservations.should.have.length_of(50) - next_token = resp['NextToken'] - next_token.should_not.be.none - resp2 = client.describe_instances(NextToken=next_token) - reservations.extend(resp2['Reservations']) - reservations.should.have.length_of(100) - assert 'NextToken' not in resp2.keys() - - -@mock_ec2 -def test_create_with_tags(): - ec2 = boto3.client('ec2', region_name='us-west-2') - instances = ec2.run_instances( - ImageId='ami-123', - MinCount=1, - MaxCount=1, - InstanceType='t2.micro', - TagSpecifications=[ - { - 'ResourceType': 'instance', - 'Tags': [ - { - 'Key': 'MY_TAG1', - 'Value': 'MY_VALUE1', - }, - { - 'Key': 'MY_TAG2', - 'Value': 'MY_VALUE2', - }, - ], - }, - { - 'ResourceType': 'instance', - 'Tags': [ - { - 'Key': 'MY_TAG3', - 'Value': 'MY_VALUE3', - }, - ] - }, - ], - ) - assert 'Tags' in instances['Instances'][0] - len(instances['Instances'][0]['Tags']).should.equal(3) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_state(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - - conn.terminate_instances([instance1.id]) - - reservations = conn.get_all_instances( - filters={'instance-state-name': 'running'}) - reservations.should.have.length_of(1) - # Since we terminated instance1, only instance2 and instance3 should be - # returned - instance_ids = [instance.id for instance in reservations[0].instances] - set(instance_ids).should.equal(set([instance2.id, instance3.id])) - - reservations = conn.get_all_instances( - [instance2.id], filters={'instance-state-name': 'running'}) - reservations.should.have.length_of(1) - instance_ids = [instance.id for instance in reservations[0].instances] - instance_ids.should.equal([instance2.id]) - - reservations = conn.get_all_instances( - [instance2.id], filters={'instance-state-name': 'terminated'}) - list(reservations).should.equal([]) - - # get_all_instances should still return all 3 - reservations = conn.get_all_instances() - reservations[0].instances.should.have.length_of(3) - - conn.get_all_instances.when.called_with( - filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_instance_id(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - - reservations = conn.get_all_instances( - filters={'instance-id': instance1.id}) - # get_all_instances should return just instance1 - reservations[0].instances.should.have.length_of(1) - reservations[0].instances[0].id.should.equal(instance1.id) - - reservations = conn.get_all_instances( - filters={'instance-id': [instance1.id, instance2.id]}) - # get_all_instances should return two - reservations[0].instances.should.have.length_of(2) - - reservations = conn.get_all_instances( - filters={'instance-id': 'non-existing-id'}) - reservations.should.have.length_of(0) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_instance_type(): - conn = boto.connect_ec2() - reservation1 = conn.run_instances('ami-1234abcd', instance_type='m1.small') - instance1 = reservation1.instances[0] - reservation2 = conn.run_instances('ami-1234abcd', instance_type='m1.small') - instance2 = reservation2.instances[0] - reservation3 = conn.run_instances('ami-1234abcd', instance_type='t1.micro') - instance3 = reservation3.instances[0] - - reservations = conn.get_all_instances( - filters={'instance-type': 'm1.small'}) - # get_all_instances should return instance1,2 - reservations.should.have.length_of(2) - reservations[0].instances.should.have.length_of(1) - reservations[1].instances.should.have.length_of(1) - instance_ids = [reservations[0].instances[0].id, - reservations[1].instances[0].id] - set(instance_ids).should.equal(set([instance1.id, instance2.id])) - - reservations = conn.get_all_instances( - filters={'instance-type': 't1.micro'}) - # get_all_instances should return one - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(1) - reservations[0].instances[0].id.should.equal(instance3.id) - - reservations = conn.get_all_instances( - filters={'instance-type': ['t1.micro', 'm1.small']}) - reservations.should.have.length_of(3) - reservations[0].instances.should.have.length_of(1) - reservations[1].instances.should.have.length_of(1) - reservations[2].instances.should.have.length_of(1) - instance_ids = [ - reservations[0].instances[0].id, - reservations[1].instances[0].id, - reservations[2].instances[0].id, - ] - set(instance_ids).should.equal( - set([instance1.id, instance2.id, instance3.id])) - - reservations = conn.get_all_instances(filters={'instance-type': 'bogus'}) - # bogus instance-type should return none - reservations.should.have.length_of(0) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_reason_code(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - instance1.stop() - instance2.terminate() - - reservations = conn.get_all_instances( - filters={'state-reason-code': 'Client.UserInitiatedShutdown'}) - # get_all_instances should return instance1 and instance2 - reservations[0].instances.should.have.length_of(2) - set([instance1.id, instance2.id]).should.equal( - set([i.id for i in reservations[0].instances])) - - reservations = conn.get_all_instances(filters={'state-reason-code': ''}) - # get_all_instances should return instance 3 - reservations[0].instances.should.have.length_of(1) - reservations[0].instances[0].id.should.equal(instance3.id) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_source_dest_check(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=2) - instance1, instance2 = reservation.instances - conn.modify_instance_attribute( - instance1.id, attribute='sourceDestCheck', value=False) - - source_dest_check_false = conn.get_all_instances( - filters={'source-dest-check': 'false'}) - source_dest_check_true = conn.get_all_instances( - filters={'source-dest-check': 'true'}) - - source_dest_check_false[0].instances.should.have.length_of(1) - source_dest_check_false[0].instances[0].id.should.equal(instance1.id) - - source_dest_check_true[0].instances.should.have.length_of(1) - source_dest_check_true[0].instances[0].id.should.equal(instance2.id) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_vpc_id(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc1 = conn.create_vpc("10.0.0.0/16") - subnet1 = conn.create_subnet(vpc1.id, "10.0.0.0/27") - reservation1 = conn.run_instances( - 'ami-1234abcd', min_count=1, subnet_id=subnet1.id) - instance1 = reservation1.instances[0] - - vpc2 = conn.create_vpc("10.1.0.0/16") - subnet2 = conn.create_subnet(vpc2.id, "10.1.0.0/27") - reservation2 = conn.run_instances( - 'ami-1234abcd', min_count=1, subnet_id=subnet2.id) - instance2 = reservation2.instances[0] - - reservations1 = conn.get_all_instances(filters={'vpc-id': vpc1.id}) - reservations1.should.have.length_of(1) - reservations1[0].instances.should.have.length_of(1) - reservations1[0].instances[0].id.should.equal(instance1.id) - reservations1[0].instances[0].vpc_id.should.equal(vpc1.id) - reservations1[0].instances[0].subnet_id.should.equal(subnet1.id) - - reservations2 = conn.get_all_instances(filters={'vpc-id': vpc2.id}) - reservations2.should.have.length_of(1) - reservations2[0].instances.should.have.length_of(1) - reservations2[0].instances[0].id.should.equal(instance2.id) - reservations2[0].instances[0].vpc_id.should.equal(vpc2.id) - reservations2[0].instances[0].subnet_id.should.equal(subnet2.id) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_architecture(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=1) - instance = reservation.instances - - reservations = conn.get_all_instances(filters={'architecture': 'x86_64'}) - # get_all_instances should return the instance - reservations[0].instances.should.have.length_of(1) - - -@mock_ec2 -def test_get_instances_filtering_by_image_id(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-east-1') - conn = boto3.resource('ec2', 'us-east-1') - conn.create_instances(ImageId=image_id, - MinCount=1, - MaxCount=1) - - reservations = client.describe_instances(Filters=[{'Name': 'image-id', - 'Values': [image_id]}])['Reservations'] - reservations[0]['Instances'].should.have.length_of(1) - - -@mock_ec2 -def test_get_instances_filtering_by_private_dns(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-east-1') - conn = boto3.resource('ec2', 'us-east-1') - conn.create_instances(ImageId=image_id, - MinCount=1, - MaxCount=1, - PrivateIpAddress='10.0.0.1') - reservations = client.describe_instances(Filters=[ - {'Name': 'private-dns-name', 'Values': ['ip-10-0-0-1.ec2.internal']} - ])['Reservations'] - reservations[0]['Instances'].should.have.length_of(1) - - -@mock_ec2 -def test_get_instances_filtering_by_ni_private_dns(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-west-2') - conn = boto3.resource('ec2', 'us-west-2') - conn.create_instances(ImageId=image_id, - MinCount=1, - MaxCount=1, - PrivateIpAddress='10.0.0.1') - reservations = client.describe_instances(Filters=[ - {'Name': 'network-interface.private-dns-name', 'Values': ['ip-10-0-0-1.us-west-2.compute.internal']} - ])['Reservations'] - reservations[0]['Instances'].should.have.length_of(1) - - -@mock_ec2 -def test_get_instances_filtering_by_instance_group_name(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-east-1') - client.create_security_group( - Description='test', - GroupName='test_sg' - ) - client.run_instances(ImageId=image_id, - MinCount=1, - MaxCount=1, - SecurityGroups=['test_sg']) - reservations = client.describe_instances(Filters=[ - {'Name': 'instance.group-name', 'Values': ['test_sg']} - ])['Reservations'] - reservations[0]['Instances'].should.have.length_of(1) - - -@mock_ec2 -def test_get_instances_filtering_by_instance_group_id(): - image_id = 'ami-1234abcd' - client = boto3.client('ec2', region_name='us-east-1') - create_sg = client.create_security_group( - Description='test', - GroupName='test_sg' - ) - group_id = create_sg['GroupId'] - client.run_instances(ImageId=image_id, - MinCount=1, - MaxCount=1, - SecurityGroups=['test_sg']) - reservations = client.describe_instances(Filters=[ - {'Name': 'instance.group-id', 'Values': [group_id]} - ])['Reservations'] - reservations[0]['Instances'].should.have.length_of(1) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_tag(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - instance1.add_tag('tag1', 'value1') - instance1.add_tag('tag2', 'value2') - instance2.add_tag('tag1', 'value1') - instance2.add_tag('tag2', 'wrong value') - instance3.add_tag('tag2', 'value2') - - reservations = conn.get_all_instances(filters={'tag:tag0': 'value0'}) - # get_all_instances should return no instances - reservations.should.have.length_of(0) - - reservations = conn.get_all_instances(filters={'tag:tag1': 'value1'}) - # get_all_instances should return both instances with this tag value - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(2) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance2.id) - - reservations = conn.get_all_instances( - filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'}) - # get_all_instances should return the instance with both tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(1) - reservations[0].instances[0].id.should.equal(instance1.id) - - reservations = conn.get_all_instances( - filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'}) - # get_all_instances should return the instance with both tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(1) - reservations[0].instances[0].id.should.equal(instance1.id) - - reservations = conn.get_all_instances( - filters={'tag:tag2': ['value2', 'bogus']}) - # get_all_instances should return both instances with one of the - # acceptable tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(2) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance3.id) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_tag_value(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - instance1.add_tag('tag1', 'value1') - instance1.add_tag('tag2', 'value2') - instance2.add_tag('tag1', 'value1') - instance2.add_tag('tag2', 'wrong value') - instance3.add_tag('tag2', 'value2') - - reservations = conn.get_all_instances(filters={'tag-value': 'value0'}) - # get_all_instances should return no instances - reservations.should.have.length_of(0) - - reservations = conn.get_all_instances(filters={'tag-value': 'value1'}) - # get_all_instances should return both instances with this tag value - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(2) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance2.id) - - reservations = conn.get_all_instances( - filters={'tag-value': ['value2', 'value1']}) - # get_all_instances should return both instances with one of the - # acceptable tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(3) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance2.id) - reservations[0].instances[2].id.should.equal(instance3.id) - - reservations = conn.get_all_instances( - filters={'tag-value': ['value2', 'bogus']}) - # get_all_instances should return both instances with one of the - # acceptable tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(2) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance3.id) - - -@mock_ec2_deprecated -def test_get_instances_filtering_by_tag_name(): - conn = boto.connect_ec2() - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - instance1.add_tag('tag1') - instance1.add_tag('tag2') - instance2.add_tag('tag1') - instance2.add_tag('tag2X') - instance3.add_tag('tag3') - - reservations = conn.get_all_instances(filters={'tag-key': 'tagX'}) - # get_all_instances should return no instances - reservations.should.have.length_of(0) - - reservations = conn.get_all_instances(filters={'tag-key': 'tag1'}) - # get_all_instances should return both instances with this tag value - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(2) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance2.id) - - reservations = conn.get_all_instances( - filters={'tag-key': ['tag1', 'tag3']}) - # get_all_instances should return both instances with one of the - # acceptable tag values - reservations.should.have.length_of(1) - reservations[0].instances.should.have.length_of(3) - reservations[0].instances[0].id.should.equal(instance1.id) - reservations[0].instances[1].id.should.equal(instance2.id) - reservations[0].instances[2].id.should.equal(instance3.id) - - -@mock_ec2_deprecated -def test_instance_start_and_stop(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', min_count=2) - instances = reservation.instances - instances.should.have.length_of(2) - - instance_ids = [instance.id for instance in instances] - - with assert_raises(EC2ResponseError) as ex: - stopped_instances = conn.stop_instances(instance_ids, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set') - - stopped_instances = conn.stop_instances(instance_ids) - - for instance in stopped_instances: - instance.state.should.equal('stopping') - - with assert_raises(EC2ResponseError) as ex: - started_instances = conn.start_instances( - [instances[0].id], dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set') - - started_instances = conn.start_instances([instances[0].id]) - started_instances[0].state.should.equal('pending') - - -@mock_ec2_deprecated -def test_instance_reboot(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - instance.reboot(dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set') - - instance.reboot() - instance.state.should.equal('pending') - - -@mock_ec2_deprecated -def test_instance_attribute_instance_type(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - instance.modify_attribute("instanceType", "m1.small", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set') - - instance.modify_attribute("instanceType", "m1.small") - - instance_attribute = instance.get_attribute("instanceType") - instance_attribute.should.be.a(InstanceAttribute) - instance_attribute.get('instanceType').should.equal("m1.small") - - -@mock_ec2_deprecated -def test_modify_instance_attribute_security_groups(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - sg_id = 'sg-1234abcd' - sg_id2 = 'sg-abcd4321' - - with assert_raises(EC2ResponseError) as ex: - instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set') - - instance.modify_attribute("groupSet", [sg_id, sg_id2]) - - instance_attribute = instance.get_attribute("groupSet") - instance_attribute.should.be.a(InstanceAttribute) - group_list = instance_attribute.get('groupSet') - any(g.id == sg_id for g in group_list).should.be.ok - any(g.id == sg_id2 for g in group_list).should.be.ok - - -@mock_ec2_deprecated -def test_instance_attribute_user_data(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - instance.modify_attribute( - "userData", "this is my user data", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set') - - instance.modify_attribute("userData", "this is my user data") - - instance_attribute = instance.get_attribute("userData") - instance_attribute.should.be.a(InstanceAttribute) - instance_attribute.get("userData").should.equal("this is my user data") - - -@mock_ec2_deprecated -def test_instance_attribute_source_dest_check(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - # Default value is true - instance.sourceDestCheck.should.equal('true') - - instance_attribute = instance.get_attribute("sourceDestCheck") - instance_attribute.should.be.a(InstanceAttribute) - instance_attribute.get("sourceDestCheck").should.equal(True) - - # Set to false (note: Boto converts bool to string, eg 'false') - - with assert_raises(EC2ResponseError) as ex: - instance.modify_attribute("sourceDestCheck", False, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set') - - instance.modify_attribute("sourceDestCheck", False) - - instance.update() - instance.sourceDestCheck.should.equal('false') - - instance_attribute = instance.get_attribute("sourceDestCheck") - instance_attribute.should.be.a(InstanceAttribute) - instance_attribute.get("sourceDestCheck").should.equal(False) - - # Set back to true - instance.modify_attribute("sourceDestCheck", True) - - instance.update() - instance.sourceDestCheck.should.equal('true') - - instance_attribute = instance.get_attribute("sourceDestCheck") - instance_attribute.should.be.a(InstanceAttribute) - instance_attribute.get("sourceDestCheck").should.equal(True) - - -@mock_ec2_deprecated -def test_user_data_with_run_instance(): - user_data = b"some user data" - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', user_data=user_data) - instance = reservation.instances[0] - - instance_attribute = instance.get_attribute("userData") - instance_attribute.should.be.a(InstanceAttribute) - retrieved_user_data = instance_attribute.get("userData").encode('utf-8') - decoded_user_data = base64.decodestring(retrieved_user_data) - decoded_user_data.should.equal(b"some user data") - - -@mock_ec2_deprecated -def test_run_instance_with_security_group_name(): - conn = boto.connect_ec2('the_key', 'the_secret') - - with assert_raises(EC2ResponseError) as ex: - group = conn.create_security_group( - 'group1', "some description", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set') - - group = conn.create_security_group('group1', "some description") - - reservation = conn.run_instances('ami-1234abcd', - security_groups=['group1']) - instance = reservation.instances[0] - - instance.groups[0].id.should.equal(group.id) - instance.groups[0].name.should.equal("group1") - - -@mock_ec2_deprecated -def test_run_instance_with_security_group_id(): - conn = boto.connect_ec2('the_key', 'the_secret') - group = conn.create_security_group('group1', "some description") - reservation = conn.run_instances('ami-1234abcd', - security_group_ids=[group.id]) - instance = reservation.instances[0] - - instance.groups[0].id.should.equal(group.id) - instance.groups[0].name.should.equal("group1") - - -@mock_ec2_deprecated -def test_run_instance_with_instance_type(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', instance_type="t1.micro") - instance = reservation.instances[0] - - instance.instance_type.should.equal("t1.micro") - - -@mock_ec2_deprecated -def test_run_instance_with_default_placement(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - instance.placement.should.equal("us-east-1a") - - -@mock_ec2_deprecated -def test_run_instance_with_placement(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', placement="us-east-1b") - instance = reservation.instances[0] - - instance.placement.should.equal("us-east-1b") - - -@mock_ec2 -def test_run_instance_with_subnet_boto3(): - client = boto3.client('ec2', region_name='eu-central-1') - - ip_networks = [ - (ipaddress.ip_network('10.0.0.0/16'), ipaddress.ip_network('10.0.99.0/24')), - (ipaddress.ip_network('192.168.42.0/24'), ipaddress.ip_network('192.168.42.0/25')) - ] - - # Tests instances are created with the correct IPs - for vpc_cidr, subnet_cidr in ip_networks: - resp = client.create_vpc( - CidrBlock=str(vpc_cidr), - AmazonProvidedIpv6CidrBlock=False, - DryRun=False, - InstanceTenancy='default' - ) - vpc_id = resp['Vpc']['VpcId'] - - resp = client.create_subnet( - CidrBlock=str(subnet_cidr), - VpcId=vpc_id - ) - subnet_id = resp['Subnet']['SubnetId'] - - resp = client.run_instances( - ImageId='ami-1234abcd', - MaxCount=1, - MinCount=1, - SubnetId=subnet_id - ) - instance = resp['Instances'][0] - instance['SubnetId'].should.equal(subnet_id) - - priv_ipv4 = ipaddress.ip_address(six.text_type(instance['PrivateIpAddress'])) - subnet_cidr.should.contain(priv_ipv4) - - -@mock_ec2 -def test_run_instance_with_specified_private_ipv4(): - client = boto3.client('ec2', region_name='eu-central-1') - - vpc_cidr = ipaddress.ip_network('192.168.42.0/24') - subnet_cidr = ipaddress.ip_network('192.168.42.0/25') - - resp = client.create_vpc( - CidrBlock=str(vpc_cidr), - AmazonProvidedIpv6CidrBlock=False, - DryRun=False, - InstanceTenancy='default' - ) - vpc_id = resp['Vpc']['VpcId'] - - resp = client.create_subnet( - CidrBlock=str(subnet_cidr), - VpcId=vpc_id - ) - subnet_id = resp['Subnet']['SubnetId'] - - resp = client.run_instances( - ImageId='ami-1234abcd', - MaxCount=1, - MinCount=1, - SubnetId=subnet_id, - PrivateIpAddress='192.168.42.5' - ) - instance = resp['Instances'][0] - instance['SubnetId'].should.equal(subnet_id) - instance['PrivateIpAddress'].should.equal('192.168.42.5') - - -@mock_ec2 -def test_run_instance_mapped_public_ipv4(): - client = boto3.client('ec2', region_name='eu-central-1') - - vpc_cidr = ipaddress.ip_network('192.168.42.0/24') - subnet_cidr = ipaddress.ip_network('192.168.42.0/25') - - resp = client.create_vpc( - CidrBlock=str(vpc_cidr), - AmazonProvidedIpv6CidrBlock=False, - DryRun=False, - InstanceTenancy='default' - ) - vpc_id = resp['Vpc']['VpcId'] - - resp = client.create_subnet( - CidrBlock=str(subnet_cidr), - VpcId=vpc_id - ) - subnet_id = resp['Subnet']['SubnetId'] - client.modify_subnet_attribute( - SubnetId=subnet_id, - MapPublicIpOnLaunch={'Value': True} - ) - - resp = client.run_instances( - ImageId='ami-1234abcd', - MaxCount=1, - MinCount=1, - SubnetId=subnet_id - ) - instance = resp['Instances'][0] - instance.should.contain('PublicDnsName') - instance.should.contain('PublicIpAddress') - len(instance['PublicDnsName']).should.be.greater_than(0) - len(instance['PublicIpAddress']).should.be.greater_than(0) - - -@mock_ec2_deprecated -def test_run_instance_with_nic_autocreated(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - private_ip = "10.0.0.1" - - reservation = conn.run_instances('ami-1234abcd', subnet_id=subnet.id, - security_groups=[security_group1.name], - security_group_ids=[security_group2.id], - private_ip_address=private_ip) - instance = reservation.instances[0] - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - eni = all_enis[0] - - instance.interfaces.should.have.length_of(1) - instance.interfaces[0].id.should.equal(eni.id) - - instance.subnet_id.should.equal(subnet.id) - instance.groups.should.have.length_of(2) - set([group.id for group in instance.groups]).should.equal( - set([security_group1.id, security_group2.id])) - - eni.subnet_id.should.equal(subnet.id) - eni.groups.should.have.length_of(2) - set([group.id for group in eni.groups]).should.equal( - set([security_group1.id, security_group2.id])) - eni.private_ip_addresses.should.have.length_of(1) - eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) - - -@mock_ec2_deprecated -def test_run_instance_with_nic_preexisting(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - private_ip = "54.0.0.1" - eni = conn.create_network_interface( - subnet.id, private_ip, groups=[security_group1.id]) - - # Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications... - # annoying, but generates the desired querystring. - from boto.ec2.networkinterface import NetworkInterfaceSpecification, NetworkInterfaceCollection - interface = NetworkInterfaceSpecification( - network_interface_id=eni.id, device_index=0) - interfaces = NetworkInterfaceCollection(interface) - # end Boto objects - - reservation = conn.run_instances('ami-1234abcd', network_interfaces=interfaces, - security_group_ids=[security_group2.id]) - instance = reservation.instances[0] - - instance.subnet_id.should.equal(subnet.id) - - all_enis = conn.get_all_network_interfaces() - all_enis.should.have.length_of(1) - - instance.interfaces.should.have.length_of(1) - instance_eni = instance.interfaces[0] - instance_eni.id.should.equal(eni.id) - - instance_eni.subnet_id.should.equal(subnet.id) - instance_eni.groups.should.have.length_of(2) - set([group.id for group in instance_eni.groups]).should.equal( - set([security_group1.id, security_group2.id])) - instance_eni.private_ip_addresses.should.have.length_of(1) - instance_eni.private_ip_addresses[ - 0].private_ip_address.should.equal(private_ip) - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_instance_with_nic_attach_detach(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - - security_group1 = conn.create_security_group( - 'test security group #1', 'this is a test security group') - security_group2 = conn.create_security_group( - 'test security group #2', 'this is a test security group') - - reservation = conn.run_instances( - 'ami-1234abcd', security_group_ids=[security_group1.id]) - instance = reservation.instances[0] - - eni = conn.create_network_interface(subnet.id, groups=[security_group2.id]) - - # Check initial instance and ENI data - instance.interfaces.should.have.length_of(1) - - eni.groups.should.have.length_of(1) - set([group.id for group in eni.groups]).should.equal( - set([security_group2.id])) - - # Attach - with assert_raises(EC2ResponseError) as ex: - conn.attach_network_interface( - eni.id, instance.id, device_index=1, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set') - - conn.attach_network_interface(eni.id, instance.id, device_index=1) - - # Check attached instance and ENI data - instance.update() - instance.interfaces.should.have.length_of(2) - instance_eni = instance.interfaces[1] - instance_eni.id.should.equal(eni.id) - instance_eni.groups.should.have.length_of(2) - set([group.id for group in instance_eni.groups]).should.equal( - set([security_group1.id, security_group2.id])) - - eni = conn.get_all_network_interfaces( - filters={'network-interface-id': eni.id})[0] - eni.groups.should.have.length_of(2) - set([group.id for group in eni.groups]).should.equal( - set([security_group1.id, security_group2.id])) - - # Detach - with assert_raises(EC2ResponseError) as ex: - conn.detach_network_interface(instance_eni.attachment.id, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set') - - conn.detach_network_interface(instance_eni.attachment.id) - - # Check detached instance and ENI data - instance.update() - instance.interfaces.should.have.length_of(1) - - eni = conn.get_all_network_interfaces( - filters={'network-interface-id': eni.id})[0] - eni.groups.should.have.length_of(1) - set([group.id for group in eni.groups]).should.equal( - set([security_group2.id])) - - # Detach with invalid attachment ID - with assert_raises(EC2ResponseError) as cm: - conn.detach_network_interface('eni-attach-1234abcd') - cm.exception.code.should.equal('InvalidAttachmentID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_ec2_classic_has_public_ip_address(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") - instance = reservation.instances[0] - instance.ip_address.should_not.equal(None) - instance.public_dns_name.should.contain(instance.ip_address.replace('.', '-')) - instance.private_ip_address.should_not.equal(None) - instance.private_dns_name.should.contain(instance.private_ip_address.replace('.', '-')) - - -@mock_ec2_deprecated -def test_run_instance_with_keypair(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") - instance = reservation.instances[0] - - instance.key_name.should.equal("keypair_name") - - -@mock_ec2_deprecated -def test_describe_instance_status_no_instances(): - conn = boto.connect_ec2('the_key', 'the_secret') - all_status = conn.get_all_instance_status() - len(all_status).should.equal(0) - - -@mock_ec2_deprecated -def test_describe_instance_status_with_instances(): - conn = boto.connect_ec2('the_key', 'the_secret') - conn.run_instances('ami-1234abcd', key_name="keypair_name") - - all_status = conn.get_all_instance_status() - len(all_status).should.equal(1) - all_status[0].instance_status.status.should.equal('ok') - all_status[0].system_status.status.should.equal('ok') - - -@mock_ec2_deprecated -def test_describe_instance_status_with_instance_filter(): - conn = boto.connect_ec2('the_key', 'the_secret') - - # We want to filter based on this one - reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") - instance = reservation.instances[0] - - # This is just to setup the test - conn.run_instances('ami-1234abcd', key_name="keypair_name") - - all_status = conn.get_all_instance_status(instance_ids=[instance.id]) - len(all_status).should.equal(1) - all_status[0].id.should.equal(instance.id) - - # Call get_all_instance_status with a bad id should raise an error - with assert_raises(EC2ResponseError) as cm: - conn.get_all_instance_status(instance_ids=[instance.id, "i-1234abcd"]) - cm.exception.code.should.equal('InvalidInstanceID.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_describe_instance_status_with_non_running_instances(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd', min_count=3) - instance1, instance2, instance3 = reservation.instances - instance1.stop() - instance2.terminate() - - all_running_status = conn.get_all_instance_status() - all_running_status.should.have.length_of(1) - all_running_status[0].id.should.equal(instance3.id) - all_running_status[0].state_name.should.equal('running') - - all_status = conn.get_all_instance_status(include_all_instances=True) - all_status.should.have.length_of(3) - - status1 = next((s for s in all_status if s.id == instance1.id), None) - status1.state_name.should.equal('stopped') - - status2 = next((s for s in all_status if s.id == instance2.id), None) - status2.state_name.should.equal('terminated') - - status3 = next((s for s in all_status if s.id == instance3.id), None) - status3.state_name.should.equal('running') - - -@mock_ec2_deprecated -def test_get_instance_by_security_group(): - conn = boto.connect_ec2('the_key', 'the_secret') - - conn.run_instances('ami-1234abcd') - instance = conn.get_only_instances()[0] - - security_group = conn.create_security_group('test', 'test') - - with assert_raises(EC2ResponseError) as ex: - conn.modify_instance_attribute(instance.id, "groupSet", [ - security_group.id], dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set') - - conn.modify_instance_attribute( - instance.id, "groupSet", [security_group.id]) - - security_group_instances = security_group.instances() - - assert len(security_group_instances) == 1 - assert security_group_instances[0].id == instance.id - - -@mock_ec2 -def test_modify_delete_on_termination(): - ec2_client = boto3.resource('ec2', region_name='us-west-1') - result = ec2_client.create_instances(ImageId='ami-12345678', MinCount=1, MaxCount=1) - instance = result[0] - instance.load() - instance.block_device_mappings[0]['Ebs']['DeleteOnTermination'].should.be(False) - instance.modify_attribute( - BlockDeviceMappings=[{ - 'DeviceName': '/dev/sda1', - 'Ebs': {'DeleteOnTermination': True} - }] - ) - instance.load() - instance.block_device_mappings[0]['Ebs']['DeleteOnTermination'].should.be(True) - -@mock_ec2 -def test_create_instance_ebs_optimized(): - ec2_resource = boto3.resource('ec2', region_name='eu-west-1') - - instance = ec2_resource.create_instances( - ImageId = 'ami-12345678', - MaxCount = 1, - MinCount = 1, - EbsOptimized = True, - )[0] - instance.load() - instance.ebs_optimized.should.be(True) - - instance.modify_attribute( - EbsOptimized={ - 'Value': False - } - ) - instance.load() - instance.ebs_optimized.should.be(False) +from __future__ import unicode_literals +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises +from nose.tools import assert_raises + +import base64 +import datetime +import ipaddress + +import six +import boto +import boto3 +from boto.ec2.instance import Reservation, InstanceAttribute +from boto.exception import EC2ResponseError, EC2ResponseError +from freezegun import freeze_time +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2 +from tests.helpers import requires_boto_gte + + +################ Test Readme ############### +def add_servers(ami_id, count): + conn = boto.connect_ec2() + for index in range(count): + conn.run_instances(ami_id) + + +@mock_ec2_deprecated +def test_add_servers(): + add_servers('ami-1234abcd', 2) + + conn = boto.connect_ec2() + reservations = conn.get_all_instances() + assert len(reservations) == 2 + instance1 = reservations[0].instances[0] + assert instance1.image_id == 'ami-1234abcd' + +############################################ + + +@freeze_time("2014-01-01 05:00:00") +@mock_ec2_deprecated +def test_instance_launch_and_terminate(): + conn = boto.connect_ec2('the_key', 'the_secret') + + with assert_raises(EC2ResponseError) as ex: + reservation = conn.run_instances('ami-1234abcd', dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set') + + reservation = conn.run_instances('ami-1234abcd') + reservation.should.be.a(Reservation) + reservation.instances.should.have.length_of(1) + instance = reservation.instances[0] + instance.state.should.equal('pending') + + reservations = conn.get_all_instances() + reservations.should.have.length_of(1) + reservations[0].id.should.equal(reservation.id) + instances = reservations[0].instances + instances.should.have.length_of(1) + instance = instances[0] + instance.id.should.equal(instance.id) + instance.state.should.equal('running') + instance.launch_time.should.equal("2014-01-01T05:00:00.000Z") + instance.vpc_id.should.equal(None) + instance.placement.should.equal('us-east-1a') + + root_device_name = instance.root_device_name + instance.block_device_mapping[ + root_device_name].status.should.equal('in-use') + volume_id = instance.block_device_mapping[root_device_name].volume_id + volume_id.should.match(r'vol-\w+') + + volume = conn.get_all_volumes(volume_ids=[volume_id])[0] + volume.attach_data.instance_id.should.equal(instance.id) + volume.status.should.equal('in-use') + + with assert_raises(EC2ResponseError) as ex: + conn.terminate_instances([instance.id], dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set') + + conn.terminate_instances([instance.id]) + + reservations = conn.get_all_instances() + instance = reservations[0].instances[0] + instance.state.should.equal('terminated') + + +@mock_ec2_deprecated +def test_terminate_empty_instances(): + conn = boto.connect_ec2('the_key', 'the_secret') + conn.terminate_instances.when.called_with( + []).should.throw(EC2ResponseError) + + +@freeze_time("2014-01-01 05:00:00") +@mock_ec2_deprecated +def test_instance_attach_volume(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + vol1 = conn.create_volume(size=36, zone=conn.region.name) + vol1.attach(instance.id, "/dev/sda1") + vol1.update() + vol2 = conn.create_volume(size=65, zone=conn.region.name) + vol2.attach(instance.id, "/dev/sdb1") + vol2.update() + vol3 = conn.create_volume(size=130, zone=conn.region.name) + vol3.attach(instance.id, "/dev/sdc1") + vol3.update() + + reservations = conn.get_all_instances() + instance = reservations[0].instances[0] + + instance.block_device_mapping.should.have.length_of(3) + + for v in conn.get_all_volumes(volume_ids=[instance.block_device_mapping['/dev/sdc1'].volume_id]): + v.attach_data.instance_id.should.equal(instance.id) + # can do due to freeze_time decorator. + v.attach_data.attach_time.should.equal(instance.launch_time) + # can do due to freeze_time decorator. + v.create_time.should.equal(instance.launch_time) + v.region.name.should.equal(instance.region.name) + v.status.should.equal('in-use') + + +@mock_ec2_deprecated +def test_get_instances_by_id(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=2) + instance1, instance2 = reservation.instances + + reservations = conn.get_all_instances(instance_ids=[instance1.id]) + reservations.should.have.length_of(1) + reservation = reservations[0] + reservation.instances.should.have.length_of(1) + reservation.instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_instances( + instance_ids=[instance1.id, instance2.id]) + reservations.should.have.length_of(1) + reservation = reservations[0] + reservation.instances.should.have.length_of(2) + instance_ids = [instance.id for instance in reservation.instances] + instance_ids.should.equal([instance1.id, instance2.id]) + + # Call get_all_instances with a bad id should raise an error + with assert_raises(EC2ResponseError) as cm: + conn.get_all_instances(instance_ids=[instance1.id, "i-1234abcd"]) + cm.exception.code.should.equal('InvalidInstanceID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2 +def test_get_paginated_instances(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-east-1') + conn = boto3.resource('ec2', 'us-east-1') + for i in range(100): + conn.create_instances(ImageId=image_id, + MinCount=1, + MaxCount=1) + resp = client.describe_instances(MaxResults=50) + reservations = resp['Reservations'] + reservations.should.have.length_of(50) + next_token = resp['NextToken'] + next_token.should_not.be.none + resp2 = client.describe_instances(NextToken=next_token) + reservations.extend(resp2['Reservations']) + reservations.should.have.length_of(100) + assert 'NextToken' not in resp2.keys() + + +@mock_ec2 +def test_create_with_tags(): + ec2 = boto3.client('ec2', region_name='us-west-2') + instances = ec2.run_instances( + ImageId='ami-123', + MinCount=1, + MaxCount=1, + InstanceType='t2.micro', + TagSpecifications=[ + { + 'ResourceType': 'instance', + 'Tags': [ + { + 'Key': 'MY_TAG1', + 'Value': 'MY_VALUE1', + }, + { + 'Key': 'MY_TAG2', + 'Value': 'MY_VALUE2', + }, + ], + }, + { + 'ResourceType': 'instance', + 'Tags': [ + { + 'Key': 'MY_TAG3', + 'Value': 'MY_VALUE3', + }, + ] + }, + ], + ) + assert 'Tags' in instances['Instances'][0] + len(instances['Instances'][0]['Tags']).should.equal(3) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_state(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + + conn.terminate_instances([instance1.id]) + + reservations = conn.get_all_instances( + filters={'instance-state-name': 'running'}) + reservations.should.have.length_of(1) + # Since we terminated instance1, only instance2 and instance3 should be + # returned + instance_ids = [instance.id for instance in reservations[0].instances] + set(instance_ids).should.equal(set([instance2.id, instance3.id])) + + reservations = conn.get_all_instances( + [instance2.id], filters={'instance-state-name': 'running'}) + reservations.should.have.length_of(1) + instance_ids = [instance.id for instance in reservations[0].instances] + instance_ids.should.equal([instance2.id]) + + reservations = conn.get_all_instances( + [instance2.id], filters={'instance-state-name': 'terminated'}) + list(reservations).should.equal([]) + + # get_all_instances should still return all 3 + reservations = conn.get_all_instances() + reservations[0].instances.should.have.length_of(3) + + conn.get_all_instances.when.called_with( + filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_instance_id(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + + reservations = conn.get_all_instances( + filters={'instance-id': instance1.id}) + # get_all_instances should return just instance1 + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_instances( + filters={'instance-id': [instance1.id, instance2.id]}) + # get_all_instances should return two + reservations[0].instances.should.have.length_of(2) + + reservations = conn.get_all_instances( + filters={'instance-id': 'non-existing-id'}) + reservations.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_instance_type(): + conn = boto.connect_ec2() + reservation1 = conn.run_instances('ami-1234abcd', instance_type='m1.small') + instance1 = reservation1.instances[0] + reservation2 = conn.run_instances('ami-1234abcd', instance_type='m1.small') + instance2 = reservation2.instances[0] + reservation3 = conn.run_instances('ami-1234abcd', instance_type='t1.micro') + instance3 = reservation3.instances[0] + + reservations = conn.get_all_instances( + filters={'instance-type': 'm1.small'}) + # get_all_instances should return instance1,2 + reservations.should.have.length_of(2) + reservations[0].instances.should.have.length_of(1) + reservations[1].instances.should.have.length_of(1) + instance_ids = [reservations[0].instances[0].id, + reservations[1].instances[0].id] + set(instance_ids).should.equal(set([instance1.id, instance2.id])) + + reservations = conn.get_all_instances( + filters={'instance-type': 't1.micro'}) + # get_all_instances should return one + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance3.id) + + reservations = conn.get_all_instances( + filters={'instance-type': ['t1.micro', 'm1.small']}) + reservations.should.have.length_of(3) + reservations[0].instances.should.have.length_of(1) + reservations[1].instances.should.have.length_of(1) + reservations[2].instances.should.have.length_of(1) + instance_ids = [ + reservations[0].instances[0].id, + reservations[1].instances[0].id, + reservations[2].instances[0].id, + ] + set(instance_ids).should.equal( + set([instance1.id, instance2.id, instance3.id])) + + reservations = conn.get_all_instances(filters={'instance-type': 'bogus'}) + # bogus instance-type should return none + reservations.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_reason_code(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.stop() + instance2.terminate() + + reservations = conn.get_all_instances( + filters={'state-reason-code': 'Client.UserInitiatedShutdown'}) + # get_all_instances should return instance1 and instance2 + reservations[0].instances.should.have.length_of(2) + set([instance1.id, instance2.id]).should.equal( + set([i.id for i in reservations[0].instances])) + + reservations = conn.get_all_instances(filters={'state-reason-code': ''}) + # get_all_instances should return instance 3 + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_source_dest_check(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=2) + instance1, instance2 = reservation.instances + conn.modify_instance_attribute( + instance1.id, attribute='sourceDestCheck', value=False) + + source_dest_check_false = conn.get_all_instances( + filters={'source-dest-check': 'false'}) + source_dest_check_true = conn.get_all_instances( + filters={'source-dest-check': 'true'}) + + source_dest_check_false[0].instances.should.have.length_of(1) + source_dest_check_false[0].instances[0].id.should.equal(instance1.id) + + source_dest_check_true[0].instances.should.have.length_of(1) + source_dest_check_true[0].instances[0].id.should.equal(instance2.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_vpc_id(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc1 = conn.create_vpc("10.0.0.0/16") + subnet1 = conn.create_subnet(vpc1.id, "10.0.0.0/27") + reservation1 = conn.run_instances( + 'ami-1234abcd', min_count=1, subnet_id=subnet1.id) + instance1 = reservation1.instances[0] + + vpc2 = conn.create_vpc("10.1.0.0/16") + subnet2 = conn.create_subnet(vpc2.id, "10.1.0.0/27") + reservation2 = conn.run_instances( + 'ami-1234abcd', min_count=1, subnet_id=subnet2.id) + instance2 = reservation2.instances[0] + + reservations1 = conn.get_all_instances(filters={'vpc-id': vpc1.id}) + reservations1.should.have.length_of(1) + reservations1[0].instances.should.have.length_of(1) + reservations1[0].instances[0].id.should.equal(instance1.id) + reservations1[0].instances[0].vpc_id.should.equal(vpc1.id) + reservations1[0].instances[0].subnet_id.should.equal(subnet1.id) + + reservations2 = conn.get_all_instances(filters={'vpc-id': vpc2.id}) + reservations2.should.have.length_of(1) + reservations2[0].instances.should.have.length_of(1) + reservations2[0].instances[0].id.should.equal(instance2.id) + reservations2[0].instances[0].vpc_id.should.equal(vpc2.id) + reservations2[0].instances[0].subnet_id.should.equal(subnet2.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_architecture(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=1) + instance = reservation.instances + + reservations = conn.get_all_instances(filters={'architecture': 'x86_64'}) + # get_all_instances should return the instance + reservations[0].instances.should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_image_id(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-east-1') + conn = boto3.resource('ec2', 'us-east-1') + conn.create_instances(ImageId=image_id, + MinCount=1, + MaxCount=1) + + reservations = client.describe_instances(Filters=[{'Name': 'image-id', + 'Values': [image_id]}])['Reservations'] + reservations[0]['Instances'].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_private_dns(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-east-1') + conn = boto3.resource('ec2', 'us-east-1') + conn.create_instances(ImageId=image_id, + MinCount=1, + MaxCount=1, + PrivateIpAddress='10.0.0.1') + reservations = client.describe_instances(Filters=[ + {'Name': 'private-dns-name', 'Values': ['ip-10-0-0-1.ec2.internal']} + ])['Reservations'] + reservations[0]['Instances'].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_ni_private_dns(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-west-2') + conn = boto3.resource('ec2', 'us-west-2') + conn.create_instances(ImageId=image_id, + MinCount=1, + MaxCount=1, + PrivateIpAddress='10.0.0.1') + reservations = client.describe_instances(Filters=[ + {'Name': 'network-interface.private-dns-name', 'Values': ['ip-10-0-0-1.us-west-2.compute.internal']} + ])['Reservations'] + reservations[0]['Instances'].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_instance_group_name(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-east-1') + client.create_security_group( + Description='test', + GroupName='test_sg' + ) + client.run_instances(ImageId=image_id, + MinCount=1, + MaxCount=1, + SecurityGroups=['test_sg']) + reservations = client.describe_instances(Filters=[ + {'Name': 'instance.group-name', 'Values': ['test_sg']} + ])['Reservations'] + reservations[0]['Instances'].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_instance_group_id(): + image_id = 'ami-1234abcd' + client = boto3.client('ec2', region_name='us-east-1') + create_sg = client.create_security_group( + Description='test', + GroupName='test_sg' + ) + group_id = create_sg['GroupId'] + client.run_instances(ImageId=image_id, + MinCount=1, + MaxCount=1, + SecurityGroups=['test_sg']) + reservations = client.describe_instances(Filters=[ + {'Name': 'instance.group-id', 'Values': [group_id]} + ])['Reservations'] + reservations[0]['Instances'].should.have.length_of(1) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag('tag1', 'value1') + instance1.add_tag('tag2', 'value2') + instance2.add_tag('tag1', 'value1') + instance2.add_tag('tag2', 'wrong value') + instance3.add_tag('tag2', 'value2') + + reservations = conn.get_all_instances(filters={'tag:tag0': 'value0'}) + # get_all_instances should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_instances(filters={'tag:tag1': 'value1'}) + # get_all_instances should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_instances( + filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'}) + # get_all_instances should return the instance with both tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_instances( + filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'}) + # get_all_instances should return the instance with both tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_instances( + filters={'tag:tag2': ['value2', 'bogus']}) + # get_all_instances should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag_value(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag('tag1', 'value1') + instance1.add_tag('tag2', 'value2') + instance2.add_tag('tag1', 'value1') + instance2.add_tag('tag2', 'wrong value') + instance3.add_tag('tag2', 'value2') + + reservations = conn.get_all_instances(filters={'tag-value': 'value0'}) + # get_all_instances should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_instances(filters={'tag-value': 'value1'}) + # get_all_instances should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_instances( + filters={'tag-value': ['value2', 'value1']}) + # get_all_instances should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(3) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + reservations[0].instances[2].id.should.equal(instance3.id) + + reservations = conn.get_all_instances( + filters={'tag-value': ['value2', 'bogus']}) + # get_all_instances should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag_name(): + conn = boto.connect_ec2() + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag('tag1') + instance1.add_tag('tag2') + instance2.add_tag('tag1') + instance2.add_tag('tag2X') + instance3.add_tag('tag3') + + reservations = conn.get_all_instances(filters={'tag-key': 'tagX'}) + # get_all_instances should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_instances(filters={'tag-key': 'tag1'}) + # get_all_instances should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_instances( + filters={'tag-key': ['tag1', 'tag3']}) + # get_all_instances should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(3) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + reservations[0].instances[2].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_instance_start_and_stop(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', min_count=2) + instances = reservation.instances + instances.should.have.length_of(2) + + instance_ids = [instance.id for instance in instances] + + with assert_raises(EC2ResponseError) as ex: + stopped_instances = conn.stop_instances(instance_ids, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set') + + stopped_instances = conn.stop_instances(instance_ids) + + for instance in stopped_instances: + instance.state.should.equal('stopping') + + with assert_raises(EC2ResponseError) as ex: + started_instances = conn.start_instances( + [instances[0].id], dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set') + + started_instances = conn.start_instances([instances[0].id]) + started_instances[0].state.should.equal('pending') + + +@mock_ec2_deprecated +def test_instance_reboot(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + instance.reboot(dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set') + + instance.reboot() + instance.state.should.equal('pending') + + +@mock_ec2_deprecated +def test_instance_attribute_instance_type(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + instance.modify_attribute("instanceType", "m1.small", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set') + + instance.modify_attribute("instanceType", "m1.small") + + instance_attribute = instance.get_attribute("instanceType") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get('instanceType').should.equal("m1.small") + + +@mock_ec2_deprecated +def test_modify_instance_attribute_security_groups(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + sg_id = 'sg-1234abcd' + sg_id2 = 'sg-abcd4321' + + with assert_raises(EC2ResponseError) as ex: + instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set') + + instance.modify_attribute("groupSet", [sg_id, sg_id2]) + + instance_attribute = instance.get_attribute("groupSet") + instance_attribute.should.be.a(InstanceAttribute) + group_list = instance_attribute.get('groupSet') + any(g.id == sg_id for g in group_list).should.be.ok + any(g.id == sg_id2 for g in group_list).should.be.ok + + +@mock_ec2_deprecated +def test_instance_attribute_user_data(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + instance.modify_attribute( + "userData", "this is my user data", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set') + + instance.modify_attribute("userData", "this is my user data") + + instance_attribute = instance.get_attribute("userData") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get("userData").should.equal("this is my user data") + + +@mock_ec2_deprecated +def test_instance_attribute_source_dest_check(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + # Default value is true + instance.sourceDestCheck.should.equal('true') + + instance_attribute = instance.get_attribute("sourceDestCheck") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get("sourceDestCheck").should.equal(True) + + # Set to false (note: Boto converts bool to string, eg 'false') + + with assert_raises(EC2ResponseError) as ex: + instance.modify_attribute("sourceDestCheck", False, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set') + + instance.modify_attribute("sourceDestCheck", False) + + instance.update() + instance.sourceDestCheck.should.equal('false') + + instance_attribute = instance.get_attribute("sourceDestCheck") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get("sourceDestCheck").should.equal(False) + + # Set back to true + instance.modify_attribute("sourceDestCheck", True) + + instance.update() + instance.sourceDestCheck.should.equal('true') + + instance_attribute = instance.get_attribute("sourceDestCheck") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get("sourceDestCheck").should.equal(True) + + +@mock_ec2_deprecated +def test_user_data_with_run_instance(): + user_data = b"some user data" + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', user_data=user_data) + instance = reservation.instances[0] + + instance_attribute = instance.get_attribute("userData") + instance_attribute.should.be.a(InstanceAttribute) + retrieved_user_data = instance_attribute.get("userData").encode('utf-8') + decoded_user_data = base64.decodestring(retrieved_user_data) + decoded_user_data.should.equal(b"some user data") + + +@mock_ec2_deprecated +def test_run_instance_with_security_group_name(): + conn = boto.connect_ec2('the_key', 'the_secret') + + with assert_raises(EC2ResponseError) as ex: + group = conn.create_security_group( + 'group1', "some description", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set') + + group = conn.create_security_group('group1', "some description") + + reservation = conn.run_instances('ami-1234abcd', + security_groups=['group1']) + instance = reservation.instances[0] + + instance.groups[0].id.should.equal(group.id) + instance.groups[0].name.should.equal("group1") + + +@mock_ec2_deprecated +def test_run_instance_with_security_group_id(): + conn = boto.connect_ec2('the_key', 'the_secret') + group = conn.create_security_group('group1', "some description") + reservation = conn.run_instances('ami-1234abcd', + security_group_ids=[group.id]) + instance = reservation.instances[0] + + instance.groups[0].id.should.equal(group.id) + instance.groups[0].name.should.equal("group1") + + +@mock_ec2_deprecated +def test_run_instance_with_instance_type(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', instance_type="t1.micro") + instance = reservation.instances[0] + + instance.instance_type.should.equal("t1.micro") + + +@mock_ec2_deprecated +def test_run_instance_with_default_placement(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + instance.placement.should.equal("us-east-1a") + + +@mock_ec2_deprecated +def test_run_instance_with_placement(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', placement="us-east-1b") + instance = reservation.instances[0] + + instance.placement.should.equal("us-east-1b") + + +@mock_ec2 +def test_run_instance_with_subnet_boto3(): + client = boto3.client('ec2', region_name='eu-central-1') + + ip_networks = [ + (ipaddress.ip_network('10.0.0.0/16'), ipaddress.ip_network('10.0.99.0/24')), + (ipaddress.ip_network('192.168.42.0/24'), ipaddress.ip_network('192.168.42.0/25')) + ] + + # Tests instances are created with the correct IPs + for vpc_cidr, subnet_cidr in ip_networks: + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy='default' + ) + vpc_id = resp['Vpc']['VpcId'] + + resp = client.create_subnet( + CidrBlock=str(subnet_cidr), + VpcId=vpc_id + ) + subnet_id = resp['Subnet']['SubnetId'] + + resp = client.run_instances( + ImageId='ami-1234abcd', + MaxCount=1, + MinCount=1, + SubnetId=subnet_id + ) + instance = resp['Instances'][0] + instance['SubnetId'].should.equal(subnet_id) + + priv_ipv4 = ipaddress.ip_address(six.text_type(instance['PrivateIpAddress'])) + subnet_cidr.should.contain(priv_ipv4) + + +@mock_ec2 +def test_run_instance_with_specified_private_ipv4(): + client = boto3.client('ec2', region_name='eu-central-1') + + vpc_cidr = ipaddress.ip_network('192.168.42.0/24') + subnet_cidr = ipaddress.ip_network('192.168.42.0/25') + + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy='default' + ) + vpc_id = resp['Vpc']['VpcId'] + + resp = client.create_subnet( + CidrBlock=str(subnet_cidr), + VpcId=vpc_id + ) + subnet_id = resp['Subnet']['SubnetId'] + + resp = client.run_instances( + ImageId='ami-1234abcd', + MaxCount=1, + MinCount=1, + SubnetId=subnet_id, + PrivateIpAddress='192.168.42.5' + ) + instance = resp['Instances'][0] + instance['SubnetId'].should.equal(subnet_id) + instance['PrivateIpAddress'].should.equal('192.168.42.5') + + +@mock_ec2 +def test_run_instance_mapped_public_ipv4(): + client = boto3.client('ec2', region_name='eu-central-1') + + vpc_cidr = ipaddress.ip_network('192.168.42.0/24') + subnet_cidr = ipaddress.ip_network('192.168.42.0/25') + + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy='default' + ) + vpc_id = resp['Vpc']['VpcId'] + + resp = client.create_subnet( + CidrBlock=str(subnet_cidr), + VpcId=vpc_id + ) + subnet_id = resp['Subnet']['SubnetId'] + client.modify_subnet_attribute( + SubnetId=subnet_id, + MapPublicIpOnLaunch={'Value': True} + ) + + resp = client.run_instances( + ImageId='ami-1234abcd', + MaxCount=1, + MinCount=1, + SubnetId=subnet_id + ) + instance = resp['Instances'][0] + instance.should.contain('PublicDnsName') + instance.should.contain('PublicIpAddress') + len(instance['PublicDnsName']).should.be.greater_than(0) + len(instance['PublicIpAddress']).should.be.greater_than(0) + + +@mock_ec2_deprecated +def test_run_instance_with_nic_autocreated(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + private_ip = "10.0.0.1" + + reservation = conn.run_instances('ami-1234abcd', subnet_id=subnet.id, + security_groups=[security_group1.name], + security_group_ids=[security_group2.id], + private_ip_address=private_ip) + instance = reservation.instances[0] + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + eni = all_enis[0] + + instance.interfaces.should.have.length_of(1) + instance.interfaces[0].id.should.equal(eni.id) + + instance.subnet_id.should.equal(subnet.id) + instance.groups.should.have.length_of(2) + set([group.id for group in instance.groups]).should.equal( + set([security_group1.id, security_group2.id])) + + eni.subnet_id.should.equal(subnet.id) + eni.groups.should.have.length_of(2) + set([group.id for group in eni.groups]).should.equal( + set([security_group1.id, security_group2.id])) + eni.private_ip_addresses.should.have.length_of(1) + eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) + + +@mock_ec2_deprecated +def test_run_instance_with_nic_preexisting(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + private_ip = "54.0.0.1" + eni = conn.create_network_interface( + subnet.id, private_ip, groups=[security_group1.id]) + + # Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications... + # annoying, but generates the desired querystring. + from boto.ec2.networkinterface import NetworkInterfaceSpecification, NetworkInterfaceCollection + interface = NetworkInterfaceSpecification( + network_interface_id=eni.id, device_index=0) + interfaces = NetworkInterfaceCollection(interface) + # end Boto objects + + reservation = conn.run_instances('ami-1234abcd', network_interfaces=interfaces, + security_group_ids=[security_group2.id]) + instance = reservation.instances[0] + + instance.subnet_id.should.equal(subnet.id) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + instance.interfaces.should.have.length_of(1) + instance_eni = instance.interfaces[0] + instance_eni.id.should.equal(eni.id) + + instance_eni.subnet_id.should.equal(subnet.id) + instance_eni.groups.should.have.length_of(2) + set([group.id for group in instance_eni.groups]).should.equal( + set([security_group1.id, security_group2.id])) + instance_eni.private_ip_addresses.should.have.length_of(1) + instance_eni.private_ip_addresses[ + 0].private_ip_address.should.equal(private_ip) + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_instance_with_nic_attach_detach(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + + security_group1 = conn.create_security_group( + 'test security group #1', 'this is a test security group') + security_group2 = conn.create_security_group( + 'test security group #2', 'this is a test security group') + + reservation = conn.run_instances( + 'ami-1234abcd', security_group_ids=[security_group1.id]) + instance = reservation.instances[0] + + eni = conn.create_network_interface(subnet.id, groups=[security_group2.id]) + + # Check initial instance and ENI data + instance.interfaces.should.have.length_of(1) + + eni.groups.should.have.length_of(1) + set([group.id for group in eni.groups]).should.equal( + set([security_group2.id])) + + # Attach + with assert_raises(EC2ResponseError) as ex: + conn.attach_network_interface( + eni.id, instance.id, device_index=1, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set') + + conn.attach_network_interface(eni.id, instance.id, device_index=1) + + # Check attached instance and ENI data + instance.update() + instance.interfaces.should.have.length_of(2) + instance_eni = instance.interfaces[1] + instance_eni.id.should.equal(eni.id) + instance_eni.groups.should.have.length_of(2) + set([group.id for group in instance_eni.groups]).should.equal( + set([security_group1.id, security_group2.id])) + + eni = conn.get_all_network_interfaces( + filters={'network-interface-id': eni.id})[0] + eni.groups.should.have.length_of(2) + set([group.id for group in eni.groups]).should.equal( + set([security_group1.id, security_group2.id])) + + # Detach + with assert_raises(EC2ResponseError) as ex: + conn.detach_network_interface(instance_eni.attachment.id, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set') + + conn.detach_network_interface(instance_eni.attachment.id) + + # Check detached instance and ENI data + instance.update() + instance.interfaces.should.have.length_of(1) + + eni = conn.get_all_network_interfaces( + filters={'network-interface-id': eni.id})[0] + eni.groups.should.have.length_of(1) + set([group.id for group in eni.groups]).should.equal( + set([security_group2.id])) + + # Detach with invalid attachment ID + with assert_raises(EC2ResponseError) as cm: + conn.detach_network_interface('eni-attach-1234abcd') + cm.exception.code.should.equal('InvalidAttachmentID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_ec2_classic_has_public_ip_address(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") + instance = reservation.instances[0] + instance.ip_address.should_not.equal(None) + instance.public_dns_name.should.contain(instance.ip_address.replace('.', '-')) + instance.private_ip_address.should_not.equal(None) + instance.private_dns_name.should.contain(instance.private_ip_address.replace('.', '-')) + + +@mock_ec2_deprecated +def test_run_instance_with_keypair(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") + instance = reservation.instances[0] + + instance.key_name.should.equal("keypair_name") + + +@mock_ec2_deprecated +def test_describe_instance_status_no_instances(): + conn = boto.connect_ec2('the_key', 'the_secret') + all_status = conn.get_all_instance_status() + len(all_status).should.equal(0) + + +@mock_ec2_deprecated +def test_describe_instance_status_with_instances(): + conn = boto.connect_ec2('the_key', 'the_secret') + conn.run_instances('ami-1234abcd', key_name="keypair_name") + + all_status = conn.get_all_instance_status() + len(all_status).should.equal(1) + all_status[0].instance_status.status.should.equal('ok') + all_status[0].system_status.status.should.equal('ok') + + +@mock_ec2_deprecated +def test_describe_instance_status_with_instance_filter(): + conn = boto.connect_ec2('the_key', 'the_secret') + + # We want to filter based on this one + reservation = conn.run_instances('ami-1234abcd', key_name="keypair_name") + instance = reservation.instances[0] + + # This is just to setup the test + conn.run_instances('ami-1234abcd', key_name="keypair_name") + + all_status = conn.get_all_instance_status(instance_ids=[instance.id]) + len(all_status).should.equal(1) + all_status[0].id.should.equal(instance.id) + + # Call get_all_instance_status with a bad id should raise an error + with assert_raises(EC2ResponseError) as cm: + conn.get_all_instance_status(instance_ids=[instance.id, "i-1234abcd"]) + cm.exception.code.should.equal('InvalidInstanceID.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_describe_instance_status_with_non_running_instances(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd', min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.stop() + instance2.terminate() + + all_running_status = conn.get_all_instance_status() + all_running_status.should.have.length_of(1) + all_running_status[0].id.should.equal(instance3.id) + all_running_status[0].state_name.should.equal('running') + + all_status = conn.get_all_instance_status(include_all_instances=True) + all_status.should.have.length_of(3) + + status1 = next((s for s in all_status if s.id == instance1.id), None) + status1.state_name.should.equal('stopped') + + status2 = next((s for s in all_status if s.id == instance2.id), None) + status2.state_name.should.equal('terminated') + + status3 = next((s for s in all_status if s.id == instance3.id), None) + status3.state_name.should.equal('running') + + +@mock_ec2_deprecated +def test_get_instance_by_security_group(): + conn = boto.connect_ec2('the_key', 'the_secret') + + conn.run_instances('ami-1234abcd') + instance = conn.get_only_instances()[0] + + security_group = conn.create_security_group('test', 'test') + + with assert_raises(EC2ResponseError) as ex: + conn.modify_instance_attribute(instance.id, "groupSet", [ + security_group.id], dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set') + + conn.modify_instance_attribute( + instance.id, "groupSet", [security_group.id]) + + security_group_instances = security_group.instances() + + assert len(security_group_instances) == 1 + assert security_group_instances[0].id == instance.id + + +@mock_ec2 +def test_modify_delete_on_termination(): + ec2_client = boto3.resource('ec2', region_name='us-west-1') + result = ec2_client.create_instances(ImageId='ami-12345678', MinCount=1, MaxCount=1) + instance = result[0] + instance.load() + instance.block_device_mappings[0]['Ebs']['DeleteOnTermination'].should.be(False) + instance.modify_attribute( + BlockDeviceMappings=[{ + 'DeviceName': '/dev/sda1', + 'Ebs': {'DeleteOnTermination': True} + }] + ) + instance.load() + instance.block_device_mappings[0]['Ebs']['DeleteOnTermination'].should.be(True) + +@mock_ec2 +def test_create_instance_ebs_optimized(): + ec2_resource = boto3.resource('ec2', region_name='eu-west-1') + + instance = ec2_resource.create_instances( + ImageId = 'ami-12345678', + MaxCount = 1, + MinCount = 1, + EbsOptimized = True, + )[0] + instance.load() + instance.ebs_optimized.should.be(True) + + instance.modify_attribute( + EbsOptimized={ + 'Value': False + } + ) + instance.load() + instance.ebs_optimized.should.be(False) + +@mock_ec2 +def test_run_multiple_instances_in_same_command(): + instance_count = 4 + client = boto3.client('ec2', region_name='us-east-1') + client.run_instances(ImageId='ami-1234abcd', + MinCount=instance_count, + MaxCount=instance_count) + reservations = client.describe_instances()['Reservations'] + + reservations[0]['Instances'].should.have.length_of(instance_count) + + instances = reservations[0]['Instances'] + for i in range(0, instance_count): + instances[i]['AmiLaunchIndex'].should.be(i) diff --git a/tests/test_ec2/test_network_acls.py b/tests/test_ec2/test_network_acls.py index ad3222b8a..9c92c949e 100644 --- a/tests/test_ec2/test_network_acls.py +++ b/tests/test_ec2/test_network_acls.py @@ -1,175 +1,216 @@ -from __future__ import unicode_literals -import boto -import sure # noqa - -from moto import mock_ec2_deprecated - - -@mock_ec2_deprecated -def test_default_network_acl_created_with_vpc(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(2) - - -@mock_ec2_deprecated -def test_network_acls(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - network_acl = conn.create_network_acl(vpc.id) - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(3) - - -@mock_ec2_deprecated -def test_new_subnet_associates_with_default_network_acl(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.get_all_vpcs()[0] - - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(1) - - acl = all_network_acls[0] - acl.associations.should.have.length_of(4) - [a.subnet_id for a in acl.associations].should.contain(subnet.id) - - -@mock_ec2_deprecated -def test_network_acl_entries(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - - network_acl = conn.create_network_acl(vpc.id) - - network_acl_entry = conn.create_network_acl_entry( - network_acl.id, 110, 6, - 'ALLOW', '0.0.0.0/0', False, - port_range_from='443', - port_range_to='443' - ) - - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(3) - - test_network_acl = next(na for na in all_network_acls - if na.id == network_acl.id) - entries = test_network_acl.network_acl_entries - entries.should.have.length_of(1) - entries[0].rule_number.should.equal('110') - entries[0].protocol.should.equal('6') - entries[0].rule_action.should.equal('ALLOW') - - -@mock_ec2_deprecated -def test_delete_network_acl_entry(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - - network_acl = conn.create_network_acl(vpc.id) - - conn.create_network_acl_entry( - network_acl.id, 110, 6, - 'ALLOW', '0.0.0.0/0', False, - port_range_from='443', - port_range_to='443' - ) - conn.delete_network_acl_entry( - network_acl.id, 110, False - ) - - all_network_acls = conn.get_all_network_acls() - - test_network_acl = next(na for na in all_network_acls - if na.id == network_acl.id) - entries = test_network_acl.network_acl_entries - entries.should.have.length_of(0) - - -@mock_ec2_deprecated -def test_replace_network_acl_entry(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - - network_acl = conn.create_network_acl(vpc.id) - - conn.create_network_acl_entry( - network_acl.id, 110, 6, - 'ALLOW', '0.0.0.0/0', False, - port_range_from='443', - port_range_to='443' - ) - conn.replace_network_acl_entry( - network_acl.id, 110, -1, - 'DENY', '0.0.0.0/0', False, - port_range_from='22', - port_range_to='22' - ) - - all_network_acls = conn.get_all_network_acls() - - test_network_acl = next(na for na in all_network_acls - if na.id == network_acl.id) - entries = test_network_acl.network_acl_entries - entries.should.have.length_of(1) - entries[0].rule_number.should.equal('110') - entries[0].protocol.should.equal('-1') - entries[0].rule_action.should.equal('DENY') - -@mock_ec2_deprecated -def test_associate_new_network_acl_with_subnet(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - network_acl = conn.create_network_acl(vpc.id) - - conn.associate_network_acl(network_acl.id, subnet.id) - - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(3) - - test_network_acl = next(na for na in all_network_acls - if na.id == network_acl.id) - - test_network_acl.associations.should.have.length_of(1) - test_network_acl.associations[0].subnet_id.should.equal(subnet.id) - - -@mock_ec2_deprecated -def test_delete_network_acl(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") - network_acl = conn.create_network_acl(vpc.id) - - all_network_acls = conn.get_all_network_acls() - all_network_acls.should.have.length_of(3) - - any(acl.id == network_acl.id for acl in all_network_acls).should.be.ok - - conn.delete_network_acl(network_acl.id) - - updated_network_acls = conn.get_all_network_acls() - updated_network_acls.should.have.length_of(2) - - any(acl.id == network_acl.id for acl in updated_network_acls).shouldnt.be.ok - - -@mock_ec2_deprecated -def test_network_acl_tagging(): - conn = boto.connect_vpc('the_key', 'the secret') - vpc = conn.create_vpc("10.0.0.0/16") - network_acl = conn.create_network_acl(vpc.id) - - network_acl.add_tag("a key", "some value") - - tag = conn.get_all_tags()[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - all_network_acls = conn.get_all_network_acls() - test_network_acl = next(na for na in all_network_acls - if na.id == network_acl.id) - test_network_acl.tags.should.have.length_of(1) - test_network_acl.tags["a key"].should.equal("some value") +from __future__ import unicode_literals +import boto +import boto3 +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2 + + +@mock_ec2_deprecated +def test_default_network_acl_created_with_vpc(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(2) + + +@mock_ec2_deprecated +def test_network_acls(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + network_acl = conn.create_network_acl(vpc.id) + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(3) + + +@mock_ec2_deprecated +def test_new_subnet_associates_with_default_network_acl(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.get_all_vpcs()[0] + + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(1) + + acl = all_network_acls[0] + acl.associations.should.have.length_of(4) + [a.subnet_id for a in acl.associations].should.contain(subnet.id) + + +@mock_ec2_deprecated +def test_network_acl_entries(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + + network_acl = conn.create_network_acl(vpc.id) + + network_acl_entry = conn.create_network_acl_entry( + network_acl.id, 110, 6, + 'ALLOW', '0.0.0.0/0', False, + port_range_from='443', + port_range_to='443' + ) + + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(3) + + test_network_acl = next(na for na in all_network_acls + if na.id == network_acl.id) + entries = test_network_acl.network_acl_entries + entries.should.have.length_of(1) + entries[0].rule_number.should.equal('110') + entries[0].protocol.should.equal('6') + entries[0].rule_action.should.equal('ALLOW') + + +@mock_ec2_deprecated +def test_delete_network_acl_entry(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + + network_acl = conn.create_network_acl(vpc.id) + + conn.create_network_acl_entry( + network_acl.id, 110, 6, + 'ALLOW', '0.0.0.0/0', False, + port_range_from='443', + port_range_to='443' + ) + conn.delete_network_acl_entry( + network_acl.id, 110, False + ) + + all_network_acls = conn.get_all_network_acls() + + test_network_acl = next(na for na in all_network_acls + if na.id == network_acl.id) + entries = test_network_acl.network_acl_entries + entries.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_replace_network_acl_entry(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + + network_acl = conn.create_network_acl(vpc.id) + + conn.create_network_acl_entry( + network_acl.id, 110, 6, + 'ALLOW', '0.0.0.0/0', False, + port_range_from='443', + port_range_to='443' + ) + conn.replace_network_acl_entry( + network_acl.id, 110, -1, + 'DENY', '0.0.0.0/0', False, + port_range_from='22', + port_range_to='22' + ) + + all_network_acls = conn.get_all_network_acls() + + test_network_acl = next(na for na in all_network_acls + if na.id == network_acl.id) + entries = test_network_acl.network_acl_entries + entries.should.have.length_of(1) + entries[0].rule_number.should.equal('110') + entries[0].protocol.should.equal('-1') + entries[0].rule_action.should.equal('DENY') + +@mock_ec2_deprecated +def test_associate_new_network_acl_with_subnet(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + network_acl = conn.create_network_acl(vpc.id) + + conn.associate_network_acl(network_acl.id, subnet.id) + + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(3) + + test_network_acl = next(na for na in all_network_acls + if na.id == network_acl.id) + + test_network_acl.associations.should.have.length_of(1) + test_network_acl.associations[0].subnet_id.should.equal(subnet.id) + + +@mock_ec2_deprecated +def test_delete_network_acl(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + subnet = conn.create_subnet(vpc.id, "10.0.0.0/18") + network_acl = conn.create_network_acl(vpc.id) + + all_network_acls = conn.get_all_network_acls() + all_network_acls.should.have.length_of(3) + + any(acl.id == network_acl.id for acl in all_network_acls).should.be.ok + + conn.delete_network_acl(network_acl.id) + + updated_network_acls = conn.get_all_network_acls() + updated_network_acls.should.have.length_of(2) + + any(acl.id == network_acl.id for acl in updated_network_acls).shouldnt.be.ok + + +@mock_ec2_deprecated +def test_network_acl_tagging(): + conn = boto.connect_vpc('the_key', 'the secret') + vpc = conn.create_vpc("10.0.0.0/16") + network_acl = conn.create_network_acl(vpc.id) + + network_acl.add_tag("a key", "some value") + + tag = conn.get_all_tags()[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + all_network_acls = conn.get_all_network_acls() + test_network_acl = next(na for na in all_network_acls + if na.id == network_acl.id) + test_network_acl.tags.should.have.length_of(1) + test_network_acl.tags["a key"].should.equal("some value") + + +@mock_ec2 +def test_new_subnet_in_new_vpc_associates_with_default_network_acl(): + ec2 = boto3.resource('ec2', region_name='us-west-1') + new_vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + new_vpc.reload() + + subnet = ec2.create_subnet(VpcId=new_vpc.id, CidrBlock='10.0.0.0/24') + subnet.reload() + + new_vpcs_default_network_acl = next(iter(new_vpc.network_acls.all()), None) + new_vpcs_default_network_acl.reload() + new_vpcs_default_network_acl.vpc_id.should.equal(new_vpc.id) + new_vpcs_default_network_acl.associations.should.have.length_of(1) + new_vpcs_default_network_acl.associations[0]['SubnetId'].should.equal(subnet.id) + + +@mock_ec2 +def test_default_network_acl_default_entries(): + ec2 = boto3.resource('ec2', region_name='us-west-1') + default_network_acl = next(iter(ec2.network_acls.all()), None) + default_network_acl.is_default.should.be.ok + + default_network_acl.entries.should.have.length_of(4) + unique_entries = [] + for entry in default_network_acl.entries: + entry['CidrBlock'].should.equal('0.0.0.0/0') + entry['Protocol'].should.equal('-1') + entry['RuleNumber'].should.be.within([100, 32767]) + entry['RuleAction'].should.be.within(['allow', 'deny']) + assert type(entry['Egress']) is bool + if entry['RuleAction'] == 'allow': + entry['RuleNumber'].should.be.equal(100) + else: + entry['RuleNumber'].should.be.equal(32767) + if entry not in unique_entries: + unique_entries.append(entry) + + unique_entries.should.have.length_of(4) diff --git a/tests/test_ec2/test_spot_fleet.py b/tests/test_ec2/test_spot_fleet.py index 01b05566a..190f3b1f1 100644 --- a/tests/test_ec2/test_spot_fleet.py +++ b/tests/test_ec2/test_spot_fleet.py @@ -1,345 +1,387 @@ -from __future__ import unicode_literals - -import boto3 -import sure # noqa - -from moto import mock_ec2 - - -def get_subnet_id(conn): - vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] - subnet = conn.create_subnet( - VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] - subnet_id = subnet['SubnetId'] - return subnet_id - - -def spot_config(subnet_id, allocation_strategy="lowestPrice"): - return { - 'ClientToken': 'string', - 'SpotPrice': '0.12', - 'TargetCapacity': 6, - 'IamFleetRole': 'arn:aws:iam::123456789012:role/fleet', - 'LaunchSpecifications': [{ - 'ImageId': 'ami-123', - 'KeyName': 'my-key', - 'SecurityGroups': [ - { - 'GroupId': 'sg-123' - }, - ], - 'UserData': 'some user data', - 'InstanceType': 't2.small', - 'BlockDeviceMappings': [ - { - 'VirtualName': 'string', - 'DeviceName': 'string', - 'Ebs': { - 'SnapshotId': 'string', - 'VolumeSize': 123, - 'DeleteOnTermination': True | False, - 'VolumeType': 'standard', - 'Iops': 123, - 'Encrypted': True | False - }, - 'NoDevice': 'string' - }, - ], - 'Monitoring': { - 'Enabled': True - }, - 'SubnetId': subnet_id, - 'IamInstanceProfile': { - 'Arn': 'arn:aws:iam::123456789012:role/fleet' - }, - 'EbsOptimized': False, - 'WeightedCapacity': 2.0, - 'SpotPrice': '0.13' - }, { - 'ImageId': 'ami-123', - 'KeyName': 'my-key', - 'SecurityGroups': [ - { - 'GroupId': 'sg-123' - }, - ], - 'UserData': 'some user data', - 'InstanceType': 't2.large', - 'Monitoring': { - 'Enabled': True - }, - 'SubnetId': subnet_id, - 'IamInstanceProfile': { - 'Arn': 'arn:aws:iam::123456789012:role/fleet' - }, - 'EbsOptimized': False, - 'WeightedCapacity': 4.0, - 'SpotPrice': '10.00', - }], - 'AllocationStrategy': allocation_strategy, - 'FulfilledCapacity': 6, - } - - -@mock_ec2 -def test_create_spot_fleet_with_lowest_price(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id) - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - spot_fleet_requests = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] - len(spot_fleet_requests).should.equal(1) - spot_fleet_request = spot_fleet_requests[0] - spot_fleet_request['SpotFleetRequestState'].should.equal("active") - spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] - - spot_fleet_config['SpotPrice'].should.equal('0.12') - spot_fleet_config['TargetCapacity'].should.equal(6) - spot_fleet_config['IamFleetRole'].should.equal( - 'arn:aws:iam::123456789012:role/fleet') - spot_fleet_config['AllocationStrategy'].should.equal('lowestPrice') - spot_fleet_config['FulfilledCapacity'].should.equal(6.0) - - len(spot_fleet_config['LaunchSpecifications']).should.equal(2) - launch_spec = spot_fleet_config['LaunchSpecifications'][0] - - launch_spec['EbsOptimized'].should.equal(False) - launch_spec['SecurityGroups'].should.equal([{"GroupId": "sg-123"}]) - launch_spec['IamInstanceProfile'].should.equal( - {"Arn": "arn:aws:iam::123456789012:role/fleet"}) - launch_spec['ImageId'].should.equal("ami-123") - launch_spec['InstanceType'].should.equal("t2.small") - launch_spec['KeyName'].should.equal("my-key") - launch_spec['Monitoring'].should.equal({"Enabled": True}) - launch_spec['SpotPrice'].should.equal("0.13") - launch_spec['SubnetId'].should.equal(subnet_id) - launch_spec['UserData'].should.equal("some user data") - launch_spec['WeightedCapacity'].should.equal(2.0) - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(3) - - -@mock_ec2 -def test_create_diversified_spot_fleet(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - diversified_config = spot_config( - subnet_id, allocation_strategy='diversified') - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=diversified_config - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(2) - instance_types = set([instance['InstanceType'] for instance in instances]) - instance_types.should.equal(set(["t2.small", "t2.large"])) - instances[0]['InstanceId'].should.contain("i-") - - -@mock_ec2 -def test_cancel_spot_fleet_request(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.cancel_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id], TerminateInstances=True) - - spot_fleet_requests = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] - len(spot_fleet_requests).should.equal(0) - - -@mock_ec2 -def test_modify_spot_fleet_request_up(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=20) - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(10) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(20) - spot_fleet_config['FulfilledCapacity'].should.equal(20.0) - - -@mock_ec2 -def test_modify_spot_fleet_request_up_diversified(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config( - subnet_id, allocation_strategy='diversified'), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=19) - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(7) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(19) - spot_fleet_config['FulfilledCapacity'].should.equal(20.0) - - -@mock_ec2 -def test_modify_spot_fleet_request_down_no_terminate(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination") - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(3) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(1) - spot_fleet_config['FulfilledCapacity'].should.equal(6.0) - - -@mock_ec2 -def test_modify_spot_fleet_request_down_odd(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=7) - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=5) - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(3) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(5) - spot_fleet_config['FulfilledCapacity'].should.equal(6.0) - - -@mock_ec2 -def test_modify_spot_fleet_request_down(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=1) - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(1) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(1) - spot_fleet_config['FulfilledCapacity'].should.equal(2.0) - - -@mock_ec2 -def test_modify_spot_fleet_request_down_no_terminate_after_custom_terminate(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - spot_fleet_res = conn.request_spot_fleet( - SpotFleetRequestConfig=spot_config(subnet_id), - ) - spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - conn.terminate_instances(InstanceIds=[i['InstanceId'] for i in instances[1:]]) - - conn.modify_spot_fleet_request( - SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination") - - instance_res = conn.describe_spot_fleet_instances( - SpotFleetRequestId=spot_fleet_id) - instances = instance_res['ActiveInstances'] - len(instances).should.equal(1) - - spot_fleet_config = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] - spot_fleet_config['TargetCapacity'].should.equal(1) - spot_fleet_config['FulfilledCapacity'].should.equal(2.0) - - -@mock_ec2 -def test_create_spot_fleet_without_spot_price(): - conn = boto3.client("ec2", region_name='us-west-2') - subnet_id = get_subnet_id(conn) - - # remove prices to force a fallback to ondemand price - spot_config_without_price = spot_config(subnet_id) - del spot_config_without_price['SpotPrice'] - for spec in spot_config_without_price['LaunchSpecifications']: - del spec['SpotPrice'] - - spot_fleet_id = conn.request_spot_fleet(SpotFleetRequestConfig=spot_config_without_price)['SpotFleetRequestId'] - spot_fleet_requests = conn.describe_spot_fleet_requests( - SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] - len(spot_fleet_requests).should.equal(1) - spot_fleet_request = spot_fleet_requests[0] - spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] - - len(spot_fleet_config['LaunchSpecifications']).should.equal(2) - launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] - launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] - - # AWS will figure out the price - assert 'SpotPrice' not in launch_spec1 - assert 'SpotPrice' not in launch_spec2 +from __future__ import unicode_literals + +import boto3 +import sure # noqa + +from moto import mock_ec2 + + +def get_subnet_id(conn): + vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] + subnet = conn.create_subnet( + VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] + subnet_id = subnet['SubnetId'] + return subnet_id + + +def spot_config(subnet_id, allocation_strategy="lowestPrice"): + return { + 'ClientToken': 'string', + 'SpotPrice': '0.12', + 'TargetCapacity': 6, + 'IamFleetRole': 'arn:aws:iam::123456789012:role/fleet', + 'LaunchSpecifications': [{ + 'ImageId': 'ami-123', + 'KeyName': 'my-key', + 'SecurityGroups': [ + { + 'GroupId': 'sg-123' + }, + ], + 'UserData': 'some user data', + 'InstanceType': 't2.small', + 'BlockDeviceMappings': [ + { + 'VirtualName': 'string', + 'DeviceName': 'string', + 'Ebs': { + 'SnapshotId': 'string', + 'VolumeSize': 123, + 'DeleteOnTermination': True | False, + 'VolumeType': 'standard', + 'Iops': 123, + 'Encrypted': True | False + }, + 'NoDevice': 'string' + }, + ], + 'Monitoring': { + 'Enabled': True + }, + 'SubnetId': subnet_id, + 'IamInstanceProfile': { + 'Arn': 'arn:aws:iam::123456789012:role/fleet' + }, + 'EbsOptimized': False, + 'WeightedCapacity': 2.0, + 'SpotPrice': '0.13', + }, { + 'ImageId': 'ami-123', + 'KeyName': 'my-key', + 'SecurityGroups': [ + { + 'GroupId': 'sg-123' + }, + ], + 'UserData': 'some user data', + 'InstanceType': 't2.large', + 'Monitoring': { + 'Enabled': True + }, + 'SubnetId': subnet_id, + 'IamInstanceProfile': { + 'Arn': 'arn:aws:iam::123456789012:role/fleet' + }, + 'EbsOptimized': False, + 'WeightedCapacity': 4.0, + 'SpotPrice': '10.00', + }], + 'AllocationStrategy': allocation_strategy, + 'FulfilledCapacity': 6, + } + + +@mock_ec2 +def test_create_spot_fleet_with_lowest_price(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id) + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_request['SpotFleetRequestState'].should.equal("active") + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + spot_fleet_config['SpotPrice'].should.equal('0.12') + spot_fleet_config['TargetCapacity'].should.equal(6) + spot_fleet_config['IamFleetRole'].should.equal( + 'arn:aws:iam::123456789012:role/fleet') + spot_fleet_config['AllocationStrategy'].should.equal('lowestPrice') + spot_fleet_config['FulfilledCapacity'].should.equal(6.0) + + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec = spot_fleet_config['LaunchSpecifications'][0] + + launch_spec['EbsOptimized'].should.equal(False) + launch_spec['SecurityGroups'].should.equal([{"GroupId": "sg-123"}]) + launch_spec['IamInstanceProfile'].should.equal( + {"Arn": "arn:aws:iam::123456789012:role/fleet"}) + launch_spec['ImageId'].should.equal("ami-123") + launch_spec['InstanceType'].should.equal("t2.small") + launch_spec['KeyName'].should.equal("my-key") + launch_spec['Monitoring'].should.equal({"Enabled": True}) + launch_spec['SpotPrice'].should.equal("0.13") + launch_spec['SubnetId'].should.equal(subnet_id) + launch_spec['UserData'].should.equal("some user data") + launch_spec['WeightedCapacity'].should.equal(2.0) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(3) + + +@mock_ec2 +def test_create_diversified_spot_fleet(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + diversified_config = spot_config( + subnet_id, allocation_strategy='diversified') + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=diversified_config + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(2) + instance_types = set([instance['InstanceType'] for instance in instances]) + instance_types.should.equal(set(["t2.small", "t2.large"])) + instances[0]['InstanceId'].should.contain("i-") + + +@mock_ec2 +def test_create_spot_fleet_request_with_tag_spec(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + tag_spec = [ + { + 'ResourceType': 'instance', + 'Tags': [ + { + 'Key': 'tag-1', + 'Value': 'foo', + }, + { + 'Key': 'tag-2', + 'Value': 'bar', + }, + ] + }, + ] + config = spot_config(subnet_id) + config['LaunchSpecifications'][0]['TagSpecifications'] = tag_spec + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=config + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + spot_fleet_config = spot_fleet_requests[0]['SpotFleetRequestConfig'] + spot_fleet_config['LaunchSpecifications'][0]['TagSpecifications'][0][ + 'ResourceType'].should.equal('instance') + for tag in tag_spec[0]['Tags']: + spot_fleet_config['LaunchSpecifications'][0]['TagSpecifications'][0]['Tags'].should.contain(tag) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = conn.describe_instances(InstanceIds=[i['InstanceId'] for i in instance_res['ActiveInstances']]) + for instance in instances['Reservations'][0]['Instances']: + for tag in tag_spec[0]['Tags']: + instance['Tags'].should.contain(tag) + + +@mock_ec2 +def test_cancel_spot_fleet_request(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.cancel_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id], TerminateInstances=True) + + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(0) + + +@mock_ec2 +def test_modify_spot_fleet_request_up(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=20) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(10) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(20) + spot_fleet_config['FulfilledCapacity'].should.equal(20.0) + + +@mock_ec2 +def test_modify_spot_fleet_request_up_diversified(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config( + subnet_id, allocation_strategy='diversified'), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=19) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(7) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(19) + spot_fleet_config['FulfilledCapacity'].should.equal(20.0) + + +@mock_ec2 +def test_modify_spot_fleet_request_down_no_terminate(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination") + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(3) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(1) + spot_fleet_config['FulfilledCapacity'].should.equal(6.0) + + +@mock_ec2 +def test_modify_spot_fleet_request_down_odd(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=7) + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=5) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(3) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(5) + spot_fleet_config['FulfilledCapacity'].should.equal(6.0) + + +@mock_ec2 +def test_modify_spot_fleet_request_down(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=1) + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(1) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(1) + spot_fleet_config['FulfilledCapacity'].should.equal(2.0) + + +@mock_ec2 +def test_modify_spot_fleet_request_down_no_terminate_after_custom_terminate(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + spot_fleet_res = conn.request_spot_fleet( + SpotFleetRequestConfig=spot_config(subnet_id), + ) + spot_fleet_id = spot_fleet_res['SpotFleetRequestId'] + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + conn.terminate_instances(InstanceIds=[i['InstanceId'] for i in instances[1:]]) + + conn.modify_spot_fleet_request( + SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination") + + instance_res = conn.describe_spot_fleet_instances( + SpotFleetRequestId=spot_fleet_id) + instances = instance_res['ActiveInstances'] + len(instances).should.equal(1) + + spot_fleet_config = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] + spot_fleet_config['TargetCapacity'].should.equal(1) + spot_fleet_config['FulfilledCapacity'].should.equal(2.0) + + +@mock_ec2 +def test_create_spot_fleet_without_spot_price(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + # remove prices to force a fallback to ondemand price + spot_config_without_price = spot_config(subnet_id) + del spot_config_without_price['SpotPrice'] + for spec in spot_config_without_price['LaunchSpecifications']: + del spec['SpotPrice'] + + spot_fleet_id = conn.request_spot_fleet(SpotFleetRequestConfig=spot_config_without_price)['SpotFleetRequestId'] + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] + launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] + + # AWS will figure out the price + assert 'SpotPrice' not in launch_spec1 + assert 'SpotPrice' not in launch_spec2 diff --git a/tests/test_ec2/test_tags.py b/tests/test_ec2/test_tags.py index ac213857a..2294979ba 100644 --- a/tests/test_ec2/test_tags.py +++ b/tests/test_ec2/test_tags.py @@ -1,453 +1,482 @@ -from __future__ import unicode_literals -from nose.tools import assert_raises - -import itertools - -import boto -import boto3 -from boto.exception import EC2ResponseError -from boto.ec2.instance import Reservation -import sure # noqa - -from moto import mock_ec2_deprecated, mock_ec2 -from nose.tools import assert_raises - - -@mock_ec2_deprecated -def test_add_tag(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as ex: - instance.add_tag("a key", "some value", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') - - instance.add_tag("a key", "some value") - chain = itertools.chain.from_iterable - existing_instances = list( - chain([res.instances for res in conn.get_all_instances()])) - existing_instances.should.have.length_of(1) - existing_instance = existing_instances[0] - existing_instance.tags["a key"].should.equal("some value") - - -@mock_ec2_deprecated -def test_remove_tag(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - instance.add_tag("a key", "some value") - - tags = conn.get_all_tags() - tag = tags[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - with assert_raises(EC2ResponseError) as ex: - instance.remove_tag("a key", dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the DeleteTags operation: Request would have succeeded, but DryRun flag is set') - - instance.remove_tag("a key") - conn.get_all_tags().should.have.length_of(0) - - instance.add_tag("a key", "some value") - conn.get_all_tags().should.have.length_of(1) - instance.remove_tag("a key", "some value") - - -@mock_ec2_deprecated -def test_get_all_tags(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - instance.add_tag("a key", "some value") - - tags = conn.get_all_tags() - tag = tags[0] - tag.name.should.equal("a key") - tag.value.should.equal("some value") - - -@mock_ec2_deprecated -def test_get_all_tags_with_special_characters(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - instance.add_tag("a key", "some<> value") - - tags = conn.get_all_tags() - tag = tags[0] - tag.name.should.equal("a key") - tag.value.should.equal("some<> value") - - -@mock_ec2_deprecated -def test_create_tags(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - tag_dict = {'a key': 'some value', - 'another key': 'some other value', - 'blank key': ''} - - with assert_raises(EC2ResponseError) as ex: - conn.create_tags(instance.id, tag_dict, dry_run=True) - ex.exception.error_code.should.equal('DryRunOperation') - ex.exception.status.should.equal(400) - ex.exception.message.should.equal( - 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') - - conn.create_tags(instance.id, tag_dict) - tags = conn.get_all_tags() - set([key for key in tag_dict]).should.equal( - set([tag.name for tag in tags])) - set([tag_dict[key] for key in tag_dict]).should.equal( - set([tag.value for tag in tags])) - - -@mock_ec2_deprecated -def test_tag_limit_exceeded(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - tag_dict = {} - for i in range(51): - tag_dict['{0:02d}'.format(i + 1)] = '' - - with assert_raises(EC2ResponseError) as cm: - conn.create_tags(instance.id, tag_dict) - cm.exception.code.should.equal('TagLimitExceeded') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - instance.add_tag("a key", "a value") - with assert_raises(EC2ResponseError) as cm: - conn.create_tags(instance.id, tag_dict) - cm.exception.code.should.equal('TagLimitExceeded') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - tags = conn.get_all_tags() - tag = tags[0] - tags.should.have.length_of(1) - tag.name.should.equal("a key") - tag.value.should.equal("a value") - - -@mock_ec2_deprecated -def test_invalid_parameter_tag_null(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - - with assert_raises(EC2ResponseError) as cm: - instance.add_tag("a key", None) - cm.exception.code.should.equal('InvalidParameterValue') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_invalid_id(): - conn = boto.connect_ec2('the_key', 'the_secret') - with assert_raises(EC2ResponseError) as cm: - conn.create_tags('ami-blah', {'key': 'tag'}) - cm.exception.code.should.equal('InvalidID') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - with assert_raises(EC2ResponseError) as cm: - conn.create_tags('blah-blah', {'key': 'tag'}) - cm.exception.code.should.equal('InvalidID') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2_deprecated -def test_get_all_tags_resource_id_filter(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - instance.add_tag("an instance key", "some value") - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - image.add_tag("an image key", "some value") - - tags = conn.get_all_tags(filters={'resource-id': instance.id}) - tag = tags[0] - tags.should.have.length_of(1) - tag.res_id.should.equal(instance.id) - tag.res_type.should.equal('instance') - tag.name.should.equal("an instance key") - tag.value.should.equal("some value") - - tags = conn.get_all_tags(filters={'resource-id': image_id}) - tag = tags[0] - tags.should.have.length_of(1) - tag.res_id.should.equal(image_id) - tag.res_type.should.equal('image') - tag.name.should.equal("an image key") - tag.value.should.equal("some value") - - -@mock_ec2_deprecated -def test_get_all_tags_resource_type_filter(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - instance.add_tag("an instance key", "some value") - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - image.add_tag("an image key", "some value") - - tags = conn.get_all_tags(filters={'resource-type': 'instance'}) - tag = tags[0] - tags.should.have.length_of(1) - tag.res_id.should.equal(instance.id) - tag.res_type.should.equal('instance') - tag.name.should.equal("an instance key") - tag.value.should.equal("some value") - - tags = conn.get_all_tags(filters={'resource-type': 'image'}) - tag = tags[0] - tags.should.have.length_of(1) - tag.res_id.should.equal(image_id) - tag.res_type.should.equal('image') - tag.name.should.equal("an image key") - tag.value.should.equal("some value") - - -@mock_ec2_deprecated -def test_get_all_tags_key_filter(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - instance.add_tag("an instance key", "some value") - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - image.add_tag("an image key", "some value") - - tags = conn.get_all_tags(filters={'key': 'an instance key'}) - tag = tags[0] - tags.should.have.length_of(1) - tag.res_id.should.equal(instance.id) - tag.res_type.should.equal('instance') - tag.name.should.equal("an instance key") - tag.value.should.equal("some value") - - -@mock_ec2_deprecated -def test_get_all_tags_value_filter(): - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance = reservation.instances[0] - instance.add_tag("an instance key", "some value") - reservation_b = conn.run_instances('ami-1234abcd') - instance_b = reservation_b.instances[0] - instance_b.add_tag("an instance key", "some other value") - reservation_c = conn.run_instances('ami-1234abcd') - instance_c = reservation_c.instances[0] - instance_c.add_tag("an instance key", "other value*") - reservation_d = conn.run_instances('ami-1234abcd') - instance_d = reservation_d.instances[0] - instance_d.add_tag("an instance key", "other value**") - reservation_e = conn.run_instances('ami-1234abcd') - instance_e = reservation_e.instances[0] - instance_e.add_tag("an instance key", "other value*?") - image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") - image = conn.get_image(image_id) - image.add_tag("an image key", "some value") - - tags = conn.get_all_tags(filters={'value': 'some value'}) - tags.should.have.length_of(2) - - tags = conn.get_all_tags(filters={'value': 'some*value'}) - tags.should.have.length_of(3) - - tags = conn.get_all_tags(filters={'value': '*some*value'}) - tags.should.have.length_of(3) - - tags = conn.get_all_tags(filters={'value': '*some*value*'}) - tags.should.have.length_of(3) - - tags = conn.get_all_tags(filters={'value': '*value\*'}) - tags.should.have.length_of(1) - - tags = conn.get_all_tags(filters={'value': '*value\*\*'}) - tags.should.have.length_of(1) - - tags = conn.get_all_tags(filters={'value': '*value\*\?'}) - tags.should.have.length_of(1) - - -@mock_ec2_deprecated -def test_retrieved_instances_must_contain_their_tags(): - tag_key = 'Tag name' - tag_value = 'Tag value' - tags_to_be_set = {tag_key: tag_value} - - conn = boto.connect_ec2('the_key', 'the_secret') - reservation = conn.run_instances('ami-1234abcd') - reservation.should.be.a(Reservation) - reservation.instances.should.have.length_of(1) - instance = reservation.instances[0] - - reservations = conn.get_all_instances() - reservations.should.have.length_of(1) - reservations[0].id.should.equal(reservation.id) - instances = reservations[0].instances - instances.should.have.length_of(1) - instances[0].id.should.equal(instance.id) - - conn.create_tags([instance.id], tags_to_be_set) - reservations = conn.get_all_instances() - instance = reservations[0].instances[0] - retrieved_tags = instance.tags - - # Cleanup of instance - conn.terminate_instances([instances[0].id]) - - # Check whether tag is present with correct value - retrieved_tags[tag_key].should.equal(tag_value) - - -@mock_ec2_deprecated -def test_retrieved_volumes_must_contain_their_tags(): - tag_key = 'Tag name' - tag_value = 'Tag value' - tags_to_be_set = {tag_key: tag_value} - conn = boto.connect_ec2('the_key', 'the_secret') - volume = conn.create_volume(80, "us-east-1a") - - all_volumes = conn.get_all_volumes() - volume = all_volumes[0] - conn.create_tags([volume.id], tags_to_be_set) - - # Fetch the volume again - all_volumes = conn.get_all_volumes() - volume = all_volumes[0] - retrieved_tags = volume.tags - - volume.delete() - - # Check whether tag is present with correct value - retrieved_tags[tag_key].should.equal(tag_value) - - -@mock_ec2_deprecated -def test_retrieved_snapshots_must_contain_their_tags(): - tag_key = 'Tag name' - tag_value = 'Tag value' - tags_to_be_set = {tag_key: tag_value} - conn = boto.connect_ec2(aws_access_key_id='the_key', - aws_secret_access_key='the_secret') - volume = conn.create_volume(80, "eu-west-1a") - snapshot = conn.create_snapshot(volume.id) - conn.create_tags([snapshot.id], tags_to_be_set) - - # Fetch the snapshot again - all_snapshots = conn.get_all_snapshots() - snapshot = [item for item in all_snapshots if item.id == snapshot.id][0] - retrieved_tags = snapshot.tags - - conn.delete_snapshot(snapshot.id) - volume.delete() - - # Check whether tag is present with correct value - retrieved_tags[tag_key].should.equal(tag_value) - - -@mock_ec2_deprecated -def test_filter_instances_by_wildcard_tags(): - conn = boto.connect_ec2(aws_access_key_id='the_key', - aws_secret_access_key='the_secret') - reservation = conn.run_instances('ami-1234abcd') - instance_a = reservation.instances[0] - instance_a.add_tag("Key1", "Value1") - reservation_b = conn.run_instances('ami-1234abcd') - instance_b = reservation_b.instances[0] - instance_b.add_tag("Key1", "Value2") - - reservations = conn.get_all_instances(filters={'tag:Key1': 'Value*'}) - reservations.should.have.length_of(2) - - reservations = conn.get_all_instances(filters={'tag-key': 'Key*'}) - reservations.should.have.length_of(2) - - reservations = conn.get_all_instances(filters={'tag-value': 'Value*'}) - reservations.should.have.length_of(2) - - -@mock_ec2 -def test_create_volume_with_tags(): - client = boto3.client('ec2', 'us-west-2') - response = client.create_volume( - AvailabilityZone='us-west-2', - Encrypted=False, - Size=40, - TagSpecifications=[ - { - 'ResourceType': 'volume', - 'Tags': [ - { - 'Key': 'TEST_TAG', - 'Value': 'TEST_VALUE' - } - ], - } - ] - ) - - assert response['Tags'][0]['Key'] == 'TEST_TAG' - - -@mock_ec2 -def test_create_snapshot_with_tags(): - client = boto3.client('ec2', 'us-west-2') - volume_id = client.create_volume( - AvailabilityZone='us-west-2', - Encrypted=False, - Size=40, - TagSpecifications=[ - { - 'ResourceType': 'volume', - 'Tags': [ - { - 'Key': 'TEST_TAG', - 'Value': 'TEST_VALUE' - } - ], - } - ] - )['VolumeId'] - snapshot = client.create_snapshot( - VolumeId=volume_id, - TagSpecifications=[ - { - 'ResourceType': 'snapshot', - 'Tags': [ - { - 'Key': 'TEST_SNAPSHOT_TAG', - 'Value': 'TEST_SNAPSHOT_VALUE' - } - ], - } - ] - ) - - expected_tags = [{ - 'Key': 'TEST_SNAPSHOT_TAG', - 'Value': 'TEST_SNAPSHOT_VALUE' - }] - - assert snapshot['Tags'] == expected_tags +from __future__ import unicode_literals +from nose.tools import assert_raises + +import itertools + +import boto +import boto3 +from botocore.exceptions import ClientError +from boto.exception import EC2ResponseError +from boto.ec2.instance import Reservation +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2 +from nose.tools import assert_raises + + +@mock_ec2_deprecated +def test_add_tag(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as ex: + instance.add_tag("a key", "some value", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') + + instance.add_tag("a key", "some value") + chain = itertools.chain.from_iterable + existing_instances = list( + chain([res.instances for res in conn.get_all_instances()])) + existing_instances.should.have.length_of(1) + existing_instance = existing_instances[0] + existing_instance.tags["a key"].should.equal("some value") + + +@mock_ec2_deprecated +def test_remove_tag(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + instance.add_tag("a key", "some value") + + tags = conn.get_all_tags() + tag = tags[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + with assert_raises(EC2ResponseError) as ex: + instance.remove_tag("a key", dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the DeleteTags operation: Request would have succeeded, but DryRun flag is set') + + instance.remove_tag("a key") + conn.get_all_tags().should.have.length_of(0) + + instance.add_tag("a key", "some value") + conn.get_all_tags().should.have.length_of(1) + instance.remove_tag("a key", "some value") + + +@mock_ec2_deprecated +def test_get_all_tags(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + instance.add_tag("a key", "some value") + + tags = conn.get_all_tags() + tag = tags[0] + tag.name.should.equal("a key") + tag.value.should.equal("some value") + + +@mock_ec2_deprecated +def test_get_all_tags_with_special_characters(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + instance.add_tag("a key", "some<> value") + + tags = conn.get_all_tags() + tag = tags[0] + tag.name.should.equal("a key") + tag.value.should.equal("some<> value") + + +@mock_ec2_deprecated +def test_create_tags(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + tag_dict = {'a key': 'some value', + 'another key': 'some other value', + 'blank key': ''} + + with assert_raises(EC2ResponseError) as ex: + conn.create_tags(instance.id, tag_dict, dry_run=True) + ex.exception.error_code.should.equal('DryRunOperation') + ex.exception.status.should.equal(400) + ex.exception.message.should.equal( + 'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set') + + conn.create_tags(instance.id, tag_dict) + tags = conn.get_all_tags() + set([key for key in tag_dict]).should.equal( + set([tag.name for tag in tags])) + set([tag_dict[key] for key in tag_dict]).should.equal( + set([tag.value for tag in tags])) + + +@mock_ec2_deprecated +def test_tag_limit_exceeded(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + tag_dict = {} + for i in range(51): + tag_dict['{0:02d}'.format(i + 1)] = '' + + with assert_raises(EC2ResponseError) as cm: + conn.create_tags(instance.id, tag_dict) + cm.exception.code.should.equal('TagLimitExceeded') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + instance.add_tag("a key", "a value") + with assert_raises(EC2ResponseError) as cm: + conn.create_tags(instance.id, tag_dict) + cm.exception.code.should.equal('TagLimitExceeded') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + tags = conn.get_all_tags() + tag = tags[0] + tags.should.have.length_of(1) + tag.name.should.equal("a key") + tag.value.should.equal("a value") + + +@mock_ec2_deprecated +def test_invalid_parameter_tag_null(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + + with assert_raises(EC2ResponseError) as cm: + instance.add_tag("a key", None) + cm.exception.code.should.equal('InvalidParameterValue') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_invalid_id(): + conn = boto.connect_ec2('the_key', 'the_secret') + with assert_raises(EC2ResponseError) as cm: + conn.create_tags('ami-blah', {'key': 'tag'}) + cm.exception.code.should.equal('InvalidID') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + with assert_raises(EC2ResponseError) as cm: + conn.create_tags('blah-blah', {'key': 'tag'}) + cm.exception.code.should.equal('InvalidID') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_get_all_tags_resource_id_filter(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + instance.add_tag("an instance key", "some value") + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + image.add_tag("an image key", "some value") + + tags = conn.get_all_tags(filters={'resource-id': instance.id}) + tag = tags[0] + tags.should.have.length_of(1) + tag.res_id.should.equal(instance.id) + tag.res_type.should.equal('instance') + tag.name.should.equal("an instance key") + tag.value.should.equal("some value") + + tags = conn.get_all_tags(filters={'resource-id': image_id}) + tag = tags[0] + tags.should.have.length_of(1) + tag.res_id.should.equal(image_id) + tag.res_type.should.equal('image') + tag.name.should.equal("an image key") + tag.value.should.equal("some value") + + +@mock_ec2_deprecated +def test_get_all_tags_resource_type_filter(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + instance.add_tag("an instance key", "some value") + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + image.add_tag("an image key", "some value") + + tags = conn.get_all_tags(filters={'resource-type': 'instance'}) + tag = tags[0] + tags.should.have.length_of(1) + tag.res_id.should.equal(instance.id) + tag.res_type.should.equal('instance') + tag.name.should.equal("an instance key") + tag.value.should.equal("some value") + + tags = conn.get_all_tags(filters={'resource-type': 'image'}) + tag = tags[0] + tags.should.have.length_of(1) + tag.res_id.should.equal(image_id) + tag.res_type.should.equal('image') + tag.name.should.equal("an image key") + tag.value.should.equal("some value") + + +@mock_ec2_deprecated +def test_get_all_tags_key_filter(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + instance.add_tag("an instance key", "some value") + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + image.add_tag("an image key", "some value") + + tags = conn.get_all_tags(filters={'key': 'an instance key'}) + tag = tags[0] + tags.should.have.length_of(1) + tag.res_id.should.equal(instance.id) + tag.res_type.should.equal('instance') + tag.name.should.equal("an instance key") + tag.value.should.equal("some value") + + +@mock_ec2_deprecated +def test_get_all_tags_value_filter(): + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance = reservation.instances[0] + instance.add_tag("an instance key", "some value") + reservation_b = conn.run_instances('ami-1234abcd') + instance_b = reservation_b.instances[0] + instance_b.add_tag("an instance key", "some other value") + reservation_c = conn.run_instances('ami-1234abcd') + instance_c = reservation_c.instances[0] + instance_c.add_tag("an instance key", "other value*") + reservation_d = conn.run_instances('ami-1234abcd') + instance_d = reservation_d.instances[0] + instance_d.add_tag("an instance key", "other value**") + reservation_e = conn.run_instances('ami-1234abcd') + instance_e = reservation_e.instances[0] + instance_e.add_tag("an instance key", "other value*?") + image_id = conn.create_image(instance.id, "test-ami", "this is a test ami") + image = conn.get_image(image_id) + image.add_tag("an image key", "some value") + + tags = conn.get_all_tags(filters={'value': 'some value'}) + tags.should.have.length_of(2) + + tags = conn.get_all_tags(filters={'value': 'some*value'}) + tags.should.have.length_of(3) + + tags = conn.get_all_tags(filters={'value': '*some*value'}) + tags.should.have.length_of(3) + + tags = conn.get_all_tags(filters={'value': '*some*value*'}) + tags.should.have.length_of(3) + + tags = conn.get_all_tags(filters={'value': '*value\*'}) + tags.should.have.length_of(1) + + tags = conn.get_all_tags(filters={'value': '*value\*\*'}) + tags.should.have.length_of(1) + + tags = conn.get_all_tags(filters={'value': '*value\*\?'}) + tags.should.have.length_of(1) + + +@mock_ec2_deprecated +def test_retrieved_instances_must_contain_their_tags(): + tag_key = 'Tag name' + tag_value = 'Tag value' + tags_to_be_set = {tag_key: tag_value} + + conn = boto.connect_ec2('the_key', 'the_secret') + reservation = conn.run_instances('ami-1234abcd') + reservation.should.be.a(Reservation) + reservation.instances.should.have.length_of(1) + instance = reservation.instances[0] + + reservations = conn.get_all_instances() + reservations.should.have.length_of(1) + reservations[0].id.should.equal(reservation.id) + instances = reservations[0].instances + instances.should.have.length_of(1) + instances[0].id.should.equal(instance.id) + + conn.create_tags([instance.id], tags_to_be_set) + reservations = conn.get_all_instances() + instance = reservations[0].instances[0] + retrieved_tags = instance.tags + + # Cleanup of instance + conn.terminate_instances([instances[0].id]) + + # Check whether tag is present with correct value + retrieved_tags[tag_key].should.equal(tag_value) + + +@mock_ec2_deprecated +def test_retrieved_volumes_must_contain_their_tags(): + tag_key = 'Tag name' + tag_value = 'Tag value' + tags_to_be_set = {tag_key: tag_value} + conn = boto.connect_ec2('the_key', 'the_secret') + volume = conn.create_volume(80, "us-east-1a") + + all_volumes = conn.get_all_volumes() + volume = all_volumes[0] + conn.create_tags([volume.id], tags_to_be_set) + + # Fetch the volume again + all_volumes = conn.get_all_volumes() + volume = all_volumes[0] + retrieved_tags = volume.tags + + volume.delete() + + # Check whether tag is present with correct value + retrieved_tags[tag_key].should.equal(tag_value) + + +@mock_ec2_deprecated +def test_retrieved_snapshots_must_contain_their_tags(): + tag_key = 'Tag name' + tag_value = 'Tag value' + tags_to_be_set = {tag_key: tag_value} + conn = boto.connect_ec2(aws_access_key_id='the_key', + aws_secret_access_key='the_secret') + volume = conn.create_volume(80, "eu-west-1a") + snapshot = conn.create_snapshot(volume.id) + conn.create_tags([snapshot.id], tags_to_be_set) + + # Fetch the snapshot again + all_snapshots = conn.get_all_snapshots() + snapshot = [item for item in all_snapshots if item.id == snapshot.id][0] + retrieved_tags = snapshot.tags + + conn.delete_snapshot(snapshot.id) + volume.delete() + + # Check whether tag is present with correct value + retrieved_tags[tag_key].should.equal(tag_value) + + +@mock_ec2_deprecated +def test_filter_instances_by_wildcard_tags(): + conn = boto.connect_ec2(aws_access_key_id='the_key', + aws_secret_access_key='the_secret') + reservation = conn.run_instances('ami-1234abcd') + instance_a = reservation.instances[0] + instance_a.add_tag("Key1", "Value1") + reservation_b = conn.run_instances('ami-1234abcd') + instance_b = reservation_b.instances[0] + instance_b.add_tag("Key1", "Value2") + + reservations = conn.get_all_instances(filters={'tag:Key1': 'Value*'}) + reservations.should.have.length_of(2) + + reservations = conn.get_all_instances(filters={'tag-key': 'Key*'}) + reservations.should.have.length_of(2) + + reservations = conn.get_all_instances(filters={'tag-value': 'Value*'}) + reservations.should.have.length_of(2) + + +@mock_ec2 +def test_create_volume_with_tags(): + client = boto3.client('ec2', 'us-west-2') + response = client.create_volume( + AvailabilityZone='us-west-2', + Encrypted=False, + Size=40, + TagSpecifications=[ + { + 'ResourceType': 'volume', + 'Tags': [ + { + 'Key': 'TEST_TAG', + 'Value': 'TEST_VALUE' + } + ], + } + ] + ) + + assert response['Tags'][0]['Key'] == 'TEST_TAG' + + +@mock_ec2 +def test_create_snapshot_with_tags(): + client = boto3.client('ec2', 'us-west-2') + volume_id = client.create_volume( + AvailabilityZone='us-west-2', + Encrypted=False, + Size=40, + TagSpecifications=[ + { + 'ResourceType': 'volume', + 'Tags': [ + { + 'Key': 'TEST_TAG', + 'Value': 'TEST_VALUE' + } + ], + } + ] + )['VolumeId'] + snapshot = client.create_snapshot( + VolumeId=volume_id, + TagSpecifications=[ + { + 'ResourceType': 'snapshot', + 'Tags': [ + { + 'Key': 'TEST_SNAPSHOT_TAG', + 'Value': 'TEST_SNAPSHOT_VALUE' + } + ], + } + ] + ) + + expected_tags = [{ + 'Key': 'TEST_SNAPSHOT_TAG', + 'Value': 'TEST_SNAPSHOT_VALUE' + }] + + assert snapshot['Tags'] == expected_tags + + +@mock_ec2 +def test_create_tag_empty_resource(): + # create ec2 client in us-west-1 + client = boto3.client('ec2', region_name='us-west-1') + # create tag with empty resource + with assert_raises(ClientError) as ex: + client.create_tags( + Resources=[], + Tags=[{'Key': 'Value'}] + ) + ex.exception.response['Error']['Code'].should.equal('MissingParameter') + ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet') + + +@mock_ec2 +def test_delete_tag_empty_resource(): + # create ec2 client in us-west-1 + client = boto3.client('ec2', region_name='us-west-1') + # delete tag with empty resource + with assert_raises(ClientError) as ex: + client.delete_tags( + Resources=[], + Tags=[{'Key': 'Value'}] + ) + ex.exception.response['Error']['Code'].should.equal('MissingParameter') + ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet') diff --git a/tests/test_ec2/test_vpc_peering.py b/tests/test_ec2/test_vpc_peering.py index 4aab5f041..082499a72 100644 --- a/tests/test_ec2/test_vpc_peering.py +++ b/tests/test_ec2/test_vpc_peering.py @@ -1,132 +1,133 @@ -from __future__ import unicode_literals -# Ensure 'assert_raises' context manager support for Python 2.6 -import tests.backport_assert_raises -from nose.tools import assert_raises -from moto.ec2.exceptions import EC2ClientError -from botocore.exceptions import ClientError - -import boto3 -import boto -from boto.exception import EC2ResponseError -import sure # noqa - -from moto import mock_ec2, mock_ec2_deprecated -from tests.helpers import requires_boto_gte - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_vpc_peering_connections(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc = conn.create_vpc("10.0.0.0/16") - peer_vpc = conn.create_vpc("11.0.0.0/16") - - vpc_pcx = conn.create_vpc_peering_connection(vpc.id, peer_vpc.id) - vpc_pcx._status.code.should.equal('initiating-request') - - return vpc_pcx - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_vpc_peering_connections_get_all(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc_pcx = test_vpc_peering_connections() - vpc_pcx._status.code.should.equal('initiating-request') - - all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(1) - all_vpc_pcxs[0]._status.code.should.equal('pending-acceptance') - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_vpc_peering_connections_accept(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc_pcx = test_vpc_peering_connections() - - vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id) - vpc_pcx._status.code.should.equal('active') - - with assert_raises(EC2ResponseError) as cm: - conn.reject_vpc_peering_connection(vpc_pcx.id) - cm.exception.code.should.equal('InvalidStateTransition') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(1) - all_vpc_pcxs[0]._status.code.should.equal('active') - - -@requires_boto_gte("2.32.0") -@mock_ec2_deprecated -def test_vpc_peering_connections_reject(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc_pcx = test_vpc_peering_connections() - - verdict = conn.reject_vpc_peering_connection(vpc_pcx.id) - verdict.should.equal(True) - - with assert_raises(EC2ResponseError) as cm: - conn.accept_vpc_peering_connection(vpc_pcx.id) - cm.exception.code.should.equal('InvalidStateTransition') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(1) - all_vpc_pcxs[0]._status.code.should.equal('rejected') - - -@requires_boto_gte("2.32.1") -@mock_ec2_deprecated -def test_vpc_peering_connections_delete(): - conn = boto.connect_vpc('the_key', 'the_secret') - vpc_pcx = test_vpc_peering_connections() - - verdict = vpc_pcx.delete() - verdict.should.equal(True) - - all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(0) - - with assert_raises(EC2ResponseError) as cm: - conn.delete_vpc_peering_connection("pcx-1234abcd") - cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound') - cm.exception.status.should.equal(400) - cm.exception.request_id.should_not.be.none - - -@mock_ec2 -def test_vpc_peering_connections_cross_region(): - # create vpc in us-west-1 and ap-northeast-1 - ec2_usw1 = boto3.resource('ec2', region_name='us-west-1') - vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16') - ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1') - vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16') - # create peering - vpc_pcx = ec2_usw1.create_vpc_peering_connection( - VpcId=vpc_usw1.id, - PeerVpcId=vpc_apn1.id, - PeerRegion='ap-northeast-1', - ) - vpc_pcx.status['Code'].should.equal('initiating-request') - vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id) - vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id) - - -@mock_ec2 -def test_vpc_peering_connections_cross_region_fail(): - # create vpc in us-west-1 and ap-northeast-1 - ec2_usw1 = boto3.resource('ec2', region_name='us-west-1') - vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16') - ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1') - vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16') - # create peering wrong region with no vpc - with assert_raises(ClientError) as cm: - ec2_usw1.create_vpc_peering_connection( - VpcId=vpc_usw1.id, - PeerVpcId=vpc_apn1.id, - PeerRegion='ap-northeast-2') - cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound') +from __future__ import unicode_literals +# Ensure 'assert_raises' context manager support for Python 2.6 +import tests.backport_assert_raises +from nose.tools import assert_raises +from moto.ec2.exceptions import EC2ClientError +from botocore.exceptions import ClientError + +import boto3 +import boto +from boto.exception import EC2ResponseError +import sure # noqa + +from moto import mock_ec2, mock_ec2_deprecated +from tests.helpers import requires_boto_gte + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_vpc_peering_connections(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc = conn.create_vpc("10.0.0.0/16") + peer_vpc = conn.create_vpc("11.0.0.0/16") + + vpc_pcx = conn.create_vpc_peering_connection(vpc.id, peer_vpc.id) + vpc_pcx._status.code.should.equal('initiating-request') + + return vpc_pcx + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_vpc_peering_connections_get_all(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc_pcx = test_vpc_peering_connections() + vpc_pcx._status.code.should.equal('initiating-request') + + all_vpc_pcxs = conn.get_all_vpc_peering_connections() + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('pending-acceptance') + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_vpc_peering_connections_accept(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc_pcx = test_vpc_peering_connections() + + vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id) + vpc_pcx._status.code.should.equal('active') + + with assert_raises(EC2ResponseError) as cm: + conn.reject_vpc_peering_connection(vpc_pcx.id) + cm.exception.code.should.equal('InvalidStateTransition') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + all_vpc_pcxs = conn.get_all_vpc_peering_connections() + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('active') + + +@requires_boto_gte("2.32.0") +@mock_ec2_deprecated +def test_vpc_peering_connections_reject(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc_pcx = test_vpc_peering_connections() + + verdict = conn.reject_vpc_peering_connection(vpc_pcx.id) + verdict.should.equal(True) + + with assert_raises(EC2ResponseError) as cm: + conn.accept_vpc_peering_connection(vpc_pcx.id) + cm.exception.code.should.equal('InvalidStateTransition') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + all_vpc_pcxs = conn.get_all_vpc_peering_connections() + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('rejected') + + +@requires_boto_gte("2.32.1") +@mock_ec2_deprecated +def test_vpc_peering_connections_delete(): + conn = boto.connect_vpc('the_key', 'the_secret') + vpc_pcx = test_vpc_peering_connections() + + verdict = vpc_pcx.delete() + verdict.should.equal(True) + + all_vpc_pcxs = conn.get_all_vpc_peering_connections() + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('deleted') + + with assert_raises(EC2ResponseError) as cm: + conn.delete_vpc_peering_connection("pcx-1234abcd") + cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound') + cm.exception.status.should.equal(400) + cm.exception.request_id.should_not.be.none + + +@mock_ec2 +def test_vpc_peering_connections_cross_region(): + # create vpc in us-west-1 and ap-northeast-1 + ec2_usw1 = boto3.resource('ec2', region_name='us-west-1') + vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16') + ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1') + vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16') + # create peering + vpc_pcx = ec2_usw1.create_vpc_peering_connection( + VpcId=vpc_usw1.id, + PeerVpcId=vpc_apn1.id, + PeerRegion='ap-northeast-1', + ) + vpc_pcx.status['Code'].should.equal('initiating-request') + vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id) + vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id) + + +@mock_ec2 +def test_vpc_peering_connections_cross_region_fail(): + # create vpc in us-west-1 and ap-northeast-1 + ec2_usw1 = boto3.resource('ec2', region_name='us-west-1') + vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16') + ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1') + vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16') + # create peering wrong region with no vpc + with assert_raises(ClientError) as cm: + ec2_usw1.create_vpc_peering_connection( + VpcId=vpc_usw1.id, + PeerVpcId=vpc_apn1.id, + PeerRegion='ap-northeast-2') + cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound') diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index a0e8318da..3bf25b8fc 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -47,6 +47,15 @@ def test_list_clusters(): 'arn:aws:ecs:us-east-1:012345678910:cluster/test_cluster1') +@mock_ecs +def test_describe_clusters(): + client = boto3.client('ecs', region_name='us-east-1') + response = client.describe_clusters(clusters=["some-cluster"]) + response['failures'].should.contain({ + 'arn': 'arn:aws:ecs:us-east-1:012345678910:cluster/some-cluster', + 'reason': 'MISSING' + }) + @mock_ecs def test_delete_cluster(): client = boto3.client('ecs', region_name='us-east-1') @@ -925,6 +934,65 @@ def test_update_container_instances_state(): status='test_status').should.throw(Exception) +@mock_ec2 +@mock_ecs +def test_update_container_instances_state_by_arn(): + ecs_client = boto3.client('ecs', region_name='us-east-1') + ec2 = boto3.resource('ec2', region_name='us-east-1') + + test_cluster_name = 'test_ecs_cluster' + _ = ecs_client.create_cluster( + clusterName=test_cluster_name + ) + + instance_to_create = 3 + test_instance_arns = [] + for i in range(0, instance_to_create): + test_instance = ec2.create_instances( + ImageId="ami-1234abcd", + MinCount=1, + MaxCount=1, + )[0] + + instance_id_document = json.dumps( + ec2_utils.generate_instance_identity_document(test_instance) + ) + + response = ecs_client.register_container_instance( + cluster=test_cluster_name, + instanceIdentityDocument=instance_id_document) + + test_instance_arns.append(response['containerInstance']['containerInstanceArn']) + + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='ACTIVE') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('ACTIVE') + ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='test_status').should.throw(Exception) + + @mock_ec2 @mock_ecs def test_run_task(): diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py index d459af533..a9d90ec32 100644 --- a/tests/test_events/test_events.py +++ b/tests/test_events/test_events.py @@ -1,211 +1,216 @@ -import random - -import boto3 -import json - -from moto.events import mock_events -from botocore.exceptions import ClientError -from nose.tools import assert_raises - - -RULES = [ - {'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'}, - {'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'}, - {'Name': 'test3', 'EventPattern': '{"source": ["test-source"]}'} -] - -TARGETS = { - 'test-target-1': { - 'Id': 'test-target-1', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-1', - 'Rules': ['test1', 'test2'] - }, - 'test-target-2': { - 'Id': 'test-target-2', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-2', - 'Rules': ['test1', 'test3'] - }, - 'test-target-3': { - 'Id': 'test-target-3', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-3', - 'Rules': ['test1', 'test2'] - }, - 'test-target-4': { - 'Id': 'test-target-4', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-4', - 'Rules': ['test1', 'test3'] - }, - 'test-target-5': { - 'Id': 'test-target-5', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-5', - 'Rules': ['test1', 'test2'] - }, - 'test-target-6': { - 'Id': 'test-target-6', - 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-6', - 'Rules': ['test1', 'test3'] - } -} - - -def get_random_rule(): - return RULES[random.randint(0, len(RULES) - 1)] - - -def generate_environment(): - client = boto3.client('events', 'us-west-2') - - for rule in RULES: - client.put_rule( - Name=rule['Name'], - ScheduleExpression=rule.get('ScheduleExpression', ''), - EventPattern=rule.get('EventPattern', '') - ) - - targets = [] - for target in TARGETS: - if rule['Name'] in TARGETS[target].get('Rules'): - targets.append({'Id': target, 'Arn': TARGETS[target]['Arn']}) - - client.put_targets(Rule=rule['Name'], Targets=targets) - - return client - - -@mock_events -def test_list_rules(): - client = generate_environment() - response = client.list_rules() - - assert(response is not None) - assert(len(response['Rules']) > 0) - - -@mock_events -def test_describe_rule(): - rule_name = get_random_rule()['Name'] - client = generate_environment() - response = client.describe_rule(Name=rule_name) - - assert(response is not None) - assert(response.get('Name') == rule_name) - assert(response.get('Arn') is not None) - - -@mock_events -def test_enable_disable_rule(): - rule_name = get_random_rule()['Name'] - client = generate_environment() - - # Rules should start out enabled in these tests. - rule = client.describe_rule(Name=rule_name) - assert(rule['State'] == 'ENABLED') - - client.disable_rule(Name=rule_name) - rule = client.describe_rule(Name=rule_name) - assert(rule['State'] == 'DISABLED') - - client.enable_rule(Name=rule_name) - rule = client.describe_rule(Name=rule_name) - assert(rule['State'] == 'ENABLED') - - -@mock_events -def test_list_rule_names_by_target(): - test_1_target = TARGETS['test-target-1'] - test_2_target = TARGETS['test-target-2'] - client = generate_environment() - - rules = client.list_rule_names_by_target(TargetArn=test_1_target['Arn']) - assert(len(rules['RuleNames']) == len(test_1_target['Rules'])) - for rule in rules['RuleNames']: - assert(rule in test_1_target['Rules']) - - rules = client.list_rule_names_by_target(TargetArn=test_2_target['Arn']) - assert(len(rules['RuleNames']) == len(test_2_target['Rules'])) - for rule in rules['RuleNames']: - assert(rule in test_2_target['Rules']) - - -@mock_events -def test_list_rules(): - client = generate_environment() - - rules = client.list_rules() - assert(len(rules['Rules']) == len(RULES)) - - -@mock_events -def test_delete_rule(): - client = generate_environment() - - client.delete_rule(Name=RULES[0]['Name']) - rules = client.list_rules() - assert(len(rules['Rules']) == len(RULES) - 1) - - -@mock_events -def test_list_targets_by_rule(): - rule_name = get_random_rule()['Name'] - client = generate_environment() - targets = client.list_targets_by_rule(Rule=rule_name) - - expected_targets = [] - for target in TARGETS: - if rule_name in TARGETS[target].get('Rules'): - expected_targets.append(target) - - assert(len(targets['Targets']) == len(expected_targets)) - - -@mock_events -def test_remove_targets(): - rule_name = get_random_rule()['Name'] - client = generate_environment() - - targets = client.list_targets_by_rule(Rule=rule_name)['Targets'] - targets_before = len(targets) - assert(targets_before > 0) - - client.remove_targets(Rule=rule_name, Ids=[targets[0]['Id']]) - - targets = client.list_targets_by_rule(Rule=rule_name)['Targets'] - targets_after = len(targets) - assert(targets_before - 1 == targets_after) - - -@mock_events -def test_permissions(): - client = boto3.client('events', 'eu-central-1') - - client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1') - client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2') - - resp = client.describe_event_bus() - resp_policy = json.loads(resp['Policy']) - assert len(resp_policy['Statement']) == 2 - - client.remove_permission(StatementId='Account2') - - resp = client.describe_event_bus() - resp_policy = json.loads(resp['Policy']) - assert len(resp_policy['Statement']) == 1 - assert resp_policy['Statement'][0]['Sid'] == 'Account1' - - -@mock_events -def test_put_events(): - client = boto3.client('events', 'eu-central-1') - - event = { - "Source": "com.mycompany.myapp", - "Detail": '{"key1": "value3", "key2": "value4"}', - "Resources": ["resource1", "resource2"], - "DetailType": "myDetailType" - } - - client.put_events(Entries=[event]) - # Boto3 would error if it didn't return 200 OK - - with assert_raises(ClientError): - client.put_events(Entries=[event]*20) +import random +import boto3 +import json + +from moto.events import mock_events +from botocore.exceptions import ClientError +from nose.tools import assert_raises + +RULES = [ + {'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'}, + {'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'}, + {'Name': 'test3', 'EventPattern': '{"source": ["test-source"]}'} +] + +TARGETS = { + 'test-target-1': { + 'Id': 'test-target-1', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-1', + 'Rules': ['test1', 'test2'] + }, + 'test-target-2': { + 'Id': 'test-target-2', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-2', + 'Rules': ['test1', 'test3'] + }, + 'test-target-3': { + 'Id': 'test-target-3', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-3', + 'Rules': ['test1', 'test2'] + }, + 'test-target-4': { + 'Id': 'test-target-4', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-4', + 'Rules': ['test1', 'test3'] + }, + 'test-target-5': { + 'Id': 'test-target-5', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-5', + 'Rules': ['test1', 'test2'] + }, + 'test-target-6': { + 'Id': 'test-target-6', + 'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-6', + 'Rules': ['test1', 'test3'] + } +} + + +def get_random_rule(): + return RULES[random.randint(0, len(RULES) - 1)] + + +def generate_environment(): + client = boto3.client('events', 'us-west-2') + + for rule in RULES: + client.put_rule( + Name=rule['Name'], + ScheduleExpression=rule.get('ScheduleExpression', ''), + EventPattern=rule.get('EventPattern', '') + ) + + targets = [] + for target in TARGETS: + if rule['Name'] in TARGETS[target].get('Rules'): + targets.append({'Id': target, 'Arn': TARGETS[target]['Arn']}) + + client.put_targets(Rule=rule['Name'], Targets=targets) + + return client + + +@mock_events +def test_list_rules(): + client = generate_environment() + response = client.list_rules() + + assert(response is not None) + assert(len(response['Rules']) > 0) + + +@mock_events +def test_describe_rule(): + rule_name = get_random_rule()['Name'] + client = generate_environment() + response = client.describe_rule(Name=rule_name) + + assert(response is not None) + assert(response.get('Name') == rule_name) + assert(response.get('Arn') is not None) + + +@mock_events +def test_enable_disable_rule(): + rule_name = get_random_rule()['Name'] + client = generate_environment() + + # Rules should start out enabled in these tests. + rule = client.describe_rule(Name=rule_name) + assert(rule['State'] == 'ENABLED') + + client.disable_rule(Name=rule_name) + rule = client.describe_rule(Name=rule_name) + assert(rule['State'] == 'DISABLED') + + client.enable_rule(Name=rule_name) + rule = client.describe_rule(Name=rule_name) + assert(rule['State'] == 'ENABLED') + + # Test invalid name + try: + client.enable_rule(Name='junk') + + except ClientError as ce: + assert ce.response['Error']['Code'] == 'ResourceNotFoundException' + + +@mock_events +def test_list_rule_names_by_target(): + test_1_target = TARGETS['test-target-1'] + test_2_target = TARGETS['test-target-2'] + client = generate_environment() + + rules = client.list_rule_names_by_target(TargetArn=test_1_target['Arn']) + assert(len(rules['RuleNames']) == len(test_1_target['Rules'])) + for rule in rules['RuleNames']: + assert(rule in test_1_target['Rules']) + + rules = client.list_rule_names_by_target(TargetArn=test_2_target['Arn']) + assert(len(rules['RuleNames']) == len(test_2_target['Rules'])) + for rule in rules['RuleNames']: + assert(rule in test_2_target['Rules']) + + +@mock_events +def test_list_rules(): + client = generate_environment() + + rules = client.list_rules() + assert(len(rules['Rules']) == len(RULES)) + + +@mock_events +def test_delete_rule(): + client = generate_environment() + + client.delete_rule(Name=RULES[0]['Name']) + rules = client.list_rules() + assert(len(rules['Rules']) == len(RULES) - 1) + + +@mock_events +def test_list_targets_by_rule(): + rule_name = get_random_rule()['Name'] + client = generate_environment() + targets = client.list_targets_by_rule(Rule=rule_name) + + expected_targets = [] + for target in TARGETS: + if rule_name in TARGETS[target].get('Rules'): + expected_targets.append(target) + + assert(len(targets['Targets']) == len(expected_targets)) + + +@mock_events +def test_remove_targets(): + rule_name = get_random_rule()['Name'] + client = generate_environment() + + targets = client.list_targets_by_rule(Rule=rule_name)['Targets'] + targets_before = len(targets) + assert(targets_before > 0) + + client.remove_targets(Rule=rule_name, Ids=[targets[0]['Id']]) + + targets = client.list_targets_by_rule(Rule=rule_name)['Targets'] + targets_after = len(targets) + assert(targets_before - 1 == targets_after) + + +@mock_events +def test_permissions(): + client = boto3.client('events', 'eu-central-1') + + client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1') + client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2') + + resp = client.describe_event_bus() + resp_policy = json.loads(resp['Policy']) + assert len(resp_policy['Statement']) == 2 + + client.remove_permission(StatementId='Account2') + + resp = client.describe_event_bus() + resp_policy = json.loads(resp['Policy']) + assert len(resp_policy['Statement']) == 1 + assert resp_policy['Statement'][0]['Sid'] == 'Account1' + + +@mock_events +def test_put_events(): + client = boto3.client('events', 'eu-central-1') + + event = { + "Source": "com.mycompany.myapp", + "Detail": '{"key1": "value3", "key2": "value4"}', + "Resources": ["resource1", "resource2"], + "DetailType": "myDetailType" + } + + client.put_events(Entries=[event]) + # Boto3 would error if it didn't return 200 OK + + with assert_raises(ClientError): + client.put_events(Entries=[event]*20) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 1db4dae1e..1cd6f9e62 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1,760 +1,1295 @@ -from __future__ import unicode_literals -import base64 - -import boto -import boto3 -import sure # noqa -from boto.exception import BotoServerError -from botocore.exceptions import ClientError -from moto import mock_iam, mock_iam_deprecated -from moto.iam.models import aws_managed_policies -from nose.tools import assert_raises, assert_equals -from nose.tools import raises - -from tests.helpers import requires_boto_gte - - -@mock_iam_deprecated() -def test_get_all_server_certs(): - conn = boto.connect_iam() - - conn.upload_server_cert("certname", "certbody", "privatekey") - certs = conn.get_all_server_certs()['list_server_certificates_response'][ - 'list_server_certificates_result']['server_certificate_metadata_list'] - certs.should.have.length_of(1) - cert1 = certs[0] - cert1.server_certificate_name.should.equal("certname") - cert1.arn.should.equal( - "arn:aws:iam::123456789012:server-certificate/certname") - - -@mock_iam_deprecated() -def test_get_server_cert_doesnt_exist(): - conn = boto.connect_iam() - - with assert_raises(BotoServerError): - conn.get_server_certificate("NonExistant") - - -@mock_iam_deprecated() -def test_get_server_cert(): - conn = boto.connect_iam() - - conn.upload_server_cert("certname", "certbody", "privatekey") - cert = conn.get_server_certificate("certname") - cert.server_certificate_name.should.equal("certname") - cert.arn.should.equal( - "arn:aws:iam::123456789012:server-certificate/certname") - - -@mock_iam_deprecated() -def test_upload_server_cert(): - conn = boto.connect_iam() - - conn.upload_server_cert("certname", "certbody", "privatekey") - cert = conn.get_server_certificate("certname") - cert.server_certificate_name.should.equal("certname") - cert.arn.should.equal( - "arn:aws:iam::123456789012:server-certificate/certname") - - -@mock_iam_deprecated() -def test_delete_server_cert(): - conn = boto.connect_iam() - - conn.upload_server_cert("certname", "certbody", "privatekey") - conn.get_server_certificate("certname") - conn.delete_server_cert("certname") - with assert_raises(BotoServerError): - conn.get_server_certificate("certname") - with assert_raises(BotoServerError): - conn.delete_server_cert("certname") - - -@mock_iam_deprecated() -@raises(BotoServerError) -def test_get_role__should_throw__when_role_does_not_exist(): - conn = boto.connect_iam() - - conn.get_role('unexisting_role') - - -@mock_iam_deprecated() -@raises(BotoServerError) -def test_get_instance_profile__should_throw__when_instance_profile_does_not_exist(): - conn = boto.connect_iam() - - conn.get_instance_profile('unexisting_instance_profile') - - -@mock_iam_deprecated() -def test_create_role_and_instance_profile(): - conn = boto.connect_iam() - conn.create_instance_profile("my-profile", path="my-path") - conn.create_role( - "my-role", assume_role_policy_document="some policy", path="my-path") - - conn.add_role_to_instance_profile("my-profile", "my-role") - - role = conn.get_role("my-role") - role.path.should.equal("my-path") - role.assume_role_policy_document.should.equal("some policy") - - profile = conn.get_instance_profile("my-profile") - profile.path.should.equal("my-path") - role_from_profile = list(profile.roles.values())[0] - role_from_profile['role_id'].should.equal(role.role_id) - role_from_profile['role_name'].should.equal("my-role") - - conn.list_roles().roles[0].role_name.should.equal('my-role') - - -@mock_iam_deprecated() -def test_remove_role_from_instance_profile(): - conn = boto.connect_iam() - conn.create_instance_profile("my-profile", path="my-path") - conn.create_role( - "my-role", assume_role_policy_document="some policy", path="my-path") - conn.add_role_to_instance_profile("my-profile", "my-role") - - profile = conn.get_instance_profile("my-profile") - role_from_profile = list(profile.roles.values())[0] - role_from_profile['role_name'].should.equal("my-role") - - conn.remove_role_from_instance_profile("my-profile", "my-role") - - profile = conn.get_instance_profile("my-profile") - dict(profile.roles).should.be.empty - - -@mock_iam() -def test_get_login_profile(): - conn = boto3.client('iam', region_name='us-east-1') - conn.create_user(UserName='my-user') - conn.create_login_profile(UserName='my-user', Password='my-pass') - - response = conn.get_login_profile(UserName='my-user') - response['LoginProfile']['UserName'].should.equal('my-user') - - -@mock_iam() -def test_update_login_profile(): - conn = boto3.client('iam', region_name='us-east-1') - conn.create_user(UserName='my-user') - conn.create_login_profile(UserName='my-user', Password='my-pass') - response = conn.get_login_profile(UserName='my-user') - response['LoginProfile'].get('PasswordResetRequired').should.equal(None) - - conn.update_login_profile(UserName='my-user', Password='new-pass', PasswordResetRequired=True) - response = conn.get_login_profile(UserName='my-user') - response['LoginProfile'].get('PasswordResetRequired').should.equal(True) - - -@mock_iam() -def test_delete_role(): - conn = boto3.client('iam', region_name='us-east-1') - - with assert_raises(ClientError): - conn.delete_role(RoleName="my-role") - - conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") - role = conn.get_role(RoleName="my-role") - role.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-path/my-role') - - conn.delete_role(RoleName="my-role") - - with assert_raises(ClientError): - conn.get_role(RoleName="my-role") - - -@mock_iam_deprecated() -def test_list_instance_profiles(): - conn = boto.connect_iam() - conn.create_instance_profile("my-profile", path="my-path") - conn.create_role("my-role", path="my-path") - - conn.add_role_to_instance_profile("my-profile", "my-role") - - profiles = conn.list_instance_profiles().instance_profiles - - len(profiles).should.equal(1) - profiles[0].instance_profile_name.should.equal("my-profile") - profiles[0].roles.role_name.should.equal("my-role") - - -@mock_iam_deprecated() -def test_list_instance_profiles_for_role(): - conn = boto.connect_iam() - - conn.create_role(role_name="my-role", - assume_role_policy_document="some policy", path="my-path") - conn.create_role(role_name="my-role2", - assume_role_policy_document="some policy2", path="my-path2") - - profile_name_list = ['my-profile', 'my-profile2'] - profile_path_list = ['my-path', 'my-path2'] - for profile_count in range(0, 2): - conn.create_instance_profile( - profile_name_list[profile_count], path=profile_path_list[profile_count]) - - for profile_count in range(0, 2): - conn.add_role_to_instance_profile( - profile_name_list[profile_count], "my-role") - - profile_dump = conn.list_instance_profiles_for_role(role_name="my-role") - profile_list = profile_dump['list_instance_profiles_for_role_response'][ - 'list_instance_profiles_for_role_result']['instance_profiles'] - for profile_count in range(0, len(profile_list)): - profile_name_list.remove(profile_list[profile_count][ - "instance_profile_name"]) - profile_path_list.remove(profile_list[profile_count]["path"]) - profile_list[profile_count]["roles"]["member"][ - "role_name"].should.equal("my-role") - - len(profile_name_list).should.equal(0) - len(profile_path_list).should.equal(0) - - profile_dump2 = conn.list_instance_profiles_for_role(role_name="my-role2") - profile_list = profile_dump2['list_instance_profiles_for_role_response'][ - 'list_instance_profiles_for_role_result']['instance_profiles'] - len(profile_list).should.equal(0) - - -@mock_iam_deprecated() -def test_list_role_policies(): - conn = boto.connect_iam() - conn.create_role("my-role") - conn.put_role_policy("my-role", "test policy", "my policy") - role = conn.list_role_policies("my-role") - role.policy_names.should.have.length_of(1) - role.policy_names[0].should.equal("test policy") - - conn.put_role_policy("my-role", "test policy 2", "another policy") - role = conn.list_role_policies("my-role") - role.policy_names.should.have.length_of(2) - - conn.delete_role_policy("my-role", "test policy") - role = conn.list_role_policies("my-role") - role.policy_names.should.have.length_of(1) - role.policy_names[0].should.equal("test policy 2") - - with assert_raises(BotoServerError): - conn.delete_role_policy("my-role", "test policy") - - -@mock_iam_deprecated() -def test_put_role_policy(): - conn = boto.connect_iam() - conn.create_role( - "my-role", assume_role_policy_document="some policy", path="my-path") - conn.put_role_policy("my-role", "test policy", "my policy") - policy = conn.get_role_policy( - "my-role", "test policy")['get_role_policy_response']['get_role_policy_result']['policy_name'] - policy.should.equal("test policy") - - -@mock_iam_deprecated() -def test_update_assume_role_policy(): - conn = boto.connect_iam() - role = conn.create_role("my-role") - conn.update_assume_role_policy(role.role_name, "my-policy") - role = conn.get_role("my-role") - role.assume_role_policy_document.should.equal("my-policy") - - -@mock_iam -def test_create_policy(): - conn = boto3.client('iam', region_name='us-east-1') - response = conn.create_policy( - PolicyName="TestCreatePolicy", - PolicyDocument='{"some":"policy"}') - response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestCreatePolicy") - - -@mock_iam -def test_create_policy_versions(): - conn = boto3.client('iam', region_name='us-east-1') - with assert_raises(ClientError): - conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", - PolicyDocument='{"some":"policy"}') - conn.create_policy( - PolicyName="TestCreatePolicyVersion", - PolicyDocument='{"some":"policy"}') - version = conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", - PolicyDocument='{"some":"policy"}') - version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) - -@mock_iam -def test_get_policy(): - conn = boto3.client('iam', region_name='us-east-1') - response = conn.create_policy( - PolicyName="TestGetPolicy", - PolicyDocument='{"some":"policy"}') - policy = conn.get_policy( - PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy") - response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy") - - -@mock_iam -def test_get_policy_version(): - conn = boto3.client('iam', region_name='us-east-1') - conn.create_policy( - PolicyName="TestGetPolicyVersion", - PolicyDocument='{"some":"policy"}') - version = conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", - PolicyDocument='{"some":"policy"}') - with assert_raises(ClientError): - conn.get_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", - VersionId='v2-does-not-exist') - retrieved = conn.get_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", - VersionId=version.get('PolicyVersion').get('VersionId')) - retrieved.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) - - -@mock_iam -def test_list_policy_versions(): - conn = boto3.client('iam', region_name='us-east-1') - with assert_raises(ClientError): - versions = conn.list_policy_versions( - PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") - conn.create_policy( - PolicyName="TestListPolicyVersions", - PolicyDocument='{"first":"policy"}') - versions = conn.list_policy_versions( - PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") - versions.get('Versions')[0].get('VersionId').should.equal('v1') - - conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", - PolicyDocument='{"second":"policy"}') - conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", - PolicyDocument='{"third":"policy"}') - versions = conn.list_policy_versions( - PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") - print(versions.get('Versions')) - versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'}) - versions.get('Versions')[2].get('Document').should.equal({'third': 'policy'}) - - -@mock_iam -def test_delete_policy_version(): - conn = boto3.client('iam', region_name='us-east-1') - conn.create_policy( - PolicyName="TestDeletePolicyVersion", - PolicyDocument='{"first":"policy"}') - conn.create_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", - PolicyDocument='{"second":"policy"}') - with assert_raises(ClientError): - conn.delete_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", - VersionId='v2-nope-this-does-not-exist') - conn.delete_policy_version( - PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", - VersionId='v2') - versions = conn.list_policy_versions( - PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion") - len(versions.get('Versions')).should.equal(1) - - -@mock_iam_deprecated() -def test_create_user(): - conn = boto.connect_iam() - conn.create_user('my-user') - with assert_raises(BotoServerError): - conn.create_user('my-user') - - -@mock_iam_deprecated() -def test_get_user(): - conn = boto.connect_iam() - with assert_raises(BotoServerError): - conn.get_user('my-user') - conn.create_user('my-user') - conn.get_user('my-user') - - -@mock_iam_deprecated() -def test_get_current_user(): - """If no user is specific, IAM returns the current user""" - conn = boto.connect_iam() - user = conn.get_user()['get_user_response']['get_user_result']['user'] - user['user_name'].should.equal('default_user') - - -@mock_iam() -def test_list_users(): - path_prefix = '/' - max_items = 10 - conn = boto3.client('iam', region_name='us-east-1') - conn.create_user(UserName='my-user') - response = conn.list_users(PathPrefix=path_prefix, MaxItems=max_items) - user = response['Users'][0] - user['UserName'].should.equal('my-user') - user['Path'].should.equal('/') - user['Arn'].should.equal('arn:aws:iam::123456789012:user/my-user') - - -@mock_iam() -def test_user_policies(): - policy_name = 'UserManagedPolicy' - policy_document = "{'mypolicy': 'test'}" - user_name = 'my-user' - conn = boto3.client('iam', region_name='us-east-1') - conn.create_user(UserName=user_name) - conn.put_user_policy( - UserName=user_name, - PolicyName=policy_name, - PolicyDocument=policy_document - ) - - policy_doc = conn.get_user_policy( - UserName=user_name, - PolicyName=policy_name - ) - test = policy_document in policy_doc['PolicyDocument'] - test.should.equal(True) - - policies = conn.list_user_policies(UserName=user_name) - len(policies['PolicyNames']).should.equal(1) - policies['PolicyNames'][0].should.equal(policy_name) - - conn.delete_user_policy( - UserName=user_name, - PolicyName=policy_name - ) - - policies = conn.list_user_policies(UserName=user_name) - len(policies['PolicyNames']).should.equal(0) - - -@mock_iam_deprecated() -def test_create_login_profile(): - conn = boto.connect_iam() - with assert_raises(BotoServerError): - conn.create_login_profile('my-user', 'my-pass') - conn.create_user('my-user') - conn.create_login_profile('my-user', 'my-pass') - with assert_raises(BotoServerError): - conn.create_login_profile('my-user', 'my-pass') - - -@mock_iam_deprecated() -def test_delete_login_profile(): - conn = boto.connect_iam() - conn.create_user('my-user') - with assert_raises(BotoServerError): - conn.delete_login_profile('my-user') - conn.create_login_profile('my-user', 'my-pass') - conn.delete_login_profile('my-user') - - -@mock_iam_deprecated() -def test_create_access_key(): - conn = boto.connect_iam() - with assert_raises(BotoServerError): - conn.create_access_key('my-user') - conn.create_user('my-user') - conn.create_access_key('my-user') - - -@mock_iam_deprecated() -def test_get_all_access_keys(): - """If no access keys exist there should be none in the response, - if an access key is present it should have the correct fields present""" - conn = boto.connect_iam() - conn.create_user('my-user') - response = conn.get_all_access_keys('my-user') - assert_equals( - response['list_access_keys_response'][ - 'list_access_keys_result']['access_key_metadata'], - [] - ) - conn.create_access_key('my-user') - response = conn.get_all_access_keys('my-user') - assert_equals( - sorted(response['list_access_keys_response'][ - 'list_access_keys_result']['access_key_metadata'][0].keys()), - sorted(['status', 'create_date', 'user_name', 'access_key_id']) - ) - - -@mock_iam_deprecated() -def test_delete_access_key(): - conn = boto.connect_iam() - conn.create_user('my-user') - access_key_id = conn.create_access_key('my-user')['create_access_key_response'][ - 'create_access_key_result']['access_key']['access_key_id'] - conn.delete_access_key(access_key_id, 'my-user') - - -@mock_iam() -def test_mfa_devices(): - # Test enable device - conn = boto3.client('iam', region_name='us-east-1') - conn.create_user(UserName='my-user') - conn.enable_mfa_device( - UserName='my-user', - SerialNumber='123456789', - AuthenticationCode1='234567', - AuthenticationCode2='987654' - ) - - # Test list mfa devices - response = conn.list_mfa_devices(UserName='my-user') - device = response['MFADevices'][0] - device['SerialNumber'].should.equal('123456789') - - # Test deactivate mfa device - conn.deactivate_mfa_device(UserName='my-user', SerialNumber='123456789') - response = conn.list_mfa_devices(UserName='my-user') - len(response['MFADevices']).should.equal(0) - - -@mock_iam_deprecated() -def test_delete_user(): - conn = boto.connect_iam() - with assert_raises(BotoServerError): - conn.delete_user('my-user') - conn.create_user('my-user') - conn.delete_user('my-user') - - -@mock_iam_deprecated() -def test_generate_credential_report(): - conn = boto.connect_iam() - result = conn.generate_credential_report() - result['generate_credential_report_response'][ - 'generate_credential_report_result']['state'].should.equal('STARTED') - result = conn.generate_credential_report() - result['generate_credential_report_response'][ - 'generate_credential_report_result']['state'].should.equal('COMPLETE') - - -@mock_iam_deprecated() -def test_get_credential_report(): - conn = boto.connect_iam() - conn.create_user('my-user') - with assert_raises(BotoServerError): - conn.get_credential_report() - result = conn.generate_credential_report() - while result['generate_credential_report_response']['generate_credential_report_result']['state'] != 'COMPLETE': - result = conn.generate_credential_report() - result = conn.get_credential_report() - report = base64.b64decode(result['get_credential_report_response'][ - 'get_credential_report_result']['content'].encode('ascii')).decode('ascii') - report.should.match(r'.*my-user.*') - - -@requires_boto_gte('2.39') -@mock_iam_deprecated() -def test_managed_policy(): - conn = boto.connect_iam() - - conn.create_policy(policy_name='UserManagedPolicy', - policy_document={'mypolicy': 'test'}, - path='/mypolicy/', - description='my user managed policy') - - marker = 0 - aws_policies = [] - while marker is not None: - response = conn.list_policies(scope='AWS', marker=marker)[ - 'list_policies_response']['list_policies_result'] - for policy in response['policies']: - aws_policies.append(policy) - marker = response.get('marker') - set(p.name for p in aws_managed_policies).should.equal( - set(p['policy_name'] for p in aws_policies)) - - user_policies = conn.list_policies(scope='Local')['list_policies_response'][ - 'list_policies_result']['policies'] - set(['UserManagedPolicy']).should.equal( - set(p['policy_name'] for p in user_policies)) - - marker = 0 - all_policies = [] - while marker is not None: - response = conn.list_policies(marker=marker)[ - 'list_policies_response']['list_policies_result'] - for policy in response['policies']: - all_policies.append(policy) - marker = response.get('marker') - set(p['policy_name'] for p in aws_policies + - user_policies).should.equal(set(p['policy_name'] for p in all_policies)) - - role_name = 'my-role' - conn.create_role(role_name, assume_role_policy_document={ - 'policy': 'test'}, path="my-path") - for policy_name in ['AmazonElasticMapReduceRole', - 'AmazonElasticMapReduceforEC2Role']: - policy_arn = 'arn:aws:iam::aws:policy/service-role/' + policy_name - conn.attach_role_policy(policy_arn, role_name) - - rows = conn.list_policies(only_attached=True)['list_policies_response'][ - 'list_policies_result']['policies'] - rows.should.have.length_of(2) - for x in rows: - int(x['attachment_count']).should.be.greater_than(0) - - # boto has not implemented this end point but accessible this way - resp = conn.get_response('ListAttachedRolePolicies', - {'RoleName': role_name}, - list_marker='AttachedPolicies') - resp['list_attached_role_policies_response']['list_attached_role_policies_result'][ - 'attached_policies'].should.have.length_of(2) - - conn.detach_role_policy( - "arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", - role_name) - rows = conn.list_policies(only_attached=True)['list_policies_response'][ - 'list_policies_result']['policies'] - rows.should.have.length_of(1) - for x in rows: - int(x['attachment_count']).should.be.greater_than(0) - - # boto has not implemented this end point but accessible this way - resp = conn.get_response('ListAttachedRolePolicies', - {'RoleName': role_name}, - list_marker='AttachedPolicies') - resp['list_attached_role_policies_response']['list_attached_role_policies_result'][ - 'attached_policies'].should.have.length_of(1) - - with assert_raises(BotoServerError): - conn.detach_role_policy( - "arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", - role_name) - - with assert_raises(BotoServerError): - conn.detach_role_policy( - "arn:aws:iam::aws:policy/Nonexistent", role_name) - - -@mock_iam -def test_boto3_create_login_profile(): - conn = boto3.client('iam', region_name='us-east-1') - - with assert_raises(ClientError): - conn.create_login_profile(UserName='my-user', Password='Password') - - conn.create_user(UserName='my-user') - conn.create_login_profile(UserName='my-user', Password='Password') - - with assert_raises(ClientError): - conn.create_login_profile(UserName='my-user', Password='Password') - - -@mock_iam() -def test_attach_detach_user_policy(): - iam = boto3.resource('iam', region_name='us-east-1') - client = boto3.client('iam', region_name='us-east-1') - - user = iam.create_user(UserName='test-user') - - policy_name = 'UserAttachedPolicy' - policy = iam.create_policy(PolicyName=policy_name, - PolicyDocument='{"mypolicy": "test"}', - Path='/mypolicy/', - Description='my user attached policy') - - client.attach_user_policy(UserName=user.name, PolicyArn=policy.arn) - - resp = client.list_attached_user_policies(UserName=user.name) - resp['AttachedPolicies'].should.have.length_of(1) - attached_policy = resp['AttachedPolicies'][0] - attached_policy['PolicyArn'].should.equal(policy.arn) - attached_policy['PolicyName'].should.equal(policy_name) - - client.detach_user_policy(UserName=user.name, PolicyArn=policy.arn) - - resp = client.list_attached_user_policies(UserName=user.name) - resp['AttachedPolicies'].should.have.length_of(0) - - -@mock_iam -def test_update_access_key(): - iam = boto3.resource('iam', region_name='us-east-1') - client = iam.meta.client - username = 'test-user' - iam.create_user(UserName=username) - with assert_raises(ClientError): - client.update_access_key(UserName=username, - AccessKeyId='non-existent-key', - Status='Inactive') - key = client.create_access_key(UserName=username)['AccessKey'] - client.update_access_key(UserName=username, - AccessKeyId=key['AccessKeyId'], - Status='Inactive') - resp = client.list_access_keys(UserName=username) - resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive') - - -@mock_iam -def test_get_account_authorization_details(): - import json - conn = boto3.client('iam', region_name='us-east-1') - conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") - conn.create_user(Path='/', UserName='testCloudAuxUser') - conn.create_group(Path='/', GroupName='testCloudAuxGroup') - conn.create_policy( - PolicyName='testCloudAuxPolicy', - Path='/', - PolicyDocument=json.dumps({ - "Version": "2012-10-17", - "Statement": [ - { - "Action": "s3:ListBucket", - "Resource": "*", - "Effect": "Allow", - } - ] - }), - Description='Test CloudAux Policy' - ) - - result = conn.get_account_authorization_details(Filter=['Role']) - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 - - result = conn.get_account_authorization_details(Filter=['User']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 - - result = conn.get_account_authorization_details(Filter=['Group']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 1 - len(result['Policies']) == 0 - - result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 1 - - # Check for greater than 1 since this should always be greater than one but might change. - # See iam/aws_managed_policies.py - result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) > 1 - - result = conn.get_account_authorization_details() - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 1 - len(result['Policies']) > 1 - - - +from __future__ import unicode_literals +import base64 + +import boto +import boto3 +import os +import sure # noqa +import sys +from boto.exception import BotoServerError +from botocore.exceptions import ClientError +from moto import mock_iam, mock_iam_deprecated +from moto.iam.models import aws_managed_policies +from nose.tools import assert_raises, assert_equals +from nose.tools import raises + +from datetime import datetime +from tests.helpers import requires_boto_gte + + +MOCK_CERT = """-----BEGIN CERTIFICATE----- +MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt +b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx +FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC +gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt +ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO +vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG +9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj +FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt +8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ== +-----END CERTIFICATE-----""" + + +@mock_iam_deprecated() +def test_get_all_server_certs(): + conn = boto.connect_iam() + + conn.upload_server_cert("certname", "certbody", "privatekey") + certs = conn.get_all_server_certs()['list_server_certificates_response'][ + 'list_server_certificates_result']['server_certificate_metadata_list'] + certs.should.have.length_of(1) + cert1 = certs[0] + cert1.server_certificate_name.should.equal("certname") + cert1.arn.should.equal( + "arn:aws:iam::123456789012:server-certificate/certname") + + +@mock_iam_deprecated() +def test_get_server_cert_doesnt_exist(): + conn = boto.connect_iam() + + with assert_raises(BotoServerError): + conn.get_server_certificate("NonExistant") + + +@mock_iam_deprecated() +def test_get_server_cert(): + conn = boto.connect_iam() + + conn.upload_server_cert("certname", "certbody", "privatekey") + cert = conn.get_server_certificate("certname") + cert.server_certificate_name.should.equal("certname") + cert.arn.should.equal( + "arn:aws:iam::123456789012:server-certificate/certname") + + +@mock_iam_deprecated() +def test_upload_server_cert(): + conn = boto.connect_iam() + + conn.upload_server_cert("certname", "certbody", "privatekey") + cert = conn.get_server_certificate("certname") + cert.server_certificate_name.should.equal("certname") + cert.arn.should.equal( + "arn:aws:iam::123456789012:server-certificate/certname") + + +@mock_iam_deprecated() +def test_delete_server_cert(): + conn = boto.connect_iam() + + conn.upload_server_cert("certname", "certbody", "privatekey") + conn.get_server_certificate("certname") + conn.delete_server_cert("certname") + with assert_raises(BotoServerError): + conn.get_server_certificate("certname") + with assert_raises(BotoServerError): + conn.delete_server_cert("certname") + + +@mock_iam_deprecated() +@raises(BotoServerError) +def test_get_role__should_throw__when_role_does_not_exist(): + conn = boto.connect_iam() + + conn.get_role('unexisting_role') + + +@mock_iam_deprecated() +@raises(BotoServerError) +def test_get_instance_profile__should_throw__when_instance_profile_does_not_exist(): + conn = boto.connect_iam() + + conn.get_instance_profile('unexisting_instance_profile') + + +@mock_iam_deprecated() +def test_create_role_and_instance_profile(): + conn = boto.connect_iam() + conn.create_instance_profile("my-profile", path="my-path") + conn.create_role( + "my-role", assume_role_policy_document="some policy", path="my-path") + + conn.add_role_to_instance_profile("my-profile", "my-role") + + role = conn.get_role("my-role") + role.path.should.equal("my-path") + role.assume_role_policy_document.should.equal("some policy") + + profile = conn.get_instance_profile("my-profile") + profile.path.should.equal("my-path") + role_from_profile = list(profile.roles.values())[0] + role_from_profile['role_id'].should.equal(role.role_id) + role_from_profile['role_name'].should.equal("my-role") + + conn.list_roles().roles[0].role_name.should.equal('my-role') + + # Test with an empty path: + profile = conn.create_instance_profile('my-other-profile') + profile.path.should.equal('/') + + +@mock_iam_deprecated() +def test_remove_role_from_instance_profile(): + conn = boto.connect_iam() + conn.create_instance_profile("my-profile", path="my-path") + conn.create_role( + "my-role", assume_role_policy_document="some policy", path="my-path") + conn.add_role_to_instance_profile("my-profile", "my-role") + + profile = conn.get_instance_profile("my-profile") + role_from_profile = list(profile.roles.values())[0] + role_from_profile['role_name'].should.equal("my-role") + + conn.remove_role_from_instance_profile("my-profile", "my-role") + + profile = conn.get_instance_profile("my-profile") + dict(profile.roles).should.be.empty + + +@mock_iam() +def test_get_login_profile(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + conn.create_login_profile(UserName='my-user', Password='my-pass') + + response = conn.get_login_profile(UserName='my-user') + response['LoginProfile']['UserName'].should.equal('my-user') + + +@mock_iam() +def test_update_login_profile(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + conn.create_login_profile(UserName='my-user', Password='my-pass') + response = conn.get_login_profile(UserName='my-user') + response['LoginProfile'].get('PasswordResetRequired').should.equal(None) + + conn.update_login_profile(UserName='my-user', Password='new-pass', PasswordResetRequired=True) + response = conn.get_login_profile(UserName='my-user') + response['LoginProfile'].get('PasswordResetRequired').should.equal(True) + + +@mock_iam() +def test_delete_role(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + role = conn.get_role(RoleName="my-role") + role.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-path/my-role') + + conn.delete_role(RoleName="my-role") + + with assert_raises(ClientError): + conn.get_role(RoleName="my-role") + + +@mock_iam_deprecated() +def test_list_instance_profiles(): + conn = boto.connect_iam() + conn.create_instance_profile("my-profile", path="my-path") + conn.create_role("my-role", path="my-path") + + conn.add_role_to_instance_profile("my-profile", "my-role") + + profiles = conn.list_instance_profiles().instance_profiles + + len(profiles).should.equal(1) + profiles[0].instance_profile_name.should.equal("my-profile") + profiles[0].roles.role_name.should.equal("my-role") + + +@mock_iam_deprecated() +def test_list_instance_profiles_for_role(): + conn = boto.connect_iam() + + conn.create_role(role_name="my-role", + assume_role_policy_document="some policy", path="my-path") + conn.create_role(role_name="my-role2", + assume_role_policy_document="some policy2", path="my-path2") + + profile_name_list = ['my-profile', 'my-profile2'] + profile_path_list = ['my-path', 'my-path2'] + for profile_count in range(0, 2): + conn.create_instance_profile( + profile_name_list[profile_count], path=profile_path_list[profile_count]) + + for profile_count in range(0, 2): + conn.add_role_to_instance_profile( + profile_name_list[profile_count], "my-role") + + profile_dump = conn.list_instance_profiles_for_role(role_name="my-role") + profile_list = profile_dump['list_instance_profiles_for_role_response'][ + 'list_instance_profiles_for_role_result']['instance_profiles'] + for profile_count in range(0, len(profile_list)): + profile_name_list.remove(profile_list[profile_count][ + "instance_profile_name"]) + profile_path_list.remove(profile_list[profile_count]["path"]) + profile_list[profile_count]["roles"]["member"][ + "role_name"].should.equal("my-role") + + len(profile_name_list).should.equal(0) + len(profile_path_list).should.equal(0) + + profile_dump2 = conn.list_instance_profiles_for_role(role_name="my-role2") + profile_list = profile_dump2['list_instance_profiles_for_role_response'][ + 'list_instance_profiles_for_role_result']['instance_profiles'] + len(profile_list).should.equal(0) + + +@mock_iam_deprecated() +def test_list_role_policies(): + conn = boto.connect_iam() + conn.create_role("my-role") + conn.put_role_policy("my-role", "test policy", "my policy") + role = conn.list_role_policies("my-role") + role.policy_names.should.have.length_of(1) + role.policy_names[0].should.equal("test policy") + + conn.put_role_policy("my-role", "test policy 2", "another policy") + role = conn.list_role_policies("my-role") + role.policy_names.should.have.length_of(2) + + conn.delete_role_policy("my-role", "test policy") + role = conn.list_role_policies("my-role") + role.policy_names.should.have.length_of(1) + role.policy_names[0].should.equal("test policy 2") + + with assert_raises(BotoServerError): + conn.delete_role_policy("my-role", "test policy") + + +@mock_iam_deprecated() +def test_put_role_policy(): + conn = boto.connect_iam() + conn.create_role( + "my-role", assume_role_policy_document="some policy", path="my-path") + conn.put_role_policy("my-role", "test policy", "my policy") + policy = conn.get_role_policy( + "my-role", "test policy")['get_role_policy_response']['get_role_policy_result']['policy_name'] + policy.should.equal("test policy") + + +@mock_iam_deprecated() +def test_update_assume_role_policy(): + conn = boto.connect_iam() + role = conn.create_role("my-role") + conn.update_assume_role_policy(role.role_name, "my-policy") + role = conn.get_role("my-role") + role.assume_role_policy_document.should.equal("my-policy") + + +@mock_iam +def test_create_policy(): + conn = boto3.client('iam', region_name='us-east-1') + response = conn.create_policy( + PolicyName="TestCreatePolicy", + PolicyDocument='{"some":"policy"}') + response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestCreatePolicy") + + +@mock_iam +def test_create_policy_versions(): + conn = boto3.client('iam', region_name='us-east-1') + with assert_raises(ClientError): + conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", + PolicyDocument='{"some":"policy"}') + conn.create_policy( + PolicyName="TestCreatePolicyVersion", + PolicyDocument='{"some":"policy"}') + version = conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", + PolicyDocument='{"some":"policy"}', + SetAsDefault=True) + version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) + version.get('PolicyVersion').get('VersionId').should.equal("v2") + conn.delete_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", + VersionId="v1") + version = conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", + PolicyDocument='{"some":"policy"}') + version.get('PolicyVersion').get('VersionId').should.equal("v3") + + +@mock_iam +def test_get_policy(): + conn = boto3.client('iam', region_name='us-east-1') + response = conn.create_policy( + PolicyName="TestGetPolicy", + PolicyDocument='{"some":"policy"}') + policy = conn.get_policy( + PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy") + response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy") + + +@mock_iam +def test_get_policy_version(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_policy( + PolicyName="TestGetPolicyVersion", + PolicyDocument='{"some":"policy"}') + version = conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", + PolicyDocument='{"some":"policy"}') + with assert_raises(ClientError): + conn.get_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", + VersionId='v2-does-not-exist') + retrieved = conn.get_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", + VersionId=version.get('PolicyVersion').get('VersionId')) + retrieved.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) + + +@mock_iam +def test_list_policy_versions(): + conn = boto3.client('iam', region_name='us-east-1') + with assert_raises(ClientError): + versions = conn.list_policy_versions( + PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") + conn.create_policy( + PolicyName="TestListPolicyVersions", + PolicyDocument='{"first":"policy"}') + versions = conn.list_policy_versions( + PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") + versions.get('Versions')[0].get('VersionId').should.equal('v1') + + conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", + PolicyDocument='{"second":"policy"}') + conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", + PolicyDocument='{"third":"policy"}') + versions = conn.list_policy_versions( + PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") + print(versions.get('Versions')) + versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'}) + versions.get('Versions')[2].get('Document').should.equal({'third': 'policy'}) + + +@mock_iam +def test_delete_policy_version(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_policy( + PolicyName="TestDeletePolicyVersion", + PolicyDocument='{"first":"policy"}') + conn.create_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", + PolicyDocument='{"second":"policy"}') + with assert_raises(ClientError): + conn.delete_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", + VersionId='v2-nope-this-does-not-exist') + conn.delete_policy_version( + PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", + VersionId='v2') + versions = conn.list_policy_versions( + PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion") + len(versions.get('Versions')).should.equal(1) + + +@mock_iam_deprecated() +def test_create_user(): + conn = boto.connect_iam() + conn.create_user('my-user') + with assert_raises(BotoServerError): + conn.create_user('my-user') + + +@mock_iam_deprecated() +def test_get_user(): + conn = boto.connect_iam() + with assert_raises(BotoServerError): + conn.get_user('my-user') + conn.create_user('my-user') + conn.get_user('my-user') + + +@mock_iam() +def test_update_user(): + conn = boto3.client('iam', region_name='us-east-1') + with assert_raises(conn.exceptions.NoSuchEntityException): + conn.update_user(UserName='my-user') + conn.create_user(UserName='my-user') + conn.update_user(UserName='my-user', NewPath='/new-path/', NewUserName='new-user') + response = conn.get_user(UserName='new-user') + response['User'].get('Path').should.equal('/new-path/') + with assert_raises(conn.exceptions.NoSuchEntityException): + conn.get_user(UserName='my-user') + + +@mock_iam_deprecated() +def test_get_current_user(): + """If no user is specific, IAM returns the current user""" + conn = boto.connect_iam() + user = conn.get_user()['get_user_response']['get_user_result']['user'] + user['user_name'].should.equal('default_user') + + +@mock_iam() +def test_list_users(): + path_prefix = '/' + max_items = 10 + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + response = conn.list_users(PathPrefix=path_prefix, MaxItems=max_items) + user = response['Users'][0] + user['UserName'].should.equal('my-user') + user['Path'].should.equal('/') + user['Arn'].should.equal('arn:aws:iam::123456789012:user/my-user') + + +@mock_iam() +def test_user_policies(): + policy_name = 'UserManagedPolicy' + policy_document = "{'mypolicy': 'test'}" + user_name = 'my-user' + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName=user_name) + conn.put_user_policy( + UserName=user_name, + PolicyName=policy_name, + PolicyDocument=policy_document + ) + + policy_doc = conn.get_user_policy( + UserName=user_name, + PolicyName=policy_name + ) + test = policy_document in policy_doc['PolicyDocument'] + test.should.equal(True) + + policies = conn.list_user_policies(UserName=user_name) + len(policies['PolicyNames']).should.equal(1) + policies['PolicyNames'][0].should.equal(policy_name) + + conn.delete_user_policy( + UserName=user_name, + PolicyName=policy_name + ) + + policies = conn.list_user_policies(UserName=user_name) + len(policies['PolicyNames']).should.equal(0) + + +@mock_iam_deprecated() +def test_create_login_profile(): + conn = boto.connect_iam() + with assert_raises(BotoServerError): + conn.create_login_profile('my-user', 'my-pass') + conn.create_user('my-user') + conn.create_login_profile('my-user', 'my-pass') + with assert_raises(BotoServerError): + conn.create_login_profile('my-user', 'my-pass') + + +@mock_iam_deprecated() +def test_delete_login_profile(): + conn = boto.connect_iam() + conn.create_user('my-user') + with assert_raises(BotoServerError): + conn.delete_login_profile('my-user') + conn.create_login_profile('my-user', 'my-pass') + conn.delete_login_profile('my-user') + + +@mock_iam_deprecated() +def test_create_access_key(): + conn = boto.connect_iam() + with assert_raises(BotoServerError): + conn.create_access_key('my-user') + conn.create_user('my-user') + conn.create_access_key('my-user') + + +@mock_iam_deprecated() +def test_get_all_access_keys(): + """If no access keys exist there should be none in the response, + if an access key is present it should have the correct fields present""" + conn = boto.connect_iam() + conn.create_user('my-user') + response = conn.get_all_access_keys('my-user') + assert_equals( + response['list_access_keys_response'][ + 'list_access_keys_result']['access_key_metadata'], + [] + ) + conn.create_access_key('my-user') + response = conn.get_all_access_keys('my-user') + assert_equals( + sorted(response['list_access_keys_response'][ + 'list_access_keys_result']['access_key_metadata'][0].keys()), + sorted(['status', 'create_date', 'user_name', 'access_key_id']) + ) + + +@mock_iam_deprecated() +def test_delete_access_key(): + conn = boto.connect_iam() + conn.create_user('my-user') + access_key_id = conn.create_access_key('my-user')['create_access_key_response'][ + 'create_access_key_result']['access_key']['access_key_id'] + conn.delete_access_key(access_key_id, 'my-user') + + +@mock_iam() +def test_mfa_devices(): + # Test enable device + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + conn.enable_mfa_device( + UserName='my-user', + SerialNumber='123456789', + AuthenticationCode1='234567', + AuthenticationCode2='987654' + ) + + # Test list mfa devices + response = conn.list_mfa_devices(UserName='my-user') + device = response['MFADevices'][0] + device['SerialNumber'].should.equal('123456789') + + # Test deactivate mfa device + conn.deactivate_mfa_device(UserName='my-user', SerialNumber='123456789') + response = conn.list_mfa_devices(UserName='my-user') + len(response['MFADevices']).should.equal(0) + + +@mock_iam_deprecated() +def test_delete_user(): + conn = boto.connect_iam() + with assert_raises(BotoServerError): + conn.delete_user('my-user') + conn.create_user('my-user') + conn.delete_user('my-user') + + +@mock_iam_deprecated() +def test_generate_credential_report(): + conn = boto.connect_iam() + result = conn.generate_credential_report() + result['generate_credential_report_response'][ + 'generate_credential_report_result']['state'].should.equal('STARTED') + result = conn.generate_credential_report() + result['generate_credential_report_response'][ + 'generate_credential_report_result']['state'].should.equal('COMPLETE') + +@mock_iam +def test_boto3_generate_credential_report(): + conn = boto3.client('iam', region_name='us-east-1') + result = conn.generate_credential_report() + result['State'].should.equal('STARTED') + result = conn.generate_credential_report() + result['State'].should.equal('COMPLETE') + + +@mock_iam_deprecated() +def test_get_credential_report(): + conn = boto.connect_iam() + conn.create_user('my-user') + with assert_raises(BotoServerError): + conn.get_credential_report() + result = conn.generate_credential_report() + while result['generate_credential_report_response']['generate_credential_report_result']['state'] != 'COMPLETE': + result = conn.generate_credential_report() + result = conn.get_credential_report() + report = base64.b64decode(result['get_credential_report_response'][ + 'get_credential_report_result']['content'].encode('ascii')).decode('ascii') + report.should.match(r'.*my-user.*') + + +@mock_iam +def test_boto3_get_credential_report(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + with assert_raises(ClientError): + conn.get_credential_report() + result = conn.generate_credential_report() + while result['State'] != 'COMPLETE': + result = conn.generate_credential_report() + result = conn.get_credential_report() + report = result['Content'].decode('utf-8') + report.should.match(r'.*my-user.*') + + +@requires_boto_gte('2.39') +@mock_iam_deprecated() +def test_managed_policy(): + conn = boto.connect_iam() + + conn.create_policy(policy_name='UserManagedPolicy', + policy_document={'mypolicy': 'test'}, + path='/mypolicy/', + description='my user managed policy') + + marker = 0 + aws_policies = [] + while marker is not None: + response = conn.list_policies(scope='AWS', marker=marker)[ + 'list_policies_response']['list_policies_result'] + for policy in response['policies']: + aws_policies.append(policy) + marker = response.get('marker') + set(p.name for p in aws_managed_policies).should.equal( + set(p['policy_name'] for p in aws_policies)) + + user_policies = conn.list_policies(scope='Local')['list_policies_response'][ + 'list_policies_result']['policies'] + set(['UserManagedPolicy']).should.equal( + set(p['policy_name'] for p in user_policies)) + + marker = 0 + all_policies = [] + while marker is not None: + response = conn.list_policies(marker=marker)[ + 'list_policies_response']['list_policies_result'] + for policy in response['policies']: + all_policies.append(policy) + marker = response.get('marker') + set(p['policy_name'] for p in aws_policies + + user_policies).should.equal(set(p['policy_name'] for p in all_policies)) + + role_name = 'my-role' + conn.create_role(role_name, assume_role_policy_document={ + 'policy': 'test'}, path="my-path") + for policy_name in ['AmazonElasticMapReduceRole', + 'AmazonElasticMapReduceforEC2Role']: + policy_arn = 'arn:aws:iam::aws:policy/service-role/' + policy_name + conn.attach_role_policy(policy_arn, role_name) + + rows = conn.list_policies(only_attached=True)['list_policies_response'][ + 'list_policies_result']['policies'] + rows.should.have.length_of(2) + for x in rows: + int(x['attachment_count']).should.be.greater_than(0) + + # boto has not implemented this end point but accessible this way + resp = conn.get_response('ListAttachedRolePolicies', + {'RoleName': role_name}, + list_marker='AttachedPolicies') + resp['list_attached_role_policies_response']['list_attached_role_policies_result'][ + 'attached_policies'].should.have.length_of(2) + + conn.detach_role_policy( + "arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", + role_name) + rows = conn.list_policies(only_attached=True)['list_policies_response'][ + 'list_policies_result']['policies'] + rows.should.have.length_of(1) + for x in rows: + int(x['attachment_count']).should.be.greater_than(0) + + # boto has not implemented this end point but accessible this way + resp = conn.get_response('ListAttachedRolePolicies', + {'RoleName': role_name}, + list_marker='AttachedPolicies') + resp['list_attached_role_policies_response']['list_attached_role_policies_result'][ + 'attached_policies'].should.have.length_of(1) + + with assert_raises(BotoServerError): + conn.detach_role_policy( + "arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", + role_name) + + with assert_raises(BotoServerError): + conn.detach_role_policy( + "arn:aws:iam::aws:policy/Nonexistent", role_name) + + +@mock_iam +def test_boto3_create_login_profile(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.create_login_profile(UserName='my-user', Password='Password') + + conn.create_user(UserName='my-user') + conn.create_login_profile(UserName='my-user', Password='Password') + + with assert_raises(ClientError): + conn.create_login_profile(UserName='my-user', Password='Password') + + +@mock_iam() +def test_attach_detach_user_policy(): + iam = boto3.resource('iam', region_name='us-east-1') + client = boto3.client('iam', region_name='us-east-1') + + user = iam.create_user(UserName='test-user') + + policy_name = 'UserAttachedPolicy' + policy = iam.create_policy(PolicyName=policy_name, + PolicyDocument='{"mypolicy": "test"}', + Path='/mypolicy/', + Description='my user attached policy') + + client.attach_user_policy(UserName=user.name, PolicyArn=policy.arn) + + resp = client.list_attached_user_policies(UserName=user.name) + resp['AttachedPolicies'].should.have.length_of(1) + attached_policy = resp['AttachedPolicies'][0] + attached_policy['PolicyArn'].should.equal(policy.arn) + attached_policy['PolicyName'].should.equal(policy_name) + + client.detach_user_policy(UserName=user.name, PolicyArn=policy.arn) + + resp = client.list_attached_user_policies(UserName=user.name) + resp['AttachedPolicies'].should.have.length_of(0) + + +@mock_iam +def test_update_access_key(): + iam = boto3.resource('iam', region_name='us-east-1') + client = iam.meta.client + username = 'test-user' + iam.create_user(UserName=username) + with assert_raises(ClientError): + client.update_access_key(UserName=username, + AccessKeyId='non-existent-key', + Status='Inactive') + key = client.create_access_key(UserName=username)['AccessKey'] + client.update_access_key(UserName=username, + AccessKeyId=key['AccessKeyId'], + Status='Inactive') + resp = client.list_access_keys(UserName=username) + resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive') + + +@mock_iam +def test_get_access_key_last_used(): + iam = boto3.resource('iam', region_name='us-east-1') + client = iam.meta.client + username = 'test-user' + iam.create_user(UserName=username) + with assert_raises(ClientError): + client.get_access_key_last_used(AccessKeyId='non-existent-key-id') + create_key_response = client.create_access_key(UserName=username)['AccessKey'] + resp = client.get_access_key_last_used(AccessKeyId=create_key_response['AccessKeyId']) + + datetime.strftime(resp["AccessKeyLastUsed"]["LastUsedDate"], "%Y-%m-%d").should.equal(datetime.strftime( + datetime.utcnow(), + "%Y-%m-%d" + )) + resp["UserName"].should.equal(create_key_response["UserName"]) + + +@mock_iam +def test_get_account_authorization_details(): + import json + test_policy = json.dumps({ + "Version": "2012-10-17", + "Statement": [ + { + "Action": "s3:ListBucket", + "Resource": "*", + "Effect": "Allow", + } + ] + }) + + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + conn.create_user(Path='/', UserName='testUser') + conn.create_group(Path='/', GroupName='testGroup') + conn.create_policy( + PolicyName='testPolicy', + Path='/', + PolicyDocument=test_policy, + Description='Test Policy' + ) + + # Attach things to the user and group: + conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy) + conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy) + + conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + conn.add_user_to_group(UserName='testUser', GroupName='testGroup') + + # Add things to the role: + conn.create_instance_profile(InstanceProfileName='ipn') + conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy) + conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + result = conn.get_account_authorization_details(Filter=['Role']) + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 + assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 + assert len(result['RoleDetailList'][0]['Tags']) == 2 + assert len(result['RoleDetailList'][0]['RolePolicyList']) == 1 + assert len(result['RoleDetailList'][0]['AttachedManagedPolicies']) == 1 + assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' + + result = conn.get_account_authorization_details(Filter=['User']) + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 1 + assert len(result['UserDetailList'][0]['GroupList']) == 1 + assert len(result['UserDetailList'][0]['AttachedManagedPolicies']) == 1 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 + assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' + + result = conn.get_account_authorization_details(Filter=['Group']) + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 1 + assert len(result['GroupDetailList'][0]['GroupPolicyList']) == 1 + assert len(result['GroupDetailList'][0]['AttachedManagedPolicies']) == 1 + assert len(result['Policies']) == 0 + assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' + + result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 1 + assert len(result['Policies'][0]['PolicyVersionList']) == 1 + + # Check for greater than 1 since this should always be greater than one but might change. + # See iam/aws_managed_policies.py + result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy']) + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) > 1 + + result = conn.get_account_authorization_details() + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 1 + assert len(result['GroupDetailList']) == 1 + assert len(result['Policies']) > 1 + + +@mock_iam +def test_signing_certs(): + client = boto3.client('iam', region_name='us-east-1') + + # Create the IAM user first: + client.create_user(UserName='testing') + + # Upload the cert: + resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate'] + cert_id = resp['CertificateId'] + + assert resp['UserName'] == 'testing' + assert resp['Status'] == 'Active' + assert resp['CertificateBody'] == MOCK_CERT + assert resp['CertificateId'] + + # Upload a the cert with an invalid body: + with assert_raises(ClientError) as ce: + client.upload_signing_certificate(UserName='testing', CertificateBody='notacert') + assert ce.exception.response['Error']['Code'] == 'MalformedCertificate' + + # Upload with an invalid user: + with assert_raises(ClientError): + client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT) + + # Update: + client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError): + client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError) as ce: + client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive') + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id='x' * 32) + + # List the certs: + resp = client.list_signing_certificates(UserName='testing')['Certificates'] + assert len(resp) == 1 + assert resp[0]['CertificateBody'] == MOCK_CERT + assert resp[0]['Status'] == 'Inactive' # Changed with the update call above. + + with assert_raises(ClientError): + client.list_signing_certificates(UserName='notauser') + + # Delete: + client.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + with assert_raises(ClientError): + client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id) + + +@mock_iam() +def test_create_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + response = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + + +@mock_iam() +def test_get_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.get_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response['SAMLMetadataDocument'].should.equal('a' * 1024) + + +@mock_iam() +def test_list_saml_providers(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + + +@mock_iam() +def test_delete_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + len(response['SAMLProviderList']).should.equal(1) + conn.delete_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response = conn.list_saml_providers() + len(response['SAMLProviderList']).should.equal(0) + conn.create_user(UserName='testing') + + cert_id = '123456789012345678901234' + with assert_raises(ClientError) as ce: + conn.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id=cert_id) + + # Verify that it's not in the list: + resp = conn.list_signing_certificates(UserName='testing') + assert not resp['Certificates'] + + +@mock_iam() +def test_tag_role(): + """Tests both the tag_role and get_role_tags capability""" + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}") + + # Get without tags: + role = conn.get_role(RoleName='my-role')['Role'] + assert not role.get('Tags') + + # With proper tag values: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + + # Get role: + role = conn.get_role(RoleName='my-role')['Role'] + assert len(role['Tags']) == 2 + assert role['Tags'][0]['Key'] == 'somekey' + assert role['Tags'][0]['Value'] == 'somevalue' + assert role['Tags'][1]['Key'] == 'someotherkey' + assert role['Tags'][1]['Value'] == 'someothervalue' + + # Same -- but for list_role_tags: + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert role['Tags'][0]['Key'] == 'somekey' + assert role['Tags'][0]['Value'] == 'somevalue' + assert role['Tags'][1]['Key'] == 'someotherkey' + assert role['Tags'][1]['Value'] == 'someothervalue' + assert not tags['IsTruncated'] + assert not tags.get('Marker') + + # Test pagination: + tags = conn.list_role_tags(RoleName='my-role', MaxItems=1) + assert len(tags['Tags']) == 1 + assert tags['IsTruncated'] + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == 'somevalue' + assert tags['Marker'] == '1' + + tags = conn.list_role_tags(RoleName='my-role', Marker=tags['Marker']) + assert len(tags['Tags']) == 1 + assert tags['Tags'][0]['Key'] == 'someotherkey' + assert tags['Tags'][0]['Value'] == 'someothervalue' + assert not tags['IsTruncated'] + assert not tags.get('Marker') + + # Test updating an existing tag: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somenewvalue' + } + ]) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == 'somenewvalue' + + # Empty is good: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': '' + } + ]) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == '' + + # Test creating tags with invalid values: + # With more than 50 tags: + with assert_raises(ClientError) as ce: + too_many_tags = list(map(lambda x: {'Key': str(x), 'Value': str(x)}, range(0, 51))) + conn.tag_role(RoleName='my-role', Tags=too_many_tags) + assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \ + in ce.exception.response['Error']['Message'] + + # With a duplicate tag: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': ''}, {'Key': '0', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # Duplicate tag with different casing: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': 'a', 'Value': ''}, {'Key': 'A', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # With a really big key: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0' * 129, 'Value': ''}]) + assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message'] + + # With a really big value: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': '0' * 257}]) + assert 'Member must have length less than or equal to 256.' in ce.exception.response['Error']['Message'] + + # With an invalid character: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': 'NOWAY!', 'Value': ''}]) + assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \ + in ce.exception.response['Error']['Message'] + + # With a role that doesn't exist: + with assert_raises(ClientError): + conn.tag_role(RoleName='notarole', Tags=[{'Key': 'some', 'Value': 'value'}]) + + +@mock_iam +def test_untag_role(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}") + + # With proper tag values: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + + # Remove them: + conn.untag_role(RoleName='my-role', TagKeys=['somekey']) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 1 + assert tags['Tags'][0]['Key'] == 'someotherkey' + assert tags['Tags'][0]['Value'] == 'someothervalue' + + # And again: + conn.untag_role(RoleName='my-role', TagKeys=['someotherkey']) + tags = conn.list_role_tags(RoleName='my-role') + assert not tags['Tags'] + + # Test removing tags with invalid values: + # With more than 50 tags: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=[str(x) for x in range(0, 51)]) + assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \ + in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With a really big key: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=['0' * 129]) + assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With an invalid character: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=['NOWAY!']) + assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \ + in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With a role that doesn't exist: + with assert_raises(ClientError): + conn.untag_role(RoleName='notarole', TagKeys=['somevalue']) + + +@mock_iam() +def test_update_role_description(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + response = conn.update_role_description(RoleName="my-role", Description="test") + + assert response['Role']['RoleName'] == 'my-role' + +@mock_iam() +def test_update_role(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + response = conn.update_role_description(RoleName="my-role", Description="test") + assert response['Role']['RoleName'] == 'my-role' + +@mock_iam() +def test_update_role(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + response = conn.update_role(RoleName="my-role", Description="test") + assert len(response.keys()) == 1 + + +@mock_iam() +def test_list_entities_for_policy(): + import json + test_policy = json.dumps({ + "Version": "2012-10-17", + "Statement": [ + { + "Action": "s3:ListBucket", + "Resource": "*", + "Effect": "Allow", + } + ] + }) + + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + conn.create_user(Path='/', UserName='testUser') + conn.create_group(Path='/', GroupName='testGroup') + conn.create_policy( + PolicyName='testPolicy', + Path='/', + PolicyDocument=test_policy, + Description='Test Policy' + ) + + # Attach things to the user and group: + conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy) + conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy) + + conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + conn.add_user_to_group(UserName='testUser', GroupName='testGroup') + + # Add things to the role: + conn.create_instance_profile(InstanceProfileName='ipn') + conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy) + conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='Role' + ) + assert response['PolicyRoles'] == [{'RoleName': 'my-role'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='User', + ) + assert response['PolicyUsers'] == [{'UserName': 'testUser'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='Group', + ) + assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='LocalManagedPolicy', + ) + assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}] + assert response['PolicyUsers'] == [{'UserName': 'testUser'}] + assert response['PolicyRoles'] == [{'RoleName': 'my-role'}] + + +@mock_iam() +def test_create_role_no_path(): + conn = boto3.client('iam', region_name='us-east-1') + resp = conn.create_role(RoleName='my-role', AssumeRolePolicyDocument='some policy', Description='test') + resp.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-role') + diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 830c531a2..e7ce9f74b 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -1,719 +1,984 @@ -from __future__ import unicode_literals -import os, re - -import boto3 -import boto.kms -from boto.exception import JSONResponseError -from boto.kms.exceptions import AlreadyExistsException, NotFoundException -import sure # noqa -from moto import mock_kms, mock_kms_deprecated -from nose.tools import assert_raises -from freezegun import freeze_time -from datetime import datetime, timedelta -from dateutil.tz import tzlocal - - -@mock_kms_deprecated -def test_create_key(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - - key['KeyMetadata']['Description'].should.equal("my key") - key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") - key['KeyMetadata']['Enabled'].should.equal(True) - - -@mock_kms_deprecated -def test_describe_key(): - conn = boto.kms.connect_to_region("us-west-2") - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - key = conn.describe_key(key_id) - key['KeyMetadata']['Description'].should.equal("my key") - key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") - - -@mock_kms_deprecated -def test_describe_key_via_alias(): - conn = boto.kms.connect_to_region("us-west-2") - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - conn.create_alias(alias_name='alias/my-key-alias', - target_key_id=key['KeyMetadata']['KeyId']) - - alias_key = conn.describe_key('alias/my-key-alias') - alias_key['KeyMetadata']['Description'].should.equal("my key") - alias_key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") - alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn']) - - -@mock_kms_deprecated -def test_describe_key_via_alias_not_found(): - conn = boto.kms.connect_to_region("us-west-2") - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - conn.create_alias(alias_name='alias/my-key-alias', - target_key_id=key['KeyMetadata']['KeyId']) - - conn.describe_key.when.called_with( - 'alias/not-found-alias').should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_describe_key_via_arn(): - conn = boto.kms.connect_to_region("us-west-2") - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - arn = key['KeyMetadata']['Arn'] - - the_key = conn.describe_key(arn) - the_key['KeyMetadata']['Description'].should.equal("my key") - the_key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") - the_key['KeyMetadata']['KeyId'].should.equal(key['KeyMetadata']['KeyId']) - - -@mock_kms_deprecated -def test_describe_missing_key(): - conn = boto.kms.connect_to_region("us-west-2") - conn.describe_key.when.called_with( - "not-a-key").should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_list_keys(): - conn = boto.kms.connect_to_region("us-west-2") - - conn.create_key(policy="my policy", description="my key1", - key_usage='ENCRYPT_DECRYPT') - conn.create_key(policy="my policy", description="my key2", - key_usage='ENCRYPT_DECRYPT') - - keys = conn.list_keys() - keys['Keys'].should.have.length_of(2) - - -@mock_kms_deprecated -def test_enable_key_rotation(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - conn.enable_key_rotation(key_id) - - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(True) - - -@mock_kms_deprecated -def test_enable_key_rotation_via_arn(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['Arn'] - - conn.enable_key_rotation(key_id) - - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(True) - - -@mock_kms_deprecated -def test_enable_key_rotation_with_missing_key(): - conn = boto.kms.connect_to_region("us-west-2") - conn.enable_key_rotation.when.called_with( - "not-a-key").should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_enable_key_rotation_with_alias_name_should_fail(): - conn = boto.kms.connect_to_region("us-west-2") - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - conn.create_alias(alias_name='alias/my-key-alias', - target_key_id=key['KeyMetadata']['KeyId']) - - alias_key = conn.describe_key('alias/my-key-alias') - alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn']) - - conn.enable_key_rotation.when.called_with( - 'alias/my-alias').should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_disable_key_rotation(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - conn.enable_key_rotation(key_id) - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(True) - - conn.disable_key_rotation(key_id) - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(False) - - -@mock_kms_deprecated -def test_encrypt(): - """ - test_encrypt - Using base64 encoding to merely test that the endpoint was called - """ - conn = boto.kms.connect_to_region("us-west-2") - response = conn.encrypt('key_id', 'encryptme'.encode('utf-8')) - response['CiphertextBlob'].should.equal(b'ZW5jcnlwdG1l') - - -@mock_kms_deprecated -def test_decrypt(): - conn = boto.kms.connect_to_region('us-west-2') - response = conn.decrypt('ZW5jcnlwdG1l'.encode('utf-8')) - response['Plaintext'].should.equal(b'encryptme') - - -@mock_kms_deprecated -def test_disable_key_rotation_with_missing_key(): - conn = boto.kms.connect_to_region("us-west-2") - conn.disable_key_rotation.when.called_with( - "not-a-key").should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_get_key_rotation_status_with_missing_key(): - conn = boto.kms.connect_to_region("us-west-2") - conn.get_key_rotation_status.when.called_with( - "not-a-key").should.throw(JSONResponseError) - - -@mock_kms_deprecated -def test_get_key_rotation_status(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(False) - - -@mock_kms_deprecated -def test_create_key_defaults_key_rotation(): - conn = boto.kms.connect_to_region("us-west-2") - - key = conn.create_key(policy="my policy", - description="my key", key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - conn.get_key_rotation_status( - key_id)['KeyRotationEnabled'].should.equal(False) - - -@mock_kms_deprecated -def test_get_key_policy(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - policy = conn.get_key_policy(key_id, 'default') - policy['Policy'].should.equal('my policy') - - -@mock_kms_deprecated -def test_get_key_policy_via_arn(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - policy = conn.get_key_policy(key['KeyMetadata']['Arn'], 'default') - - policy['Policy'].should.equal('my policy') - - -@mock_kms_deprecated -def test_put_key_policy(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - conn.put_key_policy(key_id, 'default', 'new policy') - policy = conn.get_key_policy(key_id, 'default') - policy['Policy'].should.equal('new policy') - - -@mock_kms_deprecated -def test_put_key_policy_via_arn(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['Arn'] - - conn.put_key_policy(key_id, 'default', 'new policy') - policy = conn.get_key_policy(key_id, 'default') - policy['Policy'].should.equal('new policy') - - -@mock_kms_deprecated -def test_put_key_policy_via_alias_should_not_update(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - conn.create_alias(alias_name='alias/my-key-alias', - target_key_id=key['KeyMetadata']['KeyId']) - - conn.put_key_policy.when.called_with( - 'alias/my-key-alias', 'default', 'new policy').should.throw(JSONResponseError) - - policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default') - policy['Policy'].should.equal('my policy') - - -@mock_kms_deprecated -def test_put_key_policy(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - conn.put_key_policy(key['KeyMetadata']['Arn'], 'default', 'new policy') - - policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default') - policy['Policy'].should.equal('new policy') - - -@mock_kms_deprecated -def test_list_key_policies(): - conn = boto.kms.connect_to_region('us-west-2') - - key = conn.create_key(policy='my policy', - description='my key1', key_usage='ENCRYPT_DECRYPT') - key_id = key['KeyMetadata']['KeyId'] - - policies = conn.list_key_policies(key_id) - policies['PolicyNames'].should.equal(['default']) - - -@mock_kms_deprecated -def test__create_alias__returns_none_if_correct(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - resp = kms.create_alias('alias/my-alias', key_id) - - resp.should.be.none - - -@mock_kms_deprecated -def test__create_alias__raises_if_reserved_alias(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - reserved_aliases = [ - 'alias/aws/ebs', - 'alias/aws/s3', - 'alias/aws/redshift', - 'alias/aws/rds', - ] - - for alias_name in reserved_aliases: - with assert_raises(JSONResponseError) as err: - kms.create_alias(alias_name, key_id) - - ex = err.exception - ex.error_message.should.be.none - ex.error_code.should.equal('NotAuthorizedException') - ex.body.should.equal({'__type': 'NotAuthorizedException'}) - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__create_alias__can_create_multiple_aliases_for_same_key_id(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - kms.create_alias('alias/my-alias3', key_id).should.be.none - kms.create_alias('alias/my-alias4', key_id).should.be.none - kms.create_alias('alias/my-alias5', key_id).should.be.none - - -@mock_kms_deprecated -def test__create_alias__raises_if_wrong_prefix(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - with assert_raises(JSONResponseError) as err: - kms.create_alias('wrongprefix/my-alias', key_id) - - ex = err.exception - ex.error_message.should.equal('Invalid identifier') - ex.error_code.should.equal('ValidationException') - ex.body.should.equal({'message': 'Invalid identifier', - '__type': 'ValidationException'}) - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__create_alias__raises_if_duplicate(): - region = 'us-west-2' - kms = boto.kms.connect_to_region(region) - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - alias = 'alias/my-alias' - - kms.create_alias(alias, key_id) - - with assert_raises(AlreadyExistsException) as err: - kms.create_alias(alias, key_id) - - ex = err.exception - ex.error_message.should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists' - .format(**locals())) - ex.error_code.should.be.none - ex.box_usage.should.be.none - ex.request_id.should.be.none - ex.body['message'].should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists' - .format(**locals())) - ex.body['__type'].should.equal('AlreadyExistsException') - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__create_alias__raises_if_alias_has_restricted_characters(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - alias_names_with_restricted_characters = [ - 'alias/my-alias!', - 'alias/my-alias$', - 'alias/my-alias@', - ] - - for alias_name in alias_names_with_restricted_characters: - with assert_raises(JSONResponseError) as err: - kms.create_alias(alias_name, key_id) - ex = err.exception - ex.body['__type'].should.equal('ValidationException') - ex.body['message'].should.equal( - "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals())) - ex.error_code.should.equal('ValidationException') - ex.message.should.equal( - "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals())) - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__create_alias__raises_if_alias_has_colon_character(): - # For some reason, colons are not accepted for an alias, even though they - # are accepted by regex ^[a-zA-Z0-9:/_-]+$ - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - alias_names_with_restricted_characters = [ - 'alias/my:alias', - ] - - for alias_name in alias_names_with_restricted_characters: - with assert_raises(JSONResponseError) as err: - kms.create_alias(alias_name, key_id) - ex = err.exception - ex.body['__type'].should.equal('ValidationException') - ex.body['message'].should.equal( - "{alias_name} contains invalid characters for an alias".format(**locals())) - ex.error_code.should.equal('ValidationException') - ex.message.should.equal( - "{alias_name} contains invalid characters for an alias".format(**locals())) - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__create_alias__accepted_characters(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - - alias_names_with_accepted_characters = [ - 'alias/my-alias_/', - 'alias/my_alias-/', - ] - - for alias_name in alias_names_with_accepted_characters: - kms.create_alias(alias_name, key_id) - - -@mock_kms_deprecated -def test__create_alias__raises_if_target_key_id_is_existing_alias(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - alias = 'alias/my-alias' - - kms.create_alias(alias, key_id) - - with assert_raises(JSONResponseError) as err: - kms.create_alias(alias, alias) - - ex = err.exception - ex.body['__type'].should.equal('ValidationException') - ex.body['message'].should.equal('Aliases must refer to keys. Not aliases') - ex.error_code.should.equal('ValidationException') - ex.message.should.equal('Aliases must refer to keys. Not aliases') - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__delete_alias(): - kms = boto.connect_kms() - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - alias = 'alias/my-alias' - - # added another alias here to make sure that the deletion of the alias can - # be done when there are multiple existing aliases. - another_create_resp = kms.create_key() - another_key_id = create_resp['KeyMetadata']['KeyId'] - another_alias = 'alias/another-alias' - - kms.create_alias(alias, key_id) - kms.create_alias(another_alias, another_key_id) - - resp = kms.delete_alias(alias) - - resp.should.be.none - - # we can create the alias again, since it has been deleted - kms.create_alias(alias, key_id) - - -@mock_kms_deprecated -def test__delete_alias__raises_if_wrong_prefix(): - kms = boto.connect_kms() - - with assert_raises(JSONResponseError) as err: - kms.delete_alias('wrongprefix/my-alias') - - ex = err.exception - ex.body['__type'].should.equal('ValidationException') - ex.body['message'].should.equal('Invalid identifier') - ex.error_code.should.equal('ValidationException') - ex.message.should.equal('Invalid identifier') - ex.reason.should.equal('Bad Request') - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__delete_alias__raises_if_alias_is_not_found(): - region = 'us-west-2' - kms = boto.kms.connect_to_region(region) - alias_name = 'alias/unexisting-alias' - - with assert_raises(NotFoundException) as err: - kms.delete_alias(alias_name) - - ex = err.exception - ex.body['__type'].should.equal('NotFoundException') - ex.body['message'].should.match( - r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals())) - ex.box_usage.should.be.none - ex.error_code.should.be.none - ex.message.should.match( - r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals())) - ex.reason.should.equal('Bad Request') - ex.request_id.should.be.none - ex.status.should.equal(400) - - -@mock_kms_deprecated -def test__list_aliases(): - region = "eu-west-1" - kms = boto.kms.connect_to_region(region) - - create_resp = kms.create_key() - key_id = create_resp['KeyMetadata']['KeyId'] - kms.create_alias('alias/my-alias1', key_id) - kms.create_alias('alias/my-alias2', key_id) - kms.create_alias('alias/my-alias3', key_id) - - resp = kms.list_aliases() - - resp['Truncated'].should.be.false - - aliases = resp['Aliases'] - - def has_correct_arn(alias_obj): - alias_name = alias_obj['AliasName'] - alias_arn = alias_obj['AliasArn'] - return re.match(r'arn:aws:kms:{region}:\d{{12}}:{alias_name}'.format(region=region, alias_name=alias_name), - alias_arn) - - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/aws/ebs' == alias['AliasName']]).should.equal(1) - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/aws/rds' == alias['AliasName']]).should.equal(1) - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/aws/redshift' == alias['AliasName']]).should.equal(1) - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/aws/s3' == alias['AliasName']]).should.equal(1) - - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/my-alias1' == alias['AliasName']]).should.equal(1) - len([alias for alias in aliases if - has_correct_arn(alias) and 'alias/my-alias2' == alias['AliasName']]).should.equal(1) - - len([alias for alias in aliases if 'TargetKeyId' in alias and key_id == - alias['TargetKeyId']]).should.equal(3) - - len(aliases).should.equal(7) - - -@mock_kms_deprecated -def test__assert_valid_key_id(): - from moto.kms.responses import _assert_valid_key_id - import uuid - - _assert_valid_key_id.when.called_with( - "not-a-key").should.throw(JSONResponseError) - _assert_valid_key_id.when.called_with( - str(uuid.uuid4())).should_not.throw(JSONResponseError) - - -@mock_kms_deprecated -def test__assert_default_policy(): - from moto.kms.responses import _assert_default_policy - - _assert_default_policy.when.called_with( - "not-default").should.throw(JSONResponseError) - _assert_default_policy.when.called_with( - "default").should_not.throw(JSONResponseError) - - -@mock_kms -def test_kms_encrypt_boto3(): - client = boto3.client('kms', region_name='us-east-1') - response = client.encrypt(KeyId='foo', Plaintext=b'bar') - - response = client.decrypt(CiphertextBlob=response['CiphertextBlob']) - response['Plaintext'].should.equal(b'bar') - - -@mock_kms -def test_disable_key(): - client = boto3.client('kms', region_name='us-east-1') - key = client.create_key(Description='disable-key') - client.disable_key( - KeyId=key['KeyMetadata']['KeyId'] - ) - - result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == 'Disabled' - - -@mock_kms -def test_enable_key(): - client = boto3.client('kms', region_name='us-east-1') - key = client.create_key(Description='enable-key') - client.disable_key( - KeyId=key['KeyMetadata']['KeyId'] - ) - client.enable_key( - KeyId=key['KeyMetadata']['KeyId'] - ) - - result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) - assert result["KeyMetadata"]["Enabled"] == True - assert result["KeyMetadata"]["KeyState"] == 'Enabled' - - -@mock_kms -def test_schedule_key_deletion(): - client = boto3.client('kms', region_name='us-east-1') - key = client.create_key(Description='schedule-key-deletion') - if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false': - with freeze_time("2015-01-01 12:00:00"): - response = client.schedule_key_deletion( - KeyId=key['KeyMetadata']['KeyId'] - ) - assert response['KeyId'] == key['KeyMetadata']['KeyId'] - assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzlocal()) - else: - # Can't manipulate time in server mode - response = client.schedule_key_deletion( - KeyId=key['KeyMetadata']['KeyId'] - ) - assert response['KeyId'] == key['KeyMetadata']['KeyId'] - - result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion' - assert 'DeletionDate' in result["KeyMetadata"] - - -@mock_kms -def test_schedule_key_deletion_custom(): - client = boto3.client('kms', region_name='us-east-1') - key = client.create_key(Description='schedule-key-deletion') - if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false': - with freeze_time("2015-01-01 12:00:00"): - response = client.schedule_key_deletion( - KeyId=key['KeyMetadata']['KeyId'], - PendingWindowInDays=7 - ) - assert response['KeyId'] == key['KeyMetadata']['KeyId'] - assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzlocal()) - else: - # Can't manipulate time in server mode - response = client.schedule_key_deletion( - KeyId=key['KeyMetadata']['KeyId'], - PendingWindowInDays=7 - ) - assert response['KeyId'] == key['KeyMetadata']['KeyId'] - - result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion' - assert 'DeletionDate' in result["KeyMetadata"] - - -@mock_kms -def test_cancel_key_deletion(): - client = boto3.client('kms', region_name='us-east-1') - key = client.create_key(Description='cancel-key-deletion') - client.schedule_key_deletion( - KeyId=key['KeyMetadata']['KeyId'] - ) - response = client.cancel_key_deletion( - KeyId=key['KeyMetadata']['KeyId'] - ) - assert response['KeyId'] == key['KeyMetadata']['KeyId'] - - result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) - assert result["KeyMetadata"]["Enabled"] == False - assert result["KeyMetadata"]["KeyState"] == 'Disabled' - assert 'DeletionDate' not in result["KeyMetadata"] +from __future__ import unicode_literals +import os, re +import boto3 +import boto.kms +import botocore.exceptions +from boto.exception import JSONResponseError +from boto.kms.exceptions import AlreadyExistsException, NotFoundException + +from moto.kms.exceptions import NotFoundException as MotoNotFoundException +import sure # noqa +from moto import mock_kms, mock_kms_deprecated +from nose.tools import assert_raises +from freezegun import freeze_time +from datetime import datetime +from dateutil.tz import tzutc + + +@mock_kms_deprecated +def test_create_key(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + + key['KeyMetadata']['Description'].should.equal("my key") + key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") + key['KeyMetadata']['Enabled'].should.equal(True) + + +@mock_kms_deprecated +def test_describe_key(): + conn = boto.kms.connect_to_region("us-west-2") + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + key = conn.describe_key(key_id) + key['KeyMetadata']['Description'].should.equal("my key") + key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") + + +@mock_kms_deprecated +def test_describe_key_via_alias(): + conn = boto.kms.connect_to_region("us-west-2") + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + conn.create_alias(alias_name='alias/my-key-alias', + target_key_id=key['KeyMetadata']['KeyId']) + + alias_key = conn.describe_key('alias/my-key-alias') + alias_key['KeyMetadata']['Description'].should.equal("my key") + alias_key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") + alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn']) + + +@mock_kms_deprecated +def test_describe_key_via_alias_not_found(): + conn = boto.kms.connect_to_region("us-west-2") + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + conn.create_alias(alias_name='alias/my-key-alias', + target_key_id=key['KeyMetadata']['KeyId']) + + conn.describe_key.when.called_with( + 'alias/not-found-alias').should.throw(JSONResponseError) + + +@mock_kms_deprecated +def test_describe_key_via_arn(): + conn = boto.kms.connect_to_region("us-west-2") + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + arn = key['KeyMetadata']['Arn'] + + the_key = conn.describe_key(arn) + the_key['KeyMetadata']['Description'].should.equal("my key") + the_key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT") + the_key['KeyMetadata']['KeyId'].should.equal(key['KeyMetadata']['KeyId']) + + +@mock_kms_deprecated +def test_describe_missing_key(): + conn = boto.kms.connect_to_region("us-west-2") + conn.describe_key.when.called_with( + "not-a-key").should.throw(JSONResponseError) + + +@mock_kms_deprecated +def test_list_keys(): + conn = boto.kms.connect_to_region("us-west-2") + + conn.create_key(policy="my policy", description="my key1", + key_usage='ENCRYPT_DECRYPT') + conn.create_key(policy="my policy", description="my key2", + key_usage='ENCRYPT_DECRYPT') + + keys = conn.list_keys() + keys['Keys'].should.have.length_of(2) + + +@mock_kms_deprecated +def test_enable_key_rotation(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + conn.enable_key_rotation(key_id) + + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(True) + + +@mock_kms_deprecated +def test_enable_key_rotation_via_arn(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['Arn'] + + conn.enable_key_rotation(key_id) + + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(True) + + +@mock_kms_deprecated +def test_enable_key_rotation_with_missing_key(): + conn = boto.kms.connect_to_region("us-west-2") + conn.enable_key_rotation.when.called_with( + "not-a-key").should.throw(NotFoundException) + + +@mock_kms_deprecated +def test_enable_key_rotation_with_alias_name_should_fail(): + conn = boto.kms.connect_to_region("us-west-2") + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + conn.create_alias(alias_name='alias/my-key-alias', + target_key_id=key['KeyMetadata']['KeyId']) + + alias_key = conn.describe_key('alias/my-key-alias') + alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn']) + + conn.enable_key_rotation.when.called_with( + 'alias/my-alias').should.throw(NotFoundException) + + +@mock_kms_deprecated +def test_disable_key_rotation(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + conn.enable_key_rotation(key_id) + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(True) + + conn.disable_key_rotation(key_id) + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(False) + + +@mock_kms_deprecated +def test_encrypt(): + """ + test_encrypt + Using base64 encoding to merely test that the endpoint was called + """ + conn = boto.kms.connect_to_region("us-west-2") + response = conn.encrypt('key_id', 'encryptme'.encode('utf-8')) + response['CiphertextBlob'].should.equal(b'ZW5jcnlwdG1l') + response['KeyId'].should.equal('key_id') + + +@mock_kms_deprecated +def test_decrypt(): + conn = boto.kms.connect_to_region('us-west-2') + response = conn.decrypt('ZW5jcnlwdG1l'.encode('utf-8')) + response['Plaintext'].should.equal(b'encryptme') + + +@mock_kms_deprecated +def test_disable_key_rotation_with_missing_key(): + conn = boto.kms.connect_to_region("us-west-2") + conn.disable_key_rotation.when.called_with( + "not-a-key").should.throw(NotFoundException) + + +@mock_kms_deprecated +def test_get_key_rotation_status_with_missing_key(): + conn = boto.kms.connect_to_region("us-west-2") + conn.get_key_rotation_status.when.called_with( + "not-a-key").should.throw(NotFoundException) + + +@mock_kms_deprecated +def test_get_key_rotation_status(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(False) + + +@mock_kms_deprecated +def test_create_key_defaults_key_rotation(): + conn = boto.kms.connect_to_region("us-west-2") + + key = conn.create_key(policy="my policy", + description="my key", key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + conn.get_key_rotation_status( + key_id)['KeyRotationEnabled'].should.equal(False) + + +@mock_kms_deprecated +def test_get_key_policy(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + policy = conn.get_key_policy(key_id, 'default') + policy['Policy'].should.equal('my policy') + + +@mock_kms_deprecated +def test_get_key_policy_via_arn(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + policy = conn.get_key_policy(key['KeyMetadata']['Arn'], 'default') + + policy['Policy'].should.equal('my policy') + + +@mock_kms_deprecated +def test_put_key_policy(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + conn.put_key_policy(key_id, 'default', 'new policy') + policy = conn.get_key_policy(key_id, 'default') + policy['Policy'].should.equal('new policy') + + +@mock_kms_deprecated +def test_put_key_policy_via_arn(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['Arn'] + + conn.put_key_policy(key_id, 'default', 'new policy') + policy = conn.get_key_policy(key_id, 'default') + policy['Policy'].should.equal('new policy') + + +@mock_kms_deprecated +def test_put_key_policy_via_alias_should_not_update(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + conn.create_alias(alias_name='alias/my-key-alias', + target_key_id=key['KeyMetadata']['KeyId']) + + conn.put_key_policy.when.called_with( + 'alias/my-key-alias', 'default', 'new policy').should.throw(NotFoundException) + + policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default') + policy['Policy'].should.equal('my policy') + + +@mock_kms_deprecated +def test_put_key_policy(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + conn.put_key_policy(key['KeyMetadata']['Arn'], 'default', 'new policy') + + policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default') + policy['Policy'].should.equal('new policy') + + +@mock_kms_deprecated +def test_list_key_policies(): + conn = boto.kms.connect_to_region('us-west-2') + + key = conn.create_key(policy='my policy', + description='my key1', key_usage='ENCRYPT_DECRYPT') + key_id = key['KeyMetadata']['KeyId'] + + policies = conn.list_key_policies(key_id) + policies['PolicyNames'].should.equal(['default']) + + +@mock_kms_deprecated +def test__create_alias__returns_none_if_correct(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + resp = kms.create_alias('alias/my-alias', key_id) + + resp.should.be.none + + +@mock_kms_deprecated +def test__create_alias__raises_if_reserved_alias(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + reserved_aliases = [ + 'alias/aws/ebs', + 'alias/aws/s3', + 'alias/aws/redshift', + 'alias/aws/rds', + ] + + for alias_name in reserved_aliases: + with assert_raises(JSONResponseError) as err: + kms.create_alias(alias_name, key_id) + + ex = err.exception + ex.error_message.should.be.none + ex.error_code.should.equal('NotAuthorizedException') + ex.body.should.equal({'__type': 'NotAuthorizedException'}) + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__create_alias__can_create_multiple_aliases_for_same_key_id(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + kms.create_alias('alias/my-alias3', key_id).should.be.none + kms.create_alias('alias/my-alias4', key_id).should.be.none + kms.create_alias('alias/my-alias5', key_id).should.be.none + + +@mock_kms_deprecated +def test__create_alias__raises_if_wrong_prefix(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + with assert_raises(JSONResponseError) as err: + kms.create_alias('wrongprefix/my-alias', key_id) + + ex = err.exception + ex.error_message.should.equal('Invalid identifier') + ex.error_code.should.equal('ValidationException') + ex.body.should.equal({'message': 'Invalid identifier', + '__type': 'ValidationException'}) + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__create_alias__raises_if_duplicate(): + region = 'us-west-2' + kms = boto.kms.connect_to_region(region) + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + alias = 'alias/my-alias' + + kms.create_alias(alias, key_id) + + with assert_raises(AlreadyExistsException) as err: + kms.create_alias(alias, key_id) + + ex = err.exception + ex.error_message.should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists' + .format(**locals())) + ex.error_code.should.be.none + ex.box_usage.should.be.none + ex.request_id.should.be.none + ex.body['message'].should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists' + .format(**locals())) + ex.body['__type'].should.equal('AlreadyExistsException') + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__create_alias__raises_if_alias_has_restricted_characters(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + alias_names_with_restricted_characters = [ + 'alias/my-alias!', + 'alias/my-alias$', + 'alias/my-alias@', + ] + + for alias_name in alias_names_with_restricted_characters: + with assert_raises(JSONResponseError) as err: + kms.create_alias(alias_name, key_id) + ex = err.exception + ex.body['__type'].should.equal('ValidationException') + ex.body['message'].should.equal( + "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals())) + ex.error_code.should.equal('ValidationException') + ex.message.should.equal( + "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals())) + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__create_alias__raises_if_alias_has_colon_character(): + # For some reason, colons are not accepted for an alias, even though they + # are accepted by regex ^[a-zA-Z0-9:/_-]+$ + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + alias_names_with_restricted_characters = [ + 'alias/my:alias', + ] + + for alias_name in alias_names_with_restricted_characters: + with assert_raises(JSONResponseError) as err: + kms.create_alias(alias_name, key_id) + ex = err.exception + ex.body['__type'].should.equal('ValidationException') + ex.body['message'].should.equal( + "{alias_name} contains invalid characters for an alias".format(**locals())) + ex.error_code.should.equal('ValidationException') + ex.message.should.equal( + "{alias_name} contains invalid characters for an alias".format(**locals())) + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__create_alias__accepted_characters(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + + alias_names_with_accepted_characters = [ + 'alias/my-alias_/', + 'alias/my_alias-/', + ] + + for alias_name in alias_names_with_accepted_characters: + kms.create_alias(alias_name, key_id) + + +@mock_kms_deprecated +def test__create_alias__raises_if_target_key_id_is_existing_alias(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + alias = 'alias/my-alias' + + kms.create_alias(alias, key_id) + + with assert_raises(JSONResponseError) as err: + kms.create_alias(alias, alias) + + ex = err.exception + ex.body['__type'].should.equal('ValidationException') + ex.body['message'].should.equal('Aliases must refer to keys. Not aliases') + ex.error_code.should.equal('ValidationException') + ex.message.should.equal('Aliases must refer to keys. Not aliases') + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__delete_alias(): + kms = boto.connect_kms() + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + alias = 'alias/my-alias' + + # added another alias here to make sure that the deletion of the alias can + # be done when there are multiple existing aliases. + another_create_resp = kms.create_key() + another_key_id = create_resp['KeyMetadata']['KeyId'] + another_alias = 'alias/another-alias' + + kms.create_alias(alias, key_id) + kms.create_alias(another_alias, another_key_id) + + resp = kms.delete_alias(alias) + + resp.should.be.none + + # we can create the alias again, since it has been deleted + kms.create_alias(alias, key_id) + + +@mock_kms_deprecated +def test__delete_alias__raises_if_wrong_prefix(): + kms = boto.connect_kms() + + with assert_raises(JSONResponseError) as err: + kms.delete_alias('wrongprefix/my-alias') + + ex = err.exception + ex.body['__type'].should.equal('ValidationException') + ex.body['message'].should.equal('Invalid identifier') + ex.error_code.should.equal('ValidationException') + ex.message.should.equal('Invalid identifier') + ex.reason.should.equal('Bad Request') + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__delete_alias__raises_if_alias_is_not_found(): + region = 'us-west-2' + kms = boto.kms.connect_to_region(region) + alias_name = 'alias/unexisting-alias' + + with assert_raises(NotFoundException) as err: + kms.delete_alias(alias_name) + + ex = err.exception + ex.body['__type'].should.equal('NotFoundException') + ex.body['message'].should.match( + r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals())) + ex.box_usage.should.be.none + ex.error_code.should.be.none + ex.message.should.match( + r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals())) + ex.reason.should.equal('Bad Request') + ex.request_id.should.be.none + ex.status.should.equal(400) + + +@mock_kms_deprecated +def test__list_aliases(): + region = "eu-west-1" + kms = boto.kms.connect_to_region(region) + + create_resp = kms.create_key() + key_id = create_resp['KeyMetadata']['KeyId'] + kms.create_alias('alias/my-alias1', key_id) + kms.create_alias('alias/my-alias2', key_id) + kms.create_alias('alias/my-alias3', key_id) + + resp = kms.list_aliases() + + resp['Truncated'].should.be.false + + aliases = resp['Aliases'] + + def has_correct_arn(alias_obj): + alias_name = alias_obj['AliasName'] + alias_arn = alias_obj['AliasArn'] + return re.match(r'arn:aws:kms:{region}:\d{{12}}:{alias_name}'.format(region=region, alias_name=alias_name), + alias_arn) + + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/aws/ebs' == alias['AliasName']]).should.equal(1) + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/aws/rds' == alias['AliasName']]).should.equal(1) + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/aws/redshift' == alias['AliasName']]).should.equal(1) + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/aws/s3' == alias['AliasName']]).should.equal(1) + + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/my-alias1' == alias['AliasName']]).should.equal(1) + len([alias for alias in aliases if + has_correct_arn(alias) and 'alias/my-alias2' == alias['AliasName']]).should.equal(1) + + len([alias for alias in aliases if 'TargetKeyId' in alias and key_id == + alias['TargetKeyId']]).should.equal(3) + + len(aliases).should.equal(7) + + +@mock_kms_deprecated +def test__assert_valid_key_id(): + from moto.kms.responses import _assert_valid_key_id + import uuid + + _assert_valid_key_id.when.called_with( + "not-a-key").should.throw(MotoNotFoundException) + _assert_valid_key_id.when.called_with( + str(uuid.uuid4())).should_not.throw(MotoNotFoundException) + + +@mock_kms_deprecated +def test__assert_default_policy(): + from moto.kms.responses import _assert_default_policy + + _assert_default_policy.when.called_with( + "not-default").should.throw(MotoNotFoundException) + _assert_default_policy.when.called_with( + "default").should_not.throw(MotoNotFoundException) + + +@mock_kms +def test_kms_encrypt_boto3(): + client = boto3.client('kms', region_name='us-east-1') + response = client.encrypt(KeyId='foo', Plaintext=b'bar') + + response = client.decrypt(CiphertextBlob=response['CiphertextBlob']) + response['Plaintext'].should.equal(b'bar') + + +@mock_kms +def test_disable_key(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='disable-key') + client.disable_key( + KeyId=key['KeyMetadata']['KeyId'] + ) + + result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == 'Disabled' + + +@mock_kms +def test_enable_key(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='enable-key') + client.disable_key( + KeyId=key['KeyMetadata']['KeyId'] + ) + client.enable_key( + KeyId=key['KeyMetadata']['KeyId'] + ) + + result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) + assert result["KeyMetadata"]["Enabled"] == True + assert result["KeyMetadata"]["KeyState"] == 'Enabled' + + +@mock_kms +def test_schedule_key_deletion(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='schedule-key-deletion') + if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false': + with freeze_time("2015-01-01 12:00:00"): + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + assert response['KeyId'] == key['KeyMetadata']['KeyId'] + assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzutc()) + else: + # Can't manipulate time in server mode + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + assert response['KeyId'] == key['KeyMetadata']['KeyId'] + + result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion' + assert 'DeletionDate' in result["KeyMetadata"] + + +@mock_kms +def test_schedule_key_deletion_custom(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='schedule-key-deletion') + if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false': + with freeze_time("2015-01-01 12:00:00"): + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'], + PendingWindowInDays=7 + ) + assert response['KeyId'] == key['KeyMetadata']['KeyId'] + assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzutc()) + else: + # Can't manipulate time in server mode + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'], + PendingWindowInDays=7 + ) + assert response['KeyId'] == key['KeyMetadata']['KeyId'] + + result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion' + assert 'DeletionDate' in result["KeyMetadata"] + + +@mock_kms +def test_cancel_key_deletion(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='cancel-key-deletion') + client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + response = client.cancel_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + assert response['KeyId'] == key['KeyMetadata']['KeyId'] + + result = client.describe_key(KeyId=key['KeyMetadata']['KeyId']) + assert result["KeyMetadata"]["Enabled"] == False + assert result["KeyMetadata"]["KeyState"] == 'Disabled' + assert 'DeletionDate' not in result["KeyMetadata"] + + +@mock_kms +def test_update_key_description(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='old_description') + key_id = key['KeyMetadata']['KeyId'] + + result = client.update_key_description(KeyId=key_id, Description='new_description') + assert 'ResponseMetadata' in result + + +@mock_kms +def test_tag_resource(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='cancel-key-deletion') + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + + keyid = response['KeyId'] + response = client.tag_resource( + KeyId=keyid, + Tags=[ + { + 'TagKey': 'string', + 'TagValue': 'string' + }, + ] + ) + + # Shouldn't have any data, just header + assert len(response.keys()) == 1 + + +@mock_kms +def test_list_resource_tags(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='cancel-key-deletion') + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + + keyid = response['KeyId'] + response = client.tag_resource( + KeyId=keyid, + Tags=[ + { + 'TagKey': 'string', + 'TagValue': 'string' + }, + ] + ) + + response = client.list_resource_tags(KeyId=keyid) + assert response['Tags'][0]['TagKey'] == 'string' + assert response['Tags'][0]['TagValue'] == 'string' + + +@mock_kms +def test_generate_data_key_sizes(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + resp1 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + resp2 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_128' + ) + resp3 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + NumberOfBytes=64 + ) + + assert len(resp1['Plaintext']) == 32 + assert len(resp2['Plaintext']) == 16 + assert len(resp3['Plaintext']) == 64 + + +@mock_kms +def test_generate_data_key_decrypt(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-decrypt') + + resp1 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + resp2 = client.decrypt( + CiphertextBlob=resp1['CiphertextBlob'] + ) + + assert resp1['Plaintext'] == resp2['Plaintext'] + + +@mock_kms +def test_generate_data_key_invalid_size_params(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_257' + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_128', + NumberOfBytes=16 + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + NumberOfBytes=2048 + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'] + ) + + +@mock_kms +def test_generate_data_key_invalid_key(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key( + KeyId='alias/randomnonexistantkey', + KeySpec='AES_256' + ) + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'] + '4', + KeySpec='AES_256' + ) + + +@mock_kms +def test_generate_data_key_without_plaintext_decrypt(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-decrypt') + + resp1 = client.generate_data_key_without_plaintext( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + + assert 'Plaintext' not in resp1 + + +@mock_kms +def test_enable_key_rotation_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key_rotation( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_disable_key_rotation_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key_rotation( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_enable_key_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_disable_key_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_cancel_key_deletion_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.cancel_key_deletion( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_schedule_key_deletion_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.schedule_key_deletion( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_get_key_rotation_status_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_rotation_status( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_get_key_policy_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_policy( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02', + PolicyName='default' + ) + + +@mock_kms +def test_list_key_policies_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.list_key_policies( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_put_key_policy_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.put_key_policy( + KeyId='00000000-0000-0000-0000-000000000000', + PolicyName='default', + Policy='new policy' + ) + + diff --git a/tests/test_packages/__init__.py b/tests/test_packages/__init__.py new file mode 100644 index 000000000..bf582e0b3 --- /dev/null +++ b/tests/test_packages/__init__.py @@ -0,0 +1,8 @@ +from __future__ import unicode_literals + +import logging +# Disable extra logging for tests +logging.getLogger('boto').setLevel(logging.CRITICAL) +logging.getLogger('boto3').setLevel(logging.CRITICAL) +logging.getLogger('botocore').setLevel(logging.CRITICAL) +logging.getLogger('nose').setLevel(logging.CRITICAL) diff --git a/tests/test_packages/test_httpretty.py b/tests/test_packages/test_httpretty.py new file mode 100644 index 000000000..48277a2de --- /dev/null +++ b/tests/test_packages/test_httpretty.py @@ -0,0 +1,37 @@ +# #!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +import mock + +from moto.packages.httpretty.core import HTTPrettyRequest, fake_gethostname, fake_gethostbyname + + +def test_parse_querystring(): + + core = HTTPrettyRequest(headers='test test HTTP/1.1') + + qs = 'test test' + response = core.parse_querystring(qs) + + assert response == {} + +def test_parse_request_body(): + core = HTTPrettyRequest(headers='test test HTTP/1.1') + + qs = 'test' + response = core.parse_request_body(qs) + + assert response == 'test' + +def test_fake_gethostname(): + + response = fake_gethostname() + + assert response == 'localhost' + +def test_fake_gethostbyname(): + + host = 'test' + response = fake_gethostbyname(host=host) + + assert response == '127.0.0.1' \ No newline at end of file diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 1a76a5454..d730f8dcf 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -1,711 +1,721 @@ -from __future__ import unicode_literals - -import boto -import boto3 -from boto.route53.healthcheck import HealthCheck -from boto.route53.record import ResourceRecordSets - -import sure # noqa - -import uuid - -import botocore -from nose.tools import assert_raises - -from moto import mock_route53, mock_route53_deprecated - - -@mock_route53_deprecated -def test_hosted_zone(): - conn = boto.connect_route53('the_key', 'the_secret') - firstzone = conn.create_hosted_zone("testdns.aws.com") - zones = conn.get_all_hosted_zones() - len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) - - conn.create_hosted_zone("testdns1.aws.com") - zones = conn.get_all_hosted_zones() - len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(2) - - id1 = firstzone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - zone = conn.get_hosted_zone(id1) - zone["GetHostedZoneResponse"]["HostedZone"][ - "Name"].should.equal("testdns.aws.com.") - - conn.delete_hosted_zone(id1) - zones = conn.get_all_hosted_zones() - len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) - - conn.get_hosted_zone.when.called_with("abcd").should.throw( - boto.route53.exception.DNSServerError, "404 Not Found") - - -@mock_route53_deprecated -def test_rrset(): - conn = boto.connect_route53('the_key', 'the_secret') - - conn.get_all_rrsets.when.called_with("abcd", type="A").should.throw( - boto.route53.exception.DNSServerError, "404 Not Found") - - zone = conn.create_hosted_zone("testdns.aws.com") - zoneid = zone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - changes = ResourceRecordSets(conn, zoneid) - change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") - change.add_value("1.2.3.4") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('1.2.3.4') - - rrsets = conn.get_all_rrsets(zoneid, type="CNAME") - rrsets.should.have.length_of(0) - - changes = ResourceRecordSets(conn, zoneid) - changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") - change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") - change.add_value("5.6.7.8") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('5.6.7.8') - - changes = ResourceRecordSets(conn, zoneid) - changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid) - rrsets.should.have.length_of(0) - - changes = ResourceRecordSets(conn, zoneid) - change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "A") - change.add_value("1.2.3.4") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('1.2.3.4') - - changes = ResourceRecordSets(conn, zoneid) - change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "A") - change.add_value("5.6.7.8") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('5.6.7.8') - - changes = ResourceRecordSets(conn, zoneid) - changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") - changes.commit() - - changes = ResourceRecordSets(conn, zoneid) - change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") - change.add_value("1.2.3.4") - change = changes.add_change("CREATE", "bar.foo.testdns.aws.com", "A") - change.add_value("5.6.7.8") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(2) - - rrsets = conn.get_all_rrsets( - zoneid, name="foo.bar.testdns.aws.com", type="A") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('1.2.3.4') - - rrsets = conn.get_all_rrsets( - zoneid, name="bar.foo.testdns.aws.com", type="A") - rrsets.should.have.length_of(2) - resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records] - resource_records.should.contain('1.2.3.4') - resource_records.should.contain('5.6.7.8') - - rrsets = conn.get_all_rrsets( - zoneid, name="foo.foo.testdns.aws.com", type="A") - rrsets.should.have.length_of(0) - - -@mock_route53_deprecated -def test_rrset_with_multiple_values(): - conn = boto.connect_route53('the_key', 'the_secret') - zone = conn.create_hosted_zone("testdns.aws.com") - zoneid = zone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - changes = ResourceRecordSets(conn, zoneid) - change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") - change.add_value("1.2.3.4") - change.add_value("5.6.7.8") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrsets.should.have.length_of(1) - set(rrsets[0].resource_records).should.equal(set(['1.2.3.4', '5.6.7.8'])) - - -@mock_route53_deprecated -def test_alias_rrset(): - conn = boto.connect_route53('the_key', 'the_secret') - zone = conn.create_hosted_zone("testdns.aws.com") - zoneid = zone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - changes = ResourceRecordSets(conn, zoneid) - changes.add_change("CREATE", "foo.alias.testdns.aws.com", "A", - alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="foo.testdns.aws.com") - changes.add_change("CREATE", "bar.alias.testdns.aws.com", "CNAME", - alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="bar.testdns.aws.com") - changes.commit() - - rrsets = conn.get_all_rrsets(zoneid, type="A") - rrset_records = [(rr_set.name, rr) for rr_set in rrsets for rr in rr_set.resource_records] - rrset_records.should.have.length_of(2) - rrset_records.should.contain(('foo.alias.testdns.aws.com', 'foo.testdns.aws.com')) - rrset_records.should.contain(('bar.alias.testdns.aws.com', 'bar.testdns.aws.com')) - rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com') - rrsets = conn.get_all_rrsets(zoneid, type="CNAME") - rrsets.should.have.length_of(1) - rrsets[0].resource_records[0].should.equal('bar.testdns.aws.com') - - -@mock_route53_deprecated -def test_create_health_check(): - conn = boto.connect_route53('the_key', 'the_secret') - - check = HealthCheck( - ip_addr="10.0.0.25", - port=80, - hc_type="HTTP", - resource_path="/", - fqdn="example.com", - string_match="a good response", - request_interval=10, - failure_threshold=2, - ) - conn.create_health_check(check) - - checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ - 'HealthChecks'] - list(checks).should.have.length_of(1) - check = checks[0] - config = check['HealthCheckConfig'] - config['IPAddress'].should.equal("10.0.0.25") - config['Port'].should.equal("80") - config['Type'].should.equal("HTTP") - config['ResourcePath'].should.equal("/") - config['FullyQualifiedDomainName'].should.equal("example.com") - config['SearchString'].should.equal("a good response") - config['RequestInterval'].should.equal("10") - config['FailureThreshold'].should.equal("2") - - -@mock_route53_deprecated -def test_delete_health_check(): - conn = boto.connect_route53('the_key', 'the_secret') - - check = HealthCheck( - ip_addr="10.0.0.25", - port=80, - hc_type="HTTP", - resource_path="/", - ) - conn.create_health_check(check) - - checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ - 'HealthChecks'] - list(checks).should.have.length_of(1) - health_check_id = checks[0]['Id'] - - conn.delete_health_check(health_check_id) - checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ - 'HealthChecks'] - list(checks).should.have.length_of(0) - - -@mock_route53_deprecated -def test_use_health_check_in_resource_record_set(): - conn = boto.connect_route53('the_key', 'the_secret') - - check = HealthCheck( - ip_addr="10.0.0.25", - port=80, - hc_type="HTTP", - resource_path="/", - ) - check = conn.create_health_check( - check)['CreateHealthCheckResponse']['HealthCheck'] - check_id = check['Id'] - - zone = conn.create_hosted_zone("testdns.aws.com") - zone_id = zone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - changes = ResourceRecordSets(conn, zone_id) - change = changes.add_change( - "CREATE", "foo.bar.testdns.aws.com", "A", health_check=check_id) - change.add_value("1.2.3.4") - changes.commit() - - record_sets = conn.get_all_rrsets(zone_id) - record_sets[0].health_check.should.equal(check_id) - - -@mock_route53_deprecated -def test_hosted_zone_comment_preserved(): - conn = boto.connect_route53('the_key', 'the_secret') - - firstzone = conn.create_hosted_zone( - "testdns.aws.com.", comment="test comment") - zone_id = firstzone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - hosted_zone = conn.get_hosted_zone(zone_id) - hosted_zone["GetHostedZoneResponse"]["HostedZone"][ - "Config"]["Comment"].should.equal("test comment") - - hosted_zones = conn.get_all_hosted_zones() - hosted_zones["ListHostedZonesResponse"]["HostedZones"][ - 0]["Config"]["Comment"].should.equal("test comment") - - zone = conn.get_zone("testdns.aws.com.") - zone.config["Comment"].should.equal("test comment") - - -@mock_route53_deprecated -def test_deleting_weighted_route(): - conn = boto.connect_route53() - - conn.create_hosted_zone("testdns.aws.com.") - zone = conn.get_zone("testdns.aws.com.") - - zone.add_cname("cname.testdns.aws.com", "example.com", - identifier=('success-test-foo', '50')) - zone.add_cname("cname.testdns.aws.com", "example.com", - identifier=('success-test-bar', '50')) - - cnames = zone.get_cname('cname.testdns.aws.com.', all=True) - cnames.should.have.length_of(2) - foo_cname = [cname for cname in cnames if cname.identifier == - 'success-test-foo'][0] - - zone.delete_record(foo_cname) - cname = zone.get_cname('cname.testdns.aws.com.', all=True) - # When get_cname only had one result, it returns just that result instead - # of a list. - cname.identifier.should.equal('success-test-bar') - - -@mock_route53_deprecated -def test_deleting_latency_route(): - conn = boto.connect_route53() - - conn.create_hosted_zone("testdns.aws.com.") - zone = conn.get_zone("testdns.aws.com.") - - zone.add_cname("cname.testdns.aws.com", "example.com", - identifier=('success-test-foo', 'us-west-2')) - zone.add_cname("cname.testdns.aws.com", "example.com", - identifier=('success-test-bar', 'us-west-1')) - - cnames = zone.get_cname('cname.testdns.aws.com.', all=True) - cnames.should.have.length_of(2) - foo_cname = [cname for cname in cnames if cname.identifier == - 'success-test-foo'][0] - foo_cname.region.should.equal('us-west-2') - - zone.delete_record(foo_cname) - cname = zone.get_cname('cname.testdns.aws.com.', all=True) - # When get_cname only had one result, it returns just that result instead - # of a list. - cname.identifier.should.equal('success-test-bar') - cname.region.should.equal('us-west-1') - - -@mock_route53_deprecated -def test_hosted_zone_private_zone_preserved(): - conn = boto.connect_route53('the_key', 'the_secret') - - firstzone = conn.create_hosted_zone( - "testdns.aws.com.", private_zone=True, vpc_id='vpc-fake', vpc_region='us-east-1') - zone_id = firstzone["CreateHostedZoneResponse"][ - "HostedZone"]["Id"].split("/")[-1] - - hosted_zone = conn.get_hosted_zone(zone_id) - # in (original) boto, these bools returned as strings. - hosted_zone["GetHostedZoneResponse"]["HostedZone"][ - "Config"]["PrivateZone"].should.equal('True') - - hosted_zones = conn.get_all_hosted_zones() - hosted_zones["ListHostedZonesResponse"]["HostedZones"][ - 0]["Config"]["PrivateZone"].should.equal('True') - - zone = conn.get_zone("testdns.aws.com.") - zone.config["PrivateZone"].should.equal('True') - - -@mock_route53 -def test_hosted_zone_private_zone_preserved_boto3(): - conn = boto3.client('route53', region_name='us-east-1') - # TODO: actually create_hosted_zone statements with PrivateZone=True, but without - # a _valid_ vpc-id should fail. - firstzone = conn.create_hosted_zone( - Name="testdns.aws.com.", - CallerReference=str(hash('foo')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="Test", - ) - ) - - zone_id = firstzone["HostedZone"]["Id"].split("/")[-1] - - hosted_zone = conn.get_hosted_zone(Id=zone_id) - hosted_zone["HostedZone"]["Config"]["PrivateZone"].should.equal(True) - - hosted_zones = conn.list_hosted_zones() - hosted_zones["HostedZones"][0]["Config"]["PrivateZone"].should.equal(True) - - hosted_zones = conn.list_hosted_zones_by_name(DNSName="testdns.aws.com.") - len(hosted_zones["HostedZones"]).should.equal(1) - hosted_zones["HostedZones"][0]["Config"]["PrivateZone"].should.equal(True) - - -@mock_route53 -def test_list_or_change_tags_for_resource_request(): - conn = boto3.client('route53', region_name='us-east-1') - health_check = conn.create_health_check( - CallerReference='foobar', - HealthCheckConfig={ - 'IPAddress': '192.0.2.44', - 'Port': 123, - 'Type': 'HTTP', - 'ResourcePath': '/', - 'RequestInterval': 30, - 'FailureThreshold': 123, - 'HealthThreshold': 123, - } - ) - healthcheck_id = health_check['HealthCheck']['Id'] - - tag1 = {"Key": "Deploy", "Value": "True"} - tag2 = {"Key": "Name", "Value": "UnitTest"} - - # Test adding a tag for a resource id - conn.change_tags_for_resource( - ResourceType='healthcheck', - ResourceId=healthcheck_id, - AddTags=[tag1, tag2] - ) - - # Check to make sure that the response has the 'ResourceTagSet' key - response = conn.list_tags_for_resource( - ResourceType='healthcheck', ResourceId=healthcheck_id) - response.should.contain('ResourceTagSet') - - # Validate that each key was added - response['ResourceTagSet']['Tags'].should.contain(tag1) - response['ResourceTagSet']['Tags'].should.contain(tag2) - - len(response['ResourceTagSet']['Tags']).should.equal(2) - - # Try to remove the tags - conn.change_tags_for_resource( - ResourceType='healthcheck', - ResourceId=healthcheck_id, - RemoveTagKeys=[tag1['Key']] - ) - - # Check to make sure that the response has the 'ResourceTagSet' key - response = conn.list_tags_for_resource( - ResourceType='healthcheck', ResourceId=healthcheck_id) - response.should.contain('ResourceTagSet') - response['ResourceTagSet']['Tags'].should_not.contain(tag1) - response['ResourceTagSet']['Tags'].should.contain(tag2) - - # Remove the second tag - conn.change_tags_for_resource( - ResourceType='healthcheck', - ResourceId=healthcheck_id, - RemoveTagKeys=[tag2['Key']] - ) - - response = conn.list_tags_for_resource( - ResourceType='healthcheck', ResourceId=healthcheck_id) - response['ResourceTagSet']['Tags'].should_not.contain(tag2) - - # Re-add the tags - conn.change_tags_for_resource( - ResourceType='healthcheck', - ResourceId=healthcheck_id, - AddTags=[tag1, tag2] - ) - - # Remove both - conn.change_tags_for_resource( - ResourceType='healthcheck', - ResourceId=healthcheck_id, - RemoveTagKeys=[tag1['Key'], tag2['Key']] - ) - - response = conn.list_tags_for_resource( - ResourceType='healthcheck', ResourceId=healthcheck_id) - response['ResourceTagSet']['Tags'].should.be.empty - - -@mock_route53 -def test_list_hosted_zones_by_name(): - conn = boto3.client('route53', region_name='us-east-1') - conn.create_hosted_zone( - Name="test.b.com.", - CallerReference=str(hash('foo')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="test com", - ) - ) - conn.create_hosted_zone( - Name="test.a.org.", - CallerReference=str(hash('bar')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="test org", - ) - ) - conn.create_hosted_zone( - Name="test.a.org.", - CallerReference=str(hash('bar')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="test org 2", - ) - ) - - # test lookup - zones = conn.list_hosted_zones_by_name(DNSName="test.b.com.") - len(zones["HostedZones"]).should.equal(1) - zones["HostedZones"][0]["Name"].should.equal("test.b.com.") - zones = conn.list_hosted_zones_by_name(DNSName="test.a.org.") - len(zones["HostedZones"]).should.equal(2) - zones["HostedZones"][0]["Name"].should.equal("test.a.org.") - zones["HostedZones"][1]["Name"].should.equal("test.a.org.") - - # test sort order - zones = conn.list_hosted_zones_by_name() - len(zones["HostedZones"]).should.equal(3) - zones["HostedZones"][0]["Name"].should.equal("test.b.com.") - zones["HostedZones"][1]["Name"].should.equal("test.a.org.") - zones["HostedZones"][2]["Name"].should.equal("test.a.org.") - - -@mock_route53 -def test_change_resource_record_sets_crud_valid(): - conn = boto3.client('route53', region_name='us-east-1') - conn.create_hosted_zone( - Name="db.", - CallerReference=str(hash('foo')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="db", - ) - ) - - zones = conn.list_hosted_zones_by_name(DNSName="db.") - len(zones["HostedZones"]).should.equal(1) - zones["HostedZones"][0]["Name"].should.equal("db.") - hosted_zone_id = zones["HostedZones"][0]["Id"] - - # Create A Record. - a_record_endpoint_payload = { - 'Comment': 'create A record prod.redis.db', - 'Changes': [ - { - 'Action': 'CREATE', - 'ResourceRecordSet': { - 'Name': 'prod.redis.db', - 'Type': 'A', - 'TTL': 10, - 'ResourceRecords': [{ - 'Value': '127.0.0.1' - }] - } - } - ] - } - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=a_record_endpoint_payload) - - response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) - len(response['ResourceRecordSets']).should.equal(1) - a_record_detail = response['ResourceRecordSets'][0] - a_record_detail['Name'].should.equal('prod.redis.db') - a_record_detail['Type'].should.equal('A') - a_record_detail['TTL'].should.equal(10) - a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}]) - - # Update type to CNAME - cname_record_endpoint_payload = { - 'Comment': 'Update to CNAME prod.redis.db', - 'Changes': [ - { - 'Action': 'UPSERT', - 'ResourceRecordSet': { - 'Name': 'prod.redis.db', - 'Type': 'CNAME', - 'TTL': 60, - 'ResourceRecords': [{ - 'Value': '192.168.1.1' - }] - } - } - ] - } - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=cname_record_endpoint_payload) - - response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) - len(response['ResourceRecordSets']).should.equal(1) - cname_record_detail = response['ResourceRecordSets'][0] - cname_record_detail['Name'].should.equal('prod.redis.db') - cname_record_detail['Type'].should.equal('CNAME') - cname_record_detail['TTL'].should.equal(60) - cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}]) - - # Delete record. - delete_payload = { - 'Comment': 'delete prod.redis.db', - 'Changes': [ - { - 'Action': 'DELETE', - 'ResourceRecordSet': { - 'Name': 'prod.redis.db', - 'Type': 'CNAME', - } - } - ] - } - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=delete_payload) - response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) - len(response['ResourceRecordSets']).should.equal(0) - - -@mock_route53 -def test_change_resource_record_invalid(): - conn = boto3.client('route53', region_name='us-east-1') - conn.create_hosted_zone( - Name="db.", - CallerReference=str(hash('foo')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="db", - ) - ) - - zones = conn.list_hosted_zones_by_name(DNSName="db.") - len(zones["HostedZones"]).should.equal(1) - zones["HostedZones"][0]["Name"].should.equal("db.") - hosted_zone_id = zones["HostedZones"][0]["Id"] - - invalid_a_record_payload = { - 'Comment': 'this should fail', - 'Changes': [ - { - 'Action': 'CREATE', - 'ResourceRecordSet': { - 'Name': 'prod.scooby.doo', - 'Type': 'A', - 'TTL': 10, - 'ResourceRecords': [{ - 'Value': '127.0.0.1' - }] - } - } - ] - } - - with assert_raises(botocore.exceptions.ClientError): - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=invalid_a_record_payload) - - response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) - len(response['ResourceRecordSets']).should.equal(0) - - invalid_cname_record_payload = { - 'Comment': 'this should also fail', - 'Changes': [ - { - 'Action': 'UPSERT', - 'ResourceRecordSet': { - 'Name': 'prod.scooby.doo', - 'Type': 'CNAME', - 'TTL': 10, - 'ResourceRecords': [{ - 'Value': '127.0.0.1' - }] - } - } - ] - } - - with assert_raises(botocore.exceptions.ClientError): - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=invalid_cname_record_payload) - - response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) - len(response['ResourceRecordSets']).should.equal(0) - - -@mock_route53 -def test_list_resource_record_sets_name_type_filters(): - conn = boto3.client('route53', region_name='us-east-1') - create_hosted_zone_response = conn.create_hosted_zone( - Name="db.", - CallerReference=str(hash('foo')), - HostedZoneConfig=dict( - PrivateZone=True, - Comment="db", - ) - ) - hosted_zone_id = create_hosted_zone_response['HostedZone']['Id'] - - def create_resource_record_set(rec_type, rec_name): - payload = { - 'Comment': 'create {} record {}'.format(rec_type, rec_name), - 'Changes': [ - { - 'Action': 'CREATE', - 'ResourceRecordSet': { - 'Name': rec_name, - 'Type': rec_type, - 'TTL': 10, - 'ResourceRecords': [{ - 'Value': '127.0.0.1' - }] - } - } - ] - } - conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=payload) - - # record_type, record_name - all_records = [ - ('A', 'a.a.db'), - ('A', 'a.b.db'), - ('A', 'b.b.db'), - ('CNAME', 'b.b.db'), - ('CNAME', 'b.c.db'), - ('CNAME', 'c.c.db') - ] - for record_type, record_name in all_records: - create_resource_record_set(record_type, record_name) - - start_with = 2 - response = conn.list_resource_record_sets( - HostedZoneId=hosted_zone_id, - StartRecordType=all_records[start_with][0], - StartRecordName=all_records[start_with][1] - ) - - returned_records = [(record['Type'], record['Name']) for record in response['ResourceRecordSets']] - len(returned_records).should.equal(len(all_records) - start_with) - for desired_record in all_records[start_with:]: - returned_records.should.contain(desired_record) +from __future__ import unicode_literals + +import boto +import boto3 +from boto.route53.healthcheck import HealthCheck +from boto.route53.record import ResourceRecordSets + +import sure # noqa + +import uuid + +import botocore +from nose.tools import assert_raises + +from moto import mock_route53, mock_route53_deprecated + + +@mock_route53_deprecated +def test_hosted_zone(): + conn = boto.connect_route53('the_key', 'the_secret') + firstzone = conn.create_hosted_zone("testdns.aws.com") + zones = conn.get_all_hosted_zones() + len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) + + conn.create_hosted_zone("testdns1.aws.com") + zones = conn.get_all_hosted_zones() + len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(2) + + id1 = firstzone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + zone = conn.get_hosted_zone(id1) + zone["GetHostedZoneResponse"]["HostedZone"][ + "Name"].should.equal("testdns.aws.com.") + + conn.delete_hosted_zone(id1) + zones = conn.get_all_hosted_zones() + len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) + + conn.get_hosted_zone.when.called_with("abcd").should.throw( + boto.route53.exception.DNSServerError, "404 Not Found") + + +@mock_route53_deprecated +def test_rrset(): + conn = boto.connect_route53('the_key', 'the_secret') + + conn.get_all_rrsets.when.called_with("abcd", type="A").should.throw( + boto.route53.exception.DNSServerError, "404 Not Found") + + zone = conn.create_hosted_zone("testdns.aws.com") + zoneid = zone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") + change.add_value("1.2.3.4") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('1.2.3.4') + + rrsets = conn.get_all_rrsets(zoneid, type="CNAME") + rrsets.should.have.length_of(0) + + changes = ResourceRecordSets(conn, zoneid) + changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") + change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") + change.add_value("5.6.7.8") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('5.6.7.8') + + changes = ResourceRecordSets(conn, zoneid) + changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid) + rrsets.should.have.length_of(0) + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "A") + change.add_value("1.2.3.4") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('1.2.3.4') + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "A") + change.add_value("5.6.7.8") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('5.6.7.8') + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "TXT") + change.add_value("foo") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid) + rrsets.should.have.length_of(2) + rrsets[0].resource_records[0].should.equal('5.6.7.8') + rrsets[1].resource_records[0].should.equal('foo') + + changes = ResourceRecordSets(conn, zoneid) + changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") + changes.commit() + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") + change.add_value("1.2.3.4") + change = changes.add_change("CREATE", "bar.foo.testdns.aws.com", "A") + change.add_value("5.6.7.8") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(2) + + rrsets = conn.get_all_rrsets( + zoneid, name="foo.bar.testdns.aws.com", type="A") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('1.2.3.4') + + rrsets = conn.get_all_rrsets( + zoneid, name="bar.foo.testdns.aws.com", type="A") + rrsets.should.have.length_of(2) + resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records] + resource_records.should.contain('1.2.3.4') + resource_records.should.contain('5.6.7.8') + + rrsets = conn.get_all_rrsets( + zoneid, name="foo.foo.testdns.aws.com", type="A") + rrsets.should.have.length_of(0) + + +@mock_route53_deprecated +def test_rrset_with_multiple_values(): + conn = boto.connect_route53('the_key', 'the_secret') + zone = conn.create_hosted_zone("testdns.aws.com") + zoneid = zone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") + change.add_value("1.2.3.4") + change.add_value("5.6.7.8") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrsets.should.have.length_of(1) + set(rrsets[0].resource_records).should.equal(set(['1.2.3.4', '5.6.7.8'])) + + +@mock_route53_deprecated +def test_alias_rrset(): + conn = boto.connect_route53('the_key', 'the_secret') + zone = conn.create_hosted_zone("testdns.aws.com") + zoneid = zone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + changes = ResourceRecordSets(conn, zoneid) + changes.add_change("CREATE", "foo.alias.testdns.aws.com", "A", + alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="foo.testdns.aws.com") + changes.add_change("CREATE", "bar.alias.testdns.aws.com", "CNAME", + alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="bar.testdns.aws.com") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid, type="A") + rrset_records = [(rr_set.name, rr) for rr_set in rrsets for rr in rr_set.resource_records] + rrset_records.should.have.length_of(2) + rrset_records.should.contain(('foo.alias.testdns.aws.com.', 'foo.testdns.aws.com')) + rrset_records.should.contain(('bar.alias.testdns.aws.com.', 'bar.testdns.aws.com')) + rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com') + rrsets = conn.get_all_rrsets(zoneid, type="CNAME") + rrsets.should.have.length_of(1) + rrsets[0].resource_records[0].should.equal('bar.testdns.aws.com') + + +@mock_route53_deprecated +def test_create_health_check(): + conn = boto.connect_route53('the_key', 'the_secret') + + check = HealthCheck( + ip_addr="10.0.0.25", + port=80, + hc_type="HTTP", + resource_path="/", + fqdn="example.com", + string_match="a good response", + request_interval=10, + failure_threshold=2, + ) + conn.create_health_check(check) + + checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ + 'HealthChecks'] + list(checks).should.have.length_of(1) + check = checks[0] + config = check['HealthCheckConfig'] + config['IPAddress'].should.equal("10.0.0.25") + config['Port'].should.equal("80") + config['Type'].should.equal("HTTP") + config['ResourcePath'].should.equal("/") + config['FullyQualifiedDomainName'].should.equal("example.com") + config['SearchString'].should.equal("a good response") + config['RequestInterval'].should.equal("10") + config['FailureThreshold'].should.equal("2") + + +@mock_route53_deprecated +def test_delete_health_check(): + conn = boto.connect_route53('the_key', 'the_secret') + + check = HealthCheck( + ip_addr="10.0.0.25", + port=80, + hc_type="HTTP", + resource_path="/", + ) + conn.create_health_check(check) + + checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ + 'HealthChecks'] + list(checks).should.have.length_of(1) + health_check_id = checks[0]['Id'] + + conn.delete_health_check(health_check_id) + checks = conn.get_list_health_checks()['ListHealthChecksResponse'][ + 'HealthChecks'] + list(checks).should.have.length_of(0) + + +@mock_route53_deprecated +def test_use_health_check_in_resource_record_set(): + conn = boto.connect_route53('the_key', 'the_secret') + + check = HealthCheck( + ip_addr="10.0.0.25", + port=80, + hc_type="HTTP", + resource_path="/", + ) + check = conn.create_health_check( + check)['CreateHealthCheckResponse']['HealthCheck'] + check_id = check['Id'] + + zone = conn.create_hosted_zone("testdns.aws.com") + zone_id = zone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + changes = ResourceRecordSets(conn, zone_id) + change = changes.add_change( + "CREATE", "foo.bar.testdns.aws.com", "A", health_check=check_id) + change.add_value("1.2.3.4") + changes.commit() + + record_sets = conn.get_all_rrsets(zone_id) + record_sets[0].health_check.should.equal(check_id) + + +@mock_route53_deprecated +def test_hosted_zone_comment_preserved(): + conn = boto.connect_route53('the_key', 'the_secret') + + firstzone = conn.create_hosted_zone( + "testdns.aws.com.", comment="test comment") + zone_id = firstzone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + hosted_zone = conn.get_hosted_zone(zone_id) + hosted_zone["GetHostedZoneResponse"]["HostedZone"][ + "Config"]["Comment"].should.equal("test comment") + + hosted_zones = conn.get_all_hosted_zones() + hosted_zones["ListHostedZonesResponse"]["HostedZones"][ + 0]["Config"]["Comment"].should.equal("test comment") + + zone = conn.get_zone("testdns.aws.com.") + zone.config["Comment"].should.equal("test comment") + + +@mock_route53_deprecated +def test_deleting_weighted_route(): + conn = boto.connect_route53() + + conn.create_hosted_zone("testdns.aws.com.") + zone = conn.get_zone("testdns.aws.com.") + + zone.add_cname("cname.testdns.aws.com", "example.com", + identifier=('success-test-foo', '50')) + zone.add_cname("cname.testdns.aws.com", "example.com", + identifier=('success-test-bar', '50')) + + cnames = zone.get_cname('cname.testdns.aws.com.', all=True) + cnames.should.have.length_of(2) + foo_cname = [cname for cname in cnames if cname.identifier == + 'success-test-foo'][0] + + zone.delete_record(foo_cname) + cname = zone.get_cname('cname.testdns.aws.com.', all=True) + # When get_cname only had one result, it returns just that result instead + # of a list. + cname.identifier.should.equal('success-test-bar') + + +@mock_route53_deprecated +def test_deleting_latency_route(): + conn = boto.connect_route53() + + conn.create_hosted_zone("testdns.aws.com.") + zone = conn.get_zone("testdns.aws.com.") + + zone.add_cname("cname.testdns.aws.com", "example.com", + identifier=('success-test-foo', 'us-west-2')) + zone.add_cname("cname.testdns.aws.com", "example.com", + identifier=('success-test-bar', 'us-west-1')) + + cnames = zone.get_cname('cname.testdns.aws.com.', all=True) + cnames.should.have.length_of(2) + foo_cname = [cname for cname in cnames if cname.identifier == + 'success-test-foo'][0] + foo_cname.region.should.equal('us-west-2') + + zone.delete_record(foo_cname) + cname = zone.get_cname('cname.testdns.aws.com.', all=True) + # When get_cname only had one result, it returns just that result instead + # of a list. + cname.identifier.should.equal('success-test-bar') + cname.region.should.equal('us-west-1') + + +@mock_route53_deprecated +def test_hosted_zone_private_zone_preserved(): + conn = boto.connect_route53('the_key', 'the_secret') + + firstzone = conn.create_hosted_zone( + "testdns.aws.com.", private_zone=True, vpc_id='vpc-fake', vpc_region='us-east-1') + zone_id = firstzone["CreateHostedZoneResponse"][ + "HostedZone"]["Id"].split("/")[-1] + + hosted_zone = conn.get_hosted_zone(zone_id) + # in (original) boto, these bools returned as strings. + hosted_zone["GetHostedZoneResponse"]["HostedZone"][ + "Config"]["PrivateZone"].should.equal('True') + + hosted_zones = conn.get_all_hosted_zones() + hosted_zones["ListHostedZonesResponse"]["HostedZones"][ + 0]["Config"]["PrivateZone"].should.equal('True') + + zone = conn.get_zone("testdns.aws.com.") + zone.config["PrivateZone"].should.equal('True') + + +@mock_route53 +def test_hosted_zone_private_zone_preserved_boto3(): + conn = boto3.client('route53', region_name='us-east-1') + # TODO: actually create_hosted_zone statements with PrivateZone=True, but without + # a _valid_ vpc-id should fail. + firstzone = conn.create_hosted_zone( + Name="testdns.aws.com.", + CallerReference=str(hash('foo')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="Test", + ) + ) + + zone_id = firstzone["HostedZone"]["Id"].split("/")[-1] + + hosted_zone = conn.get_hosted_zone(Id=zone_id) + hosted_zone["HostedZone"]["Config"]["PrivateZone"].should.equal(True) + + hosted_zones = conn.list_hosted_zones() + hosted_zones["HostedZones"][0]["Config"]["PrivateZone"].should.equal(True) + + hosted_zones = conn.list_hosted_zones_by_name(DNSName="testdns.aws.com.") + len(hosted_zones["HostedZones"]).should.equal(1) + hosted_zones["HostedZones"][0]["Config"]["PrivateZone"].should.equal(True) + + +@mock_route53 +def test_list_or_change_tags_for_resource_request(): + conn = boto3.client('route53', region_name='us-east-1') + health_check = conn.create_health_check( + CallerReference='foobar', + HealthCheckConfig={ + 'IPAddress': '192.0.2.44', + 'Port': 123, + 'Type': 'HTTP', + 'ResourcePath': '/', + 'RequestInterval': 30, + 'FailureThreshold': 123, + 'HealthThreshold': 123, + } + ) + healthcheck_id = health_check['HealthCheck']['Id'] + + tag1 = {"Key": "Deploy", "Value": "True"} + tag2 = {"Key": "Name", "Value": "UnitTest"} + + # Test adding a tag for a resource id + conn.change_tags_for_resource( + ResourceType='healthcheck', + ResourceId=healthcheck_id, + AddTags=[tag1, tag2] + ) + + # Check to make sure that the response has the 'ResourceTagSet' key + response = conn.list_tags_for_resource( + ResourceType='healthcheck', ResourceId=healthcheck_id) + response.should.contain('ResourceTagSet') + + # Validate that each key was added + response['ResourceTagSet']['Tags'].should.contain(tag1) + response['ResourceTagSet']['Tags'].should.contain(tag2) + + len(response['ResourceTagSet']['Tags']).should.equal(2) + + # Try to remove the tags + conn.change_tags_for_resource( + ResourceType='healthcheck', + ResourceId=healthcheck_id, + RemoveTagKeys=[tag1['Key']] + ) + + # Check to make sure that the response has the 'ResourceTagSet' key + response = conn.list_tags_for_resource( + ResourceType='healthcheck', ResourceId=healthcheck_id) + response.should.contain('ResourceTagSet') + response['ResourceTagSet']['Tags'].should_not.contain(tag1) + response['ResourceTagSet']['Tags'].should.contain(tag2) + + # Remove the second tag + conn.change_tags_for_resource( + ResourceType='healthcheck', + ResourceId=healthcheck_id, + RemoveTagKeys=[tag2['Key']] + ) + + response = conn.list_tags_for_resource( + ResourceType='healthcheck', ResourceId=healthcheck_id) + response['ResourceTagSet']['Tags'].should_not.contain(tag2) + + # Re-add the tags + conn.change_tags_for_resource( + ResourceType='healthcheck', + ResourceId=healthcheck_id, + AddTags=[tag1, tag2] + ) + + # Remove both + conn.change_tags_for_resource( + ResourceType='healthcheck', + ResourceId=healthcheck_id, + RemoveTagKeys=[tag1['Key'], tag2['Key']] + ) + + response = conn.list_tags_for_resource( + ResourceType='healthcheck', ResourceId=healthcheck_id) + response['ResourceTagSet']['Tags'].should.be.empty + + +@mock_route53 +def test_list_hosted_zones_by_name(): + conn = boto3.client('route53', region_name='us-east-1') + conn.create_hosted_zone( + Name="test.b.com.", + CallerReference=str(hash('foo')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="test com", + ) + ) + conn.create_hosted_zone( + Name="test.a.org.", + CallerReference=str(hash('bar')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="test org", + ) + ) + conn.create_hosted_zone( + Name="test.a.org.", + CallerReference=str(hash('bar')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="test org 2", + ) + ) + + # test lookup + zones = conn.list_hosted_zones_by_name(DNSName="test.b.com.") + len(zones["HostedZones"]).should.equal(1) + zones["HostedZones"][0]["Name"].should.equal("test.b.com.") + zones = conn.list_hosted_zones_by_name(DNSName="test.a.org.") + len(zones["HostedZones"]).should.equal(2) + zones["HostedZones"][0]["Name"].should.equal("test.a.org.") + zones["HostedZones"][1]["Name"].should.equal("test.a.org.") + + # test sort order + zones = conn.list_hosted_zones_by_name() + len(zones["HostedZones"]).should.equal(3) + zones["HostedZones"][0]["Name"].should.equal("test.b.com.") + zones["HostedZones"][1]["Name"].should.equal("test.a.org.") + zones["HostedZones"][2]["Name"].should.equal("test.a.org.") + + +@mock_route53 +def test_change_resource_record_sets_crud_valid(): + conn = boto3.client('route53', region_name='us-east-1') + conn.create_hosted_zone( + Name="db.", + CallerReference=str(hash('foo')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="db", + ) + ) + + zones = conn.list_hosted_zones_by_name(DNSName="db.") + len(zones["HostedZones"]).should.equal(1) + zones["HostedZones"][0]["Name"].should.equal("db.") + hosted_zone_id = zones["HostedZones"][0]["Id"] + + # Create A Record. + a_record_endpoint_payload = { + 'Comment': 'Create A record prod.redis.db', + 'Changes': [ + { + 'Action': 'CREATE', + 'ResourceRecordSet': { + 'Name': 'prod.redis.db.', + 'Type': 'A', + 'TTL': 10, + 'ResourceRecords': [{ + 'Value': '127.0.0.1' + }] + } + } + ] + } + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=a_record_endpoint_payload) + + response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) + len(response['ResourceRecordSets']).should.equal(1) + a_record_detail = response['ResourceRecordSets'][0] + a_record_detail['Name'].should.equal('prod.redis.db.') + a_record_detail['Type'].should.equal('A') + a_record_detail['TTL'].should.equal(10) + a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}]) + + # Update A Record. + cname_record_endpoint_payload = { + 'Comment': 'Update A record prod.redis.db', + 'Changes': [ + { + 'Action': 'UPSERT', + 'ResourceRecordSet': { + 'Name': 'prod.redis.db.', + 'Type': 'A', + 'TTL': 60, + 'ResourceRecords': [{ + 'Value': '192.168.1.1' + }] + } + } + ] + } + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=cname_record_endpoint_payload) + + response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) + len(response['ResourceRecordSets']).should.equal(1) + cname_record_detail = response['ResourceRecordSets'][0] + cname_record_detail['Name'].should.equal('prod.redis.db.') + cname_record_detail['Type'].should.equal('A') + cname_record_detail['TTL'].should.equal(60) + cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}]) + + # Delete record. + delete_payload = { + 'Comment': 'delete prod.redis.db', + 'Changes': [ + { + 'Action': 'DELETE', + 'ResourceRecordSet': { + 'Name': 'prod.redis.db', + 'Type': 'CNAME', + } + } + ] + } + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=delete_payload) + response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) + len(response['ResourceRecordSets']).should.equal(0) + + +@mock_route53 +def test_change_resource_record_invalid(): + conn = boto3.client('route53', region_name='us-east-1') + conn.create_hosted_zone( + Name="db.", + CallerReference=str(hash('foo')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="db", + ) + ) + + zones = conn.list_hosted_zones_by_name(DNSName="db.") + len(zones["HostedZones"]).should.equal(1) + zones["HostedZones"][0]["Name"].should.equal("db.") + hosted_zone_id = zones["HostedZones"][0]["Id"] + + invalid_a_record_payload = { + 'Comment': 'this should fail', + 'Changes': [ + { + 'Action': 'CREATE', + 'ResourceRecordSet': { + 'Name': 'prod.scooby.doo', + 'Type': 'A', + 'TTL': 10, + 'ResourceRecords': [{ + 'Value': '127.0.0.1' + }] + } + } + ] + } + + with assert_raises(botocore.exceptions.ClientError): + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=invalid_a_record_payload) + + response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) + len(response['ResourceRecordSets']).should.equal(0) + + invalid_cname_record_payload = { + 'Comment': 'this should also fail', + 'Changes': [ + { + 'Action': 'UPSERT', + 'ResourceRecordSet': { + 'Name': 'prod.scooby.doo', + 'Type': 'CNAME', + 'TTL': 10, + 'ResourceRecords': [{ + 'Value': '127.0.0.1' + }] + } + } + ] + } + + with assert_raises(botocore.exceptions.ClientError): + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=invalid_cname_record_payload) + + response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) + len(response['ResourceRecordSets']).should.equal(0) + + +@mock_route53 +def test_list_resource_record_sets_name_type_filters(): + conn = boto3.client('route53', region_name='us-east-1') + create_hosted_zone_response = conn.create_hosted_zone( + Name="db.", + CallerReference=str(hash('foo')), + HostedZoneConfig=dict( + PrivateZone=True, + Comment="db", + ) + ) + hosted_zone_id = create_hosted_zone_response['HostedZone']['Id'] + + def create_resource_record_set(rec_type, rec_name): + payload = { + 'Comment': 'create {} record {}'.format(rec_type, rec_name), + 'Changes': [ + { + 'Action': 'CREATE', + 'ResourceRecordSet': { + 'Name': rec_name, + 'Type': rec_type, + 'TTL': 10, + 'ResourceRecords': [{ + 'Value': '127.0.0.1' + }] + } + } + ] + } + conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=payload) + + # record_type, record_name + all_records = [ + ('A', 'a.a.db.'), + ('A', 'a.b.db.'), + ('A', 'b.b.db.'), + ('CNAME', 'b.b.db.'), + ('CNAME', 'b.c.db.'), + ('CNAME', 'c.c.db.') + ] + for record_type, record_name in all_records: + create_resource_record_set(record_type, record_name) + + start_with = 2 + response = conn.list_resource_record_sets( + HostedZoneId=hosted_zone_id, + StartRecordType=all_records[start_with][0], + StartRecordName=all_records[start_with][1] + ) + + returned_records = [(record['Type'], record['Name']) for record in response['ResourceRecordSets']] + len(returned_records).should.equal(len(all_records) - start_with) + for desired_record in all_records[start_with:]: + returned_records.should.contain(desired_record) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index aa9050e04..6af23849c 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1,2583 +1,2822 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import datetime -from six.moves.urllib.request import urlopen -from six.moves.urllib.error import HTTPError -from functools import wraps -from gzip import GzipFile -from io import BytesIO -import zlib - -import json -import boto -import boto3 -from botocore.client import ClientError -import botocore.exceptions -from boto.exception import S3CreateError, S3ResponseError -from botocore.handlers import disable_signing -from boto.s3.connection import S3Connection -from boto.s3.key import Key -from freezegun import freeze_time -import six -import requests -import tests.backport_assert_raises # noqa -from nose.tools import assert_raises - -import sure # noqa - -from moto import settings, mock_s3, mock_s3_deprecated -import moto.s3.models as s3model - -if settings.TEST_SERVER_MODE: - REDUCED_PART_SIZE = s3model.UPLOAD_PART_MIN_SIZE - EXPECTED_ETAG = '"140f92a6df9f9e415f74a1463bcee9bb-2"' -else: - REDUCED_PART_SIZE = 256 - EXPECTED_ETAG = '"66d1a1a2ed08fd05c137f316af4ff255-2"' - - -def reduced_min_part_size(f): - """ speed up tests by temporarily making the multipart minimum part size - small - """ - orig_size = s3model.UPLOAD_PART_MIN_SIZE - - @wraps(f) - def wrapped(*args, **kwargs): - try: - s3model.UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE - return f(*args, **kwargs) - finally: - s3model.UPLOAD_PART_MIN_SIZE = orig_size - - return wrapped - - -class MyModel(object): - - def __init__(self, name, value): - self.name = name - self.value = value - - def save(self): - s3 = boto3.client('s3', region_name='us-east-1') - s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) - - -@mock_s3 -def test_my_model_save(): - # Create Bucket so that test can run - conn = boto3.resource('s3', region_name='us-east-1') - conn.create_bucket(Bucket='mybucket') - #################################### - - model_instance = MyModel('steve', 'is awesome') - model_instance.save() - - body = conn.Object('mybucket', 'steve').get()['Body'].read().decode() - - assert body == 'is awesome' - - -@mock_s3 -def test_key_etag(): - conn = boto3.resource('s3', region_name='us-east-1') - conn.create_bucket(Bucket='mybucket') - - model_instance = MyModel('steve', 'is awesome') - model_instance.save() - - conn.Bucket('mybucket').Object('steve').e_tag.should.equal( - '"d32bda93738f7e03adb22e66c90fbc04"') - - -@mock_s3_deprecated -def test_multipart_upload_too_small(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload("the-key") - multipart.upload_part_from_file(BytesIO(b'hello'), 1) - multipart.upload_part_from_file(BytesIO(b'world'), 2) - # Multipart with total size under 5MB is refused - multipart.complete_upload.should.throw(S3ResponseError) - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_upload(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 1) - # last part, can be less than 5 MB - part2 = b'1' - multipart.upload_part_from_file(BytesIO(part2), 2) - multipart.complete_upload() - # we should get both parts as the key contents - bucket.get_key( - "the-key").get_contents_as_string().should.equal(part1 + part2) - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_upload_out_of_order(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload("the-key") - # last part, can be less than 5 MB - part2 = b'1' - multipart.upload_part_from_file(BytesIO(part2), 4) - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 2) - multipart.complete_upload() - # we should get both parts as the key contents - bucket.get_key( - "the-key").get_contents_as_string().should.equal(part1 + part2) - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_upload_with_headers(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload( - "the-key", metadata={"foo": "bar"}) - part1 = b'0' * 10 - multipart.upload_part_from_file(BytesIO(part1), 1) - multipart.complete_upload() - - key = bucket.get_key("the-key") - key.metadata.should.equal({"foo": "bar"}) - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_upload_with_copy_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "original-key" - key.set_contents_from_string("key_value") - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 1) - multipart.copy_part_from_key("foobar", "original-key", 2, 0, 3) - multipart.complete_upload() - bucket.get_key( - "the-key").get_contents_as_string().should.equal(part1 + b"key_") - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_upload_cancel(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 1) - multipart.cancel_upload() - # TODO we really need some sort of assertion here, but we don't currently - # have the ability to list mulipart uploads for a bucket. - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_etag(): - # Create Bucket so that test can run - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('mybucket') - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 1) - # last part, can be less than 5 MB - part2 = b'1' - multipart.upload_part_from_file(BytesIO(part2), 2) - multipart.complete_upload() - # we should get both parts as the key contents - bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG) - - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_invalid_order(): - # Create Bucket so that test can run - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('mybucket') - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * 5242880 - etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag - # last part, can be less than 5 MB - part2 = b'1' - etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag - xml = "{0}{1}" - xml = xml.format(2, etag2) + xml.format(1, etag1) - xml = "{0}".format(xml) - bucket.complete_multipart_upload.when.called_with( - multipart.key_name, multipart.id, xml).should.throw(S3ResponseError) - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_etag_quotes_stripped(): - # Create Bucket so that test can run - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('mybucket') - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag - # last part, can be less than 5 MB - part2 = b'1' - etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag - # Strip quotes from etags - etag1 = etag1.replace('"','') - etag2 = etag2.replace('"','') - xml = "{0}{1}" - xml = xml.format(1, etag1) + xml.format(2, etag2) - xml = "{0}".format(xml) - bucket.complete_multipart_upload.when.called_with( - multipart.key_name, multipart.id, xml).should_not.throw(S3ResponseError) - # we should get both parts as the key contents - bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG) - -@mock_s3_deprecated -@reduced_min_part_size -def test_multipart_duplicate_upload(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - multipart = bucket.initiate_multipart_upload("the-key") - part1 = b'0' * REDUCED_PART_SIZE - multipart.upload_part_from_file(BytesIO(part1), 1) - # same part again - multipart.upload_part_from_file(BytesIO(part1), 1) - part2 = b'1' * 1024 - multipart.upload_part_from_file(BytesIO(part2), 2) - multipart.complete_upload() - # We should get only one copy of part 1. - bucket.get_key( - "the-key").get_contents_as_string().should.equal(part1 + part2) - - -@mock_s3_deprecated -def test_list_multiparts(): - # Create Bucket so that test can run - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('mybucket') - - multipart1 = bucket.initiate_multipart_upload("one-key") - multipart2 = bucket.initiate_multipart_upload("two-key") - uploads = bucket.get_all_multipart_uploads() - uploads.should.have.length_of(2) - dict([(u.key_name, u.id) for u in uploads]).should.equal( - {'one-key': multipart1.id, 'two-key': multipart2.id}) - multipart2.cancel_upload() - uploads = bucket.get_all_multipart_uploads() - uploads.should.have.length_of(1) - uploads[0].key_name.should.equal("one-key") - multipart1.cancel_upload() - uploads = bucket.get_all_multipart_uploads() - uploads.should.be.empty - - -@mock_s3_deprecated -def test_key_save_to_missing_bucket(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.get_bucket('mybucket', validate=False) - - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string.when.called_with( - "foobar").should.throw(S3ResponseError) - - -@mock_s3_deprecated -def test_missing_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - bucket.get_key("the-key").should.equal(None) - - -@mock_s3_deprecated -def test_missing_key_urllib2(): - conn = boto.connect_s3('the_key', 'the_secret') - conn.create_bucket("foobar") - - urlopen.when.called_with( - "http://foobar.s3.amazonaws.com/the-key").should.throw(HTTPError) - - -@mock_s3_deprecated -def test_empty_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("") - - key = bucket.get_key("the-key") - key.size.should.equal(0) - key.get_contents_as_string().should.equal(b'') - - -@mock_s3_deprecated -def test_empty_key_set_on_existing_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("foobar") - - key = bucket.get_key("the-key") - key.size.should.equal(6) - key.get_contents_as_string().should.equal(b'foobar') - - key.set_contents_from_string("") - bucket.get_key("the-key").get_contents_as_string().should.equal(b'') - - -@mock_s3_deprecated -def test_large_key_save(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("foobar" * 100000) - - bucket.get_key( - "the-key").get_contents_as_string().should.equal(b'foobar' * 100000) - - -@mock_s3_deprecated -def test_copy_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - - bucket.copy_key('new-key', 'foobar', 'the-key') - - bucket.get_key( - "the-key").get_contents_as_string().should.equal(b"some value") - bucket.get_key( - "new-key").get_contents_as_string().should.equal(b"some value") - - -@mock_s3_deprecated -def test_copy_key_with_version(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - bucket.configure_versioning(versioning=True) - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - key.set_contents_from_string("another value") - - bucket.copy_key('new-key', 'foobar', 'the-key', src_version_id='0') - - bucket.get_key( - "the-key").get_contents_as_string().should.equal(b"another value") - bucket.get_key( - "new-key").get_contents_as_string().should.equal(b"some value") - - -@mock_s3_deprecated -def test_set_metadata(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = 'the-key' - key.set_metadata('md', 'Metadatastring') - key.set_contents_from_string("Testval") - - bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring') - - -@mock_s3_deprecated -def test_copy_key_replace_metadata(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_metadata('md', 'Metadatastring') - key.set_contents_from_string("some value") - - bucket.copy_key('new-key', 'foobar', 'the-key', - metadata={'momd': 'Mometadatastring'}) - - bucket.get_key("new-key").get_metadata('md').should.be.none - bucket.get_key( - "new-key").get_metadata('momd').should.equal('Mometadatastring') - - -@freeze_time("2012-01-01 12:00:00") -@mock_s3_deprecated -def test_last_modified(): - # See https://github.com/boto/boto/issues/466 - conn = boto.connect_s3() - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - - rs = bucket.get_all_keys() - rs[0].last_modified.should.equal('2012-01-01T12:00:00.000Z') - - bucket.get_key( - "the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT') - - -@mock_s3_deprecated -def test_missing_bucket(): - conn = boto.connect_s3('the_key', 'the_secret') - conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError) - - -@mock_s3_deprecated -def test_bucket_with_dash(): - conn = boto.connect_s3('the_key', 'the_secret') - conn.get_bucket.when.called_with( - 'mybucket-test').should.throw(S3ResponseError) - - -@mock_s3_deprecated -def test_create_existing_bucket(): - "Trying to create a bucket that already exists should raise an Error" - conn = boto.s3.connect_to_region("us-west-2") - conn.create_bucket("foobar") - with assert_raises(S3CreateError): - conn.create_bucket('foobar') - - -@mock_s3_deprecated -def test_create_existing_bucket_in_us_east_1(): - "Trying to create a bucket that already exists in us-east-1 returns the bucket" - - """" - http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html - Your previous request to create the named bucket succeeded and you already - own it. You get this error in all AWS regions except US Standard, - us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if - bucket exists it Amazon S3 will not do anything). - """ - conn = boto.s3.connect_to_region("us-east-1") - conn.create_bucket("foobar") - bucket = conn.create_bucket("foobar") - bucket.name.should.equal("foobar") - - -@mock_s3_deprecated -def test_other_region(): - conn = S3Connection( - 'key', 'secret', host='s3-website-ap-southeast-2.amazonaws.com') - conn.create_bucket("foobar") - list(conn.get_bucket("foobar").get_all_keys()).should.equal([]) - - -@mock_s3_deprecated -def test_bucket_deletion(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - - # Try to delete a bucket that still has keys - conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) - - bucket.delete_key("the-key") - conn.delete_bucket("foobar") - - # Get non-existing bucket - conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError) - - # Delete non-existant bucket - conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) - - -@mock_s3_deprecated -def test_get_all_buckets(): - conn = boto.connect_s3('the_key', 'the_secret') - conn.create_bucket("foobar") - conn.create_bucket("foobar2") - buckets = conn.get_all_buckets() - - buckets.should.have.length_of(2) - - -@mock_s3 -@mock_s3_deprecated -def test_post_to_bucket(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - requests.post("https://foobar.s3.amazonaws.com/", { - 'key': 'the-key', - 'file': 'nothing' - }) - - bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') - - -@mock_s3 -@mock_s3_deprecated -def test_post_with_metadata_to_bucket(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - - requests.post("https://foobar.s3.amazonaws.com/", { - 'key': 'the-key', - 'file': 'nothing', - 'x-amz-meta-test': 'metadata' - }) - - bucket.get_key('the-key').get_metadata('test').should.equal('metadata') - - -@mock_s3_deprecated -def test_delete_missing_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - - deleted_key = bucket.delete_key("foobar") - deleted_key.key.should.equal("foobar") - - -@mock_s3_deprecated -def test_delete_keys(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - - Key(bucket=bucket, name='file1').set_contents_from_string('abc') - Key(bucket=bucket, name='file2').set_contents_from_string('abc') - Key(bucket=bucket, name='file3').set_contents_from_string('abc') - Key(bucket=bucket, name='file4').set_contents_from_string('abc') - - result = bucket.delete_keys(['file2', 'file3']) - result.deleted.should.have.length_of(2) - result.errors.should.have.length_of(0) - keys = bucket.get_all_keys() - keys.should.have.length_of(2) - keys[0].name.should.equal('file1') - - -@mock_s3_deprecated -def test_delete_keys_with_invalid(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - - Key(bucket=bucket, name='file1').set_contents_from_string('abc') - Key(bucket=bucket, name='file2').set_contents_from_string('abc') - Key(bucket=bucket, name='file3').set_contents_from_string('abc') - Key(bucket=bucket, name='file4').set_contents_from_string('abc') - - result = bucket.delete_keys(['abc', 'file3']) - - result.deleted.should.have.length_of(1) - result.errors.should.have.length_of(1) - keys = bucket.get_all_keys() - keys.should.have.length_of(3) - keys[0].name.should.equal('file1') - - -@mock_s3_deprecated -def test_bucket_name_with_dot(): - conn = boto.connect_s3() - bucket = conn.create_bucket('firstname.lastname') - - k = Key(bucket, 'somekey') - k.set_contents_from_string('somedata') - - -@mock_s3_deprecated -def test_key_with_special_characters(): - conn = boto.connect_s3() - bucket = conn.create_bucket('test_bucket_name') - - key = Key(bucket, 'test_list_keys_2/x?y') - key.set_contents_from_string('value1') - - key_list = bucket.list('test_list_keys_2/', '/') - keys = [x for x in key_list] - keys[0].name.should.equal("test_list_keys_2/x?y") - - -@mock_s3_deprecated -def test_unicode_key_with_slash(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "/the-key-unîcode/test" - key.set_contents_from_string("value") - - key = bucket.get_key("/the-key-unîcode/test") - key.get_contents_as_string().should.equal(b'value') - - -@mock_s3_deprecated -def test_bucket_key_listing_order(): - conn = boto.connect_s3() - bucket = conn.create_bucket('test_bucket') - prefix = 'toplevel/' - - def store(name): - k = Key(bucket, prefix + name) - k.set_contents_from_string('somedata') - - names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key'] - - for name in names: - store(name) - - delimiter = None - keys = [x.name for x in bucket.list(prefix, delimiter)] - keys.should.equal([ - 'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key', - 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3' - ]) - - delimiter = '/' - keys = [x.name for x in bucket.list(prefix, delimiter)] - keys.should.equal([ - 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/' - ]) - - # Test delimiter with no prefix - delimiter = '/' - keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)] - keys.should.equal(['toplevel/']) - - delimiter = None - keys = [x.name for x in bucket.list(prefix + 'x', delimiter)] - keys.should.equal( - [u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key']) - - delimiter = '/' - keys = [x.name for x in bucket.list(prefix + 'x', delimiter)] - keys.should.equal([u'toplevel/x/']) - - -@mock_s3_deprecated -def test_key_with_reduced_redundancy(): - conn = boto.connect_s3() - bucket = conn.create_bucket('test_bucket_name') - - key = Key(bucket, 'test_rr_key') - key.set_contents_from_string('value1', reduced_redundancy=True) - # we use the bucket iterator because of: - # https:/github.com/boto/boto/issues/1173 - list(bucket)[0].storage_class.should.equal('REDUCED_REDUNDANCY') - - -@mock_s3_deprecated -def test_copy_key_reduced_redundancy(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - - bucket.copy_key('new-key', 'foobar', 'the-key', - storage_class='REDUCED_REDUNDANCY') - - # we use the bucket iterator because of: - # https:/github.com/boto/boto/issues/1173 - keys = dict([(k.name, k) for k in bucket]) - keys['new-key'].storage_class.should.equal("REDUCED_REDUNDANCY") - keys['the-key'].storage_class.should.equal("STANDARD") - - -@freeze_time("2012-01-01 12:00:00") -@mock_s3_deprecated -def test_restore_key(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - list(bucket)[0].ongoing_restore.should.be.none - key.restore(1) - key = bucket.get_key('the-key') - key.ongoing_restore.should_not.be.none - key.ongoing_restore.should.be.false - key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT") - key.restore(2) - key = bucket.get_key('the-key') - key.ongoing_restore.should_not.be.none - key.ongoing_restore.should.be.false - key.expiry_date.should.equal("Tue, 03 Jan 2012 12:00:00 GMT") - - -@freeze_time("2012-01-01 12:00:00") -@mock_s3_deprecated -def test_restore_key_headers(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket("foobar") - key = Key(bucket) - key.key = "the-key" - key.set_contents_from_string("some value") - key.restore(1, headers={'foo': 'bar'}) - key = bucket.get_key('the-key') - key.ongoing_restore.should_not.be.none - key.ongoing_restore.should.be.false - key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT") - - -@mock_s3_deprecated -def test_get_versioning_status(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - d = bucket.get_versioning_status() - d.should.be.empty - - bucket.configure_versioning(versioning=True) - d = bucket.get_versioning_status() - d.shouldnt.be.empty - d.should.have.key('Versioning').being.equal('Enabled') - - bucket.configure_versioning(versioning=False) - d = bucket.get_versioning_status() - d.should.have.key('Versioning').being.equal('Suspended') - - -@mock_s3_deprecated -def test_key_version(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - bucket.configure_versioning(versioning=True) - - key = Key(bucket) - key.key = 'the-key' - key.version_id.should.be.none - key.set_contents_from_string('some string') - key.version_id.should.equal('0') - key.set_contents_from_string('some string') - key.version_id.should.equal('1') - - key = bucket.get_key('the-key') - key.version_id.should.equal('1') - - -@mock_s3_deprecated -def test_list_versions(): - conn = boto.connect_s3('the_key', 'the_secret') - bucket = conn.create_bucket('foobar') - bucket.configure_versioning(versioning=True) - - key = Key(bucket, 'the-key') - key.version_id.should.be.none - key.set_contents_from_string("Version 1") - key.version_id.should.equal('0') - key.set_contents_from_string("Version 2") - key.version_id.should.equal('1') - - versions = list(bucket.list_versions()) - - versions.should.have.length_of(2) - - versions[0].name.should.equal('the-key') - versions[0].version_id.should.equal('0') - versions[0].get_contents_as_string().should.equal(b"Version 1") - - versions[1].name.should.equal('the-key') - versions[1].version_id.should.equal('1') - versions[1].get_contents_as_string().should.equal(b"Version 2") - - key = Key(bucket, 'the2-key') - key.set_contents_from_string("Version 1") - - keys = list(bucket.list()) - keys.should.have.length_of(2) - versions = list(bucket.list_versions(prefix='the2-')) - versions.should.have.length_of(1) - - -@mock_s3_deprecated -def test_acl_setting(): - conn = boto.connect_s3() - bucket = conn.create_bucket('foobar') - content = b'imafile' - keyname = 'test.txt' - - key = Key(bucket, name=keyname) - key.content_type = 'text/plain' - key.set_contents_from_string(content) - key.make_public() - - key = bucket.get_key(keyname) - - assert key.get_contents_as_string() == content - - grants = key.get_acl().acl.grants - assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and - g.permission == 'READ' for g in grants), grants - - -@mock_s3_deprecated -def test_acl_setting_via_headers(): - conn = boto.connect_s3() - bucket = conn.create_bucket('foobar') - content = b'imafile' - keyname = 'test.txt' - - key = Key(bucket, name=keyname) - key.content_type = 'text/plain' - key.set_contents_from_string(content, headers={ - 'x-amz-grant-full-control': 'uri="http://acs.amazonaws.com/groups/global/AllUsers"' - }) - - key = bucket.get_key(keyname) - - assert key.get_contents_as_string() == content - - grants = key.get_acl().acl.grants - assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and - g.permission == 'FULL_CONTROL' for g in grants), grants - - -@mock_s3_deprecated -def test_acl_switching(): - conn = boto.connect_s3() - bucket = conn.create_bucket('foobar') - content = b'imafile' - keyname = 'test.txt' - - key = Key(bucket, name=keyname) - key.content_type = 'text/plain' - key.set_contents_from_string(content, policy='public-read') - key.set_acl('private') - - grants = key.get_acl().acl.grants - assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and - g.permission == 'READ' for g in grants), grants - - -@mock_s3_deprecated -def test_bucket_acl_setting(): - conn = boto.connect_s3() - bucket = conn.create_bucket('foobar') - - bucket.make_public() - - grants = bucket.get_acl().acl.grants - assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and - g.permission == 'READ' for g in grants), grants - - -@mock_s3_deprecated -def test_bucket_acl_switching(): - conn = boto.connect_s3() - bucket = conn.create_bucket('foobar') - bucket.make_public() - - bucket.set_acl('private') - - grants = bucket.get_acl().acl.grants - assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and - g.permission == 'READ' for g in grants), grants - - -@mock_s3 -def test_s3_object_in_public_bucket(): - s3 = boto3.resource('s3') - bucket = s3.Bucket('test-bucket') - bucket.create(ACL='public-read') - bucket.put_object(Body=b'ABCD', Key='file.txt') - - s3_anonymous = boto3.resource('s3') - s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing) - - contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read() - contents.should.equal(b'ABCD') - - bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt') - - with assert_raises(ClientError) as exc: - s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get() - exc.exception.response['Error']['Code'].should.equal('403') - - params = {'Bucket': 'test-bucket', 'Key': 'file.txt'} - presigned_url = boto3.client('s3').generate_presigned_url('get_object', params, ExpiresIn=900) - response = requests.get(presigned_url) - assert response.status_code == 200 - - -@mock_s3 -def test_s3_object_in_private_bucket(): - s3 = boto3.resource('s3') - bucket = s3.Bucket('test-bucket') - bucket.create(ACL='private') - bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt') - - s3_anonymous = boto3.resource('s3') - s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing) - - with assert_raises(ClientError) as exc: - s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get() - exc.exception.response['Error']['Code'].should.equal('403') - - bucket.put_object(ACL='public-read', Body=b'ABCD', Key='file.txt') - contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read() - contents.should.equal(b'ABCD') - - -@mock_s3_deprecated -def test_unicode_key(): - conn = boto.connect_s3() - bucket = conn.create_bucket('mybucket') - key = Key(bucket) - key.key = u'こんにちは.jpg' - key.set_contents_from_string('Hello world!') - assert [listed_key.key for listed_key in bucket.list()] == [key.key] - fetched_key = bucket.get_key(key.key) - assert fetched_key.key == key.key - assert fetched_key.get_contents_as_string().decode("utf-8") == 'Hello world!' - - -@mock_s3_deprecated -def test_unicode_value(): - conn = boto.connect_s3() - bucket = conn.create_bucket('mybucket') - key = Key(bucket) - key.key = 'some_key' - key.set_contents_from_string(u'こんにちは.jpg') - list(bucket.list()) - key = bucket.get_key(key.key) - assert key.get_contents_as_string().decode("utf-8") == u'こんにちは.jpg' - - -@mock_s3_deprecated -def test_setting_content_encoding(): - conn = boto.connect_s3() - bucket = conn.create_bucket('mybucket') - key = bucket.new_key("keyname") - key.set_metadata("Content-Encoding", "gzip") - compressed_data = "abcdef" - key.set_contents_from_string(compressed_data) - - key = bucket.get_key("keyname") - key.content_encoding.should.equal("gzip") - - -@mock_s3_deprecated -def test_bucket_location(): - conn = boto.s3.connect_to_region("us-west-2") - bucket = conn.create_bucket('mybucket') - bucket.get_location().should.equal("us-west-2") - - -@mock_s3_deprecated -def test_ranged_get(): - conn = boto.connect_s3() - bucket = conn.create_bucket('mybucket') - key = Key(bucket) - key.key = 'bigkey' - rep = b"0123456789" - key.set_contents_from_string(rep * 10) - - # Implicitly bounded range requests. - key.get_contents_as_string( - headers={'Range': 'bytes=0-'}).should.equal(rep * 10) - key.get_contents_as_string( - headers={'Range': 'bytes=50-'}).should.equal(rep * 5) - key.get_contents_as_string( - headers={'Range': 'bytes=99-'}).should.equal(b'9') - - # Explicitly bounded range requests starting from the first byte. - key.get_contents_as_string( - headers={'Range': 'bytes=0-0'}).should.equal(b'0') - key.get_contents_as_string( - headers={'Range': 'bytes=0-49'}).should.equal(rep * 5) - key.get_contents_as_string( - headers={'Range': 'bytes=0-99'}).should.equal(rep * 10) - key.get_contents_as_string( - headers={'Range': 'bytes=0-100'}).should.equal(rep * 10) - key.get_contents_as_string( - headers={'Range': 'bytes=0-700'}).should.equal(rep * 10) - - # Explicitly bounded range requests starting from the / a middle byte. - key.get_contents_as_string( - headers={'Range': 'bytes=50-54'}).should.equal(rep[:5]) - key.get_contents_as_string( - headers={'Range': 'bytes=50-99'}).should.equal(rep * 5) - key.get_contents_as_string( - headers={'Range': 'bytes=50-100'}).should.equal(rep * 5) - key.get_contents_as_string( - headers={'Range': 'bytes=50-700'}).should.equal(rep * 5) - - # Explicitly bounded range requests starting from the last byte. - key.get_contents_as_string( - headers={'Range': 'bytes=99-99'}).should.equal(b'9') - key.get_contents_as_string( - headers={'Range': 'bytes=99-100'}).should.equal(b'9') - key.get_contents_as_string( - headers={'Range': 'bytes=99-700'}).should.equal(b'9') - - # Suffix range requests. - key.get_contents_as_string( - headers={'Range': 'bytes=-1'}).should.equal(b'9') - key.get_contents_as_string( - headers={'Range': 'bytes=-60'}).should.equal(rep * 6) - key.get_contents_as_string( - headers={'Range': 'bytes=-100'}).should.equal(rep * 10) - key.get_contents_as_string( - headers={'Range': 'bytes=-101'}).should.equal(rep * 10) - key.get_contents_as_string( - headers={'Range': 'bytes=-700'}).should.equal(rep * 10) - - key.size.should.equal(100) - - -@mock_s3_deprecated -def test_policy(): - conn = boto.connect_s3() - bucket_name = 'mybucket' - bucket = conn.create_bucket(bucket_name) - - policy = json.dumps({ - "Version": "2012-10-17", - "Id": "PutObjPolicy", - "Statement": [ - { - "Sid": "DenyUnEncryptedObjectUploads", - "Effect": "Deny", - "Principal": "*", - "Action": "s3:PutObject", - "Resource": "arn:aws:s3:::{bucket_name}/*".format(bucket_name=bucket_name), - "Condition": { - "StringNotEquals": { - "s3:x-amz-server-side-encryption": "aws:kms" - } - } - } - ] - }) - - with assert_raises(S3ResponseError) as err: - bucket.get_policy() - - ex = err.exception - ex.box_usage.should.be.none - ex.error_code.should.equal('NoSuchBucketPolicy') - ex.message.should.equal('The bucket policy does not exist') - ex.reason.should.equal('Not Found') - ex.resource.should.be.none - ex.status.should.equal(404) - ex.body.should.contain(bucket_name) - ex.request_id.should_not.be.none - - bucket.set_policy(policy).should.be.true - - bucket = conn.get_bucket(bucket_name) - - bucket.get_policy().decode('utf-8').should.equal(policy) - - bucket.delete_policy() - - with assert_raises(S3ResponseError) as err: - bucket.get_policy() - - -@mock_s3_deprecated -def test_website_configuration_xml(): - conn = boto.connect_s3() - bucket = conn.create_bucket('test-bucket') - bucket.set_website_configuration_xml(TEST_XML) - bucket.get_website_configuration_xml().should.equal(TEST_XML) - - -@mock_s3_deprecated -def test_key_with_trailing_slash_in_ordinary_calling_format(): - conn = boto.connect_s3( - 'access_key', - 'secret_key', - calling_format=boto.s3.connection.OrdinaryCallingFormat() - ) - bucket = conn.create_bucket('test_bucket_name') - - key_name = 'key_with_slash/' - - key = Key(bucket, key_name) - key.set_contents_from_string('some value') - - [k.name for k in bucket.get_all_keys()].should.contain(key_name) - - -""" -boto3 -""" - - -@mock_s3 -def test_boto3_key_etag(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome') - resp = s3.get_object(Bucket='mybucket', Key='steve') - resp['ETag'].should.equal('"d32bda93738f7e03adb22e66c90fbc04"') - - -@mock_s3 -def test_website_redirect_location(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - - s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome') - resp = s3.get_object(Bucket='mybucket', Key='steve') - resp.get('WebsiteRedirectLocation').should.be.none - - url = 'https://github.com/spulec/moto' - s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome', WebsiteRedirectLocation=url) - resp = s3.get_object(Bucket='mybucket', Key='steve') - resp['WebsiteRedirectLocation'].should.equal(url) - - -@mock_s3 -def test_boto3_list_keys_xml_escaped(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - key_name = 'Q&A.txt' - s3.put_object(Bucket='mybucket', Key=key_name, Body=b'is awesome') - - resp = s3.list_objects_v2(Bucket='mybucket', Prefix=key_name) - - assert resp['Contents'][0]['Key'] == key_name - assert resp['KeyCount'] == 1 - assert resp['MaxKeys'] == 1000 - assert resp['Prefix'] == key_name - assert resp['IsTruncated'] == False - assert 'Delimiter' not in resp - assert 'StartAfter' not in resp - assert 'NextContinuationToken' not in resp - assert 'Owner' not in resp['Contents'][0] - - -@mock_s3 -def test_boto3_list_objects_v2_truncated_response(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - s3.put_object(Bucket='mybucket', Key='one', Body=b'1') - s3.put_object(Bucket='mybucket', Key='two', Body=b'22') - s3.put_object(Bucket='mybucket', Key='three', Body=b'333') - - # First list - resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1) - listed_object = resp['Contents'][0] - - assert listed_object['Key'] == 'one' - assert resp['MaxKeys'] == 1 - assert resp['Prefix'] == '' - assert resp['KeyCount'] == 1 - assert resp['IsTruncated'] == True - assert 'Delimiter' not in resp - assert 'StartAfter' not in resp - assert 'Owner' not in listed_object # owner info was not requested - - next_token = resp['NextContinuationToken'] - - # Second list - resp = s3.list_objects_v2( - Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token) - listed_object = resp['Contents'][0] - - assert listed_object['Key'] == 'three' - assert resp['MaxKeys'] == 1 - assert resp['Prefix'] == '' - assert resp['KeyCount'] == 1 - assert resp['IsTruncated'] == True - assert 'Delimiter' not in resp - assert 'StartAfter' not in resp - assert 'Owner' not in listed_object - - next_token = resp['NextContinuationToken'] - - # Third list - resp = s3.list_objects_v2( - Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token) - listed_object = resp['Contents'][0] - - assert listed_object['Key'] == 'two' - assert resp['MaxKeys'] == 1 - assert resp['Prefix'] == '' - assert resp['KeyCount'] == 1 - assert resp['IsTruncated'] == False - assert 'Delimiter' not in resp - assert 'Owner' not in listed_object - assert 'StartAfter' not in resp - assert 'NextContinuationToken' not in resp - - -@mock_s3 -def test_boto3_list_objects_v2_truncated_response_start_after(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - s3.put_object(Bucket='mybucket', Key='one', Body=b'1') - s3.put_object(Bucket='mybucket', Key='two', Body=b'22') - s3.put_object(Bucket='mybucket', Key='three', Body=b'333') - - # First list - resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one') - listed_object = resp['Contents'][0] - - assert listed_object['Key'] == 'three' - assert resp['MaxKeys'] == 1 - assert resp['Prefix'] == '' - assert resp['KeyCount'] == 1 - assert resp['IsTruncated'] == True - assert resp['StartAfter'] == 'one' - assert 'Delimiter' not in resp - assert 'Owner' not in listed_object - - next_token = resp['NextContinuationToken'] - - # Second list - # The ContinuationToken must take precedence over StartAfter. - resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one', - ContinuationToken=next_token) - listed_object = resp['Contents'][0] - - assert listed_object['Key'] == 'two' - assert resp['MaxKeys'] == 1 - assert resp['Prefix'] == '' - assert resp['KeyCount'] == 1 - assert resp['IsTruncated'] == False - # When ContinuationToken is given, StartAfter is ignored. This also means - # AWS does not return it in the response. - assert 'StartAfter' not in resp - assert 'Delimiter' not in resp - assert 'Owner' not in listed_object - - -@mock_s3 -def test_boto3_list_objects_v2_fetch_owner(): - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - s3.put_object(Bucket='mybucket', Key='one', Body=b'11') - - resp = s3.list_objects_v2(Bucket='mybucket', FetchOwner=True) - owner = resp['Contents'][0]['Owner'] - - assert 'ID' in owner - assert 'DisplayName' in owner - assert len(owner.keys()) == 2 - - -@mock_s3 -def test_boto3_bucket_create(): - s3 = boto3.resource('s3', region_name='us-east-1') - s3.create_bucket(Bucket="blah") - - s3.Object('blah', 'hello.txt').put(Body="some text") - - s3.Object('blah', 'hello.txt').get()['Body'].read().decode( - "utf-8").should.equal("some text") - - -@mock_s3 -def test_bucket_create_duplicate(): - s3 = boto3.resource('s3', region_name='us-west-2') - s3.create_bucket(Bucket="blah", CreateBucketConfiguration={ - 'LocationConstraint': 'us-west-2', - }) - with assert_raises(ClientError) as exc: - s3.create_bucket( - Bucket="blah", - CreateBucketConfiguration={ - 'LocationConstraint': 'us-west-2', - } - ) - exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists') - - -@mock_s3 -def test_boto3_bucket_create_eu_central(): - s3 = boto3.resource('s3', region_name='eu-central-1') - s3.create_bucket(Bucket="blah") - - s3.Object('blah', 'hello.txt').put(Body="some text") - - s3.Object('blah', 'hello.txt').get()['Body'].read().decode( - "utf-8").should.equal("some text") - - -@mock_s3 -def test_boto3_head_object(): - s3 = boto3.resource('s3', region_name='us-east-1') - s3.create_bucket(Bucket="blah") - - s3.Object('blah', 'hello.txt').put(Body="some text") - - s3.Object('blah', 'hello.txt').meta.client.head_object( - Bucket='blah', Key='hello.txt') - - with assert_raises(ClientError) as e: - s3.Object('blah', 'hello2.txt').meta.client.head_object( - Bucket='blah', Key='hello_bad.txt') - e.exception.response['Error']['Code'].should.equal('404') - - -@mock_s3 -def test_boto3_bucket_deletion(): - cli = boto3.client('s3', region_name='us-east-1') - cli.create_bucket(Bucket="foobar") - - cli.put_object(Bucket="foobar", Key="the-key", Body="some value") - - # Try to delete a bucket that still has keys - cli.delete_bucket.when.called_with(Bucket="foobar").should.throw( - cli.exceptions.ClientError, - ('An error occurred (BucketNotEmpty) when calling the DeleteBucket operation: ' - 'The bucket you tried to delete is not empty')) - - cli.delete_object(Bucket="foobar", Key="the-key") - cli.delete_bucket(Bucket="foobar") - - # Get non-existing bucket - cli.head_bucket.when.called_with(Bucket="foobar").should.throw( - cli.exceptions.ClientError, - "An error occurred (404) when calling the HeadBucket operation: Not Found") - - # Delete non-existing bucket - cli.delete_bucket.when.called_with(Bucket="foobar").should.throw(cli.exceptions.NoSuchBucket) - - -@mock_s3 -def test_boto3_get_object(): - s3 = boto3.resource('s3', region_name='us-east-1') - s3.create_bucket(Bucket="blah") - - s3.Object('blah', 'hello.txt').put(Body="some text") - - s3.Object('blah', 'hello.txt').meta.client.head_object( - Bucket='blah', Key='hello.txt') - - with assert_raises(ClientError) as e: - s3.Object('blah', 'hello2.txt').get() - - e.exception.response['Error']['Code'].should.equal('NoSuchKey') - - -@mock_s3 -def test_boto3_head_object_with_versioning(): - s3 = boto3.resource('s3', region_name='us-east-1') - bucket = s3.create_bucket(Bucket='blah') - bucket.Versioning().enable() - - old_content = 'some text' - new_content = 'some new text' - s3.Object('blah', 'hello.txt').put(Body=old_content) - s3.Object('blah', 'hello.txt').put(Body=new_content) - - head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( - Bucket='blah', Key='hello.txt') - head_object['VersionId'].should.equal('1') - head_object['ContentLength'].should.equal(len(new_content)) - - old_head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( - Bucket='blah', Key='hello.txt', VersionId='0') - old_head_object['VersionId'].should.equal('0') - old_head_object['ContentLength'].should.equal(len(old_content)) - - -@mock_s3 -def test_boto3_copy_object_with_versioning(): - client = boto3.client('s3', region_name='us-east-1') - - client.create_bucket(Bucket='blah', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) - client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) - - client.put_object(Bucket='blah', Key='test1', Body=b'test1') - client.put_object(Bucket='blah', Key='test2', Body=b'test2') - - obj1_version = client.get_object(Bucket='blah', Key='test1')['VersionId'] - obj2_version = client.get_object(Bucket='blah', Key='test2')['VersionId'] - - # Versions should be the same - obj1_version.should.equal(obj2_version) - - client.copy_object(CopySource={'Bucket': 'blah', 'Key': 'test1'}, Bucket='blah', Key='test2') - obj2_version_new = client.get_object(Bucket='blah', Key='test2')['VersionId'] - - # Version should be different to previous version - obj2_version_new.should_not.equal(obj2_version) - - -@mock_s3 -def test_boto3_deleted_versionings_list(): - client = boto3.client('s3', region_name='us-east-1') - - client.create_bucket(Bucket='blah') - client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) - - client.put_object(Bucket='blah', Key='test1', Body=b'test1') - client.put_object(Bucket='blah', Key='test2', Body=b'test2') - client.delete_objects(Bucket='blah', Delete={'Objects': [{'Key': 'test1'}]}) - - listed = client.list_objects_v2(Bucket='blah') - assert len(listed['Contents']) == 1 - - -@mock_s3 -def test_boto3_delete_versioned_bucket(): - client = boto3.client('s3', region_name='us-east-1') - - client.create_bucket(Bucket='blah') - client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) - - resp = client.put_object(Bucket='blah', Key='test1', Body=b'test1') - client.delete_object(Bucket='blah', Key='test1', VersionId=resp["VersionId"]) - - client.delete_bucket(Bucket='blah') - - -@mock_s3 -def test_boto3_head_object_if_modified_since(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = "blah" - s3.create_bucket(Bucket=bucket_name) - - key = 'hello.txt' - - s3.put_object( - Bucket=bucket_name, - Key=key, - Body='test' - ) - - with assert_raises(botocore.exceptions.ClientError) as err: - s3.head_object( - Bucket=bucket_name, - Key=key, - IfModifiedSince=datetime.datetime.utcnow() + datetime.timedelta(hours=1) - ) - e = err.exception - e.response['Error'].should.equal({'Code': '304', 'Message': 'Not Modified'}) - - -@mock_s3 -@reduced_min_part_size -def test_boto3_multipart_etag(): - # Create Bucket so that test can run - s3 = boto3.client('s3', region_name='us-east-1') - s3.create_bucket(Bucket='mybucket') - - upload_id = s3.create_multipart_upload( - Bucket='mybucket', Key='the-key')['UploadId'] - part1 = b'0' * REDUCED_PART_SIZE - etags = [] - etags.append( - s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=1, - UploadId=upload_id, Body=part1)['ETag']) - # last part, can be less than 5 MB - part2 = b'1' - etags.append( - s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=2, - UploadId=upload_id, Body=part2)['ETag']) - s3.complete_multipart_upload( - Bucket='mybucket', Key='the-key', UploadId=upload_id, - MultipartUpload={'Parts': [{'ETag': etag, 'PartNumber': i} - for i, etag in enumerate(etags, 1)]}) - # we should get both parts as the key contents - resp = s3.get_object(Bucket='mybucket', Key='the-key') - resp['ETag'].should.equal(EXPECTED_ETAG) - - -@mock_s3 -def test_boto3_put_object_with_tagging(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-tags' - s3.create_bucket(Bucket=bucket_name) - - s3.put_object( - Bucket=bucket_name, - Key=key, - Body='test', - Tagging='foo=bar', - ) - - resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) - - resp['TagSet'].should.contain({'Key': 'foo', 'Value': 'bar'}) - - -@mock_s3 -def test_boto3_put_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - - # With 1 tag: - resp = s3.put_bucket_tagging(Bucket=bucket_name, - Tagging={ - "TagSet": [ - { - "Key": "TagOne", - "Value": "ValueOne" - } - ] - }) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - # With multiple tags: - resp = s3.put_bucket_tagging(Bucket=bucket_name, - Tagging={ - "TagSet": [ - { - "Key": "TagOne", - "Value": "ValueOne" - }, - { - "Key": "TagTwo", - "Value": "ValueTwo" - } - ] - }) - - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - # No tags is also OK: - resp = s3.put_bucket_tagging(Bucket=bucket_name, Tagging={ - "TagSet": [] - }) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - -@mock_s3 -def test_boto3_get_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_tagging(Bucket=bucket_name, - Tagging={ - "TagSet": [ - { - "Key": "TagOne", - "Value": "ValueOne" - }, - { - "Key": "TagTwo", - "Value": "ValueTwo" - } - ] - }) - - # Get the tags for the bucket: - resp = s3.get_bucket_tagging(Bucket=bucket_name) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - len(resp["TagSet"]).should.equal(2) - - # With no tags: - s3.put_bucket_tagging(Bucket=bucket_name, Tagging={ - "TagSet": [] - }) - - with assert_raises(ClientError) as err: - s3.get_bucket_tagging(Bucket=bucket_name) - - e = err.exception - e.response["Error"]["Code"].should.equal("NoSuchTagSet") - e.response["Error"]["Message"].should.equal("The TagSet does not exist") - - -@mock_s3 -def test_boto3_delete_bucket_tagging(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - - s3.put_bucket_tagging(Bucket=bucket_name, - Tagging={ - "TagSet": [ - { - "Key": "TagOne", - "Value": "ValueOne" - }, - { - "Key": "TagTwo", - "Value": "ValueTwo" - } - ] - }) - - resp = s3.delete_bucket_tagging(Bucket=bucket_name) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204) - - with assert_raises(ClientError) as err: - s3.get_bucket_tagging(Bucket=bucket_name) - - e = err.exception - e.response["Error"]["Code"].should.equal("NoSuchTagSet") - e.response["Error"]["Message"].should.equal("The TagSet does not exist") - - -@mock_s3 -def test_boto3_put_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - - resp = s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": [ - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "GET", - "POST" - ], - "AllowedHeaders": [ - "Authorization" - ], - "ExposeHeaders": [ - "x-amz-request-id" - ], - "MaxAgeSeconds": 123 - }, - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "PUT" - ], - "AllowedHeaders": [ - "Authorization" - ], - "ExposeHeaders": [ - "x-amz-request-id" - ], - "MaxAgeSeconds": 123 - } - ] - }) - - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - with assert_raises(ClientError) as err: - s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": [ - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "NOTREAL", - "POST" - ] - } - ] - }) - e = err.exception - e.response["Error"]["Code"].should.equal("InvalidRequest") - e.response["Error"]["Message"].should.equal("Found unsupported HTTP method in CORS config. " - "Unsupported method is NOTREAL") - - with assert_raises(ClientError) as err: - s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": [] - }) - e = err.exception - e.response["Error"]["Code"].should.equal("MalformedXML") - - # And 101: - many_rules = [{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}] * 101 - with assert_raises(ClientError) as err: - s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": many_rules - }) - e = err.exception - e.response["Error"]["Code"].should.equal("MalformedXML") - - -@mock_s3 -def test_boto3_get_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - - # Without CORS: - with assert_raises(ClientError) as err: - s3.get_bucket_cors(Bucket=bucket_name) - - e = err.exception - e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration") - e.response["Error"]["Message"].should.equal("The CORS configuration does not exist") - - s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": [ - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "GET", - "POST" - ], - "AllowedHeaders": [ - "Authorization" - ], - "ExposeHeaders": [ - "x-amz-request-id" - ], - "MaxAgeSeconds": 123 - }, - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "PUT" - ], - "AllowedHeaders": [ - "Authorization" - ], - "ExposeHeaders": [ - "x-amz-request-id" - ], - "MaxAgeSeconds": 123 - } - ] - }) - - resp = s3.get_bucket_cors(Bucket=bucket_name) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - len(resp["CORSRules"]).should.equal(2) - - -@mock_s3 -def test_boto3_delete_bucket_cors(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ - "CORSRules": [ - { - "AllowedOrigins": [ - "*" - ], - "AllowedMethods": [ - "GET" - ] - } - ] - }) - - resp = s3.delete_bucket_cors(Bucket=bucket_name) - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204) - - # Verify deletion: - with assert_raises(ClientError) as err: - s3.get_bucket_cors(Bucket=bucket_name) - - e = err.exception - e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration") - e.response["Error"]["Message"].should.equal("The CORS configuration does not exist") - - -@mock_s3 -def test_put_bucket_acl_body(): - s3 = boto3.client("s3", region_name="us-east-1") - s3.create_bucket(Bucket="bucket") - bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"] - s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "WRITE" - }, - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "READ_ACP" - } - ], - "Owner": bucket_owner - }) - - result = s3.get_bucket_acl(Bucket="bucket") - assert len(result["Grants"]) == 2 - for g in result["Grants"]: - assert g["Grantee"]["URI"] == "http://acs.amazonaws.com/groups/s3/LogDelivery" - assert g["Grantee"]["Type"] == "Group" - assert g["Permission"] in ["WRITE", "READ_ACP"] - - # With one: - s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "WRITE" - } - ], - "Owner": bucket_owner - }) - result = s3.get_bucket_acl(Bucket="bucket") - assert len(result["Grants"]) == 1 - - # With no owner: - with assert_raises(ClientError) as err: - s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "WRITE" - } - ] - }) - assert err.exception.response["Error"]["Code"] == "MalformedACLError" - - # With incorrect permission: - with assert_raises(ClientError) as err: - s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "lskjflkasdjflkdsjfalisdjflkdsjf" - } - ], - "Owner": bucket_owner - }) - assert err.exception.response["Error"]["Code"] == "MalformedACLError" - - # Clear the ACLs: - result = s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={"Grants": [], "Owner": bucket_owner}) - assert not result.get("Grants") - - -@mock_s3 -def test_put_bucket_notification(): - s3 = boto3.client("s3", region_name="us-east-1") - s3.create_bucket(Bucket="bucket") - - # With no configuration: - result = s3.get_bucket_notification(Bucket="bucket") - assert not result.get("TopicConfigurations") - assert not result.get("QueueConfigurations") - assert not result.get("LambdaFunctionConfigurations") - - # Place proper topic configuration: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "TopicConfigurations": [ - { - "TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic", - "Events": [ - "s3:ObjectCreated:*", - "s3:ObjectRemoved:*" - ] - }, - { - "TopicArn": "arn:aws:sns:us-east-1:012345678910:myothertopic", - "Events": [ - "s3:ObjectCreated:*" - ], - "Filter": { - "Key": { - "FilterRules": [ - { - "Name": "prefix", - "Value": "images/" - }, - { - "Name": "suffix", - "Value": "png" - } - ] - } - } - } - ] - }) - - # Verify to completion: - result = s3.get_bucket_notification_configuration(Bucket="bucket") - assert len(result["TopicConfigurations"]) == 2 - assert not result.get("QueueConfigurations") - assert not result.get("LambdaFunctionConfigurations") - assert result["TopicConfigurations"][0]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:mytopic" - assert result["TopicConfigurations"][1]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:myothertopic" - assert len(result["TopicConfigurations"][0]["Events"]) == 2 - assert len(result["TopicConfigurations"][1]["Events"]) == 1 - assert result["TopicConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" - assert result["TopicConfigurations"][0]["Events"][1] == "s3:ObjectRemoved:*" - assert result["TopicConfigurations"][1]["Events"][0] == "s3:ObjectCreated:*" - assert result["TopicConfigurations"][0]["Id"] - assert result["TopicConfigurations"][1]["Id"] - assert not result["TopicConfigurations"][0].get("Filter") - assert len(result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"]) == 2 - assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" - assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" - assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Name"] == "suffix" - assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Value"] == "png" - - # Place proper queue configuration: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "QueueConfigurations": [ - { - "Id": "SomeID", - "QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue", - "Events": ["s3:ObjectCreated:*"], - "Filter": { - "Key": { - "FilterRules": [ - { - "Name": "prefix", - "Value": "images/" - } - ] - } - } - } - ] - }) - result = s3.get_bucket_notification_configuration(Bucket="bucket") - assert len(result["QueueConfigurations"]) == 1 - assert not result.get("TopicConfigurations") - assert not result.get("LambdaFunctionConfigurations") - assert result["QueueConfigurations"][0]["Id"] == "SomeID" - assert result["QueueConfigurations"][0]["QueueArn"] == "arn:aws:sqs:us-east-1:012345678910:myQueue" - assert result["QueueConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" - assert len(result["QueueConfigurations"][0]["Events"]) == 1 - assert len(result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1 - assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" - assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" - - # Place proper Lambda configuration: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "LambdaFunctionConfigurations": [ - { - "LambdaFunctionArn": - "arn:aws:lambda:us-east-1:012345678910:function:lambda", - "Events": ["s3:ObjectCreated:*"], - "Filter": { - "Key": { - "FilterRules": [ - { - "Name": "prefix", - "Value": "images/" - } - ] - } - } - } - ] - }) - result = s3.get_bucket_notification_configuration(Bucket="bucket") - assert len(result["LambdaFunctionConfigurations"]) == 1 - assert not result.get("TopicConfigurations") - assert not result.get("QueueConfigurations") - assert result["LambdaFunctionConfigurations"][0]["Id"] - assert result["LambdaFunctionConfigurations"][0]["LambdaFunctionArn"] == \ - "arn:aws:lambda:us-east-1:012345678910:function:lambda" - assert result["LambdaFunctionConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" - assert len(result["LambdaFunctionConfigurations"][0]["Events"]) == 1 - assert len(result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1 - assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" - assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" - - # And with all 3 set: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "TopicConfigurations": [ - { - "TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic", - "Events": [ - "s3:ObjectCreated:*", - "s3:ObjectRemoved:*" - ] - } - ], - "LambdaFunctionConfigurations": [ - { - "LambdaFunctionArn": - "arn:aws:lambda:us-east-1:012345678910:function:lambda", - "Events": ["s3:ObjectCreated:*"] - } - ], - "QueueConfigurations": [ - { - "QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue", - "Events": ["s3:ObjectCreated:*"] - } - ] - }) - result = s3.get_bucket_notification_configuration(Bucket="bucket") - assert len(result["LambdaFunctionConfigurations"]) == 1 - assert len(result["TopicConfigurations"]) == 1 - assert len(result["QueueConfigurations"]) == 1 - - # And clear it out: - s3.put_bucket_notification_configuration(Bucket="bucket", NotificationConfiguration={}) - result = s3.get_bucket_notification_configuration(Bucket="bucket") - assert not result.get("TopicConfigurations") - assert not result.get("QueueConfigurations") - assert not result.get("LambdaFunctionConfigurations") - - -@mock_s3 -def test_put_bucket_notification_errors(): - s3 = boto3.client("s3", region_name="us-east-1") - s3.create_bucket(Bucket="bucket") - - # With incorrect ARNs: - for tech, arn in [("Queue", "sqs"), ("Topic", "sns"), ("LambdaFunction", "lambda")]: - with assert_raises(ClientError) as err: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "{}Configurations".format(tech): [ - { - "{}Arn".format(tech): - "arn:aws:{}:us-east-1:012345678910:lksajdfkldskfj", - "Events": ["s3:ObjectCreated:*"] - } - ] - }) - - assert err.exception.response["Error"]["Code"] == "InvalidArgument" - assert err.exception.response["Error"]["Message"] == "The ARN is not well formed" - - # Region not the same as the bucket: - with assert_raises(ClientError) as err: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "QueueConfigurations": [ - { - "QueueArn": - "arn:aws:sqs:us-west-2:012345678910:lksajdfkldskfj", - "Events": ["s3:ObjectCreated:*"] - } - ] - }) - - assert err.exception.response["Error"]["Code"] == "InvalidArgument" - assert err.exception.response["Error"]["Message"] == \ - "The notification destination service region is not valid for the bucket location constraint" - - # Invalid event name: - with assert_raises(ClientError) as err: - s3.put_bucket_notification_configuration(Bucket="bucket", - NotificationConfiguration={ - "QueueConfigurations": [ - { - "QueueArn": - "arn:aws:sqs:us-east-1:012345678910:lksajdfkldskfj", - "Events": ["notarealeventname"] - } - ] - }) - assert err.exception.response["Error"]["Code"] == "InvalidArgument" - assert err.exception.response["Error"]["Message"] == "The event is not supported for notifications" - - -@mock_s3 -def test_boto3_put_bucket_logging(): - s3 = boto3.client("s3", region_name="us-east-1") - bucket_name = "mybucket" - log_bucket = "logbucket" - wrong_region_bucket = "wrongregionlogbucket" - s3.create_bucket(Bucket=bucket_name) - s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later... - s3.create_bucket(Bucket=wrong_region_bucket, CreateBucketConfiguration={"LocationConstraint": "us-west-2"}) - - # No logging config: - result = s3.get_bucket_logging(Bucket=bucket_name) - assert not result.get("LoggingEnabled") - - # A log-bucket that doesn't exist: - with assert_raises(ClientError) as err: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": "IAMNOTREAL", - "TargetPrefix": "" - } - }) - assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging" - - # A log-bucket that's missing the proper ACLs for LogDelivery: - with assert_raises(ClientError) as err: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": "" - } - }) - assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging" - assert "log-delivery" in err.exception.response["Error"]["Message"] - - # Add the proper "log-delivery" ACL to the log buckets: - bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"] - for bucket in [log_bucket, wrong_region_bucket]: - s3.put_bucket_acl(Bucket=bucket, AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "WRITE" - }, - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group" - }, - "Permission": "READ_ACP" - }, - { - "Grantee": { - "Type": "CanonicalUser", - "ID": bucket_owner["ID"] - }, - "Permission": "FULL_CONTROL" - } - ], - "Owner": bucket_owner - }) - - # A log-bucket that's in the wrong region: - with assert_raises(ClientError) as err: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": wrong_region_bucket, - "TargetPrefix": "" - } - }) - assert err.exception.response["Error"]["Code"] == "CrossLocationLoggingProhibitted" - - # Correct logging: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": "{}/".format(bucket_name) - } - }) - result = s3.get_bucket_logging(Bucket=bucket_name) - assert result["LoggingEnabled"]["TargetBucket"] == log_bucket - assert result["LoggingEnabled"]["TargetPrefix"] == "{}/".format(bucket_name) - assert not result["LoggingEnabled"].get("TargetGrants") - - # And disabling: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={}) - assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled") - - # And enabling with multiple target grants: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": "{}/".format(bucket_name), - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser" - }, - "Permission": "READ" - }, - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser" - }, - "Permission": "WRITE" - } - ] - } - }) - - result = s3.get_bucket_logging(Bucket=bucket_name) - assert len(result["LoggingEnabled"]["TargetGrants"]) == 2 - assert result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"] == \ - "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274" - - # Test with just 1 grant: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": "{}/".format(bucket_name), - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser" - }, - "Permission": "READ" - } - ] - } - }) - result = s3.get_bucket_logging(Bucket=bucket_name) - assert len(result["LoggingEnabled"]["TargetGrants"]) == 1 - - # With an invalid grant: - with assert_raises(ClientError) as err: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": "{}/".format(bucket_name), - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser" - }, - "Permission": "NOTAREALPERM" - } - ] - } - }) - assert err.exception.response["Error"]["Code"] == "MalformedXML" - - -@mock_s3 -def test_boto3_put_object_tagging(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-tags' - s3.create_bucket(Bucket=bucket_name) - - with assert_raises(ClientError) as err: - s3.put_object_tagging( - Bucket=bucket_name, - Key=key, - Tagging={'TagSet': [ - {'Key': 'item1', 'Value': 'foo'}, - {'Key': 'item2', 'Value': 'bar'}, - ]} - ) - - e = err.exception - e.response['Error'].should.equal({ - 'Code': 'NoSuchKey', - 'Message': 'The specified key does not exist.', - 'RequestID': '7a62c49f-347e-4fc4-9331-6e8eEXAMPLE', - }) - - s3.put_object( - Bucket=bucket_name, - Key=key, - Body='test' - ) - - resp = s3.put_object_tagging( - Bucket=bucket_name, - Key=key, - Tagging={'TagSet': [ - {'Key': 'item1', 'Value': 'foo'}, - {'Key': 'item2', 'Value': 'bar'}, - ]} - ) - - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - -@mock_s3 -def test_boto3_put_object_tagging_with_single_tag(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-tags' - s3.create_bucket(Bucket=bucket_name) - - s3.put_object( - Bucket=bucket_name, - Key=key, - Body='test' - ) - - resp = s3.put_object_tagging( - Bucket=bucket_name, - Key=key, - Tagging={'TagSet': [ - {'Key': 'item1', 'Value': 'foo'} - ]} - ) - - resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - - -@mock_s3 -def test_boto3_get_object_tagging(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-tags' - s3.create_bucket(Bucket=bucket_name) - - s3.put_object( - Bucket=bucket_name, - Key=key, - Body='test' - ) - - resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) - resp['TagSet'].should.have.length_of(0) - - resp = s3.put_object_tagging( - Bucket=bucket_name, - Key=key, - Tagging={'TagSet': [ - {'Key': 'item1', 'Value': 'foo'}, - {'Key': 'item2', 'Value': 'bar'}, - ]} - ) - resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) - - resp['TagSet'].should.have.length_of(2) - resp['TagSet'].should.contain({'Key': 'item1', 'Value': 'foo'}) - resp['TagSet'].should.contain({'Key': 'item2', 'Value': 'bar'}) - - -@mock_s3 -def test_boto3_list_object_versions(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-versions' - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_versioning( - Bucket=bucket_name, - VersioningConfiguration={ - 'Status': 'Enabled' - } - ) - items = (six.b('v1'), six.b('v2')) - for body in items: - s3.put_object( - Bucket=bucket_name, - Key=key, - Body=body - ) - response = s3.list_object_versions( - Bucket=bucket_name - ) - # Two object versions should be returned - len(response['Versions']).should.equal(2) - keys = set([item['Key'] for item in response['Versions']]) - keys.should.equal({key}) - # Test latest object version is returned - response = s3.get_object(Bucket=bucket_name, Key=key) - response['Body'].read().should.equal(items[-1]) - - -@mock_s3 -def test_boto3_bad_prefix_list_object_versions(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = 'key-with-versions' - bad_prefix = 'key-that-does-not-exist' - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_versioning( - Bucket=bucket_name, - VersioningConfiguration={ - 'Status': 'Enabled' - } - ) - items = (six.b('v1'), six.b('v2')) - for body in items: - s3.put_object( - Bucket=bucket_name, - Key=key, - Body=body - ) - response = s3.list_object_versions( - Bucket=bucket_name, - Prefix=bad_prefix, - ) - response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - response.should_not.contain('Versions') - response.should_not.contain('DeleteMarkers') - - -@mock_s3 -def test_boto3_delete_markers(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = u'key-with-versions-and-unicode-ó' - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_versioning( - Bucket=bucket_name, - VersioningConfiguration={ - 'Status': 'Enabled' - } - ) - items = (six.b('v1'), six.b('v2')) - for body in items: - s3.put_object( - Bucket=bucket_name, - Key=key, - Body=body - ) - - s3.delete_objects(Bucket=bucket_name, Delete={'Objects': [{'Key': key}]}) - - with assert_raises(ClientError) as e: - s3.get_object( - Bucket=bucket_name, - Key=key - ) - e.response['Error']['Code'].should.equal('404') - - s3.delete_object( - Bucket=bucket_name, - Key=key, - VersionId='2' - ) - response = s3.get_object( - Bucket=bucket_name, - Key=key - ) - response['Body'].read().should.equal(items[-1]) - response = s3.list_object_versions( - Bucket=bucket_name - ) - response['Versions'].should.have.length_of(2) - - # We've asserted there is only 2 records so one is newest, one is oldest - latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0] - oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] - - # Double check ordering of version ID's - latest['VersionId'].should.equal('1') - oldest['VersionId'].should.equal('0') - - # Double check the name is still unicode - latest['Key'].should.equal('key-with-versions-and-unicode-ó') - oldest['Key'].should.equal('key-with-versions-and-unicode-ó') - - -@mock_s3 -def test_boto3_multiple_delete_markers(): - s3 = boto3.client('s3', region_name='us-east-1') - bucket_name = 'mybucket' - key = u'key-with-versions-and-unicode-ó' - s3.create_bucket(Bucket=bucket_name) - s3.put_bucket_versioning( - Bucket=bucket_name, - VersioningConfiguration={ - 'Status': 'Enabled' - } - ) - items = (six.b('v1'), six.b('v2')) - for body in items: - s3.put_object( - Bucket=bucket_name, - Key=key, - Body=body - ) - - # Delete the object twice to add multiple delete markers - s3.delete_object(Bucket=bucket_name, Key=key) - s3.delete_object(Bucket=bucket_name, Key=key) - - response = s3.list_object_versions(Bucket=bucket_name) - response['DeleteMarkers'].should.have.length_of(2) - - with assert_raises(ClientError) as e: - s3.get_object( - Bucket=bucket_name, - Key=key - ) - e.response['Error']['Code'].should.equal('404') - - # Remove both delete markers to restore the object - s3.delete_object( - Bucket=bucket_name, - Key=key, - VersionId='2' - ) - s3.delete_object( - Bucket=bucket_name, - Key=key, - VersionId='3' - ) - - response = s3.get_object( - Bucket=bucket_name, - Key=key - ) - response['Body'].read().should.equal(items[-1]) - response = s3.list_object_versions(Bucket=bucket_name) - response['Versions'].should.have.length_of(2) - - # We've asserted there is only 2 records so one is newest, one is oldest - latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0] - oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] - - # Double check ordering of version ID's - latest['VersionId'].should.equal('1') - oldest['VersionId'].should.equal('0') - - # Double check the name is still unicode - latest['Key'].should.equal('key-with-versions-and-unicode-ó') - oldest['Key'].should.equal('key-with-versions-and-unicode-ó') - -@mock_s3 -def test_get_stream_gzipped(): - payload = b"this is some stuff here" - - s3_client = boto3.client("s3", region_name='us-east-1') - s3_client.create_bucket(Bucket='moto-tests') - buffer_ = BytesIO() - with GzipFile(fileobj=buffer_, mode='w') as f: - f.write(payload) - payload_gz = buffer_.getvalue() - - s3_client.put_object( - Bucket='moto-tests', - Key='keyname', - Body=payload_gz, - ContentEncoding='gzip', - ) - - obj = s3_client.get_object( - Bucket='moto-tests', - Key='keyname', - ) - res = zlib.decompress(obj['Body'].read(), 16 + zlib.MAX_WBITS) - assert res == payload - - -TEST_XML = """\ - - - - index.html - - - - - test/testing - - - test.txt - - - - -""" +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import datetime +from six.moves.urllib.request import urlopen +from six.moves.urllib.error import HTTPError +from functools import wraps +from gzip import GzipFile +from io import BytesIO +import zlib +import pickle + +import json +import boto +import boto3 +from botocore.client import ClientError +import botocore.exceptions +from boto.exception import S3CreateError, S3ResponseError +from botocore.handlers import disable_signing +from boto.s3.connection import S3Connection +from boto.s3.key import Key +from freezegun import freeze_time +import six +import requests +import tests.backport_assert_raises # noqa +from nose.tools import assert_raises + +import sure # noqa + +from moto import settings, mock_s3, mock_s3_deprecated +import moto.s3.models as s3model + +if settings.TEST_SERVER_MODE: + REDUCED_PART_SIZE = s3model.UPLOAD_PART_MIN_SIZE + EXPECTED_ETAG = '"140f92a6df9f9e415f74a1463bcee9bb-2"' +else: + REDUCED_PART_SIZE = 256 + EXPECTED_ETAG = '"66d1a1a2ed08fd05c137f316af4ff255-2"' + + +def reduced_min_part_size(f): + """ speed up tests by temporarily making the multipart minimum part size + small + """ + orig_size = s3model.UPLOAD_PART_MIN_SIZE + + @wraps(f) + def wrapped(*args, **kwargs): + try: + s3model.UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE + return f(*args, **kwargs) + finally: + s3model.UPLOAD_PART_MIN_SIZE = orig_size + + return wrapped + + +class MyModel(object): + + def __init__(self, name, value): + self.name = name + self.value = value + + def save(self): + s3 = boto3.client('s3', region_name='us-east-1') + s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) + + +@mock_s3 +def test_keys_are_pickleable(): + """Keys must be pickleable due to boto3 implementation details.""" + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + + pickled = pickle.dumps(key) + loaded = pickle.loads(pickled) + assert loaded.value == key.value + + +@mock_s3 +def test_append_to_value__basic(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b' And even more data') + assert key.value == b'data! And even more data' + assert key.size == 24 + + +@mock_s3 +def test_append_to_value__nothing_added(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b'') + assert key.value == b'data!' + assert key.size == 5 + + +@mock_s3 +def test_append_to_value__empty_key(): + key = s3model.FakeKey('name', b'') + assert key.value == b'' + assert key.size == 0 + + key.append_to_value(b'stuff') + assert key.value == b'stuff' + assert key.size == 5 + + +@mock_s3 +def test_my_model_save(): + # Create Bucket so that test can run + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') + #################################### + + model_instance = MyModel('steve', 'is awesome') + model_instance.save() + + body = conn.Object('mybucket', 'steve').get()['Body'].read().decode() + + assert body == 'is awesome' + + +@mock_s3 +def test_key_etag(): + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='mybucket') + + model_instance = MyModel('steve', 'is awesome') + model_instance.save() + + conn.Bucket('mybucket').Object('steve').e_tag.should.equal( + '"d32bda93738f7e03adb22e66c90fbc04"') + + +@mock_s3_deprecated +def test_multipart_upload_too_small(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload("the-key") + multipart.upload_part_from_file(BytesIO(b'hello'), 1) + multipart.upload_part_from_file(BytesIO(b'world'), 2) + # Multipart with total size under 5MB is refused + multipart.complete_upload.should.throw(S3ResponseError) + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_upload(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 1) + # last part, can be less than 5 MB + part2 = b'1' + multipart.upload_part_from_file(BytesIO(part2), 2) + multipart.complete_upload() + # we should get both parts as the key contents + bucket.get_key( + "the-key").get_contents_as_string().should.equal(part1 + part2) + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_upload_out_of_order(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload("the-key") + # last part, can be less than 5 MB + part2 = b'1' + multipart.upload_part_from_file(BytesIO(part2), 4) + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 2) + multipart.complete_upload() + # we should get both parts as the key contents + bucket.get_key( + "the-key").get_contents_as_string().should.equal(part1 + part2) + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_upload_with_headers(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload( + "the-key", metadata={"foo": "bar"}) + part1 = b'0' * 10 + multipart.upload_part_from_file(BytesIO(part1), 1) + multipart.complete_upload() + + key = bucket.get_key("the-key") + key.metadata.should.equal({"foo": "bar"}) + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_upload_with_copy_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "original-key" + key.set_contents_from_string("key_value") + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 1) + multipart.copy_part_from_key("foobar", "original-key", 2, 0, 3) + multipart.complete_upload() + bucket.get_key( + "the-key").get_contents_as_string().should.equal(part1 + b"key_") + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_upload_cancel(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 1) + multipart.cancel_upload() + # TODO we really need some sort of assertion here, but we don't currently + # have the ability to list mulipart uploads for a bucket. + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_etag(): + # Create Bucket so that test can run + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('mybucket') + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 1) + # last part, can be less than 5 MB + part2 = b'1' + multipart.upload_part_from_file(BytesIO(part2), 2) + multipart.complete_upload() + # we should get both parts as the key contents + bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG) + + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_invalid_order(): + # Create Bucket so that test can run + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('mybucket') + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * 5242880 + etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag + # last part, can be less than 5 MB + part2 = b'1' + etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag + xml = "{0}{1}" + xml = xml.format(2, etag2) + xml.format(1, etag1) + xml = "{0}".format(xml) + bucket.complete_multipart_upload.when.called_with( + multipart.key_name, multipart.id, xml).should.throw(S3ResponseError) + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_etag_quotes_stripped(): + # Create Bucket so that test can run + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('mybucket') + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag + # last part, can be less than 5 MB + part2 = b'1' + etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag + # Strip quotes from etags + etag1 = etag1.replace('"','') + etag2 = etag2.replace('"','') + xml = "{0}{1}" + xml = xml.format(1, etag1) + xml.format(2, etag2) + xml = "{0}".format(xml) + bucket.complete_multipart_upload.when.called_with( + multipart.key_name, multipart.id, xml).should_not.throw(S3ResponseError) + # we should get both parts as the key contents + bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG) + +@mock_s3_deprecated +@reduced_min_part_size +def test_multipart_duplicate_upload(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + multipart = bucket.initiate_multipart_upload("the-key") + part1 = b'0' * REDUCED_PART_SIZE + multipart.upload_part_from_file(BytesIO(part1), 1) + # same part again + multipart.upload_part_from_file(BytesIO(part1), 1) + part2 = b'1' * 1024 + multipart.upload_part_from_file(BytesIO(part2), 2) + multipart.complete_upload() + # We should get only one copy of part 1. + bucket.get_key( + "the-key").get_contents_as_string().should.equal(part1 + part2) + + +@mock_s3_deprecated +def test_list_multiparts(): + # Create Bucket so that test can run + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('mybucket') + + multipart1 = bucket.initiate_multipart_upload("one-key") + multipart2 = bucket.initiate_multipart_upload("two-key") + uploads = bucket.get_all_multipart_uploads() + uploads.should.have.length_of(2) + dict([(u.key_name, u.id) for u in uploads]).should.equal( + {'one-key': multipart1.id, 'two-key': multipart2.id}) + multipart2.cancel_upload() + uploads = bucket.get_all_multipart_uploads() + uploads.should.have.length_of(1) + uploads[0].key_name.should.equal("one-key") + multipart1.cancel_upload() + uploads = bucket.get_all_multipart_uploads() + uploads.should.be.empty + + +@mock_s3_deprecated +def test_key_save_to_missing_bucket(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.get_bucket('mybucket', validate=False) + + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string.when.called_with( + "foobar").should.throw(S3ResponseError) + + +@mock_s3_deprecated +def test_missing_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + bucket.get_key("the-key").should.equal(None) + + +@mock_s3_deprecated +def test_missing_key_urllib2(): + conn = boto.connect_s3('the_key', 'the_secret') + conn.create_bucket("foobar") + + urlopen.when.called_with( + "http://foobar.s3.amazonaws.com/the-key").should.throw(HTTPError) + + +@mock_s3_deprecated +def test_empty_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("") + + key = bucket.get_key("the-key") + key.size.should.equal(0) + key.get_contents_as_string().should.equal(b'') + + +@mock_s3_deprecated +def test_empty_key_set_on_existing_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("foobar") + + key = bucket.get_key("the-key") + key.size.should.equal(6) + key.get_contents_as_string().should.equal(b'foobar') + + key.set_contents_from_string("") + bucket.get_key("the-key").get_contents_as_string().should.equal(b'') + + +@mock_s3_deprecated +def test_large_key_save(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("foobar" * 100000) + + bucket.get_key( + "the-key").get_contents_as_string().should.equal(b'foobar' * 100000) + + +@mock_s3_deprecated +def test_copy_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + + bucket.copy_key('new-key', 'foobar', 'the-key') + + bucket.get_key( + "the-key").get_contents_as_string().should.equal(b"some value") + bucket.get_key( + "new-key").get_contents_as_string().should.equal(b"some value") + + +@mock_s3_deprecated +def test_copy_key_with_unicode(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-unicode-💩-key" + key.set_contents_from_string("some value") + + bucket.copy_key('new-key', 'foobar', 'the-unicode-💩-key') + + bucket.get_key( + "the-unicode-💩-key").get_contents_as_string().should.equal(b"some value") + bucket.get_key( + "new-key").get_contents_as_string().should.equal(b"some value") + + +@mock_s3_deprecated +def test_copy_key_with_version(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + bucket.configure_versioning(versioning=True) + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + key.set_contents_from_string("another value") + + key = [ + key.version_id + for key in bucket.get_all_versions() + if not key.is_latest + ][0] + bucket.copy_key('new-key', 'foobar', 'the-key', src_version_id=key) + + bucket.get_key( + "the-key").get_contents_as_string().should.equal(b"another value") + bucket.get_key( + "new-key").get_contents_as_string().should.equal(b"some value") + + +@mock_s3_deprecated +def test_set_metadata(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = 'the-key' + key.set_metadata('md', 'Metadatastring') + key.set_contents_from_string("Testval") + + bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring') + + +@mock_s3_deprecated +def test_copy_key_replace_metadata(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_metadata('md', 'Metadatastring') + key.set_contents_from_string("some value") + + bucket.copy_key('new-key', 'foobar', 'the-key', + metadata={'momd': 'Mometadatastring'}) + + bucket.get_key("new-key").get_metadata('md').should.be.none + bucket.get_key( + "new-key").get_metadata('momd').should.equal('Mometadatastring') + + +@freeze_time("2012-01-01 12:00:00") +@mock_s3_deprecated +def test_last_modified(): + # See https://github.com/boto/boto/issues/466 + conn = boto.connect_s3() + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + + rs = bucket.get_all_keys() + rs[0].last_modified.should.equal('2012-01-01T12:00:00.000Z') + + bucket.get_key( + "the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT') + + +@mock_s3_deprecated +def test_missing_bucket(): + conn = boto.connect_s3('the_key', 'the_secret') + conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError) + + +@mock_s3_deprecated +def test_bucket_with_dash(): + conn = boto.connect_s3('the_key', 'the_secret') + conn.get_bucket.when.called_with( + 'mybucket-test').should.throw(S3ResponseError) + + +@mock_s3_deprecated +def test_create_existing_bucket(): + "Trying to create a bucket that already exists should raise an Error" + conn = boto.s3.connect_to_region("us-west-2") + conn.create_bucket("foobar") + with assert_raises(S3CreateError): + conn.create_bucket('foobar') + + +@mock_s3_deprecated +def test_create_existing_bucket_in_us_east_1(): + "Trying to create a bucket that already exists in us-east-1 returns the bucket" + + """" + http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html + Your previous request to create the named bucket succeeded and you already + own it. You get this error in all AWS regions except US Standard, + us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if + bucket exists it Amazon S3 will not do anything). + """ + conn = boto.s3.connect_to_region("us-east-1") + conn.create_bucket("foobar") + bucket = conn.create_bucket("foobar") + bucket.name.should.equal("foobar") + + +@mock_s3_deprecated +def test_other_region(): + conn = S3Connection( + 'key', 'secret', host='s3-website-ap-southeast-2.amazonaws.com') + conn.create_bucket("foobar") + list(conn.get_bucket("foobar").get_all_keys()).should.equal([]) + + +@mock_s3_deprecated +def test_bucket_deletion(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + + # Try to delete a bucket that still has keys + conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) + + bucket.delete_key("the-key") + conn.delete_bucket("foobar") + + # Get non-existing bucket + conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError) + + # Delete non-existant bucket + conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError) + + +@mock_s3_deprecated +def test_get_all_buckets(): + conn = boto.connect_s3('the_key', 'the_secret') + conn.create_bucket("foobar") + conn.create_bucket("foobar2") + buckets = conn.get_all_buckets() + + buckets.should.have.length_of(2) + + +@mock_s3 +@mock_s3_deprecated +def test_post_to_bucket(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + requests.post("https://foobar.s3.amazonaws.com/", { + 'key': 'the-key', + 'file': 'nothing' + }) + + bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') + + +@mock_s3 +@mock_s3_deprecated +def test_post_with_metadata_to_bucket(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + + requests.post("https://foobar.s3.amazonaws.com/", { + 'key': 'the-key', + 'file': 'nothing', + 'x-amz-meta-test': 'metadata' + }) + + bucket.get_key('the-key').get_metadata('test').should.equal('metadata') + + +@mock_s3_deprecated +def test_delete_missing_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + + deleted_key = bucket.delete_key("foobar") + deleted_key.key.should.equal("foobar") + + +@mock_s3_deprecated +def test_delete_keys(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + + Key(bucket=bucket, name='file1').set_contents_from_string('abc') + Key(bucket=bucket, name='file2').set_contents_from_string('abc') + Key(bucket=bucket, name='file3').set_contents_from_string('abc') + Key(bucket=bucket, name='file4').set_contents_from_string('abc') + + result = bucket.delete_keys(['file2', 'file3']) + result.deleted.should.have.length_of(2) + result.errors.should.have.length_of(0) + keys = bucket.get_all_keys() + keys.should.have.length_of(2) + keys[0].name.should.equal('file1') + + +@mock_s3_deprecated +def test_delete_keys_with_invalid(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + + Key(bucket=bucket, name='file1').set_contents_from_string('abc') + Key(bucket=bucket, name='file2').set_contents_from_string('abc') + Key(bucket=bucket, name='file3').set_contents_from_string('abc') + Key(bucket=bucket, name='file4').set_contents_from_string('abc') + + result = bucket.delete_keys(['abc', 'file3']) + + result.deleted.should.have.length_of(1) + result.errors.should.have.length_of(1) + keys = bucket.get_all_keys() + keys.should.have.length_of(3) + keys[0].name.should.equal('file1') + + +@mock_s3_deprecated +def test_bucket_name_with_dot(): + conn = boto.connect_s3() + bucket = conn.create_bucket('firstname.lastname') + + k = Key(bucket, 'somekey') + k.set_contents_from_string('somedata') + + +@mock_s3_deprecated +def test_key_with_special_characters(): + conn = boto.connect_s3() + bucket = conn.create_bucket('test_bucket_name') + + key = Key(bucket, 'test_list_keys_2/x?y') + key.set_contents_from_string('value1') + + key_list = bucket.list('test_list_keys_2/', '/') + keys = [x for x in key_list] + keys[0].name.should.equal("test_list_keys_2/x?y") + + +@mock_s3_deprecated +def test_unicode_key_with_slash(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "/the-key-unîcode/test" + key.set_contents_from_string("value") + + key = bucket.get_key("/the-key-unîcode/test") + key.get_contents_as_string().should.equal(b'value') + + +@mock_s3_deprecated +def test_bucket_key_listing_order(): + conn = boto.connect_s3() + bucket = conn.create_bucket('test_bucket') + prefix = 'toplevel/' + + def store(name): + k = Key(bucket, prefix + name) + k.set_contents_from_string('somedata') + + names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key'] + + for name in names: + store(name) + + delimiter = None + keys = [x.name for x in bucket.list(prefix, delimiter)] + keys.should.equal([ + 'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key', + 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3' + ]) + + delimiter = '/' + keys = [x.name for x in bucket.list(prefix, delimiter)] + keys.should.equal([ + 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/' + ]) + + # Test delimiter with no prefix + delimiter = '/' + keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)] + keys.should.equal(['toplevel/']) + + delimiter = None + keys = [x.name for x in bucket.list(prefix + 'x', delimiter)] + keys.should.equal( + [u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key']) + + delimiter = '/' + keys = [x.name for x in bucket.list(prefix + 'x', delimiter)] + keys.should.equal([u'toplevel/x/']) + + +@mock_s3_deprecated +def test_key_with_reduced_redundancy(): + conn = boto.connect_s3() + bucket = conn.create_bucket('test_bucket_name') + + key = Key(bucket, 'test_rr_key') + key.set_contents_from_string('value1', reduced_redundancy=True) + # we use the bucket iterator because of: + # https:/github.com/boto/boto/issues/1173 + list(bucket)[0].storage_class.should.equal('REDUCED_REDUNDANCY') + + +@mock_s3_deprecated +def test_copy_key_reduced_redundancy(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + + bucket.copy_key('new-key', 'foobar', 'the-key', + storage_class='REDUCED_REDUNDANCY') + + # we use the bucket iterator because of: + # https:/github.com/boto/boto/issues/1173 + keys = dict([(k.name, k) for k in bucket]) + keys['new-key'].storage_class.should.equal("REDUCED_REDUNDANCY") + keys['the-key'].storage_class.should.equal("STANDARD") + + +@freeze_time("2012-01-01 12:00:00") +@mock_s3_deprecated +def test_restore_key(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + list(bucket)[0].ongoing_restore.should.be.none + key.restore(1) + key = bucket.get_key('the-key') + key.ongoing_restore.should_not.be.none + key.ongoing_restore.should.be.false + key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT") + key.restore(2) + key = bucket.get_key('the-key') + key.ongoing_restore.should_not.be.none + key.ongoing_restore.should.be.false + key.expiry_date.should.equal("Tue, 03 Jan 2012 12:00:00 GMT") + + +@freeze_time("2012-01-01 12:00:00") +@mock_s3_deprecated +def test_restore_key_headers(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-key" + key.set_contents_from_string("some value") + key.restore(1, headers={'foo': 'bar'}) + key = bucket.get_key('the-key') + key.ongoing_restore.should_not.be.none + key.ongoing_restore.should.be.false + key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT") + + +@mock_s3_deprecated +def test_get_versioning_status(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + d = bucket.get_versioning_status() + d.should.be.empty + + bucket.configure_versioning(versioning=True) + d = bucket.get_versioning_status() + d.shouldnt.be.empty + d.should.have.key('Versioning').being.equal('Enabled') + + bucket.configure_versioning(versioning=False) + d = bucket.get_versioning_status() + d.should.have.key('Versioning').being.equal('Suspended') + + +@mock_s3_deprecated +def test_key_version(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + bucket.configure_versioning(versioning=True) + + versions = [] + + key = Key(bucket) + key.key = 'the-key' + key.version_id.should.be.none + key.set_contents_from_string('some string') + versions.append(key.version_id) + key.set_contents_from_string('some string') + versions.append(key.version_id) + set(versions).should.have.length_of(2) + + key = bucket.get_key('the-key') + key.version_id.should.equal(versions[-1]) + + +@mock_s3_deprecated +def test_list_versions(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket('foobar') + bucket.configure_versioning(versioning=True) + + key_versions = [] + + key = Key(bucket, 'the-key') + key.version_id.should.be.none + key.set_contents_from_string("Version 1") + key_versions.append(key.version_id) + key.set_contents_from_string("Version 2") + key_versions.append(key.version_id) + key_versions.should.have.length_of(2) + + versions = list(bucket.list_versions()) + versions.should.have.length_of(2) + + versions[0].name.should.equal('the-key') + versions[0].version_id.should.equal(key_versions[0]) + versions[0].get_contents_as_string().should.equal(b"Version 1") + + versions[1].name.should.equal('the-key') + versions[1].version_id.should.equal(key_versions[1]) + versions[1].get_contents_as_string().should.equal(b"Version 2") + + key = Key(bucket, 'the2-key') + key.set_contents_from_string("Version 1") + + keys = list(bucket.list()) + keys.should.have.length_of(2) + versions = list(bucket.list_versions(prefix='the2-')) + versions.should.have.length_of(1) + + +@mock_s3_deprecated +def test_acl_setting(): + conn = boto.connect_s3() + bucket = conn.create_bucket('foobar') + content = b'imafile' + keyname = 'test.txt' + + key = Key(bucket, name=keyname) + key.content_type = 'text/plain' + key.set_contents_from_string(content) + key.make_public() + + key = bucket.get_key(keyname) + + assert key.get_contents_as_string() == content + + grants = key.get_acl().acl.grants + assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and + g.permission == 'READ' for g in grants), grants + + +@mock_s3_deprecated +def test_acl_setting_via_headers(): + conn = boto.connect_s3() + bucket = conn.create_bucket('foobar') + content = b'imafile' + keyname = 'test.txt' + + key = Key(bucket, name=keyname) + key.content_type = 'text/plain' + key.set_contents_from_string(content, headers={ + 'x-amz-grant-full-control': 'uri="http://acs.amazonaws.com/groups/global/AllUsers"' + }) + + key = bucket.get_key(keyname) + + assert key.get_contents_as_string() == content + + grants = key.get_acl().acl.grants + assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and + g.permission == 'FULL_CONTROL' for g in grants), grants + + +@mock_s3_deprecated +def test_acl_switching(): + conn = boto.connect_s3() + bucket = conn.create_bucket('foobar') + content = b'imafile' + keyname = 'test.txt' + + key = Key(bucket, name=keyname) + key.content_type = 'text/plain' + key.set_contents_from_string(content, policy='public-read') + key.set_acl('private') + + grants = key.get_acl().acl.grants + assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and + g.permission == 'READ' for g in grants), grants + + +@mock_s3_deprecated +def test_bucket_acl_setting(): + conn = boto.connect_s3() + bucket = conn.create_bucket('foobar') + + bucket.make_public() + + grants = bucket.get_acl().acl.grants + assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and + g.permission == 'READ' for g in grants), grants + + +@mock_s3_deprecated +def test_bucket_acl_switching(): + conn = boto.connect_s3() + bucket = conn.create_bucket('foobar') + bucket.make_public() + + bucket.set_acl('private') + + grants = bucket.get_acl().acl.grants + assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and + g.permission == 'READ' for g in grants), grants + + +@mock_s3 +def test_s3_object_in_public_bucket(): + s3 = boto3.resource('s3') + bucket = s3.Bucket('test-bucket') + bucket.create(ACL='public-read') + bucket.put_object(Body=b'ABCD', Key='file.txt') + + s3_anonymous = boto3.resource('s3') + s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing) + + contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read() + contents.should.equal(b'ABCD') + + bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt') + + with assert_raises(ClientError) as exc: + s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get() + exc.exception.response['Error']['Code'].should.equal('403') + + params = {'Bucket': 'test-bucket', 'Key': 'file.txt'} + presigned_url = boto3.client('s3').generate_presigned_url('get_object', params, ExpiresIn=900) + response = requests.get(presigned_url) + assert response.status_code == 200 + + +@mock_s3 +def test_s3_object_in_private_bucket(): + s3 = boto3.resource('s3') + bucket = s3.Bucket('test-bucket') + bucket.create(ACL='private') + bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt') + + s3_anonymous = boto3.resource('s3') + s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing) + + with assert_raises(ClientError) as exc: + s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get() + exc.exception.response['Error']['Code'].should.equal('403') + + bucket.put_object(ACL='public-read', Body=b'ABCD', Key='file.txt') + contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read() + contents.should.equal(b'ABCD') + + +@mock_s3_deprecated +def test_unicode_key(): + conn = boto.connect_s3() + bucket = conn.create_bucket('mybucket') + key = Key(bucket) + key.key = u'こんにちは.jpg' + key.set_contents_from_string('Hello world!') + assert [listed_key.key for listed_key in bucket.list()] == [key.key] + fetched_key = bucket.get_key(key.key) + assert fetched_key.key == key.key + assert fetched_key.get_contents_as_string().decode("utf-8") == 'Hello world!' + + +@mock_s3_deprecated +def test_unicode_value(): + conn = boto.connect_s3() + bucket = conn.create_bucket('mybucket') + key = Key(bucket) + key.key = 'some_key' + key.set_contents_from_string(u'こんにちは.jpg') + list(bucket.list()) + key = bucket.get_key(key.key) + assert key.get_contents_as_string().decode("utf-8") == u'こんにちは.jpg' + + +@mock_s3_deprecated +def test_setting_content_encoding(): + conn = boto.connect_s3() + bucket = conn.create_bucket('mybucket') + key = bucket.new_key("keyname") + key.set_metadata("Content-Encoding", "gzip") + compressed_data = "abcdef" + key.set_contents_from_string(compressed_data) + + key = bucket.get_key("keyname") + key.content_encoding.should.equal("gzip") + + +@mock_s3_deprecated +def test_bucket_location(): + conn = boto.s3.connect_to_region("us-west-2") + bucket = conn.create_bucket('mybucket') + bucket.get_location().should.equal("us-west-2") + + +@mock_s3 +def test_bucket_location_us_east_1(): + cli = boto3.client('s3') + bucket_name = 'mybucket' + # No LocationConstraint ==> us-east-1 + cli.create_bucket(Bucket=bucket_name) + cli.get_bucket_location(Bucket=bucket_name)['LocationConstraint'].should.equal(None) + + +@mock_s3_deprecated +def test_ranged_get(): + conn = boto.connect_s3() + bucket = conn.create_bucket('mybucket') + key = Key(bucket) + key.key = 'bigkey' + rep = b"0123456789" + key.set_contents_from_string(rep * 10) + + # Implicitly bounded range requests. + key.get_contents_as_string( + headers={'Range': 'bytes=0-'}).should.equal(rep * 10) + key.get_contents_as_string( + headers={'Range': 'bytes=50-'}).should.equal(rep * 5) + key.get_contents_as_string( + headers={'Range': 'bytes=99-'}).should.equal(b'9') + + # Explicitly bounded range requests starting from the first byte. + key.get_contents_as_string( + headers={'Range': 'bytes=0-0'}).should.equal(b'0') + key.get_contents_as_string( + headers={'Range': 'bytes=0-49'}).should.equal(rep * 5) + key.get_contents_as_string( + headers={'Range': 'bytes=0-99'}).should.equal(rep * 10) + key.get_contents_as_string( + headers={'Range': 'bytes=0-100'}).should.equal(rep * 10) + key.get_contents_as_string( + headers={'Range': 'bytes=0-700'}).should.equal(rep * 10) + + # Explicitly bounded range requests starting from the / a middle byte. + key.get_contents_as_string( + headers={'Range': 'bytes=50-54'}).should.equal(rep[:5]) + key.get_contents_as_string( + headers={'Range': 'bytes=50-99'}).should.equal(rep * 5) + key.get_contents_as_string( + headers={'Range': 'bytes=50-100'}).should.equal(rep * 5) + key.get_contents_as_string( + headers={'Range': 'bytes=50-700'}).should.equal(rep * 5) + + # Explicitly bounded range requests starting from the last byte. + key.get_contents_as_string( + headers={'Range': 'bytes=99-99'}).should.equal(b'9') + key.get_contents_as_string( + headers={'Range': 'bytes=99-100'}).should.equal(b'9') + key.get_contents_as_string( + headers={'Range': 'bytes=99-700'}).should.equal(b'9') + + # Suffix range requests. + key.get_contents_as_string( + headers={'Range': 'bytes=-1'}).should.equal(b'9') + key.get_contents_as_string( + headers={'Range': 'bytes=-60'}).should.equal(rep * 6) + key.get_contents_as_string( + headers={'Range': 'bytes=-100'}).should.equal(rep * 10) + key.get_contents_as_string( + headers={'Range': 'bytes=-101'}).should.equal(rep * 10) + key.get_contents_as_string( + headers={'Range': 'bytes=-700'}).should.equal(rep * 10) + + key.size.should.equal(100) + + +@mock_s3_deprecated +def test_policy(): + conn = boto.connect_s3() + bucket_name = 'mybucket' + bucket = conn.create_bucket(bucket_name) + + policy = json.dumps({ + "Version": "2012-10-17", + "Id": "PutObjPolicy", + "Statement": [ + { + "Sid": "DenyUnEncryptedObjectUploads", + "Effect": "Deny", + "Principal": "*", + "Action": "s3:PutObject", + "Resource": "arn:aws:s3:::{bucket_name}/*".format(bucket_name=bucket_name), + "Condition": { + "StringNotEquals": { + "s3:x-amz-server-side-encryption": "aws:kms" + } + } + } + ] + }) + + with assert_raises(S3ResponseError) as err: + bucket.get_policy() + + ex = err.exception + ex.box_usage.should.be.none + ex.error_code.should.equal('NoSuchBucketPolicy') + ex.message.should.equal('The bucket policy does not exist') + ex.reason.should.equal('Not Found') + ex.resource.should.be.none + ex.status.should.equal(404) + ex.body.should.contain(bucket_name) + ex.request_id.should_not.be.none + + bucket.set_policy(policy).should.be.true + + bucket = conn.get_bucket(bucket_name) + + bucket.get_policy().decode('utf-8').should.equal(policy) + + bucket.delete_policy() + + with assert_raises(S3ResponseError) as err: + bucket.get_policy() + + +@mock_s3_deprecated +def test_website_configuration_xml(): + conn = boto.connect_s3() + bucket = conn.create_bucket('test-bucket') + bucket.set_website_configuration_xml(TEST_XML) + bucket.get_website_configuration_xml().should.equal(TEST_XML) + + +@mock_s3_deprecated +def test_key_with_trailing_slash_in_ordinary_calling_format(): + conn = boto.connect_s3( + 'access_key', + 'secret_key', + calling_format=boto.s3.connection.OrdinaryCallingFormat() + ) + bucket = conn.create_bucket('test_bucket_name') + + key_name = 'key_with_slash/' + + key = Key(bucket, key_name) + key.set_contents_from_string('some value') + + [k.name for k in bucket.get_all_keys()].should.contain(key_name) + + +""" +boto3 +""" + + +@mock_s3 +def test_boto3_key_etag(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome') + resp = s3.get_object(Bucket='mybucket', Key='steve') + resp['ETag'].should.equal('"d32bda93738f7e03adb22e66c90fbc04"') + + +@mock_s3 +def test_website_redirect_location(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + + s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome') + resp = s3.get_object(Bucket='mybucket', Key='steve') + resp.get('WebsiteRedirectLocation').should.be.none + + url = 'https://github.com/spulec/moto' + s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome', WebsiteRedirectLocation=url) + resp = s3.get_object(Bucket='mybucket', Key='steve') + resp['WebsiteRedirectLocation'].should.equal(url) + + +@mock_s3 +def test_boto3_list_keys_xml_escaped(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + key_name = 'Q&A.txt' + s3.put_object(Bucket='mybucket', Key=key_name, Body=b'is awesome') + + resp = s3.list_objects_v2(Bucket='mybucket', Prefix=key_name) + + assert resp['Contents'][0]['Key'] == key_name + assert resp['KeyCount'] == 1 + assert resp['MaxKeys'] == 1000 + assert resp['Prefix'] == key_name + assert resp['IsTruncated'] == False + assert 'Delimiter' not in resp + assert 'StartAfter' not in resp + assert 'NextContinuationToken' not in resp + assert 'Owner' not in resp['Contents'][0] + + +@mock_s3 +def test_boto3_list_objects_v2_common_prefix_pagination(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + + max_keys = 1 + keys = ['test/{i}/{i}'.format(i=i) for i in range(3)] + for key in keys: + s3.put_object(Bucket='mybucket', Key=key, Body=b'v') + + prefixes = [] + args = {"Bucket": 'mybucket', "Delimiter": "/", "Prefix": "test/", "MaxKeys": max_keys} + resp = {"IsTruncated": True} + while resp.get("IsTruncated", False): + if "NextContinuationToken" in resp: + args["ContinuationToken"] = resp["NextContinuationToken"] + resp = s3.list_objects_v2(**args) + if "CommonPrefixes" in resp: + assert len(resp["CommonPrefixes"]) == max_keys + prefixes.extend(i["Prefix"] for i in resp["CommonPrefixes"]) + + assert prefixes == [k[:k.rindex('/') + 1] for k in keys] + + +@mock_s3 +def test_boto3_list_objects_v2_truncated_response(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + s3.put_object(Bucket='mybucket', Key='one', Body=b'1') + s3.put_object(Bucket='mybucket', Key='two', Body=b'22') + s3.put_object(Bucket='mybucket', Key='three', Body=b'333') + + # First list + resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1) + listed_object = resp['Contents'][0] + + assert listed_object['Key'] == 'one' + assert resp['MaxKeys'] == 1 + assert resp['Prefix'] == '' + assert resp['KeyCount'] == 1 + assert resp['IsTruncated'] == True + assert 'Delimiter' not in resp + assert 'StartAfter' not in resp + assert 'Owner' not in listed_object # owner info was not requested + + next_token = resp['NextContinuationToken'] + + # Second list + resp = s3.list_objects_v2( + Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token) + listed_object = resp['Contents'][0] + + assert listed_object['Key'] == 'three' + assert resp['MaxKeys'] == 1 + assert resp['Prefix'] == '' + assert resp['KeyCount'] == 1 + assert resp['IsTruncated'] == True + assert 'Delimiter' not in resp + assert 'StartAfter' not in resp + assert 'Owner' not in listed_object + + next_token = resp['NextContinuationToken'] + + # Third list + resp = s3.list_objects_v2( + Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token) + listed_object = resp['Contents'][0] + + assert listed_object['Key'] == 'two' + assert resp['MaxKeys'] == 1 + assert resp['Prefix'] == '' + assert resp['KeyCount'] == 1 + assert resp['IsTruncated'] == False + assert 'Delimiter' not in resp + assert 'Owner' not in listed_object + assert 'StartAfter' not in resp + assert 'NextContinuationToken' not in resp + + +@mock_s3 +def test_boto3_list_objects_v2_truncated_response_start_after(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + s3.put_object(Bucket='mybucket', Key='one', Body=b'1') + s3.put_object(Bucket='mybucket', Key='two', Body=b'22') + s3.put_object(Bucket='mybucket', Key='three', Body=b'333') + + # First list + resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one') + listed_object = resp['Contents'][0] + + assert listed_object['Key'] == 'three' + assert resp['MaxKeys'] == 1 + assert resp['Prefix'] == '' + assert resp['KeyCount'] == 1 + assert resp['IsTruncated'] == True + assert resp['StartAfter'] == 'one' + assert 'Delimiter' not in resp + assert 'Owner' not in listed_object + + next_token = resp['NextContinuationToken'] + + # Second list + # The ContinuationToken must take precedence over StartAfter. + resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one', + ContinuationToken=next_token) + listed_object = resp['Contents'][0] + + assert listed_object['Key'] == 'two' + assert resp['MaxKeys'] == 1 + assert resp['Prefix'] == '' + assert resp['KeyCount'] == 1 + assert resp['IsTruncated'] == False + # When ContinuationToken is given, StartAfter is ignored. This also means + # AWS does not return it in the response. + assert 'StartAfter' not in resp + assert 'Delimiter' not in resp + assert 'Owner' not in listed_object + + +@mock_s3 +def test_boto3_list_objects_v2_fetch_owner(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + s3.put_object(Bucket='mybucket', Key='one', Body=b'11') + + resp = s3.list_objects_v2(Bucket='mybucket', FetchOwner=True) + owner = resp['Contents'][0]['Owner'] + + assert 'ID' in owner + assert 'DisplayName' in owner + assert len(owner.keys()) == 2 + + +@mock_s3 +def test_boto3_bucket_create(): + s3 = boto3.resource('s3', region_name='us-east-1') + s3.create_bucket(Bucket="blah") + + s3.Object('blah', 'hello.txt').put(Body="some text") + + s3.Object('blah', 'hello.txt').get()['Body'].read().decode( + "utf-8").should.equal("some text") + + +@mock_s3 +def test_bucket_create_duplicate(): + s3 = boto3.resource('s3', region_name='us-west-2') + s3.create_bucket(Bucket="blah", CreateBucketConfiguration={ + 'LocationConstraint': 'us-west-2', + }) + with assert_raises(ClientError) as exc: + s3.create_bucket( + Bucket="blah", + CreateBucketConfiguration={ + 'LocationConstraint': 'us-west-2', + } + ) + exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists') + + +@mock_s3 +def test_bucket_create_force_us_east_1(): + s3 = boto3.resource('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket="blah", CreateBucketConfiguration={ + 'LocationConstraint': 'us-east-1', + }) + exc.exception.response['Error']['Code'].should.equal('InvalidLocationConstraint') + + +@mock_s3 +def test_boto3_bucket_create_eu_central(): + s3 = boto3.resource('s3', region_name='eu-central-1') + s3.create_bucket(Bucket="blah") + + s3.Object('blah', 'hello.txt').put(Body="some text") + + s3.Object('blah', 'hello.txt').get()['Body'].read().decode( + "utf-8").should.equal("some text") + + +@mock_s3 +def test_boto3_head_object(): + s3 = boto3.resource('s3', region_name='us-east-1') + s3.create_bucket(Bucket="blah") + + s3.Object('blah', 'hello.txt').put(Body="some text") + + s3.Object('blah', 'hello.txt').meta.client.head_object( + Bucket='blah', Key='hello.txt') + + with assert_raises(ClientError) as e: + s3.Object('blah', 'hello2.txt').meta.client.head_object( + Bucket='blah', Key='hello_bad.txt') + e.exception.response['Error']['Code'].should.equal('404') + + +@mock_s3 +def test_boto3_bucket_deletion(): + cli = boto3.client('s3', region_name='us-east-1') + cli.create_bucket(Bucket="foobar") + + cli.put_object(Bucket="foobar", Key="the-key", Body="some value") + + # Try to delete a bucket that still has keys + cli.delete_bucket.when.called_with(Bucket="foobar").should.throw( + cli.exceptions.ClientError, + ('An error occurred (BucketNotEmpty) when calling the DeleteBucket operation: ' + 'The bucket you tried to delete is not empty')) + + cli.delete_object(Bucket="foobar", Key="the-key") + cli.delete_bucket(Bucket="foobar") + + # Get non-existing bucket + cli.head_bucket.when.called_with(Bucket="foobar").should.throw( + cli.exceptions.ClientError, + "An error occurred (404) when calling the HeadBucket operation: Not Found") + + # Delete non-existing bucket + cli.delete_bucket.when.called_with(Bucket="foobar").should.throw(cli.exceptions.NoSuchBucket) + + +@mock_s3 +def test_boto3_get_object(): + s3 = boto3.resource('s3', region_name='us-east-1') + s3.create_bucket(Bucket="blah") + + s3.Object('blah', 'hello.txt').put(Body="some text") + + s3.Object('blah', 'hello.txt').meta.client.head_object( + Bucket='blah', Key='hello.txt') + + with assert_raises(ClientError) as e: + s3.Object('blah', 'hello2.txt').get() + + e.exception.response['Error']['Code'].should.equal('NoSuchKey') + + +@mock_s3 +def test_boto3_head_object_with_versioning(): + s3 = boto3.resource('s3', region_name='us-east-1') + bucket = s3.create_bucket(Bucket='blah') + bucket.Versioning().enable() + + old_content = 'some text' + new_content = 'some new text' + s3.Object('blah', 'hello.txt').put(Body=old_content) + s3.Object('blah', 'hello.txt').put(Body=new_content) + + versions = list(s3.Bucket('blah').object_versions.all()) + latest = list(filter(lambda item: item.is_latest, versions))[0] + oldest = list(filter(lambda item: not item.is_latest, versions))[0] + + head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( + Bucket='blah', Key='hello.txt') + head_object['VersionId'].should.equal(latest.id) + head_object['ContentLength'].should.equal(len(new_content)) + + old_head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( + Bucket='blah', Key='hello.txt', VersionId=oldest.id) + old_head_object['VersionId'].should.equal(oldest.id) + old_head_object['ContentLength'].should.equal(len(old_content)) + + old_head_object['VersionId'].should_not.equal(head_object['VersionId']) + + +@mock_s3 +def test_boto3_copy_object_with_versioning(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='blah', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) + + client.put_object(Bucket='blah', Key='test1', Body=b'test1') + client.put_object(Bucket='blah', Key='test2', Body=b'test2') + + obj1_version = client.get_object(Bucket='blah', Key='test1')['VersionId'] + obj2_version = client.get_object(Bucket='blah', Key='test2')['VersionId'] + + client.copy_object(CopySource={'Bucket': 'blah', 'Key': 'test1'}, Bucket='blah', Key='test2') + obj2_version_new = client.get_object(Bucket='blah', Key='test2')['VersionId'] + + # Version should be different to previous version + obj2_version_new.should_not.equal(obj2_version) + + +@mock_s3 +def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='src', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.create_bucket(Bucket='dest', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.put_bucket_versioning(Bucket='dest', VersioningConfiguration={'Status': 'Enabled'}) + + client.put_object(Bucket='src', Key='test', Body=b'content') + + obj2_version_new = client.copy_object(CopySource={'Bucket': 'src', 'Key': 'test'}, Bucket='dest', Key='test') \ + .get('VersionId') + + # VersionId should be present in the response + obj2_version_new.should_not.equal(None) + + +@mock_s3 +def test_boto3_deleted_versionings_list(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='blah') + client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) + + client.put_object(Bucket='blah', Key='test1', Body=b'test1') + client.put_object(Bucket='blah', Key='test2', Body=b'test2') + client.delete_objects(Bucket='blah', Delete={'Objects': [{'Key': 'test1'}]}) + + listed = client.list_objects_v2(Bucket='blah') + assert len(listed['Contents']) == 1 + + +@mock_s3 +def test_boto3_delete_versioned_bucket(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='blah') + client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'}) + + resp = client.put_object(Bucket='blah', Key='test1', Body=b'test1') + client.delete_object(Bucket='blah', Key='test1', VersionId=resp["VersionId"]) + + client.delete_bucket(Bucket='blah') + + +@mock_s3 +def test_boto3_head_object_if_modified_since(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = "blah" + s3.create_bucket(Bucket=bucket_name) + + key = 'hello.txt' + + s3.put_object( + Bucket=bucket_name, + Key=key, + Body='test' + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + s3.head_object( + Bucket=bucket_name, + Key=key, + IfModifiedSince=datetime.datetime.utcnow() + datetime.timedelta(hours=1) + ) + e = err.exception + e.response['Error'].should.equal({'Code': '304', 'Message': 'Not Modified'}) + + +@mock_s3 +@reduced_min_part_size +def test_boto3_multipart_etag(): + # Create Bucket so that test can run + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + + upload_id = s3.create_multipart_upload( + Bucket='mybucket', Key='the-key')['UploadId'] + part1 = b'0' * REDUCED_PART_SIZE + etags = [] + etags.append( + s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=1, + UploadId=upload_id, Body=part1)['ETag']) + # last part, can be less than 5 MB + part2 = b'1' + etags.append( + s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=2, + UploadId=upload_id, Body=part2)['ETag']) + s3.complete_multipart_upload( + Bucket='mybucket', Key='the-key', UploadId=upload_id, + MultipartUpload={'Parts': [{'ETag': etag, 'PartNumber': i} + for i, etag in enumerate(etags, 1)]}) + # we should get both parts as the key contents + resp = s3.get_object(Bucket='mybucket', Key='the-key') + resp['ETag'].should.equal(EXPECTED_ETAG) + + +@mock_s3 +def test_boto3_put_object_with_tagging(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-tags' + s3.create_bucket(Bucket=bucket_name) + + s3.put_object( + Bucket=bucket_name, + Key=key, + Body='test', + Tagging='foo=bar', + ) + + resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) + + resp['TagSet'].should.contain({'Key': 'foo', 'Value': 'bar'}) + + +@mock_s3 +def test_boto3_put_bucket_tagging(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + + # With 1 tag: + resp = s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + } + ] + }) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + # With multiple tags: + resp = s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagTwo", + "Value": "ValueTwo" + } + ] + }) + + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + # No tags is also OK: + resp = s3.put_bucket_tagging(Bucket=bucket_name, Tagging={ + "TagSet": [] + }) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + # With duplicate tag keys: + with assert_raises(ClientError) as err: + resp = s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagOne", + "Value": "ValueOneAgain" + } + ] + }) + e = err.exception + e.response["Error"]["Code"].should.equal("InvalidTag") + e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key") + +@mock_s3 +def test_boto3_get_bucket_tagging(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagTwo", + "Value": "ValueTwo" + } + ] + }) + + # Get the tags for the bucket: + resp = s3.get_bucket_tagging(Bucket=bucket_name) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + len(resp["TagSet"]).should.equal(2) + + # With no tags: + s3.put_bucket_tagging(Bucket=bucket_name, Tagging={ + "TagSet": [] + }) + + with assert_raises(ClientError) as err: + s3.get_bucket_tagging(Bucket=bucket_name) + + e = err.exception + e.response["Error"]["Code"].should.equal("NoSuchTagSet") + e.response["Error"]["Message"].should.equal("The TagSet does not exist") + + +@mock_s3 +def test_boto3_delete_bucket_tagging(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + + s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagTwo", + "Value": "ValueTwo" + } + ] + }) + + resp = s3.delete_bucket_tagging(Bucket=bucket_name) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204) + + with assert_raises(ClientError) as err: + s3.get_bucket_tagging(Bucket=bucket_name) + + e = err.exception + e.response["Error"]["Code"].should.equal("NoSuchTagSet") + e.response["Error"]["Message"].should.equal("The TagSet does not exist") + + +@mock_s3 +def test_boto3_put_bucket_cors(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + + resp = s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": [ + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "GET", + "POST" + ], + "AllowedHeaders": [ + "Authorization" + ], + "ExposeHeaders": [ + "x-amz-request-id" + ], + "MaxAgeSeconds": 123 + }, + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "PUT" + ], + "AllowedHeaders": [ + "Authorization" + ], + "ExposeHeaders": [ + "x-amz-request-id" + ], + "MaxAgeSeconds": 123 + } + ] + }) + + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + with assert_raises(ClientError) as err: + s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": [ + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "NOTREAL", + "POST" + ] + } + ] + }) + e = err.exception + e.response["Error"]["Code"].should.equal("InvalidRequest") + e.response["Error"]["Message"].should.equal("Found unsupported HTTP method in CORS config. " + "Unsupported method is NOTREAL") + + with assert_raises(ClientError) as err: + s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": [] + }) + e = err.exception + e.response["Error"]["Code"].should.equal("MalformedXML") + + # And 101: + many_rules = [{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}] * 101 + with assert_raises(ClientError) as err: + s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": many_rules + }) + e = err.exception + e.response["Error"]["Code"].should.equal("MalformedXML") + + +@mock_s3 +def test_boto3_get_bucket_cors(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + + # Without CORS: + with assert_raises(ClientError) as err: + s3.get_bucket_cors(Bucket=bucket_name) + + e = err.exception + e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration") + e.response["Error"]["Message"].should.equal("The CORS configuration does not exist") + + s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": [ + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "GET", + "POST" + ], + "AllowedHeaders": [ + "Authorization" + ], + "ExposeHeaders": [ + "x-amz-request-id" + ], + "MaxAgeSeconds": 123 + }, + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "PUT" + ], + "AllowedHeaders": [ + "Authorization" + ], + "ExposeHeaders": [ + "x-amz-request-id" + ], + "MaxAgeSeconds": 123 + } + ] + }) + + resp = s3.get_bucket_cors(Bucket=bucket_name) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + len(resp["CORSRules"]).should.equal(2) + + +@mock_s3 +def test_boto3_delete_bucket_cors(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={ + "CORSRules": [ + { + "AllowedOrigins": [ + "*" + ], + "AllowedMethods": [ + "GET" + ] + } + ] + }) + + resp = s3.delete_bucket_cors(Bucket=bucket_name) + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204) + + # Verify deletion: + with assert_raises(ClientError) as err: + s3.get_bucket_cors(Bucket=bucket_name) + + e = err.exception + e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration") + e.response["Error"]["Message"].should.equal("The CORS configuration does not exist") + + +@mock_s3 +def test_put_bucket_acl_body(): + s3 = boto3.client("s3", region_name="us-east-1") + s3.create_bucket(Bucket="bucket") + bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"] + s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "WRITE" + }, + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "READ_ACP" + } + ], + "Owner": bucket_owner + }) + + result = s3.get_bucket_acl(Bucket="bucket") + assert len(result["Grants"]) == 2 + for g in result["Grants"]: + assert g["Grantee"]["URI"] == "http://acs.amazonaws.com/groups/s3/LogDelivery" + assert g["Grantee"]["Type"] == "Group" + assert g["Permission"] in ["WRITE", "READ_ACP"] + + # With one: + s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "WRITE" + } + ], + "Owner": bucket_owner + }) + result = s3.get_bucket_acl(Bucket="bucket") + assert len(result["Grants"]) == 1 + + # With no owner: + with assert_raises(ClientError) as err: + s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "WRITE" + } + ] + }) + assert err.exception.response["Error"]["Code"] == "MalformedACLError" + + # With incorrect permission: + with assert_raises(ClientError) as err: + s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "lskjflkasdjflkdsjfalisdjflkdsjf" + } + ], + "Owner": bucket_owner + }) + assert err.exception.response["Error"]["Code"] == "MalformedACLError" + + # Clear the ACLs: + result = s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={"Grants": [], "Owner": bucket_owner}) + assert not result.get("Grants") + + +@mock_s3 +def test_put_bucket_notification(): + s3 = boto3.client("s3", region_name="us-east-1") + s3.create_bucket(Bucket="bucket") + + # With no configuration: + result = s3.get_bucket_notification(Bucket="bucket") + assert not result.get("TopicConfigurations") + assert not result.get("QueueConfigurations") + assert not result.get("LambdaFunctionConfigurations") + + # Place proper topic configuration: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "TopicConfigurations": [ + { + "TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic", + "Events": [ + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*" + ] + }, + { + "TopicArn": "arn:aws:sns:us-east-1:012345678910:myothertopic", + "Events": [ + "s3:ObjectCreated:*" + ], + "Filter": { + "Key": { + "FilterRules": [ + { + "Name": "prefix", + "Value": "images/" + }, + { + "Name": "suffix", + "Value": "png" + } + ] + } + } + } + ] + }) + + # Verify to completion: + result = s3.get_bucket_notification_configuration(Bucket="bucket") + assert len(result["TopicConfigurations"]) == 2 + assert not result.get("QueueConfigurations") + assert not result.get("LambdaFunctionConfigurations") + assert result["TopicConfigurations"][0]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:mytopic" + assert result["TopicConfigurations"][1]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:myothertopic" + assert len(result["TopicConfigurations"][0]["Events"]) == 2 + assert len(result["TopicConfigurations"][1]["Events"]) == 1 + assert result["TopicConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" + assert result["TopicConfigurations"][0]["Events"][1] == "s3:ObjectRemoved:*" + assert result["TopicConfigurations"][1]["Events"][0] == "s3:ObjectCreated:*" + assert result["TopicConfigurations"][0]["Id"] + assert result["TopicConfigurations"][1]["Id"] + assert not result["TopicConfigurations"][0].get("Filter") + assert len(result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"]) == 2 + assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" + assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" + assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Name"] == "suffix" + assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Value"] == "png" + + # Place proper queue configuration: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "QueueConfigurations": [ + { + "Id": "SomeID", + "QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue", + "Events": ["s3:ObjectCreated:*"], + "Filter": { + "Key": { + "FilterRules": [ + { + "Name": "prefix", + "Value": "images/" + } + ] + } + } + } + ] + }) + result = s3.get_bucket_notification_configuration(Bucket="bucket") + assert len(result["QueueConfigurations"]) == 1 + assert not result.get("TopicConfigurations") + assert not result.get("LambdaFunctionConfigurations") + assert result["QueueConfigurations"][0]["Id"] == "SomeID" + assert result["QueueConfigurations"][0]["QueueArn"] == "arn:aws:sqs:us-east-1:012345678910:myQueue" + assert result["QueueConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" + assert len(result["QueueConfigurations"][0]["Events"]) == 1 + assert len(result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1 + assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" + assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" + + # Place proper Lambda configuration: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "LambdaFunctionConfigurations": [ + { + "LambdaFunctionArn": + "arn:aws:lambda:us-east-1:012345678910:function:lambda", + "Events": ["s3:ObjectCreated:*"], + "Filter": { + "Key": { + "FilterRules": [ + { + "Name": "prefix", + "Value": "images/" + } + ] + } + } + } + ] + }) + result = s3.get_bucket_notification_configuration(Bucket="bucket") + assert len(result["LambdaFunctionConfigurations"]) == 1 + assert not result.get("TopicConfigurations") + assert not result.get("QueueConfigurations") + assert result["LambdaFunctionConfigurations"][0]["Id"] + assert result["LambdaFunctionConfigurations"][0]["LambdaFunctionArn"] == \ + "arn:aws:lambda:us-east-1:012345678910:function:lambda" + assert result["LambdaFunctionConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*" + assert len(result["LambdaFunctionConfigurations"][0]["Events"]) == 1 + assert len(result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1 + assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix" + assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/" + + # And with all 3 set: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "TopicConfigurations": [ + { + "TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic", + "Events": [ + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*" + ] + } + ], + "LambdaFunctionConfigurations": [ + { + "LambdaFunctionArn": + "arn:aws:lambda:us-east-1:012345678910:function:lambda", + "Events": ["s3:ObjectCreated:*"] + } + ], + "QueueConfigurations": [ + { + "QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue", + "Events": ["s3:ObjectCreated:*"] + } + ] + }) + result = s3.get_bucket_notification_configuration(Bucket="bucket") + assert len(result["LambdaFunctionConfigurations"]) == 1 + assert len(result["TopicConfigurations"]) == 1 + assert len(result["QueueConfigurations"]) == 1 + + # And clear it out: + s3.put_bucket_notification_configuration(Bucket="bucket", NotificationConfiguration={}) + result = s3.get_bucket_notification_configuration(Bucket="bucket") + assert not result.get("TopicConfigurations") + assert not result.get("QueueConfigurations") + assert not result.get("LambdaFunctionConfigurations") + + +@mock_s3 +def test_put_bucket_notification_errors(): + s3 = boto3.client("s3", region_name="us-east-1") + s3.create_bucket(Bucket="bucket") + + # With incorrect ARNs: + for tech, arn in [("Queue", "sqs"), ("Topic", "sns"), ("LambdaFunction", "lambda")]: + with assert_raises(ClientError) as err: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "{}Configurations".format(tech): [ + { + "{}Arn".format(tech): + "arn:aws:{}:us-east-1:012345678910:lksajdfkldskfj", + "Events": ["s3:ObjectCreated:*"] + } + ] + }) + + assert err.exception.response["Error"]["Code"] == "InvalidArgument" + assert err.exception.response["Error"]["Message"] == "The ARN is not well formed" + + # Region not the same as the bucket: + with assert_raises(ClientError) as err: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "QueueConfigurations": [ + { + "QueueArn": + "arn:aws:sqs:us-west-2:012345678910:lksajdfkldskfj", + "Events": ["s3:ObjectCreated:*"] + } + ] + }) + + assert err.exception.response["Error"]["Code"] == "InvalidArgument" + assert err.exception.response["Error"]["Message"] == \ + "The notification destination service region is not valid for the bucket location constraint" + + # Invalid event name: + with assert_raises(ClientError) as err: + s3.put_bucket_notification_configuration(Bucket="bucket", + NotificationConfiguration={ + "QueueConfigurations": [ + { + "QueueArn": + "arn:aws:sqs:us-east-1:012345678910:lksajdfkldskfj", + "Events": ["notarealeventname"] + } + ] + }) + assert err.exception.response["Error"]["Code"] == "InvalidArgument" + assert err.exception.response["Error"]["Message"] == "The event is not supported for notifications" + + +@mock_s3 +def test_boto3_put_bucket_logging(): + s3 = boto3.client("s3", region_name="us-east-1") + bucket_name = "mybucket" + log_bucket = "logbucket" + wrong_region_bucket = "wrongregionlogbucket" + s3.create_bucket(Bucket=bucket_name) + s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later... + s3.create_bucket(Bucket=wrong_region_bucket, CreateBucketConfiguration={"LocationConstraint": "us-west-2"}) + + # No logging config: + result = s3.get_bucket_logging(Bucket=bucket_name) + assert not result.get("LoggingEnabled") + + # A log-bucket that doesn't exist: + with assert_raises(ClientError) as err: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": "IAMNOTREAL", + "TargetPrefix": "" + } + }) + assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging" + + # A log-bucket that's missing the proper ACLs for LogDelivery: + with assert_raises(ClientError) as err: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": "" + } + }) + assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging" + assert "log-delivery" in err.exception.response["Error"]["Message"] + + # Add the proper "log-delivery" ACL to the log buckets: + bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"] + for bucket in [log_bucket, wrong_region_bucket]: + s3.put_bucket_acl(Bucket=bucket, AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "WRITE" + }, + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group" + }, + "Permission": "READ_ACP" + }, + { + "Grantee": { + "Type": "CanonicalUser", + "ID": bucket_owner["ID"] + }, + "Permission": "FULL_CONTROL" + } + ], + "Owner": bucket_owner + }) + + # A log-bucket that's in the wrong region: + with assert_raises(ClientError) as err: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": wrong_region_bucket, + "TargetPrefix": "" + } + }) + assert err.exception.response["Error"]["Code"] == "CrossLocationLoggingProhibitted" + + # Correct logging: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": "{}/".format(bucket_name) + } + }) + result = s3.get_bucket_logging(Bucket=bucket_name) + assert result["LoggingEnabled"]["TargetBucket"] == log_bucket + assert result["LoggingEnabled"]["TargetPrefix"] == "{}/".format(bucket_name) + assert not result["LoggingEnabled"].get("TargetGrants") + + # And disabling: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={}) + assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled") + + # And enabling with multiple target grants: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": "{}/".format(bucket_name), + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser" + }, + "Permission": "READ" + }, + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser" + }, + "Permission": "WRITE" + } + ] + } + }) + + result = s3.get_bucket_logging(Bucket=bucket_name) + assert len(result["LoggingEnabled"]["TargetGrants"]) == 2 + assert result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"] == \ + "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274" + + # Test with just 1 grant: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": "{}/".format(bucket_name), + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser" + }, + "Permission": "READ" + } + ] + } + }) + result = s3.get_bucket_logging(Bucket=bucket_name) + assert len(result["LoggingEnabled"]["TargetGrants"]) == 1 + + # With an invalid grant: + with assert_raises(ClientError) as err: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": "{}/".format(bucket_name), + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser" + }, + "Permission": "NOTAREALPERM" + } + ] + } + }) + assert err.exception.response["Error"]["Code"] == "MalformedXML" + + +@mock_s3 +def test_boto3_put_object_tagging(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-tags' + s3.create_bucket(Bucket=bucket_name) + + with assert_raises(ClientError) as err: + s3.put_object_tagging( + Bucket=bucket_name, + Key=key, + Tagging={'TagSet': [ + {'Key': 'item1', 'Value': 'foo'}, + {'Key': 'item2', 'Value': 'bar'}, + ]} + ) + + e = err.exception + e.response['Error'].should.equal({ + 'Code': 'NoSuchKey', + 'Message': 'The specified key does not exist.', + 'RequestID': '7a62c49f-347e-4fc4-9331-6e8eEXAMPLE', + }) + + s3.put_object( + Bucket=bucket_name, + Key=key, + Body='test' + ) + + resp = s3.put_object_tagging( + Bucket=bucket_name, + Key=key, + Tagging={'TagSet': [ + {'Key': 'item1', 'Value': 'foo'}, + {'Key': 'item2', 'Value': 'bar'}, + ]} + ) + + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + +@mock_s3 +def test_boto3_put_object_tagging_with_single_tag(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-tags' + s3.create_bucket(Bucket=bucket_name) + + s3.put_object( + Bucket=bucket_name, + Key=key, + Body='test' + ) + + resp = s3.put_object_tagging( + Bucket=bucket_name, + Key=key, + Tagging={'TagSet': [ + {'Key': 'item1', 'Value': 'foo'} + ]} + ) + + resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + + +@mock_s3 +def test_boto3_get_object_tagging(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-tags' + s3.create_bucket(Bucket=bucket_name) + + s3.put_object( + Bucket=bucket_name, + Key=key, + Body='test' + ) + + resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) + resp['TagSet'].should.have.length_of(0) + + resp = s3.put_object_tagging( + Bucket=bucket_name, + Key=key, + Tagging={'TagSet': [ + {'Key': 'item1', 'Value': 'foo'}, + {'Key': 'item2', 'Value': 'bar'}, + ]} + ) + resp = s3.get_object_tagging(Bucket=bucket_name, Key=key) + + resp['TagSet'].should.have.length_of(2) + resp['TagSet'].should.contain({'Key': 'item1', 'Value': 'foo'}) + resp['TagSet'].should.contain({'Key': 'item2', 'Value': 'bar'}) + + +@mock_s3 +def test_boto3_list_object_versions(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + response = s3.list_object_versions( + Bucket=bucket_name + ) + # Two object versions should be returned + len(response['Versions']).should.equal(2) + keys = set([item['Key'] for item in response['Versions']]) + keys.should.equal({key}) + # Test latest object version is returned + response = s3.get_object(Bucket=bucket_name, Key=key) + response['Body'].read().should.equal(items[-1]) + + +@mock_s3 +def test_boto3_list_object_versions_with_versioning_disabled(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + s3.create_bucket(Bucket=bucket_name) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + response = s3.list_object_versions( + Bucket=bucket_name + ) + + # One object version should be returned + len(response['Versions']).should.equal(1) + response['Versions'][0]['Key'].should.equal(key) + + # The version id should be the string null + response['Versions'][0]['VersionId'].should.equal('null') + + # Test latest object version is returned + response = s3.get_object(Bucket=bucket_name, Key=key) + response['Body'].read().should.equal(items[-1]) + + +@mock_s3 +def test_boto3_list_object_versions_with_versioning_enabled_late(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + s3.create_bucket(Bucket=bucket_name) + items = (six.b('v1'), six.b('v2')) + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=six.b('v1') + ) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=six.b('v2') + ) + response = s3.list_object_versions( + Bucket=bucket_name + ) + + # Two object versions should be returned + len(response['Versions']).should.equal(2) + keys = set([item['Key'] for item in response['Versions']]) + keys.should.equal({key}) + + # There should still be a null version id. + versionsId = set([item['VersionId'] for item in response['Versions']]) + versionsId.should.contain('null') + + # Test latest object version is returned + response = s3.get_object(Bucket=bucket_name, Key=key) + response['Body'].read().should.equal(items[-1]) + +@mock_s3 +def test_boto3_bad_prefix_list_object_versions(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + bad_prefix = 'key-that-does-not-exist' + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + response = s3.list_object_versions( + Bucket=bucket_name, + Prefix=bad_prefix, + ) + response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + response.should_not.contain('Versions') + response.should_not.contain('DeleteMarkers') + + +@mock_s3 +def test_boto3_delete_markers(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = u'key-with-versions-and-unicode-ó' + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + + s3.delete_objects(Bucket=bucket_name, Delete={'Objects': [{'Key': key}]}) + + with assert_raises(ClientError) as e: + s3.get_object( + Bucket=bucket_name, + Key=key + ) + e.exception.response['Error']['Code'].should.equal('NoSuchKey') + + response = s3.list_object_versions( + Bucket=bucket_name + ) + response['Versions'].should.have.length_of(2) + response['DeleteMarkers'].should.have.length_of(1) + + s3.delete_object( + Bucket=bucket_name, + Key=key, + VersionId=response['DeleteMarkers'][0]['VersionId'] + ) + response = s3.get_object( + Bucket=bucket_name, + Key=key + ) + response['Body'].read().should.equal(items[-1]) + + response = s3.list_object_versions( + Bucket=bucket_name + ) + response['Versions'].should.have.length_of(2) + + # We've asserted there is only 2 records so one is newest, one is oldest + latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0] + oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] + # Double check ordering of version ID's + latest['VersionId'].should_not.equal(oldest['VersionId']) + + # Double check the name is still unicode + latest['Key'].should.equal('key-with-versions-and-unicode-ó') + oldest['Key'].should.equal('key-with-versions-and-unicode-ó') + + +@mock_s3 +def test_boto3_multiple_delete_markers(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = u'key-with-versions-and-unicode-ó' + s3.create_bucket(Bucket=bucket_name) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + + # Delete the object twice to add multiple delete markers + s3.delete_object(Bucket=bucket_name, Key=key) + s3.delete_object(Bucket=bucket_name, Key=key) + + response = s3.list_object_versions(Bucket=bucket_name) + response['DeleteMarkers'].should.have.length_of(2) + + with assert_raises(ClientError) as e: + s3.get_object( + Bucket=bucket_name, + Key=key + ) + e.response['Error']['Code'].should.equal('404') + + # Remove both delete markers to restore the object + s3.delete_object( + Bucket=bucket_name, + Key=key, + VersionId=response['DeleteMarkers'][0]['VersionId'] + ) + s3.delete_object( + Bucket=bucket_name, + Key=key, + VersionId=response['DeleteMarkers'][1]['VersionId'] + ) + + response = s3.get_object( + Bucket=bucket_name, + Key=key + ) + response['Body'].read().should.equal(items[-1]) + response = s3.list_object_versions(Bucket=bucket_name) + response['Versions'].should.have.length_of(2) + + # We've asserted there is only 2 records so one is newest, one is oldest + latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0] + oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] + + # Double check ordering of version ID's + latest['VersionId'].should_not.equal(oldest['VersionId']) + + # Double check the name is still unicode + latest['Key'].should.equal('key-with-versions-and-unicode-ó') + oldest['Key'].should.equal('key-with-versions-and-unicode-ó') + +@mock_s3 +def test_get_stream_gzipped(): + payload = b"this is some stuff here" + + s3_client = boto3.client("s3", region_name='us-east-1') + s3_client.create_bucket(Bucket='moto-tests') + buffer_ = BytesIO() + with GzipFile(fileobj=buffer_, mode='w') as f: + f.write(payload) + payload_gz = buffer_.getvalue() + + s3_client.put_object( + Bucket='moto-tests', + Key='keyname', + Body=payload_gz, + ContentEncoding='gzip', + ) + + obj = s3_client.get_object( + Bucket='moto-tests', + Key='keyname', + ) + res = zlib.decompress(obj['Body'].read(), 16 + zlib.MAX_WBITS) + assert res == payload + + +TEST_XML = """\ + + + + index.html + + + + + test/testing + + + test.txt + + + + +""" + +@mock_s3 +def test_boto3_bucket_name_too_long(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*64) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') + +@mock_s3 +def test_boto3_bucket_name_too_short(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*2) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py index efa05b862..b179a2329 100644 --- a/tests/test_s3/test_server.py +++ b/tests/test_s3/test_server.py @@ -1,105 +1,106 @@ -# coding=utf-8 - -from __future__ import unicode_literals -import sure # noqa - -from flask.testing import FlaskClient -import moto.server as server - -''' -Test the different server responses -''' - - -class AuthenticatedClient(FlaskClient): - def open(self, *args, **kwargs): - kwargs['headers'] = kwargs.get('headers', {}) - kwargs['headers']['Authorization'] = "Any authorization header" - return super(AuthenticatedClient, self).open(*args, **kwargs) - - -def authenticated_client(): - backend = server.create_backend_app("s3") - backend.test_client_class = AuthenticatedClient - return backend.test_client() - - -def test_s3_server_get(): - test_client = authenticated_client() - res = test_client.get('/') - - res.data.should.contain(b'ListAllMyBucketsResult') - - -def test_s3_server_bucket_create(): - test_client = authenticated_client() - - res = test_client.put('/', 'http://foobaz.localhost:5000/') - res.status_code.should.equal(200) - - res = test_client.get('/') - res.data.should.contain(b'foobaz') - - res = test_client.get('/', 'http://foobaz.localhost:5000/') - res.status_code.should.equal(200) - res.data.should.contain(b"ListBucketResult") - - res = test_client.put( - '/bar', 'http://foobaz.localhost:5000/', data='test value') - res.status_code.should.equal(200) - assert 'ETag' in dict(res.headers) - - res = test_client.get('/bar', 'http://foobaz.localhost:5000/') - res.status_code.should.equal(200) - res.data.should.equal(b"test value") - - -def test_s3_server_bucket_versioning(): - test_client = authenticated_client() - - # Just enough XML to enable versioning - body = 'Enabled' - res = test_client.put( - '/?versioning', 'http://foobaz.localhost:5000', data=body) - res.status_code.should.equal(200) - - -def test_s3_server_post_to_bucket(): - test_client = authenticated_client() - - res = test_client.put('/', 'http://tester.localhost:5000/') - res.status_code.should.equal(200) - - test_client.post('/', "https://tester.localhost:5000/", data={ - 'key': 'the-key', - 'file': 'nothing' - }) - - res = test_client.get('/the-key', 'http://tester.localhost:5000/') - res.status_code.should.equal(200) - res.data.should.equal(b"nothing") - - -def test_s3_server_post_without_content_length(): - test_client = authenticated_client() - - res = test_client.put('/', 'http://tester.localhost:5000/', environ_overrides={'CONTENT_LENGTH': ''}) - res.status_code.should.equal(411) - - res = test_client.post('/', "https://tester.localhost:5000/", environ_overrides={'CONTENT_LENGTH': ''}) - res.status_code.should.equal(411) - - -def test_s3_server_post_unicode_bucket_key(): - # Make sure that we can deal with non-ascii characters in request URLs (e.g., S3 object names) - dispatcher = server.DomainDispatcherApplication(server.create_backend_app) - backend_app = dispatcher.get_application({ - 'HTTP_HOST': 's3.amazonaws.com', - 'PATH_INFO': '/test-bucket/test-object-てすと' - }) - assert backend_app - backend_app = dispatcher.get_application({ - 'HTTP_HOST': 's3.amazonaws.com', - 'PATH_INFO': '/test-bucket/test-object-てすと'.encode('utf-8') - }) - assert backend_app +# coding=utf-8 + +from __future__ import unicode_literals +import sure # noqa + +from flask.testing import FlaskClient +import moto.server as server + +''' +Test the different server responses +''' + + +class AuthenticatedClient(FlaskClient): + def open(self, *args, **kwargs): + kwargs['headers'] = kwargs.get('headers', {}) + kwargs['headers']['Authorization'] = "Any authorization header" + kwargs['content_length'] = 0 # Fixes content-length complaints. + return super(AuthenticatedClient, self).open(*args, **kwargs) + + +def authenticated_client(): + backend = server.create_backend_app("s3") + backend.test_client_class = AuthenticatedClient + return backend.test_client() + + +def test_s3_server_get(): + test_client = authenticated_client() + res = test_client.get('/') + + res.data.should.contain(b'ListAllMyBucketsResult') + + +def test_s3_server_bucket_create(): + test_client = authenticated_client() + + res = test_client.put('/', 'http://foobaz.localhost:5000/') + res.status_code.should.equal(200) + + res = test_client.get('/') + res.data.should.contain(b'foobaz') + + res = test_client.get('/', 'http://foobaz.localhost:5000/') + res.status_code.should.equal(200) + res.data.should.contain(b"ListBucketResult") + + res = test_client.put( + '/bar', 'http://foobaz.localhost:5000/', data='test value') + res.status_code.should.equal(200) + assert 'ETag' in dict(res.headers) + + res = test_client.get('/bar', 'http://foobaz.localhost:5000/') + res.status_code.should.equal(200) + res.data.should.equal(b"test value") + + +def test_s3_server_bucket_versioning(): + test_client = authenticated_client() + + # Just enough XML to enable versioning + body = 'Enabled' + res = test_client.put( + '/?versioning', 'http://foobaz.localhost:5000', data=body) + res.status_code.should.equal(200) + + +def test_s3_server_post_to_bucket(): + test_client = authenticated_client() + + res = test_client.put('/', 'http://tester.localhost:5000/') + res.status_code.should.equal(200) + + test_client.post('/', "https://tester.localhost:5000/", data={ + 'key': 'the-key', + 'file': 'nothing' + }) + + res = test_client.get('/the-key', 'http://tester.localhost:5000/') + res.status_code.should.equal(200) + res.data.should.equal(b"nothing") + + +def test_s3_server_post_without_content_length(): + test_client = authenticated_client() + + res = test_client.put('/', 'http://tester.localhost:5000/', environ_overrides={'CONTENT_LENGTH': ''}) + res.status_code.should.equal(411) + + res = test_client.post('/', "https://tester.localhost:5000/", environ_overrides={'CONTENT_LENGTH': ''}) + res.status_code.should.equal(411) + + +def test_s3_server_post_unicode_bucket_key(): + # Make sure that we can deal with non-ascii characters in request URLs (e.g., S3 object names) + dispatcher = server.DomainDispatcherApplication(server.create_backend_app) + backend_app = dispatcher.get_application({ + 'HTTP_HOST': 's3.amazonaws.com', + 'PATH_INFO': '/test-bucket/test-object-てすと' + }) + assert backend_app + backend_app = dispatcher.get_application({ + 'HTTP_HOST': 's3.amazonaws.com', + 'PATH_INFO': '/test-bucket/test-object-てすと'.encode('utf-8') + }) + assert backend_app diff --git a/tests/test_s3bucket_path/test_bucket_path_server.py b/tests/test_s3bucket_path/test_bucket_path_server.py index 0fd73c3b9..f6238dd28 100644 --- a/tests/test_s3bucket_path/test_bucket_path_server.py +++ b/tests/test_s3bucket_path/test_bucket_path_server.py @@ -1,113 +1,114 @@ -from __future__ import unicode_literals -import sure # noqa - -from flask.testing import FlaskClient -import moto.server as server - -''' -Test the different server responses -''' - - -class AuthenticatedClient(FlaskClient): - def open(self, *args, **kwargs): - kwargs['headers'] = kwargs.get('headers', {}) - kwargs['headers']['Authorization'] = "Any authorization header" - return super(AuthenticatedClient, self).open(*args, **kwargs) - - -def authenticated_client(): - backend = server.create_backend_app("s3bucket_path") - backend.test_client_class = AuthenticatedClient - return backend.test_client() - - -def test_s3_server_get(): - test_client = authenticated_client() - - res = test_client.get('/') - - res.data.should.contain(b'ListAllMyBucketsResult') - - -def test_s3_server_bucket_create(): - test_client = authenticated_client() - - res = test_client.put('/foobar', 'http://localhost:5000') - res.status_code.should.equal(200) - - res = test_client.get('/') - res.data.should.contain(b'foobar') - - res = test_client.get('/foobar', 'http://localhost:5000') - res.status_code.should.equal(200) - res.data.should.contain(b"ListBucketResult") - - res = test_client.put('/foobar2/', 'http://localhost:5000') - res.status_code.should.equal(200) - - res = test_client.get('/') - res.data.should.contain(b'foobar2') - - res = test_client.get('/foobar2/', 'http://localhost:5000') - res.status_code.should.equal(200) - res.data.should.contain(b"ListBucketResult") - - res = test_client.get('/missing-bucket', 'http://localhost:5000') - res.status_code.should.equal(404) - - res = test_client.put( - '/foobar/bar', 'http://localhost:5000', data='test value') - res.status_code.should.equal(200) - - res = test_client.get('/foobar/bar', 'http://localhost:5000') - res.status_code.should.equal(200) - res.data.should.equal(b"test value") - - -def test_s3_server_post_to_bucket(): - test_client = authenticated_client() - - res = test_client.put('/foobar2', 'http://localhost:5000/') - res.status_code.should.equal(200) - - test_client.post('/foobar2', "https://localhost:5000/", data={ - 'key': 'the-key', - 'file': 'nothing' - }) - - res = test_client.get('/foobar2/the-key', 'http://localhost:5000/') - res.status_code.should.equal(200) - res.data.should.equal(b"nothing") - - -def test_s3_server_put_ipv6(): - test_client = authenticated_client() - - res = test_client.put('/foobar2', 'http://[::]:5000/') - res.status_code.should.equal(200) - - test_client.post('/foobar2', "https://[::]:5000/", data={ - 'key': 'the-key', - 'file': 'nothing' - }) - - res = test_client.get('/foobar2/the-key', 'http://[::]:5000/') - res.status_code.should.equal(200) - res.data.should.equal(b"nothing") - - -def test_s3_server_put_ipv4(): - test_client = authenticated_client() - - res = test_client.put('/foobar2', 'http://127.0.0.1:5000/') - res.status_code.should.equal(200) - - test_client.post('/foobar2', "https://127.0.0.1:5000/", data={ - 'key': 'the-key', - 'file': 'nothing' - }) - - res = test_client.get('/foobar2/the-key', 'http://127.0.0.1:5000/') - res.status_code.should.equal(200) - res.data.should.equal(b"nothing") +from __future__ import unicode_literals +import sure # noqa + +from flask.testing import FlaskClient +import moto.server as server + +''' +Test the different server responses +''' + + +class AuthenticatedClient(FlaskClient): + def open(self, *args, **kwargs): + kwargs['headers'] = kwargs.get('headers', {}) + kwargs['headers']['Authorization'] = "Any authorization header" + kwargs['content_length'] = 0 # Fixes content-length complaints. + return super(AuthenticatedClient, self).open(*args, **kwargs) + + +def authenticated_client(): + backend = server.create_backend_app("s3bucket_path") + backend.test_client_class = AuthenticatedClient + return backend.test_client() + + +def test_s3_server_get(): + test_client = authenticated_client() + + res = test_client.get('/') + + res.data.should.contain(b'ListAllMyBucketsResult') + + +def test_s3_server_bucket_create(): + test_client = authenticated_client() + + res = test_client.put('/foobar', 'http://localhost:5000') + res.status_code.should.equal(200) + + res = test_client.get('/') + res.data.should.contain(b'foobar') + + res = test_client.get('/foobar', 'http://localhost:5000') + res.status_code.should.equal(200) + res.data.should.contain(b"ListBucketResult") + + res = test_client.put('/foobar2/', 'http://localhost:5000') + res.status_code.should.equal(200) + + res = test_client.get('/') + res.data.should.contain(b'foobar2') + + res = test_client.get('/foobar2/', 'http://localhost:5000') + res.status_code.should.equal(200) + res.data.should.contain(b"ListBucketResult") + + res = test_client.get('/missing-bucket', 'http://localhost:5000') + res.status_code.should.equal(404) + + res = test_client.put( + '/foobar/bar', 'http://localhost:5000', data='test value') + res.status_code.should.equal(200) + + res = test_client.get('/foobar/bar', 'http://localhost:5000') + res.status_code.should.equal(200) + res.data.should.equal(b"test value") + + +def test_s3_server_post_to_bucket(): + test_client = authenticated_client() + + res = test_client.put('/foobar2', 'http://localhost:5000/') + res.status_code.should.equal(200) + + test_client.post('/foobar2', "https://localhost:5000/", data={ + 'key': 'the-key', + 'file': 'nothing' + }) + + res = test_client.get('/foobar2/the-key', 'http://localhost:5000/') + res.status_code.should.equal(200) + res.data.should.equal(b"nothing") + + +def test_s3_server_put_ipv6(): + test_client = authenticated_client() + + res = test_client.put('/foobar2', 'http://[::]:5000/') + res.status_code.should.equal(200) + + test_client.post('/foobar2', "https://[::]:5000/", data={ + 'key': 'the-key', + 'file': 'nothing' + }) + + res = test_client.get('/foobar2/the-key', 'http://[::]:5000/') + res.status_code.should.equal(200) + res.data.should.equal(b"nothing") + + +def test_s3_server_put_ipv4(): + test_client = authenticated_client() + + res = test_client.put('/foobar2', 'http://127.0.0.1:5000/') + res.status_code.should.equal(200) + + test_client.post('/foobar2', "https://127.0.0.1:5000/", data={ + 'key': 'the-key', + 'file': 'nothing' + }) + + res = test_client.get('/foobar2/the-key', 'http://127.0.0.1:5000/') + res.status_code.should.equal(200) + res.data.should.equal(b"nothing") diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 9d496704c..81ce93cc3 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -1,286 +1,505 @@ -from __future__ import unicode_literals - -import boto3 - -from moto import mock_secretsmanager -from botocore.exceptions import ClientError -import sure # noqa -import string -import unittest -from nose.tools import assert_raises - -@mock_secretsmanager -def test_get_secret_value(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - create_secret = conn.create_secret(Name='java-util-test-password', - SecretString="foosecret") - result = conn.get_secret_value(SecretId='java-util-test-password') - assert result['SecretString'] == 'foosecret' - -@mock_secretsmanager -def test_get_secret_that_does_not_exist(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - with assert_raises(ClientError): - result = conn.get_secret_value(SecretId='i-dont-exist') - -@mock_secretsmanager -def test_get_secret_that_does_not_match(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - create_secret = conn.create_secret(Name='java-util-test-password', - SecretString="foosecret") - - with assert_raises(ClientError): - result = conn.get_secret_value(SecretId='i-dont-match') - -@mock_secretsmanager -def test_create_secret(): - conn = boto3.client('secretsmanager', region_name='us-east-1') - - result = conn.create_secret(Name='test-secret', SecretString="foosecret") - assert result['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') - assert result['Name'] == 'test-secret' - secret = conn.get_secret_value(SecretId='test-secret') - assert secret['SecretString'] == 'foosecret' - -@mock_secretsmanager -def test_get_random_password_default_length(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password() - assert len(random_password['RandomPassword']) == 32 - -@mock_secretsmanager -def test_get_random_password_default_requirements(): - # When require_each_included_type, default true - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password() - # Should contain lowercase, upppercase, digit, special character - assert any(c.islower() for c in random_password['RandomPassword']) - assert any(c.isupper() for c in random_password['RandomPassword']) - assert any(c.isdigit() for c in random_password['RandomPassword']) - assert any(c in string.punctuation - for c in random_password['RandomPassword']) - -@mock_secretsmanager -def test_get_random_password_custom_length(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=50) - assert len(random_password['RandomPassword']) == 50 - -@mock_secretsmanager -def test_get_random_exclude_lowercase(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=55, - ExcludeLowercase=True) - assert any(c.islower() for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_exclude_uppercase(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=55, - ExcludeUppercase=True) - assert any(c.isupper() for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_exclude_characters_and_symbols(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=20, - ExcludeCharacters='xyzDje@?!.') - assert any(c in 'xyzDje@?!.' for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_exclude_numbers(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=100, - ExcludeNumbers=True) - assert any(c.isdigit() for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_exclude_punctuation(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=100, - ExcludePunctuation=True) - assert any(c in string.punctuation - for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_include_space_false(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=300) - assert any(c.isspace() for c in random_password['RandomPassword']) == False - -@mock_secretsmanager -def test_get_random_include_space_true(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=4, - IncludeSpace=True) - assert any(c.isspace() for c in random_password['RandomPassword']) == True - -@mock_secretsmanager -def test_get_random_require_each_included_type(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - random_password = conn.get_random_password(PasswordLength=4, - RequireEachIncludedType=True) - assert any(c in string.punctuation for c in random_password['RandomPassword']) == True - assert any(c in string.ascii_lowercase for c in random_password['RandomPassword']) == True - assert any(c in string.ascii_uppercase for c in random_password['RandomPassword']) == True - assert any(c in string.digits for c in random_password['RandomPassword']) == True - -@mock_secretsmanager -def test_get_random_too_short_password(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - with assert_raises(ClientError): - random_password = conn.get_random_password(PasswordLength=3) - -@mock_secretsmanager -def test_get_random_too_long_password(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - with assert_raises(Exception): - random_password = conn.get_random_password(PasswordLength=5555) - -@mock_secretsmanager -def test_describe_secret(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name='test-secret', - SecretString='foosecret') - - secret_description = conn.describe_secret(SecretId='test-secret') - assert secret_description # Returned dict is not empty - assert secret_description['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad') - -@mock_secretsmanager -def test_describe_secret_that_does_not_exist(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - with assert_raises(ClientError): - result = conn.get_secret_value(SecretId='i-dont-exist') - -@mock_secretsmanager -def test_describe_secret_that_does_not_match(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name='test-secret', - SecretString='foosecret') - - with assert_raises(ClientError): - result = conn.get_secret_value(SecretId='i-dont-match') - -@mock_secretsmanager -def test_rotate_secret(): - secret_name = 'test-secret' - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name=secret_name, - SecretString='foosecret') - - rotated_secret = conn.rotate_secret(SecretId=secret_name) - - assert rotated_secret - assert rotated_secret['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad' - ) - assert rotated_secret['Name'] == secret_name - assert rotated_secret['VersionId'] != '' - -@mock_secretsmanager -def test_rotate_secret_enable_rotation(): - secret_name = 'test-secret' - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name=secret_name, - SecretString='foosecret') - - initial_description = conn.describe_secret(SecretId=secret_name) - assert initial_description - assert initial_description['RotationEnabled'] is False - assert initial_description['RotationRules']['AutomaticallyAfterDays'] == 0 - - conn.rotate_secret(SecretId=secret_name, - RotationRules={'AutomaticallyAfterDays': 42}) - - rotated_description = conn.describe_secret(SecretId=secret_name) - assert rotated_description - assert rotated_description['RotationEnabled'] is True - assert rotated_description['RotationRules']['AutomaticallyAfterDays'] == 42 - -@mock_secretsmanager -def test_rotate_secret_that_does_not_exist(): - conn = boto3.client('secretsmanager', 'us-west-2') - - with assert_raises(ClientError): - result = conn.rotate_secret(SecretId='i-dont-exist') - -@mock_secretsmanager -def test_rotate_secret_that_does_not_match(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name='test-secret', - SecretString='foosecret') - - with assert_raises(ClientError): - result = conn.rotate_secret(SecretId='i-dont-match') - -@mock_secretsmanager -def test_rotate_secret_client_request_token_too_short(): - # Test is intentionally empty. Boto3 catches too short ClientRequestToken - # and raises ParamValidationError before Moto can see it. - # test_server actually handles this error. - assert True - -@mock_secretsmanager -def test_rotate_secret_client_request_token_too_long(): - secret_name = 'test-secret' - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name=secret_name, - SecretString='foosecret') - - client_request_token = ( - 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-' - 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C' - ) - with assert_raises(ClientError): - result = conn.rotate_secret(SecretId=secret_name, - ClientRequestToken=client_request_token) - -@mock_secretsmanager -def test_rotate_secret_rotation_lambda_arn_too_long(): - secret_name = 'test-secret' - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name=secret_name, - SecretString='foosecret') - - rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters - with assert_raises(ClientError): - result = conn.rotate_secret(SecretId=secret_name, - RotationLambdaARN=rotation_lambda_arn) - -@mock_secretsmanager -def test_rotate_secret_rotation_period_zero(): - # Test is intentionally empty. Boto3 catches zero day rotation period - # and raises ParamValidationError before Moto can see it. - # test_server actually handles this error. - assert True - -@mock_secretsmanager -def test_rotate_secret_rotation_period_too_long(): - secret_name = 'test-secret' - conn = boto3.client('secretsmanager', region_name='us-west-2') - conn.create_secret(Name=secret_name, - SecretString='foosecret') - - rotation_rules = {'AutomaticallyAfterDays': 1001} - with assert_raises(ClientError): - result = conn.rotate_secret(SecretId=secret_name, - RotationRules=rotation_rules) +from __future__ import unicode_literals + +import boto3 + +from moto import mock_secretsmanager +from botocore.exceptions import ClientError +import sure # noqa +import string +import pytz +from datetime import datetime +import unittest +from nose.tools import assert_raises + +@mock_secretsmanager +def test_get_secret_value(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + create_secret = conn.create_secret(Name='java-util-test-password', + SecretString="foosecret") + result = conn.get_secret_value(SecretId='java-util-test-password') + assert result['SecretString'] == 'foosecret' + +@mock_secretsmanager +def test_get_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='i-dont-exist') + +@mock_secretsmanager +def test_get_secret_that_does_not_match(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + create_secret = conn.create_secret(Name='java-util-test-password', + SecretString="foosecret") + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='i-dont-match') + + +@mock_secretsmanager +def test_get_secret_value_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='test-secret') + + +@mock_secretsmanager +def test_create_secret(): + conn = boto3.client('secretsmanager', region_name='us-east-1') + + result = conn.create_secret(Name='test-secret', SecretString="foosecret") + assert result['ARN'] + assert result['Name'] == 'test-secret' + secret = conn.get_secret_value(SecretId='test-secret') + assert secret['SecretString'] == 'foosecret' + +@mock_secretsmanager +def test_create_secret_with_tags(): + conn = boto3.client('secretsmanager', region_name='us-east-1') + secret_name = 'test-secret-with-tags' + + result = conn.create_secret( + Name=secret_name, + SecretString="foosecret", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + ) + assert result['ARN'] + assert result['Name'] == secret_name + secret_value = conn.get_secret_value(SecretId=secret_name) + assert secret_value['SecretString'] == 'foosecret' + secret_details = conn.describe_secret(SecretId=secret_name) + assert secret_details['Tags'] == [{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + + +@mock_secretsmanager +def test_delete_secret(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + deleted_secret = conn.delete_secret(SecretId='test-secret') + + assert deleted_secret['ARN'] + assert deleted_secret['Name'] == 'test-secret' + assert deleted_secret['DeletionDate'] > datetime.fromtimestamp(1, pytz.utc) + + secret_details = conn.describe_secret(SecretId='test-secret') + + assert secret_details['ARN'] + assert secret_details['Name'] == 'test-secret' + assert secret_details['DeletedDate'] > datetime.fromtimestamp(1, pytz.utc) + + +@mock_secretsmanager +def test_delete_secret_force(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + result = conn.delete_secret(SecretId='test-secret', ForceDeleteWithoutRecovery=True) + + assert result['ARN'] + assert result['DeletionDate'] > datetime.fromtimestamp(1, pytz.utc) + assert result['Name'] == 'test-secret' + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='test-secret') + + +@mock_secretsmanager +def test_delete_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='i-dont-exist', ForceDeleteWithoutRecovery=True) + + +@mock_secretsmanager +def test_delete_secret_fails_with_both_force_delete_flag_and_recovery_window_flag(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=1, ForceDeleteWithoutRecovery=True) + + +@mock_secretsmanager +def test_delete_secret_recovery_window_too_short(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=6) + + +@mock_secretsmanager +def test_delete_secret_recovery_window_too_long(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=31) + + +@mock_secretsmanager +def test_delete_secret_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + deleted_secret = conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret') + + +@mock_secretsmanager +def test_get_random_password_default_length(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password() + assert len(random_password['RandomPassword']) == 32 + +@mock_secretsmanager +def test_get_random_password_default_requirements(): + # When require_each_included_type, default true + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password() + # Should contain lowercase, upppercase, digit, special character + assert any(c.islower() for c in random_password['RandomPassword']) + assert any(c.isupper() for c in random_password['RandomPassword']) + assert any(c.isdigit() for c in random_password['RandomPassword']) + assert any(c in string.punctuation + for c in random_password['RandomPassword']) + +@mock_secretsmanager +def test_get_random_password_custom_length(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=50) + assert len(random_password['RandomPassword']) == 50 + +@mock_secretsmanager +def test_get_random_exclude_lowercase(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=55, + ExcludeLowercase=True) + assert any(c.islower() for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_exclude_uppercase(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=55, + ExcludeUppercase=True) + assert any(c.isupper() for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_exclude_characters_and_symbols(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=20, + ExcludeCharacters='xyzDje@?!.') + assert any(c in 'xyzDje@?!.' for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_exclude_numbers(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=100, + ExcludeNumbers=True) + assert any(c.isdigit() for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_exclude_punctuation(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=100, + ExcludePunctuation=True) + assert any(c in string.punctuation + for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_include_space_false(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=300) + assert any(c.isspace() for c in random_password['RandomPassword']) == False + +@mock_secretsmanager +def test_get_random_include_space_true(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=4, + IncludeSpace=True) + assert any(c.isspace() for c in random_password['RandomPassword']) == True + +@mock_secretsmanager +def test_get_random_require_each_included_type(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + random_password = conn.get_random_password(PasswordLength=4, + RequireEachIncludedType=True) + assert any(c in string.punctuation for c in random_password['RandomPassword']) == True + assert any(c in string.ascii_lowercase for c in random_password['RandomPassword']) == True + assert any(c in string.ascii_uppercase for c in random_password['RandomPassword']) == True + assert any(c in string.digits for c in random_password['RandomPassword']) == True + +@mock_secretsmanager +def test_get_random_too_short_password(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + random_password = conn.get_random_password(PasswordLength=3) + +@mock_secretsmanager +def test_get_random_too_long_password(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(Exception): + random_password = conn.get_random_password(PasswordLength=5555) + +@mock_secretsmanager +def test_describe_secret(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.create_secret(Name='test-secret-2', + SecretString='barsecret') + + secret_description = conn.describe_secret(SecretId='test-secret') + secret_description_2 = conn.describe_secret(SecretId='test-secret-2') + + assert secret_description # Returned dict is not empty + assert secret_description['Name'] == ('test-secret') + assert secret_description['ARN'] != '' # Test arn not empty + assert secret_description_2['Name'] == ('test-secret-2') + assert secret_description_2['ARN'] != '' # Test arn not empty + +@mock_secretsmanager +def test_describe_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='i-dont-exist') + +@mock_secretsmanager +def test_describe_secret_that_does_not_match(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='i-dont-match') + + +@mock_secretsmanager +def test_list_secrets_empty(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + secrets = conn.list_secrets() + + assert secrets['SecretList'] == [] + + +@mock_secretsmanager +def test_list_secrets(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.create_secret(Name='test-secret-2', + SecretString='barsecret', + Tags=[{ + 'Key': 'a', + 'Value': '1' + }]) + + secrets = conn.list_secrets() + + assert secrets['SecretList'][0]['ARN'] is not None + assert secrets['SecretList'][0]['Name'] == 'test-secret' + assert secrets['SecretList'][1]['ARN'] is not None + assert secrets['SecretList'][1]['Name'] == 'test-secret-2' + assert secrets['SecretList'][1]['Tags'] == [{ + 'Key': 'a', + 'Value': '1' + }] + + +@mock_secretsmanager +def test_restore_secret(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.delete_secret(SecretId='test-secret') + + described_secret_before = conn.describe_secret(SecretId='test-secret') + assert described_secret_before['DeletedDate'] > datetime.fromtimestamp(1, pytz.utc) + + restored_secret = conn.restore_secret(SecretId='test-secret') + assert restored_secret['ARN'] + assert restored_secret['Name'] == 'test-secret' + + described_secret_after = conn.describe_secret(SecretId='test-secret') + assert 'DeletedDate' not in described_secret_after + + +@mock_secretsmanager +def test_restore_secret_that_is_not_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + restored_secret = conn.restore_secret(SecretId='test-secret') + assert restored_secret['ARN'] + assert restored_secret['Name'] == 'test-secret' + + +@mock_secretsmanager +def test_restore_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.restore_secret(SecretId='i-dont-exist') + + +@mock_secretsmanager +def test_rotate_secret(): + secret_name = 'test-secret' + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name=secret_name, + SecretString='foosecret') + + rotated_secret = conn.rotate_secret(SecretId=secret_name) + + assert rotated_secret + assert rotated_secret['ARN'] != '' # Test arn not empty + assert rotated_secret['Name'] == secret_name + assert rotated_secret['VersionId'] != '' + +@mock_secretsmanager +def test_rotate_secret_enable_rotation(): + secret_name = 'test-secret' + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name=secret_name, + SecretString='foosecret') + + initial_description = conn.describe_secret(SecretId=secret_name) + assert initial_description + assert initial_description['RotationEnabled'] is False + assert initial_description['RotationRules']['AutomaticallyAfterDays'] == 0 + + conn.rotate_secret(SecretId=secret_name, + RotationRules={'AutomaticallyAfterDays': 42}) + + rotated_description = conn.describe_secret(SecretId=secret_name) + assert rotated_description + assert rotated_description['RotationEnabled'] is True + assert rotated_description['RotationRules']['AutomaticallyAfterDays'] == 42 + + +@mock_secretsmanager +def test_rotate_secret_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId='test-secret') + + +@mock_secretsmanager +def test_rotate_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', 'us-west-2') + + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId='i-dont-exist') + +@mock_secretsmanager +def test_rotate_secret_that_does_not_match(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId='i-dont-match') + +@mock_secretsmanager +def test_rotate_secret_client_request_token_too_short(): + # Test is intentionally empty. Boto3 catches too short ClientRequestToken + # and raises ParamValidationError before Moto can see it. + # test_server actually handles this error. + assert True + +@mock_secretsmanager +def test_rotate_secret_client_request_token_too_long(): + secret_name = 'test-secret' + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name=secret_name, + SecretString='foosecret') + + client_request_token = ( + 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-' + 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C' + ) + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId=secret_name, + ClientRequestToken=client_request_token) + +@mock_secretsmanager +def test_rotate_secret_rotation_lambda_arn_too_long(): + secret_name = 'test-secret' + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name=secret_name, + SecretString='foosecret') + + rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId=secret_name, + RotationLambdaARN=rotation_lambda_arn) + +@mock_secretsmanager +def test_rotate_secret_rotation_period_zero(): + # Test is intentionally empty. Boto3 catches zero day rotation period + # and raises ParamValidationError before Moto can see it. + # test_server actually handles this error. + assert True + +@mock_secretsmanager +def test_rotate_secret_rotation_period_too_long(): + secret_name = 'test-secret' + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name=secret_name, + SecretString='foosecret') + + rotation_rules = {'AutomaticallyAfterDays': 1001} + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId=secret_name, + RotationRules=rotation_rules) diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py index 3365fe4de..d0f495f57 100644 --- a/tests/test_secretsmanager/test_server.py +++ b/tests/test_secretsmanager/test_server.py @@ -1,421 +1,446 @@ -from __future__ import unicode_literals - -import json -import sure # noqa - -import moto.server as server -from moto import mock_secretsmanager - -''' -Test the different server responses for secretsmanager -''' - - -@mock_secretsmanager -def test_get_secret_value(): - - backend = server.create_backend_app("secretsmanager") - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foo-secret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret"}, - ) - get_secret = test_client.post('/', - data={"SecretId": "test-secret", - "VersionStage": "AWSCURRENT"}, - headers={ - "X-Amz-Target": "secretsmanager.GetSecretValue"}, - ) - - json_data = json.loads(get_secret.data.decode("utf-8")) - assert json_data['SecretString'] == 'foo-secret' - -@mock_secretsmanager -def test_get_secret_that_does_not_exist(): - - backend = server.create_backend_app("secretsmanager") - test_client = backend.test_client() - - get_secret = test_client.post('/', - data={"SecretId": "i-dont-exist", - "VersionStage": "AWSCURRENT"}, - headers={ - "X-Amz-Target": "secretsmanager.GetSecretValue"}, - ) - json_data = json.loads(get_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_get_secret_that_does_not_match(): - backend = server.create_backend_app("secretsmanager") - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foo-secret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret"}, - ) - get_secret = test_client.post('/', - data={"SecretId": "i-dont-match", - "VersionStage": "AWSCURRENT"}, - headers={ - "X-Amz-Target": "secretsmanager.GetSecretValue"}, - ) - json_data = json.loads(get_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_create_secret(): - - backend = server.create_backend_app("secretsmanager") - test_client = backend.test_client() - - res = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foo-secret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret"}, - ) - - json_data = json.loads(res.data.decode("utf-8")) - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') - assert json_data['Name'] == 'test-secret' - -@mock_secretsmanager -def test_describe_secret(): - - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - describe_secret = test_client.post('/', - data={"SecretId": "test-secret"}, - headers={ - "X-Amz-Target": "secretsmanager.DescribeSecret" - }, - ) - - json_data = json.loads(describe_secret.data.decode("utf-8")) - assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) - -@mock_secretsmanager -def test_describe_secret_that_does_not_exist(): - - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - describe_secret = test_client.post('/', - data={"SecretId": "i-dont-exist"}, - headers={ - "X-Amz-Target": "secretsmanager.DescribeSecret" - }, - ) - - json_data = json.loads(describe_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_describe_secret_that_does_not_match(): - - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - describe_secret = test_client.post('/', - data={"SecretId": "i-dont-match"}, - headers={ - "X-Amz-Target": "secretsmanager.DescribeSecret" - }, - ) - - json_data = json.loads(describe_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_rotate_secret(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - - client_request_token = "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2" - rotate_secret = test_client.post('/', - data={"SecretId": "test-secret", - "ClientRequestToken": client_request_token}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) - assert json_data['Name'] == 'test-secret' - assert json_data['VersionId'] == client_request_token - -# @mock_secretsmanager -# def test_rotate_secret_enable_rotation(): -# backend = server.create_backend_app('secretsmanager') -# test_client = backend.test_client() - -# create_secret = test_client.post( -# '/', -# data={ -# "Name": "test-secret", -# "SecretString": "foosecret" -# }, -# headers={ -# "X-Amz-Target": "secretsmanager.CreateSecret" -# }, -# ) - -# initial_description = test_client.post( -# '/', -# data={ -# "SecretId": "test-secret" -# }, -# headers={ -# "X-Amz-Target": "secretsmanager.DescribeSecret" -# }, -# ) - -# json_data = json.loads(initial_description.data.decode("utf-8")) -# assert json_data # Returned dict is not empty -# assert json_data['RotationEnabled'] is False -# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 0 - -# rotate_secret = test_client.post( -# '/', -# data={ -# "SecretId": "test-secret", -# "RotationRules": {"AutomaticallyAfterDays": 42} -# }, -# headers={ -# "X-Amz-Target": "secretsmanager.RotateSecret" -# }, -# ) - -# rotated_description = test_client.post( -# '/', -# data={ -# "SecretId": "test-secret" -# }, -# headers={ -# "X-Amz-Target": "secretsmanager.DescribeSecret" -# }, -# ) - -# json_data = json.loads(rotated_description.data.decode("utf-8")) -# assert json_data # Returned dict is not empty -# assert json_data['RotationEnabled'] is True -# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 42 - -@mock_secretsmanager -def test_rotate_secret_that_does_not_exist(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - rotate_secret = test_client.post('/', - data={"SecretId": "i-dont-exist"}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_rotate_secret_that_does_not_match(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - - rotate_secret = test_client.post('/', - data={"SecretId": "i-dont-match"}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data['message'] == "Secrets Manager can't find the specified secret" - assert json_data['__type'] == 'ResourceNotFoundException' - -@mock_secretsmanager -def test_rotate_secret_client_request_token_too_short(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - - client_request_token = "ED9F8B6C-85B7-B7E4-38F2A3BEB13C" - rotate_secret = test_client.post('/', - data={"SecretId": "test-secret", - "ClientRequestToken": client_request_token}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data['message'] == "ClientRequestToken must be 32-64 characters long." - assert json_data['__type'] == 'InvalidParameterException' - -@mock_secretsmanager -def test_rotate_secret_client_request_token_too_long(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - - client_request_token = ( - 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-' - 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C' - ) - rotate_secret = test_client.post('/', - data={"SecretId": "test-secret", - "ClientRequestToken": client_request_token}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data['message'] == "ClientRequestToken must be 32-64 characters long." - assert json_data['__type'] == 'InvalidParameterException' - -@mock_secretsmanager -def test_rotate_secret_rotation_lambda_arn_too_long(): - backend = server.create_backend_app('secretsmanager') - test_client = backend.test_client() - - create_secret = test_client.post('/', - data={"Name": "test-secret", - "SecretString": "foosecret"}, - headers={ - "X-Amz-Target": "secretsmanager.CreateSecret" - }, - ) - - rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters - rotate_secret = test_client.post('/', - data={"SecretId": "test-secret", - "RotationLambdaARN": rotation_lambda_arn}, - headers={ - "X-Amz-Target": "secretsmanager.RotateSecret" - }, - ) - - json_data = json.loads(rotate_secret.data.decode("utf-8")) - assert json_data['message'] == "RotationLambdaARN must <= 2048 characters long." - assert json_data['__type'] == 'InvalidParameterException' - - -# -# The following tests should work, but fail on the embedded dict in -# RotationRules. The error message suggests a problem deeper in the code, which -# needs further investigation. -# - -# @mock_secretsmanager -# def test_rotate_secret_rotation_period_zero(): -# backend = server.create_backend_app('secretsmanager') -# test_client = backend.test_client() - -# create_secret = test_client.post('/', -# data={"Name": "test-secret", -# "SecretString": "foosecret"}, -# headers={ -# "X-Amz-Target": "secretsmanager.CreateSecret" -# }, -# ) - -# rotate_secret = test_client.post('/', -# data={"SecretId": "test-secret", -# "RotationRules": {"AutomaticallyAfterDays": 0}}, -# headers={ -# "X-Amz-Target": "secretsmanager.RotateSecret" -# }, -# ) - -# json_data = json.loads(rotate_secret.data.decode("utf-8")) -# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000." -# assert json_data['__type'] == 'InvalidParameterException' - -# @mock_secretsmanager -# def test_rotate_secret_rotation_period_too_long(): -# backend = server.create_backend_app('secretsmanager') -# test_client = backend.test_client() - -# create_secret = test_client.post('/', -# data={"Name": "test-secret", -# "SecretString": "foosecret"}, -# headers={ -# "X-Amz-Target": "secretsmanager.CreateSecret" -# }, -# ) - -# rotate_secret = test_client.post('/', -# data={"SecretId": "test-secret", -# "RotationRules": {"AutomaticallyAfterDays": 1001}}, -# headers={ -# "X-Amz-Target": "secretsmanager.RotateSecret" -# }, -# ) - -# json_data = json.loads(rotate_secret.data.decode("utf-8")) -# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000." -# assert json_data['__type'] == 'InvalidParameterException' +from __future__ import unicode_literals + +import json +import sure # noqa + +import moto.server as server +from moto import mock_secretsmanager + +''' +Test the different server responses for secretsmanager +''' + + +@mock_secretsmanager +def test_get_secret_value(): + + backend = server.create_backend_app("secretsmanager") + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foo-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) + get_secret = test_client.post('/', + data={"SecretId": "test-secret", + "VersionStage": "AWSCURRENT"}, + headers={ + "X-Amz-Target": "secretsmanager.GetSecretValue"}, + ) + + json_data = json.loads(get_secret.data.decode("utf-8")) + assert json_data['SecretString'] == 'foo-secret' + +@mock_secretsmanager +def test_get_secret_that_does_not_exist(): + + backend = server.create_backend_app("secretsmanager") + test_client = backend.test_client() + + get_secret = test_client.post('/', + data={"SecretId": "i-dont-exist", + "VersionStage": "AWSCURRENT"}, + headers={ + "X-Amz-Target": "secretsmanager.GetSecretValue"}, + ) + json_data = json.loads(get_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_get_secret_that_does_not_match(): + backend = server.create_backend_app("secretsmanager") + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foo-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) + get_secret = test_client.post('/', + data={"SecretId": "i-dont-match", + "VersionStage": "AWSCURRENT"}, + headers={ + "X-Amz-Target": "secretsmanager.GetSecretValue"}, + ) + json_data = json.loads(get_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_create_secret(): + + backend = server.create_backend_app("secretsmanager") + test_client = backend.test_client() + + res = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foo-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) + res_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "bar-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) + + json_data = json.loads(res.data.decode("utf-8")) + assert json_data['ARN'] != '' + assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(res_2.data.decode("utf-8")) + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' + +@mock_secretsmanager +def test_describe_secret(): + + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + describe_secret = test_client.post('/', + data={"SecretId": "test-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) + + create_secret_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "barsecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + describe_secret_2 = test_client.post('/', + data={"SecretId": "test-secret-2"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) + + json_data = json.loads(describe_secret.data.decode("utf-8")) + assert json_data # Returned dict is not empty + assert json_data['ARN'] != '' + assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(describe_secret_2.data.decode("utf-8")) + assert json_data_2 # Returned dict is not empty + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' + +@mock_secretsmanager +def test_describe_secret_that_does_not_exist(): + + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + describe_secret = test_client.post('/', + data={"SecretId": "i-dont-exist"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) + + json_data = json.loads(describe_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_describe_secret_that_does_not_match(): + + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + describe_secret = test_client.post('/', + data={"SecretId": "i-dont-match"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) + + json_data = json.loads(describe_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_rotate_secret(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + + client_request_token = "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2" + rotate_secret = test_client.post('/', + data={"SecretId": "test-secret", + "ClientRequestToken": client_request_token}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data # Returned dict is not empty + assert json_data['ARN'] != '' + assert json_data['Name'] == 'test-secret' + assert json_data['VersionId'] == client_request_token + +# @mock_secretsmanager +# def test_rotate_secret_enable_rotation(): +# backend = server.create_backend_app('secretsmanager') +# test_client = backend.test_client() + +# create_secret = test_client.post( +# '/', +# data={ +# "Name": "test-secret", +# "SecretString": "foosecret" +# }, +# headers={ +# "X-Amz-Target": "secretsmanager.CreateSecret" +# }, +# ) + +# initial_description = test_client.post( +# '/', +# data={ +# "SecretId": "test-secret" +# }, +# headers={ +# "X-Amz-Target": "secretsmanager.DescribeSecret" +# }, +# ) + +# json_data = json.loads(initial_description.data.decode("utf-8")) +# assert json_data # Returned dict is not empty +# assert json_data['RotationEnabled'] is False +# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 0 + +# rotate_secret = test_client.post( +# '/', +# data={ +# "SecretId": "test-secret", +# "RotationRules": {"AutomaticallyAfterDays": 42} +# }, +# headers={ +# "X-Amz-Target": "secretsmanager.RotateSecret" +# }, +# ) + +# rotated_description = test_client.post( +# '/', +# data={ +# "SecretId": "test-secret" +# }, +# headers={ +# "X-Amz-Target": "secretsmanager.DescribeSecret" +# }, +# ) + +# json_data = json.loads(rotated_description.data.decode("utf-8")) +# assert json_data # Returned dict is not empty +# assert json_data['RotationEnabled'] is True +# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 42 + +@mock_secretsmanager +def test_rotate_secret_that_does_not_exist(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + rotate_secret = test_client.post('/', + data={"SecretId": "i-dont-exist"}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_rotate_secret_that_does_not_match(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + + rotate_secret = test_client.post('/', + data={"SecretId": "i-dont-match"}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data['message'] == "Secrets Manager can't find the specified secret" + assert json_data['__type'] == 'ResourceNotFoundException' + +@mock_secretsmanager +def test_rotate_secret_client_request_token_too_short(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + + client_request_token = "ED9F8B6C-85B7-B7E4-38F2A3BEB13C" + rotate_secret = test_client.post('/', + data={"SecretId": "test-secret", + "ClientRequestToken": client_request_token}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data['message'] == "ClientRequestToken must be 32-64 characters long." + assert json_data['__type'] == 'InvalidParameterException' + +@mock_secretsmanager +def test_rotate_secret_client_request_token_too_long(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + + client_request_token = ( + 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-' + 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C' + ) + rotate_secret = test_client.post('/', + data={"SecretId": "test-secret", + "ClientRequestToken": client_request_token}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data['message'] == "ClientRequestToken must be 32-64 characters long." + assert json_data['__type'] == 'InvalidParameterException' + +@mock_secretsmanager +def test_rotate_secret_rotation_lambda_arn_too_long(): + backend = server.create_backend_app('secretsmanager') + test_client = backend.test_client() + + create_secret = test_client.post('/', + data={"Name": "test-secret", + "SecretString": "foosecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + + rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters + rotate_secret = test_client.post('/', + data={"SecretId": "test-secret", + "RotationLambdaARN": rotation_lambda_arn}, + headers={ + "X-Amz-Target": "secretsmanager.RotateSecret" + }, + ) + + json_data = json.loads(rotate_secret.data.decode("utf-8")) + assert json_data['message'] == "RotationLambdaARN must <= 2048 characters long." + assert json_data['__type'] == 'InvalidParameterException' + + +# +# The following tests should work, but fail on the embedded dict in +# RotationRules. The error message suggests a problem deeper in the code, which +# needs further investigation. +# + +# @mock_secretsmanager +# def test_rotate_secret_rotation_period_zero(): +# backend = server.create_backend_app('secretsmanager') +# test_client = backend.test_client() + +# create_secret = test_client.post('/', +# data={"Name": "test-secret", +# "SecretString": "foosecret"}, +# headers={ +# "X-Amz-Target": "secretsmanager.CreateSecret" +# }, +# ) + +# rotate_secret = test_client.post('/', +# data={"SecretId": "test-secret", +# "RotationRules": {"AutomaticallyAfterDays": 0}}, +# headers={ +# "X-Amz-Target": "secretsmanager.RotateSecret" +# }, +# ) + +# json_data = json.loads(rotate_secret.data.decode("utf-8")) +# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000." +# assert json_data['__type'] == 'InvalidParameterException' + +# @mock_secretsmanager +# def test_rotate_secret_rotation_period_too_long(): +# backend = server.create_backend_app('secretsmanager') +# test_client = backend.test_client() + +# create_secret = test_client.post('/', +# data={"Name": "test-secret", +# "SecretString": "foosecret"}, +# headers={ +# "X-Amz-Target": "secretsmanager.CreateSecret" +# }, +# ) + +# rotate_secret = test_client.post('/', +# data={"SecretId": "test-secret", +# "RotationRules": {"AutomaticallyAfterDays": 1001}}, +# headers={ +# "X-Amz-Target": "secretsmanager.RotateSecret" +# }, +# ) + +# json_data = json.loads(rotate_secret.data.decode("utf-8")) +# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000." +# assert json_data['__type'] == 'InvalidParameterException' diff --git a/tests/test_ssm/test_ssm_boto3.py b/tests/test_ssm/test_ssm_boto3.py index 7f25ac61b..77d439d83 100644 --- a/tests/test_ssm/test_ssm_boto3.py +++ b/tests/test_ssm/test_ssm_boto3.py @@ -1,786 +1,800 @@ -from __future__ import unicode_literals - -import boto3 -import botocore.exceptions -import sure # noqa -import datetime -import uuid -import json - -from botocore.exceptions import ClientError -from nose.tools import assert_raises - -from moto import mock_ssm, mock_cloudformation - - -@mock_ssm -def test_delete_parameter(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='String') - - response = client.get_parameters(Names=['test']) - len(response['Parameters']).should.equal(1) - - client.delete_parameter(Name='test') - - response = client.get_parameters(Names=['test']) - len(response['Parameters']).should.equal(0) - - -@mock_ssm -def test_delete_parameters(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='String') - - response = client.get_parameters(Names=['test']) - len(response['Parameters']).should.equal(1) - - result = client.delete_parameters(Names=['test', 'invalid']) - len(result['DeletedParameters']).should.equal(1) - len(result['InvalidParameters']).should.equal(1) - - response = client.get_parameters(Names=['test']) - len(response['Parameters']).should.equal(0) - - -@mock_ssm -def test_get_parameters_by_path(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='/foo/name1', - Description='A test parameter', - Value='value1', - Type='String') - - client.put_parameter( - Name='/foo/name2', - Description='A test parameter', - Value='value2', - Type='String') - - client.put_parameter( - Name='/bar/name3', - Description='A test parameter', - Value='value3', - Type='String') - - client.put_parameter( - Name='/bar/name3/name4', - Description='A test parameter', - Value='value4', - Type='String') - - client.put_parameter( - Name='/baz/name1', - Description='A test parameter (list)', - Value='value1,value2,value3', - Type='StringList') - - client.put_parameter( - Name='/baz/name2', - Description='A test parameter', - Value='value1', - Type='String') - - client.put_parameter( - Name='/baz/pwd', - Description='A secure test parameter', - Value='my_secret', - Type='SecureString', - KeyId='alias/aws/ssm') - - client.put_parameter( - Name='foo', - Description='A test parameter', - Value='bar', - Type='String') - - client.put_parameter( - Name='baz', - Description='A test parameter', - Value='qux', - Type='String') - - response = client.get_parameters_by_path(Path='/', Recursive=False) - len(response['Parameters']).should.equal(2) - {p['Value'] for p in response['Parameters']}.should.equal( - set(['bar', 'qux']) - ) - - response = client.get_parameters_by_path(Path='/', Recursive=True) - len(response['Parameters']).should.equal(9) - - response = client.get_parameters_by_path(Path='/foo') - len(response['Parameters']).should.equal(2) - {p['Value'] for p in response['Parameters']}.should.equal( - set(['value1', 'value2']) - ) - - response = client.get_parameters_by_path(Path='/bar', Recursive=False) - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Value'].should.equal('value3') - - response = client.get_parameters_by_path(Path='/bar', Recursive=True) - len(response['Parameters']).should.equal(2) - {p['Value'] for p in response['Parameters']}.should.equal( - set(['value3', 'value4']) - ) - - response = client.get_parameters_by_path(Path='/baz') - len(response['Parameters']).should.equal(3) - - filters = [{ - 'Key': 'Type', - 'Option': 'Equals', - 'Values': ['StringList'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(1) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/name1']) - ) - - # note: 'Option' is optional (default: 'Equals') - filters = [{ - 'Key': 'Type', - 'Values': ['StringList'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(1) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/name1']) - ) - - filters = [{ - 'Key': 'Type', - 'Option': 'Equals', - 'Values': ['String'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(1) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/name2']) - ) - - filters = [{ - 'Key': 'Type', - 'Option': 'Equals', - 'Values': ['String', 'SecureString'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(2) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/name2', '/baz/pwd']) - ) - - filters = [{ - 'Key': 'Type', - 'Option': 'BeginsWith', - 'Values': ['String'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(2) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/name1', '/baz/name2']) - ) - - filters = [{ - 'Key': 'KeyId', - 'Option': 'Equals', - 'Values': ['alias/aws/ssm'], - }] - response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) - len(response['Parameters']).should.equal(1) - {p['Name'] for p in response['Parameters']}.should.equal( - set(['/baz/pwd']) - ) - - -@mock_ssm -def test_put_parameter(): - client = boto3.client('ssm', region_name='us-east-1') - - response = client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='String') - - response['Version'].should.equal(1) - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=False) - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('value') - response['Parameters'][0]['Type'].should.equal('String') - response['Parameters'][0]['Version'].should.equal(1) - - try: - client.put_parameter( - Name='test', - Description='desc 2', - Value='value 2', - Type='String') - raise RuntimeError('Should fail') - except botocore.exceptions.ClientError as err: - err.operation_name.should.equal('PutParameter') - err.response['Error']['Message'].should.equal('Parameter test already exists.') - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=False) - - # without overwrite nothing change - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('value') - response['Parameters'][0]['Type'].should.equal('String') - response['Parameters'][0]['Version'].should.equal(1) - - response = client.put_parameter( - Name='test', - Description='desc 3', - Value='value 3', - Type='String', - Overwrite=True) - - response['Version'].should.equal(2) - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=False) - - # without overwrite nothing change - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('value 3') - response['Parameters'][0]['Type'].should.equal('String') - response['Parameters'][0]['Version'].should.equal(2) - - -@mock_ssm -def test_get_parameter(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='String') - - response = client.get_parameter( - Name='test', - WithDecryption=False) - - response['Parameter']['Name'].should.equal('test') - response['Parameter']['Value'].should.equal('value') - response['Parameter']['Type'].should.equal('String') - - -@mock_ssm -def test_get_nonexistant_parameter(): - client = boto3.client('ssm', region_name='us-east-1') - - try: - client.get_parameter( - Name='test_noexist', - WithDecryption=False) - raise RuntimeError('Should of failed') - except botocore.exceptions.ClientError as err: - err.operation_name.should.equal('GetParameter') - err.response['Error']['Message'].should.equal('Parameter test_noexist not found.') - - -@mock_ssm -def test_describe_parameters(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='String') - - response = client.describe_parameters() - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Type'].should.equal('String') - - -@mock_ssm -def test_describe_parameters_paging(): - client = boto3.client('ssm', region_name='us-east-1') - - for i in range(50): - client.put_parameter( - Name="param-%d" % i, - Value="value-%d" % i, - Type="String" - ) - - response = client.describe_parameters() - len(response['Parameters']).should.equal(10) - response['NextToken'].should.equal('10') - - response = client.describe_parameters(NextToken=response['NextToken']) - len(response['Parameters']).should.equal(10) - response['NextToken'].should.equal('20') - - response = client.describe_parameters(NextToken=response['NextToken']) - len(response['Parameters']).should.equal(10) - response['NextToken'].should.equal('30') - - response = client.describe_parameters(NextToken=response['NextToken']) - len(response['Parameters']).should.equal(10) - response['NextToken'].should.equal('40') - - response = client.describe_parameters(NextToken=response['NextToken']) - len(response['Parameters']).should.equal(10) - response['NextToken'].should.equal('50') - - response = client.describe_parameters(NextToken=response['NextToken']) - len(response['Parameters']).should.equal(0) - ''.should.equal(response.get('NextToken', '')) - - -@mock_ssm -def test_describe_parameters_filter_names(): - client = boto3.client('ssm', region_name='us-east-1') - - for i in range(50): - p = { - 'Name': "param-%d" % i, - 'Value': "value-%d" % i, - 'Type': "String" - } - if i % 5 == 0: - p['Type'] = 'SecureString' - p['KeyId'] = 'a key' - client.put_parameter(**p) - - response = client.describe_parameters(Filters=[ - { - 'Key': 'Name', - 'Values': ['param-22'] - }, - ]) - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('param-22') - response['Parameters'][0]['Type'].should.equal('String') - ''.should.equal(response.get('NextToken', '')) - - -@mock_ssm -def test_describe_parameters_filter_type(): - client = boto3.client('ssm', region_name='us-east-1') - - for i in range(50): - p = { - 'Name': "param-%d" % i, - 'Value': "value-%d" % i, - 'Type': "String" - } - if i % 5 == 0: - p['Type'] = 'SecureString' - p['KeyId'] = 'a key' - client.put_parameter(**p) - - response = client.describe_parameters(Filters=[ - { - 'Key': 'Type', - 'Values': ['SecureString'] - }, - ]) - len(response['Parameters']).should.equal(10) - response['Parameters'][0]['Type'].should.equal('SecureString') - '10'.should.equal(response.get('NextToken', '')) - - -@mock_ssm -def test_describe_parameters_filter_keyid(): - client = boto3.client('ssm', region_name='us-east-1') - - for i in range(50): - p = { - 'Name': "param-%d" % i, - 'Value': "value-%d" % i, - 'Type': "String" - } - if i % 5 == 0: - p['Type'] = 'SecureString' - p['KeyId'] = "key:%d" % i - client.put_parameter(**p) - - response = client.describe_parameters(Filters=[ - { - 'Key': 'KeyId', - 'Values': ['key:10'] - }, - ]) - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('param-10') - response['Parameters'][0]['Type'].should.equal('SecureString') - ''.should.equal(response.get('NextToken', '')) - - -@mock_ssm -def test_describe_parameters_attributes(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='aa', - Value='11', - Type='String', - Description='my description' - ) - - client.put_parameter( - Name='bb', - Value='22', - Type='String' - ) - - response = client.describe_parameters() - len(response['Parameters']).should.equal(2) - - response['Parameters'][0]['Description'].should.equal('my description') - response['Parameters'][0]['Version'].should.equal(1) - response['Parameters'][0]['LastModifiedDate'].should.be.a(datetime.date) - response['Parameters'][0]['LastModifiedUser'].should.equal('N/A') - - response['Parameters'][1].get('Description').should.be.none - response['Parameters'][1]['Version'].should.equal(1) - - -@mock_ssm -def test_get_parameter_invalid(): - client = client = boto3.client('ssm', region_name='us-east-1') - response = client.get_parameters( - Names=[ - 'invalid' - ], - WithDecryption=False) - - len(response['Parameters']).should.equal(0) - len(response['InvalidParameters']).should.equal(1) - response['InvalidParameters'][0].should.equal('invalid') - - -@mock_ssm -def test_put_parameter_secure_default_kms(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='SecureString') - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=False) - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('kms:default:value') - response['Parameters'][0]['Type'].should.equal('SecureString') - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=True) - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('value') - response['Parameters'][0]['Type'].should.equal('SecureString') - - -@mock_ssm -def test_put_parameter_secure_custom_kms(): - client = boto3.client('ssm', region_name='us-east-1') - - client.put_parameter( - Name='test', - Description='A test parameter', - Value='value', - Type='SecureString', - KeyId='foo') - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=False) - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('kms:foo:value') - response['Parameters'][0]['Type'].should.equal('SecureString') - - response = client.get_parameters( - Names=[ - 'test' - ], - WithDecryption=True) - - len(response['Parameters']).should.equal(1) - response['Parameters'][0]['Name'].should.equal('test') - response['Parameters'][0]['Value'].should.equal('value') - response['Parameters'][0]['Type'].should.equal('SecureString') - - -@mock_ssm -def test_add_remove_list_tags_for_resource(): - client = boto3.client('ssm', region_name='us-east-1') - - client.add_tags_to_resource( - ResourceId='test', - ResourceType='Parameter', - Tags=[{'Key': 'test-key', 'Value': 'test-value'}] - ) - - response = client.list_tags_for_resource( - ResourceId='test', - ResourceType='Parameter' - ) - len(response['TagList']).should.equal(1) - response['TagList'][0]['Key'].should.equal('test-key') - response['TagList'][0]['Value'].should.equal('test-value') - - client.remove_tags_from_resource( - ResourceId='test', - ResourceType='Parameter', - TagKeys=['test-key'] - ) - - response = client.list_tags_for_resource( - ResourceId='test', - ResourceType='Parameter' - ) - len(response['TagList']).should.equal(0) - - -@mock_ssm -def test_send_command(): - ssm_document = 'AWS-RunShellScript' - params = {'commands': ['#!/bin/bash\necho \'hello world\'']} - - client = boto3.client('ssm', region_name='us-east-1') - # note the timeout is determined server side, so this is a simpler check. - before = datetime.datetime.now() - - response = client.send_command( - InstanceIds=['i-123456'], - DocumentName=ssm_document, - Parameters=params, - OutputS3Region='us-east-2', - OutputS3BucketName='the-bucket', - OutputS3KeyPrefix='pref' - ) - cmd = response['Command'] - - cmd['CommandId'].should_not.be(None) - cmd['DocumentName'].should.equal(ssm_document) - cmd['Parameters'].should.equal(params) - - cmd['OutputS3Region'].should.equal('us-east-2') - cmd['OutputS3BucketName'].should.equal('the-bucket') - cmd['OutputS3KeyPrefix'].should.equal('pref') - - cmd['ExpiresAfter'].should.be.greater_than(before) - - # test sending a command without any optional parameters - response = client.send_command( - DocumentName=ssm_document) - - cmd = response['Command'] - - cmd['CommandId'].should_not.be(None) - cmd['DocumentName'].should.equal(ssm_document) - - -@mock_ssm -def test_list_commands(): - client = boto3.client('ssm', region_name='us-east-1') - - ssm_document = 'AWS-RunShellScript' - params = {'commands': ['#!/bin/bash\necho \'hello world\'']} - - response = client.send_command( - InstanceIds=['i-123456'], - DocumentName=ssm_document, - Parameters=params, - OutputS3Region='us-east-2', - OutputS3BucketName='the-bucket', - OutputS3KeyPrefix='pref') - - cmd = response['Command'] - cmd_id = cmd['CommandId'] - - # get the command by id - response = client.list_commands( - CommandId=cmd_id) - - cmds = response['Commands'] - len(cmds).should.equal(1) - cmds[0]['CommandId'].should.equal(cmd_id) - - # add another command with the same instance id to test listing by - # instance id - client.send_command( - InstanceIds=['i-123456'], - DocumentName=ssm_document) - - response = client.list_commands( - InstanceId='i-123456') - - cmds = response['Commands'] - len(cmds).should.equal(2) - - for cmd in cmds: - cmd['InstanceIds'].should.contain('i-123456') - - # test the error case for an invalid command id - with assert_raises(ClientError): - response = client.list_commands( - CommandId=str(uuid.uuid4())) - -@mock_ssm -def test_get_command_invocation(): - client = boto3.client('ssm', region_name='us-east-1') - - ssm_document = 'AWS-RunShellScript' - params = {'commands': ['#!/bin/bash\necho \'hello world\'']} - - response = client.send_command( - InstanceIds=['i-123456', 'i-234567', 'i-345678'], - DocumentName=ssm_document, - Parameters=params, - OutputS3Region='us-east-2', - OutputS3BucketName='the-bucket', - OutputS3KeyPrefix='pref') - - cmd = response['Command'] - cmd_id = cmd['CommandId'] - - instance_id = 'i-345678' - invocation_response = client.get_command_invocation( - CommandId=cmd_id, - InstanceId=instance_id, - PluginName='aws:runShellScript') - - invocation_response['CommandId'].should.equal(cmd_id) - invocation_response['InstanceId'].should.equal(instance_id) - - # test the error case for an invalid instance id - with assert_raises(ClientError): - invocation_response = client.get_command_invocation( - CommandId=cmd_id, - InstanceId='i-FAKE') - - # test the error case for an invalid plugin name - with assert_raises(ClientError): - invocation_response = client.get_command_invocation( - CommandId=cmd_id, - InstanceId=instance_id, - PluginName='FAKE') - -@mock_ssm -@mock_cloudformation -def test_get_command_invocations_from_stack(): - stack_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Test Stack", - "Resources": { - "EC2Instance1": { - "Type": "AWS::EC2::Instance", - "Properties": { - "ImageId": "ami-test-image-id", - "KeyName": "test", - "InstanceType": "t2.micro", - "Tags": [ - { - "Key": "Test Description", - "Value": "Test tag" - }, - { - "Key": "Test Name", - "Value": "Name tag for tests" - } - ] - } - } - }, - "Outputs": { - "test": { - "Description": "Test Output", - "Value": "Test output value", - "Export": { - "Name": "Test value to export" - } - }, - "PublicIP": { - "Value": "Test public ip" - } - } - } - - cloudformation_client = boto3.client( - 'cloudformation', - region_name='us-east-1') - - stack_template_str = json.dumps(stack_template) - - response = cloudformation_client.create_stack( - StackName='test_stack', - TemplateBody=stack_template_str, - Capabilities=('CAPABILITY_IAM', )) - - client = boto3.client('ssm', region_name='us-east-1') - - ssm_document = 'AWS-RunShellScript' - params = {'commands': ['#!/bin/bash\necho \'hello world\'']} - - response = client.send_command( - Targets=[{ - 'Key': 'tag:aws:cloudformation:stack-name', - 'Values': ('test_stack', )}], - DocumentName=ssm_document, - Parameters=params, - OutputS3Region='us-east-2', - OutputS3BucketName='the-bucket', - OutputS3KeyPrefix='pref') - - cmd = response['Command'] - cmd_id = cmd['CommandId'] - instance_ids = cmd['InstanceIds'] - - invocation_response = client.get_command_invocation( - CommandId=cmd_id, - InstanceId=instance_ids[0], - PluginName='aws:runShellScript') +from __future__ import unicode_literals + +import boto3 +import botocore.exceptions +import sure # noqa +import datetime +import uuid +import json + +from botocore.exceptions import ClientError +from nose.tools import assert_raises + +from moto import mock_ssm, mock_cloudformation + + +@mock_ssm +def test_delete_parameter(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response = client.get_parameters(Names=['test']) + len(response['Parameters']).should.equal(1) + + client.delete_parameter(Name='test') + + response = client.get_parameters(Names=['test']) + len(response['Parameters']).should.equal(0) + + +@mock_ssm +def test_delete_parameters(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response = client.get_parameters(Names=['test']) + len(response['Parameters']).should.equal(1) + + result = client.delete_parameters(Names=['test', 'invalid']) + len(result['DeletedParameters']).should.equal(1) + len(result['InvalidParameters']).should.equal(1) + + response = client.get_parameters(Names=['test']) + len(response['Parameters']).should.equal(0) + + +@mock_ssm +def test_get_parameters_by_path(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='/foo/name1', + Description='A test parameter', + Value='value1', + Type='String') + + client.put_parameter( + Name='/foo/name2', + Description='A test parameter', + Value='value2', + Type='String') + + client.put_parameter( + Name='/bar/name3', + Description='A test parameter', + Value='value3', + Type='String') + + client.put_parameter( + Name='/bar/name3/name4', + Description='A test parameter', + Value='value4', + Type='String') + + client.put_parameter( + Name='/baz/name1', + Description='A test parameter (list)', + Value='value1,value2,value3', + Type='StringList') + + client.put_parameter( + Name='/baz/name2', + Description='A test parameter', + Value='value1', + Type='String') + + client.put_parameter( + Name='/baz/pwd', + Description='A secure test parameter', + Value='my_secret', + Type='SecureString', + KeyId='alias/aws/ssm') + + client.put_parameter( + Name='foo', + Description='A test parameter', + Value='bar', + Type='String') + + client.put_parameter( + Name='baz', + Description='A test parameter', + Value='qux', + Type='String') + + response = client.get_parameters_by_path(Path='/', Recursive=False) + len(response['Parameters']).should.equal(2) + {p['Value'] for p in response['Parameters']}.should.equal( + set(['bar', 'qux']) + ) + + response = client.get_parameters_by_path(Path='/', Recursive=True) + len(response['Parameters']).should.equal(9) + + response = client.get_parameters_by_path(Path='/foo') + len(response['Parameters']).should.equal(2) + {p['Value'] for p in response['Parameters']}.should.equal( + set(['value1', 'value2']) + ) + + response = client.get_parameters_by_path(Path='/bar', Recursive=False) + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Value'].should.equal('value3') + + response = client.get_parameters_by_path(Path='/bar', Recursive=True) + len(response['Parameters']).should.equal(2) + {p['Value'] for p in response['Parameters']}.should.equal( + set(['value3', 'value4']) + ) + + response = client.get_parameters_by_path(Path='/baz') + len(response['Parameters']).should.equal(3) + + filters = [{ + 'Key': 'Type', + 'Option': 'Equals', + 'Values': ['StringList'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(1) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/name1']) + ) + + # note: 'Option' is optional (default: 'Equals') + filters = [{ + 'Key': 'Type', + 'Values': ['StringList'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(1) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/name1']) + ) + + filters = [{ + 'Key': 'Type', + 'Option': 'Equals', + 'Values': ['String'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(1) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/name2']) + ) + + filters = [{ + 'Key': 'Type', + 'Option': 'Equals', + 'Values': ['String', 'SecureString'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(2) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/name2', '/baz/pwd']) + ) + + filters = [{ + 'Key': 'Type', + 'Option': 'BeginsWith', + 'Values': ['String'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(2) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/name1', '/baz/name2']) + ) + + filters = [{ + 'Key': 'KeyId', + 'Option': 'Equals', + 'Values': ['alias/aws/ssm'], + }] + response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters) + len(response['Parameters']).should.equal(1) + {p['Name'] for p in response['Parameters']}.should.equal( + set(['/baz/pwd']) + ) + + +@mock_ssm +def test_put_parameter(): + client = boto3.client('ssm', region_name='us-east-1') + + response = client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response['Version'].should.equal(1) + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=False) + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('value') + response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['Version'].should.equal(1) + + try: + client.put_parameter( + Name='test', + Description='desc 2', + Value='value 2', + Type='String') + raise RuntimeError('Should fail') + except botocore.exceptions.ClientError as err: + err.operation_name.should.equal('PutParameter') + err.response['Error']['Message'].should.equal('Parameter test already exists.') + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=False) + + # without overwrite nothing change + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('value') + response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['Version'].should.equal(1) + + response = client.put_parameter( + Name='test', + Description='desc 3', + Value='value 3', + Type='String', + Overwrite=True) + + response['Version'].should.equal(2) + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=False) + + # without overwrite nothing change + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('value 3') + response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['Version'].should.equal(2) + +@mock_ssm +def test_put_parameter_china(): + client = boto3.client('ssm', region_name='cn-north-1') + + response = client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response['Version'].should.equal(1) + + +@mock_ssm +def test_get_parameter(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response = client.get_parameter( + Name='test', + WithDecryption=False) + + response['Parameter']['Name'].should.equal('test') + response['Parameter']['Value'].should.equal('value') + response['Parameter']['Type'].should.equal('String') + + +@mock_ssm +def test_get_nonexistant_parameter(): + client = boto3.client('ssm', region_name='us-east-1') + + try: + client.get_parameter( + Name='test_noexist', + WithDecryption=False) + raise RuntimeError('Should of failed') + except botocore.exceptions.ClientError as err: + err.operation_name.should.equal('GetParameter') + err.response['Error']['Message'].should.equal('Parameter test_noexist not found.') + + +@mock_ssm +def test_describe_parameters(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String', + AllowedPattern=r'.*') + + response = client.describe_parameters() + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['AllowedPattern'].should.equal(r'.*') + + +@mock_ssm +def test_describe_parameters_paging(): + client = boto3.client('ssm', region_name='us-east-1') + + for i in range(50): + client.put_parameter( + Name="param-%d" % i, + Value="value-%d" % i, + Type="String" + ) + + response = client.describe_parameters() + len(response['Parameters']).should.equal(10) + response['NextToken'].should.equal('10') + + response = client.describe_parameters(NextToken=response['NextToken']) + len(response['Parameters']).should.equal(10) + response['NextToken'].should.equal('20') + + response = client.describe_parameters(NextToken=response['NextToken']) + len(response['Parameters']).should.equal(10) + response['NextToken'].should.equal('30') + + response = client.describe_parameters(NextToken=response['NextToken']) + len(response['Parameters']).should.equal(10) + response['NextToken'].should.equal('40') + + response = client.describe_parameters(NextToken=response['NextToken']) + len(response['Parameters']).should.equal(10) + response['NextToken'].should.equal('50') + + response = client.describe_parameters(NextToken=response['NextToken']) + len(response['Parameters']).should.equal(0) + ''.should.equal(response.get('NextToken', '')) + + +@mock_ssm +def test_describe_parameters_filter_names(): + client = boto3.client('ssm', region_name='us-east-1') + + for i in range(50): + p = { + 'Name': "param-%d" % i, + 'Value': "value-%d" % i, + 'Type': "String" + } + if i % 5 == 0: + p['Type'] = 'SecureString' + p['KeyId'] = 'a key' + client.put_parameter(**p) + + response = client.describe_parameters(Filters=[ + { + 'Key': 'Name', + 'Values': ['param-22'] + }, + ]) + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('param-22') + response['Parameters'][0]['Type'].should.equal('String') + ''.should.equal(response.get('NextToken', '')) + + +@mock_ssm +def test_describe_parameters_filter_type(): + client = boto3.client('ssm', region_name='us-east-1') + + for i in range(50): + p = { + 'Name': "param-%d" % i, + 'Value': "value-%d" % i, + 'Type': "String" + } + if i % 5 == 0: + p['Type'] = 'SecureString' + p['KeyId'] = 'a key' + client.put_parameter(**p) + + response = client.describe_parameters(Filters=[ + { + 'Key': 'Type', + 'Values': ['SecureString'] + }, + ]) + len(response['Parameters']).should.equal(10) + response['Parameters'][0]['Type'].should.equal('SecureString') + '10'.should.equal(response.get('NextToken', '')) + + +@mock_ssm +def test_describe_parameters_filter_keyid(): + client = boto3.client('ssm', region_name='us-east-1') + + for i in range(50): + p = { + 'Name': "param-%d" % i, + 'Value': "value-%d" % i, + 'Type': "String" + } + if i % 5 == 0: + p['Type'] = 'SecureString' + p['KeyId'] = "key:%d" % i + client.put_parameter(**p) + + response = client.describe_parameters(Filters=[ + { + 'Key': 'KeyId', + 'Values': ['key:10'] + }, + ]) + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('param-10') + response['Parameters'][0]['Type'].should.equal('SecureString') + ''.should.equal(response.get('NextToken', '')) + + +@mock_ssm +def test_describe_parameters_attributes(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='aa', + Value='11', + Type='String', + Description='my description' + ) + + client.put_parameter( + Name='bb', + Value='22', + Type='String' + ) + + response = client.describe_parameters() + len(response['Parameters']).should.equal(2) + + response['Parameters'][0]['Description'].should.equal('my description') + response['Parameters'][0]['Version'].should.equal(1) + response['Parameters'][0]['LastModifiedDate'].should.be.a(datetime.date) + response['Parameters'][0]['LastModifiedUser'].should.equal('N/A') + + response['Parameters'][1].get('Description').should.be.none + response['Parameters'][1]['Version'].should.equal(1) + + +@mock_ssm +def test_get_parameter_invalid(): + client = client = boto3.client('ssm', region_name='us-east-1') + response = client.get_parameters( + Names=[ + 'invalid' + ], + WithDecryption=False) + + len(response['Parameters']).should.equal(0) + len(response['InvalidParameters']).should.equal(1) + response['InvalidParameters'][0].should.equal('invalid') + + +@mock_ssm +def test_put_parameter_secure_default_kms(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='SecureString') + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=False) + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('kms:default:value') + response['Parameters'][0]['Type'].should.equal('SecureString') + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=True) + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('value') + response['Parameters'][0]['Type'].should.equal('SecureString') + + +@mock_ssm +def test_put_parameter_secure_custom_kms(): + client = boto3.client('ssm', region_name='us-east-1') + + client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='SecureString', + KeyId='foo') + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=False) + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('kms:foo:value') + response['Parameters'][0]['Type'].should.equal('SecureString') + + response = client.get_parameters( + Names=[ + 'test' + ], + WithDecryption=True) + + len(response['Parameters']).should.equal(1) + response['Parameters'][0]['Name'].should.equal('test') + response['Parameters'][0]['Value'].should.equal('value') + response['Parameters'][0]['Type'].should.equal('SecureString') + + +@mock_ssm +def test_add_remove_list_tags_for_resource(): + client = boto3.client('ssm', region_name='us-east-1') + + client.add_tags_to_resource( + ResourceId='test', + ResourceType='Parameter', + Tags=[{'Key': 'test-key', 'Value': 'test-value'}] + ) + + response = client.list_tags_for_resource( + ResourceId='test', + ResourceType='Parameter' + ) + len(response['TagList']).should.equal(1) + response['TagList'][0]['Key'].should.equal('test-key') + response['TagList'][0]['Value'].should.equal('test-value') + + client.remove_tags_from_resource( + ResourceId='test', + ResourceType='Parameter', + TagKeys=['test-key'] + ) + + response = client.list_tags_for_resource( + ResourceId='test', + ResourceType='Parameter' + ) + len(response['TagList']).should.equal(0) + + +@mock_ssm +def test_send_command(): + ssm_document = 'AWS-RunShellScript' + params = {'commands': ['#!/bin/bash\necho \'hello world\'']} + + client = boto3.client('ssm', region_name='us-east-1') + # note the timeout is determined server side, so this is a simpler check. + before = datetime.datetime.now() + + response = client.send_command( + InstanceIds=['i-123456'], + DocumentName=ssm_document, + Parameters=params, + OutputS3Region='us-east-2', + OutputS3BucketName='the-bucket', + OutputS3KeyPrefix='pref' + ) + cmd = response['Command'] + + cmd['CommandId'].should_not.be(None) + cmd['DocumentName'].should.equal(ssm_document) + cmd['Parameters'].should.equal(params) + + cmd['OutputS3Region'].should.equal('us-east-2') + cmd['OutputS3BucketName'].should.equal('the-bucket') + cmd['OutputS3KeyPrefix'].should.equal('pref') + + cmd['ExpiresAfter'].should.be.greater_than(before) + + # test sending a command without any optional parameters + response = client.send_command( + DocumentName=ssm_document) + + cmd = response['Command'] + + cmd['CommandId'].should_not.be(None) + cmd['DocumentName'].should.equal(ssm_document) + + +@mock_ssm +def test_list_commands(): + client = boto3.client('ssm', region_name='us-east-1') + + ssm_document = 'AWS-RunShellScript' + params = {'commands': ['#!/bin/bash\necho \'hello world\'']} + + response = client.send_command( + InstanceIds=['i-123456'], + DocumentName=ssm_document, + Parameters=params, + OutputS3Region='us-east-2', + OutputS3BucketName='the-bucket', + OutputS3KeyPrefix='pref') + + cmd = response['Command'] + cmd_id = cmd['CommandId'] + + # get the command by id + response = client.list_commands( + CommandId=cmd_id) + + cmds = response['Commands'] + len(cmds).should.equal(1) + cmds[0]['CommandId'].should.equal(cmd_id) + + # add another command with the same instance id to test listing by + # instance id + client.send_command( + InstanceIds=['i-123456'], + DocumentName=ssm_document) + + response = client.list_commands( + InstanceId='i-123456') + + cmds = response['Commands'] + len(cmds).should.equal(2) + + for cmd in cmds: + cmd['InstanceIds'].should.contain('i-123456') + + # test the error case for an invalid command id + with assert_raises(ClientError): + response = client.list_commands( + CommandId=str(uuid.uuid4())) + +@mock_ssm +def test_get_command_invocation(): + client = boto3.client('ssm', region_name='us-east-1') + + ssm_document = 'AWS-RunShellScript' + params = {'commands': ['#!/bin/bash\necho \'hello world\'']} + + response = client.send_command( + InstanceIds=['i-123456', 'i-234567', 'i-345678'], + DocumentName=ssm_document, + Parameters=params, + OutputS3Region='us-east-2', + OutputS3BucketName='the-bucket', + OutputS3KeyPrefix='pref') + + cmd = response['Command'] + cmd_id = cmd['CommandId'] + + instance_id = 'i-345678' + invocation_response = client.get_command_invocation( + CommandId=cmd_id, + InstanceId=instance_id, + PluginName='aws:runShellScript') + + invocation_response['CommandId'].should.equal(cmd_id) + invocation_response['InstanceId'].should.equal(instance_id) + + # test the error case for an invalid instance id + with assert_raises(ClientError): + invocation_response = client.get_command_invocation( + CommandId=cmd_id, + InstanceId='i-FAKE') + + # test the error case for an invalid plugin name + with assert_raises(ClientError): + invocation_response = client.get_command_invocation( + CommandId=cmd_id, + InstanceId=instance_id, + PluginName='FAKE') + +@mock_ssm +@mock_cloudformation +def test_get_command_invocations_from_stack(): + stack_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Test Stack", + "Resources": { + "EC2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-test-image-id", + "KeyName": "test", + "InstanceType": "t2.micro", + "Tags": [ + { + "Key": "Test Description", + "Value": "Test tag" + }, + { + "Key": "Test Name", + "Value": "Name tag for tests" + } + ] + } + } + }, + "Outputs": { + "test": { + "Description": "Test Output", + "Value": "Test output value", + "Export": { + "Name": "Test value to export" + } + }, + "PublicIP": { + "Value": "Test public ip" + } + } + } + + cloudformation_client = boto3.client( + 'cloudformation', + region_name='us-east-1') + + stack_template_str = json.dumps(stack_template) + + response = cloudformation_client.create_stack( + StackName='test_stack', + TemplateBody=stack_template_str, + Capabilities=('CAPABILITY_IAM', )) + + client = boto3.client('ssm', region_name='us-east-1') + + ssm_document = 'AWS-RunShellScript' + params = {'commands': ['#!/bin/bash\necho \'hello world\'']} + + response = client.send_command( + Targets=[{ + 'Key': 'tag:aws:cloudformation:stack-name', + 'Values': ('test_stack', )}], + DocumentName=ssm_document, + Parameters=params, + OutputS3Region='us-east-2', + OutputS3BucketName='the-bucket', + OutputS3KeyPrefix='pref') + + cmd = response['Command'] + cmd_id = cmd['CommandId'] + instance_ids = cmd['InstanceIds'] + + invocation_response = client.get_command_invocation( + CommandId=cmd_id, + InstanceId=instance_ids[0], + PluginName='aws:runShellScript') diff --git a/tox.ini b/tox.ini index 7c5ed1ef7..18809b52f 100644 --- a/tox.ini +++ b/tox.ini @@ -1,14 +1,19 @@ -[tox] -envlist = py27, py36 - -[testenv] -deps = - -r{toxinidir}/requirements.txt - -r{toxinidir}/requirements-dev.txt -commands = - {envpython} setup.py test - nosetests {posargs} - -[flake8] -ignore = E128,E501 -exclude = moto/packages,dist +[tox] +envlist = py27, py36 + +[testenv] +setenv = + BOTO_CONFIG=/dev/null + AWS_SECRET_ACCESS_KEY=foobar_secret + AWS_ACCESS_KEY_ID=foobar_key + AWS_DEFAULT_REGION=us-east-1 +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/requirements-dev.txt +commands = + {envpython} setup.py test + nosetests {posargs} + +[flake8] +ignore = E128,E501 +exclude = moto/packages,dist \ No newline at end of file