diff --git a/.gitignore b/.gitignore index 7f57e98e9..47e5efbe0 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,5 @@ python_env .ropeproject/ .pytest_cache/ venv/ - +.python-version +.vscode/ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 3a5de0fa2..5bc9779f3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: xenial language: python sudo: false services: @@ -5,26 +6,12 @@ services: python: - 2.7 - 3.6 + - 3.7 env: - TEST_SERVER_MODE=false - TEST_SERVER_MODE=true -# Due to incomplete Python 3.7 support on Travis CI ( -# https://github.com/travis-ci/travis-ci/issues/9815), -# using a matrix is necessary -matrix: - include: - - python: 3.7 - env: TEST_SERVER_MODE=false - dist: xenial - sudo: true - - python: 3.7 - env: TEST_SERVER_MODE=true - dist: xenial - sudo: true before_install: - export BOTO_CONFIG=/dev/null - - export AWS_SECRET_ACCESS_KEY=foobar_secret - - export AWS_ACCESS_KEY_ID=foobar_key install: # We build moto first so the docker container doesn't try to compile it as well, also note we don't use # -d for docker run so the logs show up in travis diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7c68c0e31..735af6002 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -827,25 +827,25 @@ - [ ] unlink_identity - [ ] update_identity_pool -## cognito-idp - 0% implemented +## cognito-idp - 34% implemented - [ ] add_custom_attributes -- [ ] admin_add_user_to_group +- [X] admin_add_user_to_group - [ ] admin_confirm_sign_up -- [ ] admin_create_user -- [ ] admin_delete_user +- [X] admin_create_user +- [X] admin_delete_user - [ ] admin_delete_user_attributes - [ ] admin_disable_provider_for_user - [X] admin_disable_user - [X] admin_enable_user - [ ] admin_forget_device - [ ] admin_get_device -- [ ] admin_get_user -- [ ] admin_initiate_auth +- [X] admin_get_user +- [X] admin_initiate_auth - [ ] admin_link_provider_for_user - [ ] admin_list_devices -- [ ] admin_list_groups_for_user +- [X] admin_list_groups_for_user - [ ] admin_list_user_auth_events -- [ ] admin_remove_user_from_group +- [X] admin_remove_user_from_group - [ ] admin_reset_user_password - [ ] admin_respond_to_auth_challenge - [ ] admin_set_user_mfa_preference @@ -855,37 +855,37 @@ - [ ] admin_update_user_attributes - [ ] admin_user_global_sign_out - [ ] associate_software_token -- [ ] change_password +- [X] change_password - [ ] confirm_device -- [ ] confirm_forgot_password +- [X] confirm_forgot_password - [ ] confirm_sign_up -- [ ] create_group -- [ ] create_identity_provider +- [X] create_group +- [X] create_identity_provider - [ ] create_resource_server - [ ] create_user_import_job -- [ ] create_user_pool -- [ ] create_user_pool_client -- [ ] create_user_pool_domain -- [ ] delete_group -- [ ] delete_identity_provider +- [X] create_user_pool +- [X] create_user_pool_client +- [X] create_user_pool_domain +- [X] delete_group +- [X] delete_identity_provider - [ ] delete_resource_server - [ ] delete_user - [ ] delete_user_attributes -- [ ] delete_user_pool -- [ ] delete_user_pool_client -- [ ] delete_user_pool_domain -- [ ] describe_identity_provider +- [X] delete_user_pool +- [X] delete_user_pool_client +- [X] delete_user_pool_domain +- [X] describe_identity_provider - [ ] describe_resource_server - [ ] describe_risk_configuration - [ ] describe_user_import_job -- [ ] describe_user_pool -- [ ] describe_user_pool_client -- [ ] describe_user_pool_domain +- [X] describe_user_pool +- [X] describe_user_pool_client +- [X] describe_user_pool_domain - [ ] forget_device - [ ] forgot_password - [ ] get_csv_header - [ ] get_device -- [ ] get_group +- [X] get_group - [ ] get_identity_provider_by_identifier - [ ] get_signing_certificate - [ ] get_ui_customization @@ -895,16 +895,16 @@ - [ ] global_sign_out - [ ] initiate_auth - [ ] list_devices -- [ ] list_groups -- [ ] list_identity_providers +- [X] list_groups +- [X] list_identity_providers - [ ] list_resource_servers - [ ] list_user_import_jobs -- [ ] list_user_pool_clients -- [ ] list_user_pools -- [ ] list_users -- [ ] list_users_in_group +- [X] list_user_pool_clients +- [X] list_user_pools +- [X] list_users +- [X] list_users_in_group - [ ] resend_confirmation_code -- [ ] respond_to_auth_challenge +- [X] respond_to_auth_challenge - [ ] set_risk_configuration - [ ] set_ui_customization - [ ] set_user_mfa_preference @@ -920,7 +920,7 @@ - [ ] update_resource_server - [ ] update_user_attributes - [ ] update_user_pool -- [ ] update_user_pool_client +- [X] update_user_pool_client - [ ] verify_software_token - [ ] verify_user_attribute @@ -2225,7 +2225,7 @@ - [X] create_policy - [X] create_policy_version - [X] create_role -- [ ] create_saml_provider +- [X] create_saml_provider - [ ] create_service_linked_role - [ ] create_service_specific_credential - [X] create_user @@ -2243,7 +2243,7 @@ - [X] delete_policy_version - [X] delete_role - [X] delete_role_policy -- [ ] delete_saml_provider +- [X] delete_saml_provider - [X] delete_server_certificate - [ ] delete_service_linked_role - [ ] delete_service_specific_credential @@ -2257,7 +2257,7 @@ - [X] detach_user_policy - [X] enable_mfa_device - [ ] generate_credential_report -- [ ] get_access_key_last_used +- [X] get_access_key_last_used - [X] get_account_authorization_details - [ ] get_account_password_policy - [ ] get_account_summary @@ -2273,7 +2273,7 @@ - [X] get_policy_version - [X] get_role - [X] get_role_policy -- [ ] get_saml_provider +- [X] get_saml_provider - [X] get_server_certificate - [ ] get_service_linked_role_deletion_status - [ ] get_ssh_public_key @@ -2296,7 +2296,7 @@ - [X] list_policy_versions - [X] list_role_policies - [ ] list_roles -- [ ] list_saml_providers +- [X] list_saml_providers - [ ] list_server_certificates - [ ] list_service_specific_credentials - [ ] list_signing_certificates @@ -2323,7 +2323,7 @@ - [ ] update_open_id_connect_provider_thumbprint - [ ] update_role - [ ] update_role_description -- [ ] update_saml_provider +- [X] update_saml_provider - [ ] update_server_certificate - [ ] update_service_specific_credential - [ ] update_signing_certificate @@ -2376,11 +2376,11 @@ - [ ] unsubscribe_from_event - [ ] update_assessment_target -## iot - 30% implemented +## iot - 32% implemented - [ ] accept_certificate_transfer - [X] add_thing_to_thing_group - [ ] associate_targets_with_job -- [ ] attach_policy +- [X] attach_policy - [X] attach_principal_policy - [X] attach_thing_principal - [ ] cancel_certificate_transfer @@ -2429,7 +2429,7 @@ - [X] describe_thing_group - [ ] describe_thing_registration_task - [X] describe_thing_type -- [ ] detach_policy +- [X] detach_policy - [X] detach_principal_policy - [X] detach_thing_principal - [ ] disable_topic_rule diff --git a/Makefile b/Makefile index f224d7091..de08c6f74 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,7 @@ test: lint rm -f .coverage rm -rf cover @nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE) + test_server: @TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/ diff --git a/README.md b/README.md index 791226d6b..d6e9f30a1 100644 --- a/README.md +++ b/README.md @@ -259,7 +259,7 @@ It uses flask, which isn't a default dependency. You can install the server 'extra' package with: ```python -pip install moto[server] +pip install "moto[server]" ``` You can then start it running a service: diff --git a/moto/__init__.py b/moto/__init__.py index dd3593d5d..e86c499a7 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -16,6 +16,7 @@ from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: n from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa +from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa diff --git a/moto/autoscaling/models.py b/moto/autoscaling/models.py index 0ebc4c465..27e81a87c 100644 --- a/moto/autoscaling/models.py +++ b/moto/autoscaling/models.py @@ -17,10 +17,12 @@ ASG_NAME_TAG = "aws:autoscaling:groupName" class InstanceState(object): - def __init__(self, instance, lifecycle_state="InService", health_status="Healthy"): + def __init__(self, instance, lifecycle_state="InService", + health_status="Healthy", protected_from_scale_in=False): self.instance = instance self.lifecycle_state = lifecycle_state self.health_status = health_status + self.protected_from_scale_in = protected_from_scale_in class FakeScalingPolicy(BaseModel): @@ -152,7 +154,8 @@ class FakeAutoScalingGroup(BaseModel): min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, termination_policies, - autoscaling_backend, tags): + autoscaling_backend, tags, + new_instances_protected_from_scale_in=False): self.autoscaling_backend = autoscaling_backend self.name = name @@ -178,6 +181,7 @@ class FakeAutoScalingGroup(BaseModel): self.target_group_arns = target_group_arns self.placement_group = placement_group self.termination_policies = termination_policies + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in self.suspended_processes = [] self.instance_states = [] @@ -210,6 +214,8 @@ class FakeAutoScalingGroup(BaseModel): placement_group=None, termination_policies=properties.get("TerminationPolicies", []), tags=properties.get("Tags", []), + new_instances_protected_from_scale_in=properties.get( + "NewInstancesProtectedFromScaleIn", False) ) return group @@ -238,7 +244,8 @@ class FakeAutoScalingGroup(BaseModel): def update(self, availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies): + placement_group, termination_policies, + new_instances_protected_from_scale_in=None): if availability_zones: self.availability_zones = availability_zones if max_size is not None: @@ -256,6 +263,8 @@ class FakeAutoScalingGroup(BaseModel): self.health_check_period = health_check_period if health_check_type is not None: self.health_check_type = health_check_type + if new_instances_protected_from_scale_in is not None: + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in if desired_capacity is not None: self.set_desired_capacity(desired_capacity) @@ -280,12 +289,16 @@ class FakeAutoScalingGroup(BaseModel): else: # Need to remove some instances count_to_remove = curr_instance_count - self.desired_capacity - instances_to_remove = self.instance_states[:count_to_remove] - instance_ids_to_remove = [ - instance.instance.id for instance in instances_to_remove] - self.autoscaling_backend.ec2_backend.terminate_instances( - instance_ids_to_remove) - self.instance_states = self.instance_states[count_to_remove:] + instances_to_remove = [ # only remove unprotected + state for state in self.instance_states + if not state.protected_from_scale_in + ][:count_to_remove] + if instances_to_remove: # just in case not instances to remove + instance_ids_to_remove = [ + instance.instance.id for instance in instances_to_remove] + self.autoscaling_backend.ec2_backend.terminate_instances( + instance_ids_to_remove) + self.instance_states = list(set(self.instance_states) - set(instances_to_remove)) def get_propagated_tags(self): propagated_tags = {} @@ -310,7 +323,10 @@ class FakeAutoScalingGroup(BaseModel): ) for instance in reservation.instances: instance.autoscaling_group = self - self.instance_states.append(InstanceState(instance)) + self.instance_states.append(InstanceState( + instance, + protected_from_scale_in=self.new_instances_protected_from_scale_in, + )) def append_target_groups(self, target_group_arns): append = [x for x in target_group_arns if x not in self.target_group_arns] @@ -372,7 +388,8 @@ class AutoScalingBackend(BaseBackend): default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, - termination_policies, tags): + termination_policies, tags, + new_instances_protected_from_scale_in=False): def make_int(value): return int(value) if value is not None else value @@ -403,6 +420,7 @@ class AutoScalingBackend(BaseBackend): termination_policies=termination_policies, autoscaling_backend=self, tags=tags, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in, ) self.autoscaling_groups[name] = group @@ -415,12 +433,14 @@ class AutoScalingBackend(BaseBackend): launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, placement_group, - termination_policies): + termination_policies, + new_instances_protected_from_scale_in=None): group = self.autoscaling_groups[name] group.update(availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies) + placement_group, termination_policies, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in) return group def describe_auto_scaling_groups(self, names): @@ -448,7 +468,13 @@ class AutoScalingBackend(BaseBackend): raise ResourceContentionError else: group.desired_capacity = original_size + len(instance_ids) - new_instances = [InstanceState(self.ec2_backend.get_instance(x)) for x in instance_ids] + new_instances = [ + InstanceState( + self.ec2_backend.get_instance(x), + protected_from_scale_in=group.new_instances_protected_from_scale_in, + ) + for x in instance_ids + ] for instance in new_instances: self.ec2_backend.create_tags([instance.instance.id], {ASG_NAME_TAG: group.name}) group.instance_states.extend(new_instances) @@ -626,6 +652,13 @@ class AutoScalingBackend(BaseBackend): group = self.autoscaling_groups[group_name] group.suspended_processes = scaling_processes or [] + def set_instance_protection(self, group_name, instance_ids, protected_from_scale_in): + group = self.autoscaling_groups[group_name] + protected_instances = [ + x for x in group.instance_states if x.instance.id in instance_ids] + for instance in protected_instances: + instance.protected_from_scale_in = protected_from_scale_in + autoscaling_backends = {} for region, ec2_backend in ec2_backends.items(): diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py index 5586c51dd..6a7913021 100644 --- a/moto/autoscaling/responses.py +++ b/moto/autoscaling/responses.py @@ -85,6 +85,8 @@ class AutoScalingResponse(BaseResponse): termination_policies=self._get_multi_param( 'TerminationPolicies.member'), tags=self._get_list_prefix('Tags.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', False) ) template = self.response_template(CREATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -192,6 +194,8 @@ class AutoScalingResponse(BaseResponse): placement_group=self._get_param('PlacementGroup'), termination_policies=self._get_multi_param( 'TerminationPolicies.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', None) ) template = self.response_template(UPDATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -290,6 +294,15 @@ class AutoScalingResponse(BaseResponse): template = self.response_template(SUSPEND_PROCESSES_TEMPLATE) return template.render() + def set_instance_protection(self): + group_name = self._get_param('AutoScalingGroupName') + instance_ids = self._get_multi_param('InstanceIds.member') + protected_from_scale_in = self._get_bool_param('ProtectedFromScaleIn') + self.autoscaling_backend.set_instance_protection( + group_name, instance_ids, protected_from_scale_in) + template = self.response_template(SET_INSTANCE_PROTECTION_TEMPLATE) + return template.render() + CREATE_LAUNCH_CONFIGURATION_TEMPLATE = """ @@ -490,6 +503,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ instance_state.instance.id }} {{ group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -508,6 +522,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """ {% endif %} + {% if group.target_group_arns %} + + {% for target_group_arn in group.target_group_arns %} + {{ target_group_arn }} + {% endfor %} + + {% else %} + + {% endif %} {{ group.min_size }} {% if group.vpc_zone_identifier %} {{ group.vpc_zone_identifier }} @@ -530,6 +553,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ group.placement_group }} {% endif %} + {{ group.new_instances_protected_from_scale_in|string|lower }} {% endfor %} @@ -565,6 +589,7 @@ DESCRIBE_AUTOSCALING_INSTANCES_TEMPLATE = """{{ instance_state.instance.id }} {{ instance_state.instance.autoscaling_group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -668,3 +693,10 @@ SET_INSTANCE_HEALTH_TEMPLATE = """ + + +{{ requestid }} + +""" diff --git a/moto/backends.py b/moto/backends.py index d95424385..1a333415e 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -12,6 +12,7 @@ from moto.core import moto_api_backends from moto.datapipeline import datapipeline_backends from moto.dynamodb import dynamodb_backends from moto.dynamodb2 import dynamodb_backends2 +from moto.dynamodbstreams import dynamodbstreams_backends from moto.ec2 import ec2_backends from moto.ecr import ecr_backends from moto.ecs import ecs_backends @@ -59,6 +60,7 @@ BACKENDS = { 'datapipeline': datapipeline_backends, 'dynamodb': dynamodb_backends, 'dynamodb2': dynamodb_backends2, + 'dynamodbstreams': dynamodbstreams_backends, 'ec2': ec2_backends, 'ecr': ecr_backends, 'ecs': ecs_backends, diff --git a/moto/batch/responses.py b/moto/batch/responses.py index e626b7d4c..7fb606184 100644 --- a/moto/batch/responses.py +++ b/moto/batch/responses.py @@ -27,7 +27,7 @@ class BatchResponse(BaseResponse): elif not hasattr(self, '_json'): try: self._json = json.loads(self.body) - except json.JSONDecodeError: + except ValueError: print() return self._json diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index e5ab7255d..c45c5d5fe 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -13,6 +13,7 @@ from .utils import ( generate_changeset_id, generate_stack_id, yaml_tag_constructor, + validate_template_cfn_lint, ) from .exceptions import ValidationError @@ -223,7 +224,11 @@ class CloudFormationBackend(BaseBackend): return list(stacks) def list_stacks(self): - return self.stacks.values() + return [ + v for v in self.stacks.values() + ] + [ + v for v in self.deleted_stacks.values() + ] def get_stack(self, name_or_stack_id): all_stacks = dict(self.deleted_stacks, **self.stacks) @@ -270,6 +275,9 @@ class CloudFormationBackend(BaseBackend): next_token = str(token + 100) if len(all_exports) > token + 100 else None return exports, next_token + def validate_template(self, template): + return validate_template_cfn_lint(template) + def _validate_export_uniqueness(self, stack): new_stack_export_names = [x.name for x in stack.exports] export_names = self.exports.keys() diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index a1295a20d..9e67e931a 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import json +import yaml from six.moves.urllib.parse import urlparse from moto.core.responses import BaseResponse @@ -87,7 +88,8 @@ class CloudFormationResponse(BaseResponse): role_arn = self._get_param('RoleARN') update_or_create = self._get_param('ChangeSetType', 'CREATE') parameters_list = self._get_list_prefix("Parameters.member") - tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")} + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) parameters = {param['parameter_key']: param['parameter_value'] for param in parameters_list} if template_url: @@ -294,6 +296,32 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(LIST_EXPORTS_RESPONSE) return template.render(exports=exports, next_token=next_token) + def validate_template(self): + cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody')) + if cfn_lint: + raise ValidationError(cfn_lint[0].message) + description = "" + try: + description = json.loads(self._get_param('TemplateBody'))['Description'] + except (ValueError, KeyError): + pass + try: + description = yaml.load(self._get_param('TemplateBody'))['Description'] + except (yaml.ParserError, KeyError): + pass + template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE) + return template.render(description=description) + + +VALIDATE_STACK_RESPONSE_TEMPLATE = """ + + + + +{{ description }} + + +""" CREATE_STACK_RESPONSE_TEMPLATE = """ diff --git a/moto/cloudformation/utils.py b/moto/cloudformation/utils.py index f3b8874ed..f963ce7c8 100644 --- a/moto/cloudformation/utils.py +++ b/moto/cloudformation/utils.py @@ -3,6 +3,9 @@ import uuid import six import random import yaml +import os + +from cfnlint import decode, core def generate_stack_id(stack_name): @@ -38,3 +41,33 @@ def yaml_tag_constructor(loader, tag, node): key = 'Fn::{}'.format(tag[1:]) return {key: _f(loader, tag, node)} + + +def validate_template_cfn_lint(template): + + # Save the template to a temporary file -- cfn-lint requires a file + filename = "file.tmp" + with open(filename, "w") as file: + file.write(template) + abs_filename = os.path.abspath(filename) + + # decode handles both yaml and json + template, matches = decode.decode(abs_filename, False) + + # Set cfn-lint to info + core.configure_logging(None) + + # Initialize the ruleset to be applied (no overrules, no excludes) + rules = core.get_rules([], [], []) + + # Use us-east-1 region (spec file) for validation + regions = ['us-east-1'] + + # Process all the rules and gather the errors + matches = core.run_checks( + abs_filename, + template, + rules, + regions) + + return matches diff --git a/moto/cognitoidp/exceptions.py b/moto/cognitoidp/exceptions.py index 1f1ec2309..452670213 100644 --- a/moto/cognitoidp/exceptions.py +++ b/moto/cognitoidp/exceptions.py @@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest): }) +class GroupExistsException(BadRequest): + + def __init__(self, message): + super(GroupExistsException, self).__init__() + self.description = json.dumps({ + "message": message, + '__type': 'GroupExistsException', + }) + + class NotAuthorizedError(BadRequest): def __init__(self, message): diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 476d470b9..00868f7b3 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -1,6 +1,8 @@ from __future__ import unicode_literals import datetime +import functools +import itertools import json import os import time @@ -11,8 +13,7 @@ from jose import jws from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel -from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError - +from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError UserStatus = { "FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD", @@ -20,6 +21,39 @@ UserStatus = { } +def paginate(limit, start_arg="next_token", limit_arg="max_results"): + """Returns a limited result list, and an offset into list of remaining items + + Takes the next_token, and max_results kwargs given to a function and handles + the slicing of the results. The kwarg `next_token` is the offset into the + list to begin slicing from. `max_results` is the size of the result required + + If the max_results is not supplied then the `limit` parameter is used as a + default + + :param limit_arg: the name of argument in the decorated function that + controls amount of items returned + :param start_arg: the name of the argument in the decorated that provides + the starting offset + :param limit: A default maximum items to return + :return: a tuple containing a list of items, and the offset into the list + """ + default_start = 0 + + def outer_wrapper(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg]) + lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg]) + stop = start + lim + result = func(*args, **kwargs) + limited_results = list(itertools.islice(result, start, stop)) + next_token = stop if stop < len(result) else None + return limited_results, next_token + return wrapper + return outer_wrapper + + class CognitoIdpUserPool(BaseModel): def __init__(self, region, name, extended_config): @@ -33,6 +67,7 @@ class CognitoIdpUserPool(BaseModel): self.clients = OrderedDict() self.identity_providers = OrderedDict() + self.groups = OrderedDict() self.users = OrderedDict() self.refresh_tokens = {} self.access_tokens = {} @@ -185,6 +220,33 @@ class CognitoIdpIdentityProvider(BaseModel): return identity_provider_json +class CognitoIdpGroup(BaseModel): + + def __init__(self, user_pool_id, group_name, description, role_arn, precedence): + self.user_pool_id = user_pool_id + self.group_name = group_name + self.description = description or "" + self.role_arn = role_arn + self.precedence = precedence + self.last_modified_date = datetime.datetime.now() + self.creation_date = self.last_modified_date + + # Users who are members of this group. + # Note that these links are bidirectional. + self.users = set() + + def to_json(self): + return { + "GroupName": self.group_name, + "UserPoolId": self.user_pool_id, + "Description": self.description, + "RoleArn": self.role_arn, + "Precedence": self.precedence, + "LastModifiedDate": time.mktime(self.last_modified_date.timetuple()), + "CreationDate": time.mktime(self.creation_date.timetuple()), + } + + class CognitoIdpUser(BaseModel): def __init__(self, user_pool_id, username, password, status, attributes): @@ -198,6 +260,10 @@ class CognitoIdpUser(BaseModel): self.create_date = datetime.datetime.utcnow() self.last_modified_date = datetime.datetime.utcnow() + # Groups this user is a member of. + # Note that these links are bidirectional. + self.groups = set() + def _base_json(self): return { "UserPoolId": self.user_pool_id, @@ -242,7 +308,8 @@ class CognitoIdpBackend(BaseBackend): self.user_pools[user_pool.id] = user_pool return user_pool - def list_user_pools(self): + @paginate(60) + def list_user_pools(self, max_results=None, next_token=None): return self.user_pools.values() def describe_user_pool(self, user_pool_id): @@ -289,7 +356,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.clients[user_pool_client.id] = user_pool_client return user_pool_client - def list_user_pool_clients(self, user_pool_id): + @paginate(60) + def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -339,7 +407,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.identity_providers[name] = identity_provider return identity_provider - def list_identity_providers(self, user_pool_id): + @paginate(60) + def list_identity_providers(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -367,6 +436,72 @@ class CognitoIdpBackend(BaseBackend): del user_pool.identity_providers[name] + # Group + def create_group(self, user_pool_id, group_name, description, role_arn, precedence): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence) + if group.group_name in user_pool.groups: + raise GroupExistsException("A group with the name already exists") + user_pool.groups[group.group_name] = group + + return group + + def get_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + return user_pool.groups[group_name] + + def list_groups(self, user_pool_id): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + return user_pool.groups.values() + + def delete_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + group = user_pool.groups[group_name] + for user in group.users: + user.groups.remove(group) + + del user_pool.groups[group_name] + + def admin_add_user_to_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.add(user) + user.groups.add(group) + + def list_users_in_group(self, user_pool_id, group_name): + group = self.get_group(user_pool_id, group_name) + return list(group.users) + + def admin_list_groups_for_user(self, user_pool_id, username): + user = self.admin_get_user(user_pool_id, username) + return list(user.groups) + + def admin_remove_user_from_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.discard(user) + user.groups.discard(group) + # User def admin_create_user(self, user_pool_id, username, temporary_password, attributes): user_pool = self.user_pools.get(user_pool_id) @@ -387,7 +522,8 @@ class CognitoIdpBackend(BaseBackend): return user_pool.users[username] - def list_users(self, user_pool_id): + @paginate(60, "pagination_token", "limit") + def list_users(self, user_pool_id, pagination_token=None, limit=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -410,6 +546,10 @@ class CognitoIdpBackend(BaseBackend): if username not in user_pool.users: raise UserNotFoundError(username) + user = user_pool.users[username] + for group in user.groups: + group.users.remove(user) + del user_pool.users[username] def _log_user_in(self, user_pool, client, username): diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 50939786b..8b3941c21 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse): }) def list_user_pools(self): - user_pools = cognitoidp_backends[self.region].list_user_pools() - return json.dumps({ - "UserPools": [user_pool.to_json() for user_pool in user_pools] - }) + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pools, next_token = cognitoidp_backends[self.region].list_user_pools( + max_results=max_results, next_token=next_token + ) + response = { + "UserPools": [user_pool.to_json() for user_pool in user_pools], + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool(self): user_pool_id = self._get_param("UserPoolId") @@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse): def list_user_pool_clients(self): user_pool_id = self._get_param("UserPoolId") - user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id, + max_results=max_results, next_token=next_token) + response = { "UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool_client(self): user_pool_id = self._get_param("UserPoolId") @@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse): def list_identity_providers(self): user_pool_id = self._get_param("UserPoolId") - identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers( + user_pool_id, max_results=max_results, next_token=next_token + ) + response = { "Providers": [identity_provider.to_json() for identity_provider in identity_providers] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_identity_provider(self): user_pool_id = self._get_param("UserPoolId") @@ -129,6 +149,89 @@ class CognitoIdpResponse(BaseResponse): cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name) return "" + # Group + def create_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + description = self._get_param("Description") + role_arn = self._get_param("RoleArn") + precedence = self._get_param("Precedence") + + group = cognitoidp_backends[self.region].create_group( + user_pool_id, + group_name, + description, + role_arn, + precedence, + ) + + return json.dumps({ + "Group": group.to_json(), + }) + + def get_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name) + return json.dumps({ + "Group": group.to_json(), + }) + + def list_groups(self): + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].list_groups(user_pool_id) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def delete_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + cognitoidp_backends[self.region].delete_group(user_pool_id, group_name) + return "" + + def admin_add_user_to_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_add_user_to_group( + user_pool_id, + group_name, + username, + ) + + return "" + + def list_users_in_group(self): + user_pool_id = self._get_param("UserPoolId") + group_name = self._get_param("GroupName") + users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name) + return json.dumps({ + "Users": [user.to_json(extended=True) for user in users], + }) + + def admin_list_groups_for_user(self): + username = self._get_param("Username") + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def admin_remove_user_from_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_remove_user_from_group( + user_pool_id, + group_name, + username, + ) + + return "" + # User def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") @@ -155,10 +258,15 @@ class CognitoIdpResponse(BaseResponse): def list_users(self): user_pool_id = self._get_param("UserPoolId") - users = cognitoidp_backends[self.region].list_users(user_pool_id) - return json.dumps({ - "Users": [user.to_json(extended=True) for user in users] - }) + limit = self._get_param("Limit") + token = self._get_param("PaginationToken") + users, token = cognitoidp_backends[self.region].list_users(user_pool_id, + limit=limit, + pagination_token=token) + response = {"Users": [user.to_json(extended=True) for user in users]} + if token: + response["PaginationToken"] = str(token) + return json.dumps(response) def admin_disable_user(self): user_pool_id = self._get_param("UserPoolId") diff --git a/moto/core/models.py b/moto/core/models.py index 19267ca08..9fe1e96bd 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -4,6 +4,7 @@ from __future__ import absolute_import import functools import inspect +import os import re import six from io import BytesIO @@ -21,6 +22,11 @@ from .utils import ( ) +# "Mock" the AWS credentials as they can't be mocked in Botocore currently +os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key") +os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret") + + class BaseMockAWS(object): nested_count = 0 diff --git a/moto/core/responses.py b/moto/core/responses.py index 0f133e72c..8fb247f75 100644 --- a/moto/core/responses.py +++ b/moto/core/responses.py @@ -718,6 +718,8 @@ def to_str(value, spec): return str(value) elif vtype == 'float': return str(value) + elif vtype == 'double': + return str(value) elif vtype == 'timestamp': return datetime.datetime.utcfromtimestamp( value).replace(tzinfo=pytz.utc).isoformat() @@ -737,6 +739,8 @@ def from_str(value, spec): return int(value) elif vtype == 'float': return float(value) + elif vtype == 'double': + return float(value) elif vtype == 'timestamp': return value elif vtype == 'string': diff --git a/moto/dynamodb2/comparisons.py b/moto/dynamodb2/comparisons.py index 53226c557..6d37345fe 100644 --- a/moto/dynamodb2/comparisons.py +++ b/moto/dynamodb2/comparisons.py @@ -383,7 +383,7 @@ class OpNotEqual(Op): def expr(self, item): lhs = self._lhs(item) rhs = self._rhs(item) - return lhs == rhs + return lhs != rhs class OpLessThanOrEqual(Op): diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index a54c4f7d0..8187ceaf9 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -5,6 +5,7 @@ import datetime import decimal import json import re +import uuid import boto3 from moto.compat import OrderedDict @@ -292,9 +293,82 @@ class Item(BaseModel): 'ADD not supported for %s' % ', '.join(update_action['Value'].keys())) +class StreamRecord(BaseModel): + def __init__(self, table, stream_type, event_name, old, new, seq): + old_a = old.to_json()['Attributes'] if old is not None else {} + new_a = new.to_json()['Attributes'] if new is not None else {} + + rec = old if old is not None else new + keys = {table.hash_key_attr: rec.hash_key.to_json()} + if table.range_key_attr is not None: + keys[table.range_key_attr] = rec.range_key.to_json() + + self.record = { + 'eventID': uuid.uuid4().hex, + 'eventName': event_name, + 'eventSource': 'aws:dynamodb', + 'eventVersion': '1.0', + 'awsRegion': 'us-east-1', + 'dynamodb': { + 'StreamViewType': stream_type, + 'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(), + 'SequenceNumber': seq, + 'SizeBytes': 1, + 'Keys': keys + } + } + + if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['NewImage'] = new_a + if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['OldImage'] = old_a + + # This is a substantial overestimate but it's the easiest to do now + self.record['dynamodb']['SizeBytes'] = len( + json.dumps(self.record['dynamodb'])) + + def to_json(self): + return self.record + + +class StreamShard(BaseModel): + def __init__(self, table): + self.table = table + self.id = 'shardId-00000001541626099285-f35f62ef' + self.starting_sequence_number = 1100000000017454423009 + self.items = [] + self.created_on = datetime.datetime.utcnow() + + def to_json(self): + return { + 'ShardId': self.id, + 'SequenceNumberRange': { + 'StartingSequenceNumber': str(self.starting_sequence_number) + } + } + + def add(self, old, new): + t = self.table.stream_specification['StreamViewType'] + if old is None: + event_name = 'INSERT' + elif new is None: + event_name = 'DELETE' + else: + event_name = 'MODIFY' + seq = len(self.items) + self.starting_sequence_number + self.items.append( + StreamRecord(self.table, t, event_name, old, new, seq)) + + def get(self, start, quantity): + start -= self.starting_sequence_number + assert start >= 0 + end = start + quantity + return [i.to_json() for i in self.items[start:end]] + + class Table(BaseModel): - def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None): + def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None): self.name = table_name self.attr = attr self.schema = schema @@ -325,10 +399,22 @@ class Table(BaseModel): 'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED', # 'AttributeName': 'string' # Can contain this } + self.set_stream_specification(streams) def _generate_arn(self, name): return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name + def set_stream_specification(self, streams): + self.stream_specification = streams + if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')): + self.stream_specification['StreamEnabled'] = True + self.latest_stream_label = datetime.datetime.utcnow().isoformat() + self.stream_shard = StreamShard(self) + else: + self.stream_specification = {'StreamEnabled': False} + self.latest_stream_label = None + self.stream_shard = None + def describe(self, base_key='TableDescription'): results = { base_key: { @@ -345,6 +431,11 @@ class Table(BaseModel): 'LocalSecondaryIndexes': [index for index in self.indexes], } } + if self.stream_specification and self.stream_specification['StreamEnabled']: + results[base_key]['StreamSpecification'] = self.stream_specification + if self.latest_stream_label: + results[base_key]['LatestStreamLabel'] = self.latest_stream_label + results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label return results def __len__(self): @@ -385,23 +476,22 @@ class Table(BaseModel): else: range_value = None + if expected is None: + expected = {} + lookup_range_value = range_value + else: + expected_range_value = expected.get( + self.range_key_attr, {}).get("Value") + if(expected_range_value is None): + lookup_range_value = range_value + else: + lookup_range_value = DynamoType(expected_range_value) + current = self.get_item(hash_value, lookup_range_value) + item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) if not overwrite: - if expected is None: - expected = {} - lookup_range_value = range_value - else: - expected_range_value = expected.get( - self.range_key_attr, {}).get("Value") - if(expected_range_value is None): - lookup_range_value = range_value - else: - lookup_range_value = DynamoType(expected_range_value) - - current = self.get_item(hash_value, lookup_range_value) - if current is None: current_attr = {} elif hasattr(current, 'attrs'): @@ -432,6 +522,10 @@ class Table(BaseModel): self.items[hash_value][range_value] = item else: self.items[hash_value] = item + + if self.stream_shard is not None: + self.stream_shard.add(current, item) + return item def __nonzero__(self): @@ -462,9 +556,14 @@ class Table(BaseModel): def delete_item(self, hash_key, range_key): try: if range_key: - return self.items[hash_key].pop(range_key) + item = self.items[hash_key].pop(range_key) else: - return self.items.pop(hash_key) + item = self.items.pop(hash_key) + + if self.stream_shard is not None: + self.stream_shard.add(item, None) + + return item except KeyError: return None @@ -680,6 +779,13 @@ class DynamoDBBackend(BaseBackend): table.throughput = throughput return table + def update_table_streams(self, name, stream_specification): + table = self.tables[name] + if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label: + raise ValueError('Table already has stream enabled') + table.set_stream_specification(stream_specification) + return table + def update_table_global_indexes(self, name, global_index_updates): table = self.tables[name] gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index e2f1ef1cc..60886e171 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -104,13 +104,16 @@ class DynamoHandler(BaseResponse): # getting the indexes global_indexes = body.get("GlobalSecondaryIndexes", []) local_secondary_indexes = body.get("LocalSecondaryIndexes", []) + # get the stream specification + streams = body.get("StreamSpecification") table = self.dynamodb_backend.create_table(table_name, schema=key_schema, throughput=throughput, attr=attr, global_indexes=global_indexes, - indexes=local_secondary_indexes) + indexes=local_secondary_indexes, + streams=streams) if table is not None: return dynamo_json_dump(table.describe()) else: @@ -163,12 +166,20 @@ class DynamoHandler(BaseResponse): def update_table(self): name = self.body['TableName'] + table = self.dynamodb_backend.get_table(name) if 'GlobalSecondaryIndexUpdates' in self.body: table = self.dynamodb_backend.update_table_global_indexes( name, self.body['GlobalSecondaryIndexUpdates']) if 'ProvisionedThroughput' in self.body: throughput = self.body["ProvisionedThroughput"] table = self.dynamodb_backend.update_table_throughput(name, throughput) + if 'StreamSpecification' in self.body: + try: + table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) + except ValueError: + er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException' + return self.error(er, 'Cannot enable stream') + return dynamo_json_dump(table.describe()) def describe_table(self): @@ -183,6 +194,11 @@ class DynamoHandler(BaseResponse): def put_item(self): name = self.body['TableName'] item = self.body['Item'] + return_values = self.body.get('ReturnValues', 'NONE') + + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') if has_empty_keys_or_values(item): return get_empty_str_error() @@ -193,6 +209,13 @@ class DynamoHandler(BaseResponse): else: expected = None + if return_values == 'ALL_OLD': + existing_item = self.dynamodb_backend.get_item(name, item) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} + # Attempt to parse simple ConditionExpressions into an Expected # expression if not expected: @@ -228,6 +251,10 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 1 } + if return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + else: + item_dict.pop('Attributes', None) return dynamo_json_dump(item_dict) else: er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException' @@ -385,7 +412,7 @@ class DynamoHandler(BaseResponse): range_values = [value_alias_map[ range_key_expression_components[2]]] else: - hash_key_expression = key_condition_expression + hash_key_expression = key_condition_expression.strip('()') range_comparison = None range_values = [] @@ -512,7 +539,11 @@ class DynamoHandler(BaseResponse): def delete_item(self): name = self.body['TableName'] keys = self.body['Key'] - return_values = self.body.get('ReturnValues', '') + return_values = self.body.get('ReturnValues', 'NONE') + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') + table = self.dynamodb_backend.get_table(name) if not table: er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException' @@ -527,9 +558,9 @@ class DynamoHandler(BaseResponse): return dynamo_json_dump(item_dict) def update_item(self): - name = self.body['TableName'] key = self.body['Key'] + return_values = self.body.get('ReturnValues', 'NONE') update_expression = self.body.get('UpdateExpression') attribute_updates = self.body.get('AttributeUpdates') expression_attribute_names = self.body.get( @@ -537,6 +568,15 @@ class DynamoHandler(BaseResponse): expression_attribute_values = self.body.get( 'ExpressionAttributeValues', {}) existing_item = self.dynamodb_backend.get_item(name, key) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} + + if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD', + 'UPDATED_NEW'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') if has_empty_keys_or_values(expression_attribute_values): return get_empty_str_error() @@ -591,8 +631,26 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 0.5 } - if not existing_item: + unchanged_attributes = { + k for k in existing_attributes.keys() + if existing_attributes[k] == item_dict['Attributes'].get(k) + } + changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes) + + if return_values == 'NONE': item_dict['Attributes'] = {} + elif return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + elif return_values == 'UPDATED_OLD': + item_dict['Attributes'] = { + k: v for k, v in existing_attributes.items() + if k in changed_attributes + } + elif return_values == 'UPDATED_NEW': + item_dict['Attributes'] = { + k: v for k, v in item_dict['Attributes'].items() + if k in changed_attributes + } return dynamo_json_dump(item_dict) diff --git a/moto/dynamodbstreams/__init__.py b/moto/dynamodbstreams/__init__.py new file mode 100644 index 000000000..b35879eba --- /dev/null +++ b/moto/dynamodbstreams/__init__.py @@ -0,0 +1,6 @@ +from __future__ import unicode_literals +from .models import dynamodbstreams_backends +from ..core.models import base_decorator + +dynamodbstreams_backend = dynamodbstreams_backends['us-east-1'] +mock_dynamodbstreams = base_decorator(dynamodbstreams_backends) diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py new file mode 100644 index 000000000..41cc6e280 --- /dev/null +++ b/moto/dynamodbstreams/models.py @@ -0,0 +1,129 @@ +from __future__ import unicode_literals + +import os +import json +import boto3 +import base64 + +from moto.core import BaseBackend, BaseModel +from moto.dynamodb2.models import dynamodb_backends + + +class ShardIterator(BaseModel): + def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None): + self.id = base64.b64encode(os.urandom(472)).decode('utf-8') + self.streams_backend = streams_backend + self.stream_shard = stream_shard + self.shard_iterator_type = shard_iterator_type + if shard_iterator_type == 'TRIM_HORIZON': + self.sequence_number = stream_shard.starting_sequence_number + elif shard_iterator_type == 'LATEST': + self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items) + elif shard_iterator_type == 'AT_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + 1 + + @property + def arn(self): + return '{}/stream/{}|1|{}'.format( + self.stream_shard.table.table_arn, + self.stream_shard.table.latest_stream_label, + self.id) + + def to_json(self): + return { + 'ShardIterator': self.arn + } + + def get(self, limit=1000): + items = self.stream_shard.get(self.sequence_number, limit) + try: + last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items) + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AFTER_SEQUENCE_NUMBER', + last_sequence_number) + except ValueError: + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AT_SEQUENCE_NUMBER', + self.sequence_number) + + self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator + return { + 'NextShardIterator': new_shard_iterator.arn, + 'Records': items + } + + +class DynamoDBStreamsBackend(BaseBackend): + def __init__(self, region): + self.region = region + self.shard_iterators = {} + + def reset(self): + region = self.region + self.__dict__ = {} + self.__init__(region) + + @property + def dynamodb(self): + return dynamodb_backends[self.region] + + def _get_table_from_arn(self, arn): + table_name = arn.split(':', 6)[5].split('/')[1] + return self.dynamodb.get_table(table_name) + + def describe_stream(self, arn): + table = self._get_table_from_arn(arn) + resp = {'StreamDescription': { + 'StreamArn': arn, + 'StreamLabel': table.latest_stream_label, + 'StreamStatus': ('ENABLED' if table.latest_stream_label + else 'DISABLED'), + 'StreamViewType': table.stream_specification['StreamViewType'], + 'CreationRequestDateTime': table.stream_shard.created_on.isoformat(), + 'TableName': table.name, + 'KeySchema': table.schema, + 'Shards': ([table.stream_shard.to_json()] if table.stream_shard + else []) + }} + + return json.dumps(resp) + + def list_streams(self, table_name=None): + streams = [] + for table in self.dynamodb.tables.values(): + if table_name is not None and table.name != table_name: + continue + if table.latest_stream_label: + d = table.describe(base_key='Table') + streams.append({ + 'StreamArn': d['Table']['LatestStreamArn'], + 'TableName': d['Table']['TableName'], + 'StreamLabel': d['Table']['LatestStreamLabel'] + }) + + return json.dumps({'Streams': streams}) + + def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None): + table = self._get_table_from_arn(arn) + assert table.stream_shard.id == shard_id + + shard_iterator = ShardIterator(self, table.stream_shard, + shard_iterator_type, + sequence_number) + self.shard_iterators[shard_iterator.arn] = shard_iterator + + return json.dumps(shard_iterator.to_json()) + + def get_records(self, iterator_arn, limit): + shard_iterator = self.shard_iterators[iterator_arn] + return json.dumps(shard_iterator.get(limit)) + + +available_regions = boto3.session.Session().get_available_regions( + 'dynamodbstreams') +dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region) + for region in available_regions} diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py new file mode 100644 index 000000000..c9c113615 --- /dev/null +++ b/moto/dynamodbstreams/responses.py @@ -0,0 +1,34 @@ +from __future__ import unicode_literals + +from moto.core.responses import BaseResponse + +from .models import dynamodbstreams_backends + + +class DynamoDBStreamsHandler(BaseResponse): + + @property + def backend(self): + return dynamodbstreams_backends[self.region] + + def describe_stream(self): + arn = self._get_param('StreamArn') + return self.backend.describe_stream(arn) + + def list_streams(self): + table_name = self._get_param('TableName') + return self.backend.list_streams(table_name) + + def get_shard_iterator(self): + arn = self._get_param('StreamArn') + shard_id = self._get_param('ShardId') + shard_iterator_type = self._get_param('ShardIteratorType') + return self.backend.get_shard_iterator(arn, shard_id, + shard_iterator_type) + + def get_records(self): + arn = self._get_param('ShardIterator') + limit = self._get_param('Limit') + if limit is None: + limit = 1000 + return self.backend.get_records(arn, limit) diff --git a/moto/dynamodbstreams/urls.py b/moto/dynamodbstreams/urls.py new file mode 100644 index 000000000..1d0f94c35 --- /dev/null +++ b/moto/dynamodbstreams/urls.py @@ -0,0 +1,10 @@ +from __future__ import unicode_literals +from .responses import DynamoDBStreamsHandler + +url_bases = [ + "https?://streams.dynamodb.(.+).amazonaws.com" +] + +url_paths = { + "{0}/$": DynamoDBStreamsHandler.dispatch, +} diff --git a/moto/ec2/models.py b/moto/ec2/models.py index b94cac479..a450943b7 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -1115,7 +1115,7 @@ class Ami(TaggedEC2Resource): elif filter_name == 'image-id': return self.id elif filter_name == 'is-public': - return str(self.is_public) + return self.is_public_string elif filter_name == 'state': return self.state elif filter_name == 'name': @@ -2230,6 +2230,10 @@ class VPCPeeringConnectionStatus(object): self.code = code self.message = message + def deleted(self): + self.code = 'deleted' + self.message = 'Deleted by {deleter ID}' + def initiating(self): self.code = 'initiating-request' self.message = 'Initiating Request to {accepter ID}' @@ -2292,9 +2296,8 @@ class VPCPeeringConnectionBackend(object): return self.vpc_pcxs.get(vpc_pcx_id) def delete_vpc_peering_connection(self, vpc_pcx_id): - deleted = self.vpc_pcxs.pop(vpc_pcx_id, None) - if not deleted: - raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id) + deleted = self.get_vpc_peering_connection(vpc_pcx_id) + deleted._status.deleted() return deleted def accept_vpc_peering_connection(self, vpc_pcx_id): diff --git a/moto/ecs/models.py b/moto/ecs/models.py index d00853843..4a6737ceb 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -769,6 +769,8 @@ class EC2ContainerServiceBackend(BaseBackend): Container instances status should be one of [ACTIVE,DRAINING]") failures = [] container_instance_objects = [] + list_container_instance_ids = [x.split('/')[-1] + for x in list_container_instance_ids] for container_instance_id in list_container_instance_ids: container_instance = self.container_instances[cluster_name].get(container_instance_id, None) if container_instance is not None: diff --git a/moto/emr/responses.py b/moto/emr/responses.py index 49e37ab9a..933e0177b 100644 --- a/moto/emr/responses.py +++ b/moto/emr/responses.py @@ -613,13 +613,11 @@ DESCRIBE_STEP_TEMPLATE = """ @@ -734,7 +807,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """ {{ policy }} {% endfor %} + false - false 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE @@ -1240,11 +1313,23 @@ LIST_ACCESS_KEYS_TEMPLATE = """ """ + +GET_ACCESS_KEY_LAST_USED_TEMPLATE = """ + + + {{ user_name }} + + {{ last_used }} + + + +""" + CREDENTIAL_REPORT_GENERATING = """ - STARTED - No report exists. Starting a new report generation task + STARTED + No report exists. Starting a new report generation task fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1253,7 +1338,7 @@ CREDENTIAL_REPORT_GENERATING = """ CREDENTIAL_REPORT_GENERATED = """ - COMPLETE + COMPLETE fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1262,7 +1347,7 @@ CREDENTIAL_REPORT_GENERATED = """ CREDENTIAL_REPORT = """ - {{ report }} + {{ report }} 2015-02-02T20:02:02Z text/csv @@ -1277,23 +1362,23 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """ {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1382,7 +1467,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ user.path }} {{ user.name }} {{ user.arn }} - 2012-05-09T15:45:35Z + {{ user.created_iso_8601 }} {% endfor %} @@ -1401,7 +1486,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ group.name }} {{ group.path }} {{ group.arn }} - 2012-05-09T16:27:11Z + {{ group.create_date }} {% endfor %} @@ -1421,23 +1506,23 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_role_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_role_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1445,7 +1530,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ role.arn }} {{ role.name }} {{ role.assume_role_policy_document }} - 2014-07-30T17:09:20Z + {{ role.create_date }} {{ role.id }} {% endfor %} @@ -1474,9 +1559,9 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {{ policy.arn }} 1 - 2012-05-09T16:27:11Z + {{ policy.create_datetime }} true - 2012-05-09T16:27:11Z + {{ policy.update_datetime }} {% endfor %} @@ -1485,3 +1570,104 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """92e79ae7-7399-11e4-8c85-4b53eEXAMPLE """ + +CREATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +LIST_SAML_PROVIDERS_TEMPLATE = """ + + + {% for saml_provider in saml_providers %} + + {{ saml_provider.arn }} + 2032-05-09T16:27:11Z + 2012-05-09T16:27:03Z + + {% endfor %} + + + + fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804 + +""" + +GET_SAML_PROVIDER_TEMPLATE = """ + + 2012-05-09T16:27:11Z + 2015-12-31T21:59:59Z + {{ saml_provider.saml_metadata_document }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +DELETE_SAML_PROVIDER_TEMPLATE = """ + + c749ee7f-99ef-11e1-a4c3-27EXAMPLE804 + +""" + +UPDATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """ + + + {{ cert.user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +DELETE_SIGNING_CERTIFICATE_TEMPLATE = """ + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +LIST_SIGNING_CERTIFICATES_TEMPLATE = """ + + {{ user_name }} + + {% for cert in certificates %} + + {{ user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + {% endfor %} + + false + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" diff --git a/moto/iam/utils.py b/moto/iam/utils.py index 1fae85a6c..f59bdfffe 100644 --- a/moto/iam/utils.py +++ b/moto/iam/utils.py @@ -12,8 +12,7 @@ def random_alphanumeric(length): ) -def random_resource_id(): - size = 20 +def random_resource_id(size=20): chars = list(range(10)) + list(string.ascii_lowercase) return ''.join(six.text_type(random.choice(chars)) for x in range(size)) diff --git a/moto/iot/exceptions.py b/moto/iot/exceptions.py index 7bbdb706d..3af3751d9 100644 --- a/moto/iot/exceptions.py +++ b/moto/iot/exceptions.py @@ -31,3 +31,20 @@ class VersionConflictException(IoTClientError): 'VersionConflictException', 'The version for thing %s does not match the expected version.' % name ) + + +class CertificateStateException(IoTClientError): + def __init__(self, msg, cert_id): + self.code = 406 + super(CertificateStateException, self).__init__( + 'CertificateStateException', + '%s Id: %s' % (msg, cert_id) + ) + + +class DeleteConflictException(IoTClientError): + def __init__(self, msg): + self.code = 409 + super(DeleteConflictException, self).__init__( + 'DeleteConflictException', msg + ) diff --git a/moto/iot/models.py b/moto/iot/models.py index c36bb985f..b493f6b8d 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -13,6 +13,8 @@ import boto3 from moto.core import BaseBackend, BaseModel from .exceptions import ( + CertificateStateException, + DeleteConflictException, ResourceNotFoundException, InvalidRequestException, VersionConflictException @@ -378,7 +380,25 @@ class IoTBackend(BaseBackend): return certificate, key_pair def delete_certificate(self, certificate_id): - self.describe_certificate(certificate_id) + cert = self.describe_certificate(certificate_id) + if cert.status == 'ACTIVE': + raise CertificateStateException( + 'Certificate must be deactivated (not ACTIVE) before deletion.', certificate_id) + + certs = [k[0] for k, v in self.principal_things.items() + if self._get_principal(k[0]).certificate_id == certificate_id] + if len(certs) > 0: + raise DeleteConflictException( + 'Things must be detached before deletion (arn: %s)' % certs[0] + ) + + certs = [k[0] for k, v in self.principal_policies.items() + if self._get_principal(k[0]).certificate_id == certificate_id] + if len(certs) > 0: + raise DeleteConflictException( + 'Certificate policies must be detached before deletion (arn: %s)' % certs[0] + ) + del self.certificates[certificate_id] def describe_certificate(self, certificate_id): @@ -411,6 +431,14 @@ class IoTBackend(BaseBackend): return policies[0] def delete_policy(self, policy_name): + + policies = [k[1] for k, v in self.principal_policies.items() if k[1] == policy_name] + if len(policies) > 0: + raise DeleteConflictException( + 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' + % policy_name + ) + policy = self.get_policy(policy_name) del self.policies[policy.name] @@ -429,6 +457,14 @@ class IoTBackend(BaseBackend): pass raise ResourceNotFoundException() + def attach_policy(self, policy_name, target): + principal = self._get_principal(target) + policy = self.get_policy(policy_name) + k = (target, policy_name) + if k in self.principal_policies: + return + self.principal_policies[k] = (principal, policy) + def attach_principal_policy(self, policy_name, principal_arn): principal = self._get_principal(principal_arn) policy = self.get_policy(policy_name) @@ -437,6 +473,15 @@ class IoTBackend(BaseBackend): return self.principal_policies[k] = (principal, policy) + def detach_policy(self, policy_name, target): + # this may raises ResourceNotFoundException + self._get_principal(target) + self.get_policy(policy_name) + k = (target, policy_name) + if k not in self.principal_policies: + raise ResourceNotFoundException() + del self.principal_policies[k] + def detach_principal_policy(self, policy_name, principal_arn): # this may raises ResourceNotFoundException self._get_principal(principal_arn) diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 006c4c4cc..214576f52 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -224,6 +224,15 @@ class IoTResponse(BaseResponse): ) return json.dumps(dict()) + def attach_policy(self): + policy_name = self._get_param("policyName") + target = self._get_param('target') + self.iot_backend.attach_policy( + policy_name=policy_name, + target=target, + ) + return json.dumps(dict()) + def attach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get('x-amzn-iot-principal') @@ -233,6 +242,15 @@ class IoTResponse(BaseResponse): ) return json.dumps(dict()) + def detach_policy(self): + policy_name = self._get_param("policyName") + target = self._get_param('target') + self.iot_backend.detach_policy( + policy_name=policy_name, + target=target, + ) + return json.dumps(dict()) + def detach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get('x-amzn-iot-principal') diff --git a/moto/route53/responses.py b/moto/route53/responses.py index 6679e7945..98ffa4c47 100644 --- a/moto/route53/responses.py +++ b/moto/route53/responses.py @@ -123,6 +123,9 @@ class Route53(BaseResponse): """ % (record_set['Name'], the_zone.name) return 400, headers, error_msg + if not record_set['Name'].endswith('.'): + record_set['Name'] += '.' + if action in ('CREATE', 'UPSERT'): if 'ResourceRecords' in record_set: resource_records = list( diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index 26515dfd2..27c842111 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -178,3 +178,24 @@ class InvalidStorageClass(S3ClientError): "InvalidStorageClass", "The storage class you specified is not valid", *args, **kwargs) + + +class InvalidBucketName(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(InvalidBucketName, self).__init__( + "InvalidBucketName", + "The specified bucket is not valid.", + *args, **kwargs + ) + + +class DuplicateTagKeys(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(DuplicateTagKeys, self).__init__( + "InvalidTag", + "Cannot provide multiple Tags with the same key", + *args, **kwargs) diff --git a/moto/s3/models.py b/moto/s3/models.py index bb4d7848c..50a54918b 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -8,19 +8,25 @@ import itertools import codecs import random import string +import tempfile +import sys import six from bisect import insort from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime -from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \ - InvalidNotificationDestination, MalformedXML, InvalidStorageClass +from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \ + EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys from .utils import clean_key_name, _VersionedKeyStore +MAX_BUCKET_NAME_LENGTH = 63 +MIN_BUCKET_NAME_LENGTH = 3 UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] +DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024 +DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() class FakeDeleteMarker(BaseModel): @@ -42,9 +48,9 @@ class FakeDeleteMarker(BaseModel): class FakeKey(BaseModel): - def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0): + def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0, + max_buffer_size=DEFAULT_KEY_BUFFER_SIZE): self.name = name - self.value = value self.last_modified = datetime.datetime.utcnow() self.acl = get_canned_acl('private') self.website_redirect_location = None @@ -56,10 +62,30 @@ class FakeKey(BaseModel): self._is_versioned = is_versioned self._tagging = FakeTagging() + self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) + self._max_buffer_size = max_buffer_size + self.value = value + @property def version_id(self): return self._version_id + @property + def value(self): + self._value_buffer.seek(0) + return self._value_buffer.read() + + @value.setter + def value(self, new_value): + self._value_buffer.seek(0) + self._value_buffer.truncate() + + # Hack for working around moto's own unit tests; this probably won't + # actually get hit in normal use. + if isinstance(new_value, six.text_type): + new_value = new_value.encode(DEFAULT_TEXT_ENCODING) + self._value_buffer.write(new_value) + def copy(self, new_name=None): r = copy.deepcopy(self) if new_name is not None: @@ -83,7 +109,9 @@ class FakeKey(BaseModel): self.acl = acl def append_to_value(self, value): - self.value += value + self._value_buffer.seek(0, os.SEEK_END) + self._value_buffer.write(value) + self.last_modified = datetime.datetime.utcnow() self._etag = None # must recalculate etag if self._is_versioned: @@ -101,11 +129,13 @@ class FakeKey(BaseModel): def etag(self): if self._etag is None: value_md5 = hashlib.md5() - if isinstance(self.value, six.text_type): - value = self.value.encode("utf-8") - else: - value = self.value - value_md5.update(value) + self._value_buffer.seek(0) + while True: + block = self._value_buffer.read(DEFAULT_KEY_BUFFER_SIZE) + if not block: + break + value_md5.update(block) + self._etag = value_md5.hexdigest() return '"{0}"'.format(self._etag) @@ -132,7 +162,7 @@ class FakeKey(BaseModel): res = { 'ETag': self.etag, 'last-modified': self.last_modified_RFC1123, - 'content-length': str(len(self.value)), + 'content-length': str(self.size), } if self._storage_class != 'STANDARD': res['x-amz-storage-class'] = self._storage_class @@ -150,7 +180,8 @@ class FakeKey(BaseModel): @property def size(self): - return len(self.value) + self._value_buffer.seek(0, os.SEEK_END) + return self._value_buffer.tell() @property def storage_class(self): @@ -161,6 +192,26 @@ class FakeKey(BaseModel): if self._expiry is not None: return self._expiry.strftime("%a, %d %b %Y %H:%M:%S GMT") + # Keys need to be pickleable due to some implementation details of boto3. + # Since file objects aren't pickleable, we need to override the default + # behavior. The following is adapted from the Python docs: + # https://docs.python.org/3/library/pickle.html#handling-stateful-objects + def __getstate__(self): + state = self.__dict__.copy() + state['value'] = self.value + del state['_value_buffer'] + return state + + def __setstate__(self, state): + self.__dict__.update({ + k: v for k, v in six.iteritems(state) + if k != 'value' + }) + + self._value_buffer = \ + tempfile.SpooledTemporaryFile(max_size=self._max_buffer_size) + self.value = state['value'] + class FakeMultipart(BaseModel): @@ -634,6 +685,8 @@ class S3Backend(BaseBackend): def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: raise BucketAlreadyExists(bucket=bucket_name) + if not MIN_BUCKET_NAME_LENGTH <= len(bucket_name) <= MAX_BUCKET_NAME_LENGTH: + raise InvalidBucketName() new_bucket = FakeBucket(name=bucket_name, region_name=region_name) self.buckets[bucket_name] = new_bucket return new_bucket @@ -773,6 +826,9 @@ class S3Backend(BaseBackend): return key def put_bucket_tagging(self, bucket_name, tagging): + tag_keys = [tag.key for tag in tagging.tag_set.tags] + if len(tag_keys) != len(set(tag_keys)): + raise DuplicateTagKeys() bucket = self.get_bucket(bucket_name) bucket.set_tags(tagging) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 13e5f87d9..43f690bc8 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -193,7 +193,13 @@ class ResponseObject(_TemplateEnvironmentMixin): elif 'location' in querystring: bucket = self.backend.get_bucket(bucket_name) template = self.response_template(S3_BUCKET_LOCATION) - return template.render(location=bucket.location) + + location = bucket.location + # us-east-1 is different - returns a None location + if location == DEFAULT_REGION_NAME: + location = None + + return template.render(location=location) elif 'lifecycle' in querystring: bucket = self.backend.get_bucket(bucket_name) if not bucket.rules: @@ -338,9 +344,15 @@ class ResponseObject(_TemplateEnvironmentMixin): if continuation_token or start_after: limit = continuation_token or start_after - result_keys = self._get_results_from_token(result_keys, limit) + if not delimiter: + result_keys = self._get_results_from_token(result_keys, limit) + else: + result_folders = self._get_results_from_token(result_folders, limit) - result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + if not delimiter: + result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + else: + result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys) return template.render( bucket=bucket, @@ -358,7 +370,7 @@ class ResponseObject(_TemplateEnvironmentMixin): def _get_results_from_token(self, result_keys, token): continuation_index = 0 for key in result_keys: - if key.name > token: + if (key.name if isinstance(key, FakeKey) else key) > token: break continuation_index += 1 return result_keys[continuation_index:] @@ -367,7 +379,8 @@ class ResponseObject(_TemplateEnvironmentMixin): if len(result_keys) > max_keys: is_truncated = 'true' result_keys = result_keys[:max_keys] - next_continuation_token = result_keys[-1].name + item = result_keys[-1] + next_continuation_token = (item.name if isinstance(item, FakeKey) else item) else: is_truncated = 'false' next_continuation_token = None @@ -432,8 +445,19 @@ class ResponseObject(_TemplateEnvironmentMixin): else: if body: + # us-east-1, the default AWS region behaves a bit differently + # - you should not use it as a location constraint --> it fails + # - querying the location constraint returns None try: - region_name = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + forced_region = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + + if forced_region == DEFAULT_REGION_NAME: + raise S3ClientError( + 'InvalidLocationConstraint', + 'The specified location-constraint is not valid' + ) + else: + region_name = forced_region except KeyError: pass @@ -1176,7 +1200,7 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """ """ S3_BUCKET_LOCATION = """ -{{ location }}""" +{% if location != None %}{{ location }}{% endif %}""" S3_BUCKET_LIFECYCLE_CONFIGURATION = """ diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 1404a0ec8..1350ab469 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import time import json +import uuid import boto3 @@ -18,10 +19,6 @@ class SecretsManager(BaseModel): def __init__(self, region_name, **kwargs): self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.version_id = kwargs.get('version_id', '') - self.version_stage = kwargs.get('version_stage', '') - self.secret_string = '' class SecretsManagerBackend(BaseBackend): @@ -29,14 +26,7 @@ class SecretsManagerBackend(BaseBackend): def __init__(self, region_name=None, **kwargs): super(SecretsManagerBackend, self).__init__() self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.name = kwargs.get('name', '') - self.createdate = int(time.time()) - self.secret_string = '' - self.rotation_enabled = False - self.rotation_lambda_arn = '' - self.auto_rotate_after_days = 0 - self.version_id = '' + self.secrets = {} def reset(self): region_name = self.region @@ -44,36 +34,50 @@ class SecretsManagerBackend(BaseBackend): self.__init__(region_name) def _is_valid_identifier(self, identifier): - return identifier in (self.name, self.secret_id) + return identifier in self.secrets def get_secret_value(self, secret_id, version_id, version_stage): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException() + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", - "SecretString": self.secret_string, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'], + "SecretString": secret['secret_string'], "VersionStages": [ "AWSCURRENT", ], - "CreatedDate": "2018-05-23 13:16:57.198000" + "CreatedDate": secret['createdate'] }) return response - def create_secret(self, name, secret_string, **kwargs): + def create_secret(self, name, secret_string, tags, **kwargs): - self.secret_string = secret_string - self.secret_id = name - self.name = name + generated_version_id = str(uuid.uuid4()) + + secret = { + 'secret_string': secret_string, + 'secret_id': name, + 'name': name, + 'createdate': int(time.time()), + 'rotation_enabled': False, + 'rotation_lambda_arn': '', + 'auto_rotate_after_days': 0, + 'version_id': generated_version_id, + 'tags': tags + } + + self.secrets[name] = secret response = json.dumps({ "ARN": secret_arn(self.region, name), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", + "Name": name, + "VersionId": generated_version_id, }) return response @@ -82,26 +86,23 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], "Description": "", "KmsKeyId": "", - "RotationEnabled": self.rotation_enabled, - "RotationLambdaARN": self.rotation_lambda_arn, + "RotationEnabled": secret['rotation_enabled'], + "RotationLambdaARN": secret['rotation_lambda_arn'], "RotationRules": { - "AutomaticallyAfterDays": self.auto_rotate_after_days + "AutomaticallyAfterDays": secret['auto_rotate_after_days'] }, "LastRotatedDate": None, "LastChangedDate": None, "LastAccessedDate": None, "DeletedDate": None, - "Tags": [ - { - "Key": "", - "Value": "" - }, - ] + "Tags": secret['tags'] }) return response @@ -141,17 +142,19 @@ class SecretsManagerBackend(BaseBackend): ) raise InvalidParameterException(msg) - self.version_id = client_request_token or '' - self.rotation_lambda_arn = rotation_lambda_arn or '' + secret = self.secrets[secret_id] + + secret['version_id'] = client_request_token or '' + secret['rotation_lambda_arn'] = rotation_lambda_arn or '' if rotation_rules: - self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0) - if self.auto_rotate_after_days > 0: - self.rotation_enabled = True + secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0) + if secret['auto_rotate_after_days'] > 0: + secret['rotation_enabled'] = True response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": self.version_id + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'] }) return response diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index b8b6872a8..932e7bfd7 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -19,9 +19,11 @@ class SecretsManagerResponse(BaseResponse): def create_secret(self): name = self._get_param('Name') secret_string = self._get_param('SecretString') + tags = self._get_param('Tags', if_none=[]) return secretsmanager_backends[self.region].create_secret( name=name, - secret_string=secret_string + secret_string=secret_string, + tags=tags ) def get_random_password(self): diff --git a/moto/secretsmanager/utils.py b/moto/secretsmanager/utils.py index 2cb92020a..231fea296 100644 --- a/moto/secretsmanager/utils.py +++ b/moto/secretsmanager/utils.py @@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers, def secret_arn(region, secret_id): - return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format( - region, secret_id) + id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5)) + return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format( + region, secret_id, id_string) def _exclude_characters(password, exclude_characters): diff --git a/moto/server.py b/moto/server.py index ba2470478..5ad02d383 100644 --- a/moto/server.py +++ b/moto/server.py @@ -80,10 +80,13 @@ class DomainDispatcherApplication(object): region = 'us-east-1' if service == 'dynamodb': - dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] - # If Newer API version, use dynamodb2 - if dynamo_api_version > "20111205": - host = "dynamodb2" + if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'): + host = 'dynamodbstreams' + else: + dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] + # If Newer API version, use dynamodb2 + if dynamo_api_version > "20111205": + host = "dynamodb2" else: host = "{service}.{region}.amazonaws.com".format( service=service, region=region) diff --git a/moto/sqs/models.py b/moto/sqs/models.py index f3262a988..1404ded75 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -534,7 +534,7 @@ class SQSBackend(BaseBackend): break import time - time.sleep(0.001) + time.sleep(0.01) continue previous_result_count = len(result) diff --git a/moto/ssm/models.py b/moto/ssm/models.py index f16a7d981..2f316a3ac 100644 --- a/moto/ssm/models.py +++ b/moto/ssm/models.py @@ -14,10 +14,12 @@ import itertools class Parameter(BaseModel): - def __init__(self, name, value, type, description, keyid, last_modified_date, version): + def __init__(self, name, value, type, description, allowed_pattern, keyid, + last_modified_date, version): self.name = name self.type = type self.description = description + self.allowed_pattern = allowed_pattern self.keyid = keyid self.last_modified_date = last_modified_date self.version = version @@ -58,6 +60,10 @@ class Parameter(BaseModel): if self.keyid: r['KeyId'] = self.keyid + + if self.allowed_pattern: + r['AllowedPattern'] = self.allowed_pattern + return r @@ -291,7 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend): return self._parameters[name] return None - def put_parameter(self, name, description, value, type, keyid, overwrite): + def put_parameter(self, name, description, value, type, allowed_pattern, + keyid, overwrite): previous_parameter = self._parameters.get(name) version = 1 @@ -302,8 +309,8 @@ class SimpleSystemManagerBackend(BaseBackend): return last_modified_date = time.time() - self._parameters[name] = Parameter( - name, value, type, description, keyid, last_modified_date, version) + self._parameters[name] = Parameter(name, value, type, description, + allowed_pattern, keyid, last_modified_date, version) return version def add_tags_to_resource(self, resource_type, resource_id, tags): diff --git a/moto/ssm/responses.py b/moto/ssm/responses.py index eb05e51b6..c47d4127a 100644 --- a/moto/ssm/responses.py +++ b/moto/ssm/responses.py @@ -160,11 +160,12 @@ class SimpleSystemManagerResponse(BaseResponse): description = self._get_param('Description') value = self._get_param('Value') type_ = self._get_param('Type') + allowed_pattern = self._get_param('AllowedPattern') keyid = self._get_param('KeyId') overwrite = self._get_param('Overwrite', False) result = self.ssm_backend.put_parameter( - name, description, value, type_, keyid, overwrite) + name, description, value, type_, allowed_pattern, keyid, overwrite) if result is None: error = { diff --git a/moto/ssm/urls.py b/moto/ssm/urls.py index d22866486..9ac327325 100644 --- a/moto/ssm/urls.py +++ b/moto/ssm/urls.py @@ -3,6 +3,7 @@ from .responses import SimpleSystemManagerResponse url_bases = [ "https?://ssm.(.+).amazonaws.com", + "https?://ssm.(.+).amazonaws.com.cn", ] url_paths = { diff --git a/setup.py b/setup.py index a1b8c5dae..74683836e 100755 --- a/setup.py +++ b/setup.py @@ -1,10 +1,23 @@ #!/usr/bin/env python from __future__ import unicode_literals +import codecs +import os +import re import setuptools from setuptools import setup, find_packages import sys +# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15 +here = os.path.abspath(os.path.dirname(__file__)) + +def read(*parts): + # intentionally *not* adding an encoding option to open, See: + # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 + with codecs.open(os.path.join(here, *parts), 'r') as fp: + return fp.read() + + install_requires = [ "Jinja2>=2.7.3", "boto>=2.36.0", @@ -15,15 +28,17 @@ install_requires = [ "xmltodict", "six>1.9", "werkzeug", - "pyaml", + "PyYAML", "pytz", "python-dateutil<3.0.0,>=2.1", - "python-jose<3.0.0", + "python-jose<4.0.0", "mock", "docker>=2.5.1", - "jsondiff==1.1.1", + "jsondiff==1.1.2", "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", + "idna<2.8,>=2.5", + "cfn-lint", ] extras_require = { @@ -43,6 +58,8 @@ setup( version='1.3.7', description='A library that allows your python tests to easily' ' mock out the boto library', + long_description=read('README.md'), + long_description_content_type='text/markdown', author='Steve Pulec', author_email='spulec@gmail.com', url='https://github.com/spulec/moto', diff --git a/tests/test_autoscaling/test_autoscaling.py b/tests/test_autoscaling/test_autoscaling.py index f86ca2b81..b1a65fb7e 100644 --- a/tests/test_autoscaling/test_autoscaling.py +++ b/tests/test_autoscaling/test_autoscaling.py @@ -710,6 +710,7 @@ def test_create_autoscaling_group_boto3(): 'PropagateAtLaunch': False }], VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) @@ -728,13 +729,48 @@ def test_describe_autoscaling_groups_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) + response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - response['AutoScalingGroups'][0][ - 'AutoScalingGroupName'].should.equal('test_asg') + group = response['AutoScalingGroups'][0] + group['AutoScalingGroupName'].should.equal('test_asg') + group['NewInstancesProtectedFromScaleIn'].should.equal(True) + group['Instances'][0]['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_describe_autoscaling_instances_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups( + AutoScalingGroupNames=["test_asg"] + ) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + + response = client.describe_auto_scaling_instances(InstanceIds=instance_ids) + for instance in response['AutoScalingInstances']: + instance['AutoScalingGroupName'].should.equal('test_asg') + instance['ProtectedFromScaleIn'].should.equal(True) @mock_autoscaling @@ -751,17 +787,21 @@ def test_update_autoscaling_group_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) - response = client.update_auto_scaling_group( + _ = client.update_auto_scaling_group( AutoScalingGroupName='test_asg', MinSize=1, + NewInstancesProtectedFromScaleIn=False, ) response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) - response['AutoScalingGroups'][0]['MinSize'].should.equal(1) + group = response['AutoScalingGroups'][0] + group['MinSize'].should.equal(1) + group['NewInstancesProtectedFromScaleIn'].should.equal(False) @mock_autoscaling @@ -992,9 +1032,7 @@ def test_attach_one_instance(): 'PropagateAtLaunch': True }], VPCZoneIdentifier=mocked_networking['subnet1'], - ) - response = client.describe_auto_scaling_groups( - AutoScalingGroupNames=['test_asg'] + NewInstancesProtectedFromScaleIn=True, ) ec2 = boto3.resource('ec2', 'us-east-1') @@ -1009,7 +1047,11 @@ def test_attach_one_instance(): response = client.describe_auto_scaling_groups( AutoScalingGroupNames=['test_asg'] ) - response['AutoScalingGroups'][0]['Instances'].should.have.length_of(3) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(3) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + @mock_autoscaling @mock_ec2 @@ -1100,3 +1142,111 @@ def test_suspend_processes(): launch_suspended = True assert launch_suspended is True + +@mock_autoscaling +def test_set_instance_protection(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + protected = instance_ids[:3] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=protected, + ProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + for instance in response['AutoScalingGroups'][0]['Instances']: + instance['ProtectedFromScaleIn'].should.equal( + instance['InstanceId'] in protected + ) + + +@mock_autoscaling +def test_set_desired_capacity_up_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=10, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(10) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_set_desired_capacity_down_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + unprotected, protected = instance_ids[:2], instance_ids[2:] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=unprotected, + ProtectedFromScaleIn=False, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=1, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + group = response['AutoScalingGroups'][0] + group['DesiredCapacity'].should.equal(1) + instance_ids = {instance['InstanceId'] for instance in group['Instances']} + set(protected).should.equal(instance_ids) + set(unprotected).should_not.be.within(instance_ids) # only unprotected killed diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud.py b/tests/test_cloudformation/test_cloudformation_stack_crud.py index 801faf8a1..b7906632b 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud.py @@ -266,9 +266,9 @@ def test_delete_stack_by_name(): template_body=dummy_template_json, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) @mock_cloudformation_deprecated @@ -279,9 +279,9 @@ def test_delete_stack_by_id(): template_body=dummy_template_json, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack(stack_id) - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) with assert_raises(BotoServerError): conn.describe_stacks("test_stack") @@ -296,9 +296,9 @@ def test_delete_stack_with_resource_missing_delete_attr(): template_body=dummy_template_json3, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) @mock_cloudformation_deprecated diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 9bfae6174..4585da056 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -391,6 +391,9 @@ def test_create_change_set_from_s3_url(): TemplateURL=key_url, ChangeSetName='NewChangeSet', ChangeSetType='CREATE', + Tags=[ + {'Key': 'tag-key', 'Value': 'tag-value'} + ], ) assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id'] assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId'] @@ -532,6 +535,21 @@ def test_delete_stack_by_name(): cf_conn.describe_stacks()['Stacks'].should.have.length_of(0) +@mock_cloudformation +def test_delete_stack(): + cf = boto3.client('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + cf.delete_stack( + StackName="test_stack", + ) + stacks = cf.list_stacks() + assert stacks['StackSummaries'][0]['StackStatus'] == 'DELETE_COMPLETE' + + @mock_cloudformation def test_describe_deleted_stack(): cf_conn = boto3.client('cloudformation', region_name='us-east-1') diff --git a/tests/test_cloudformation/test_validate.py b/tests/test_cloudformation/test_validate.py new file mode 100644 index 000000000..e2c3af05d --- /dev/null +++ b/tests/test_cloudformation/test_validate.py @@ -0,0 +1,115 @@ +from collections import OrderedDict +import json +import yaml +import os +import boto3 +from nose.tools import raises +import botocore + + +from moto.cloudformation.exceptions import ValidationError +from moto.cloudformation.models import FakeStack +from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export +from moto.sqs.models import Queue +from moto.s3.models import FakeBucket +from moto.cloudformation.utils import yaml_tag_constructor +from boto.cloudformation.stack import Output +from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2 + +json_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": { + "EC2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-d3adb33f", + "KeyName": "dummy", + "InstanceType": "t2.micro", + "Tags": [ + { + "Key": "Description", + "Value": "Test tag" + }, + { + "Key": "Name", + "Value": "Name tag for tests" + } + ] + } + } + } +} + +# One resource is required +json_bad_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1" +} + +dummy_template_json = json.dumps(json_template) +dummy_bad_template_json = json.dumps(json_bad_template) + + +@mock_cloudformation +def test_boto3_json_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=dummy_template_json, + ) + assert response['Description'] == "Stack 1" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_json_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=dummy_bad_template_json, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True + + +yaml_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 +""" + +yaml_bad_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template +""" + +@mock_cloudformation +def test_boto3_yaml_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=yaml_template, + ) + assert response['Description'] == "Simple CloudFormation Test Template" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_yaml_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=yaml_bad_template, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index f72a44762..0ef082d5c 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -1,14 +1,18 @@ from __future__ import unicode_literals -import boto3 import json import os +import random import uuid +import boto3 +# noinspection PyUnresolvedReferences +import sure # noqa +from botocore.exceptions import ClientError from jose import jws +from nose.tools import assert_raises from moto import mock_cognitoidp -import sure # noqa @mock_cognitoidp @@ -41,6 +45,56 @@ def test_list_user_pools(): result["UserPools"][0]["Name"].should.equal(name) +@mock_cognitoidp +def test_list_user_pools_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pools + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token) + result_2["UserPools"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = pool_count + 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(pool_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_user_pool(): conn = boto3.client("cognito-idp", "us-west-2") @@ -140,6 +194,67 @@ def test_list_user_pool_clients(): result["UserPoolClients"][0]["ClientName"].should.equal(client_name) +@mock_cognitoidp +def test_list_user_pool_clients_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["UserPoolClients"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = client_count + 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(client_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_user_pool_client(): conn = boto3.client("cognito-idp", "us-west-2") @@ -264,6 +379,83 @@ def test_list_identity_providers(): result["Providers"][0]["ProviderType"].should.equal(provider_type) +@mock_cognitoidp +def test_list_identity_providers_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["Providers"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = identity_provider_count + 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(identity_provider_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_identity_providers(): conn = boto3.client("cognito-idp", "us-west-2") @@ -323,6 +515,245 @@ def test_delete_identity_providers(): caught.should.be.true +@mock_cognitoidp +def test_create_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + description = str(uuid.uuid4()) + role_arn = "arn:aws:iam:::role/my-iam-role" + precedence = random.randint(0, 100000) + + result = conn.create_group( + GroupName=group_name, + UserPoolId=user_pool_id, + Description=description, + RoleArn=role_arn, + Precedence=precedence, + ) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["Description"].should.equal(description) + result["Group"]["RoleArn"].should.equal(role_arn) + result["Group"]["Precedence"].should.equal(precedence) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_create_group_with_duplicate_name_raises_error(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + with assert_raises(ClientError) as cm: + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.operation_name.should.equal('CreateGroup') + cm.exception.response['Error']['Code'].should.equal('GroupExistsException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_get_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_list_groups(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.list_groups(UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_delete_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + with assert_raises(ClientError) as cm: + conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + + +@mock_cognitoidp +def test_admin_add_user_to_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + +@mock_cognitoidp +def test_admin_add_user_to_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + +@mock_cognitoidp +def test_list_users_in_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username) + + +@mock_cognitoidp +def test_list_users_in_group_ignores_deleted_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + username2 = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username2) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name) + conn.admin_delete_user(UserPoolId=user_pool_id, Username=username) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username2) + + +@mock_cognitoidp +def test_admin_list_groups_for_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_admin_list_groups_for_user_ignores_deleted_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + group_name2 = str(uuid.uuid4()) + conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2) + conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name2) + + +@mock_cognitoidp +def test_admin_remove_user_from_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \ + ["Users"].should.have.length_of(0) + conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \ + ["Groups"].should.have.length_of(0) + + +@mock_cognitoidp +def test_admin_remove_user_from_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + @mock_cognitoidp def test_admin_create_user(): conn = boto3.client("cognito-idp", "us-west-2") @@ -396,6 +827,62 @@ def test_list_users(): result["Users"][0]["Username"].should.equal(username) +@mock_cognitoidp +def test_list_users_returns_limit_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_returns_pagination_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + next_token = result["PaginationToken"] + result_2 = conn.list_users(UserPoolId=user_pool_id, + Limit=max_results, PaginationToken=next_token) + result_2["Users"].should.have.length_of(max_results) + result_2.shouldnt.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_when_limit_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = user_count + 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(user_count) + result.shouldnt.have.key("PaginationToken") + + @mock_cognitoidp def test_admin_disable_user(): conn = boto3.client("cognito-idp", "us-west-2") diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index afc919dd7..ea9be19c1 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -815,6 +815,16 @@ def test_scan_filter(): ) assert response['Count'] == 1 + response = table.scan( + FilterExpression=Attr('app').ne('app2') + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne('app1') + ) + assert response['Count'] == 0 + @mock_dynamodb2 def test_scan_filter2(): @@ -872,6 +882,26 @@ def test_scan_filter3(): ) assert response['Count'] == 1 + response = table.scan( + FilterExpression=Attr('active').ne(True) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('active').ne(False) + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne(1) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('app').ne(2) + ) + assert response['Count'] == 1 + @mock_dynamodb2 def test_scan_filter4(): @@ -1000,6 +1030,11 @@ def test_delete_item(): response = table.scan() assert response['Count'] == 2 + # Test ReturnValues validation + with assert_raises(ClientError) as ex: + table.delete_item(Key={'client': 'client1', 'app': 'app1'}, + ReturnValues='ALL_NEW') + # Test deletion and returning old value response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD') response['Attributes'].should.contain('client') @@ -1246,6 +1281,81 @@ def test_update_if_not_exists(): assert resp['Items'][0]['created_at'] == 123 +# https://github.com/spulec/moto/issues/1937 +@mock_dynamodb2 +def test_update_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + def update(col, to, rv): + return dynamodb.update_item( + TableName='moto-test', + Key={'id': {'S': 'foo'}}, + AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}}, + ReturnValues=rv + ) + + r = update('col1', 'val1', 'ALL_NEW') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col1', 'val2', 'ALL_OLD') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col2', 'val3', 'UPDATED_NEW') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col2', 'val4', 'UPDATED_OLD') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col1', 'val5', 'NONE') + assert r['Attributes'] == {} + + with assert_raises(ClientError) as ex: + r = update('col1', 'val6', 'WRONG') + + +@mock_dynamodb2 +def test_put_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}, + ReturnValues='NONE' + ) + assert 'Attributes' not in r + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}}, + ReturnValues='ALL_OLD' + ) + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + with assert_raises(ClientError) as ex: + dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}}, + ReturnValues='ALL_NEW' + ) + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value') + + @mock_dynamodb2 def test_query_global_secondary_index_when_created_via_update_table_resource(): dynamodb = boto3.resource('dynamodb', region_name='us-east-1') @@ -1336,3 +1446,62 @@ def test_query_global_secondary_index_when_created_via_update_table_resource(): assert len(forum_and_subject_items) == 1 assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats', 'subject': 'my pet is the cutest'} + + +@mock_dynamodb2 +def test_dynamodb_streams_1(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + + resp = conn.delete_table(TableName='test-streams') + + assert 'StreamSpecification' in resp['TableDescription'] + + +@mock_dynamodb2 +def test_dynamodb_streams_2(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-stream-update', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + ) + + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-stream-update', + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py new file mode 100644 index 000000000..b60c21053 --- /dev/null +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -0,0 +1,234 @@ +from __future__ import unicode_literals, print_function + +from nose.tools import assert_raises + +import boto3 +from moto import mock_dynamodb2, mock_dynamodbstreams + + +class TestCore(): + stream_arn = None + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + # create a table with a stream + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + self.stream_arn = resp['TableDescription']['LatestStreamArn'] + + def teardown(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + conn.delete_table(TableName='test-streams') + self.stream_arn = None + + for m in self.mocks: + m.stop() + + + def test_verify_stream(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.describe_table(TableName='test-streams') + assert 'LatestStreamArn' in resp['Table'] + + def test_describe_stream(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + assert 'StreamDescription' in resp + desc = resp['StreamDescription'] + assert desc['StreamArn'] == self.stream_arn + assert desc['TableName'] == 'test-streams' + + def test_list_streams(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.list_streams() + assert resp['Streams'][0]['StreamArn'] == self.stream_arn + + resp = conn.list_streams(TableName='no-stream') + assert not resp['Streams'] + + def test_get_shard_iterator(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + assert 'ShardIterator' in resp + + def test_get_records_empty(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='LATEST' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert 'Records' in resp + assert len(resp['Records']) == 0 + + def test_get_records_seq(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'foo'} + } + ) + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'bar'}, + 'second_col': {'S': 'baz'} + } + ) + conn.delete_item( + TableName='test-streams', + Key={'id': {'S': 'entry1'}} + ) + + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 3 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'MODIFY' + assert resp['Records'][2]['eventName'] == 'DELETE' + + # now try fetching from the next shard iterator, it should be + # empty + resp = conn.get_records(ShardIterator=resp['NextShardIterator']) + assert len(resp['Records']) == 0 + + +class TestEdges(): + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + def teardown(self): + for m in self.mocks: + m.stop() + + + def test_enable_stream_on_table(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1} + ) + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'KEYS_ONLY' + } + ) + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'KEYS_ONLY' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + + # now try to enable it again + with assert_raises(conn.exceptions.ResourceInUseException): + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'OLD_IMAGES' + } + ) + + def test_stream_with_range_key(self): + dyn = boto3.client('dynamodb', region_name='us-east-1') + + resp = dyn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}, + {'AttributeName': 'color', 'KeyType': 'RANGE'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}, + {'AttributeName': 'color', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamViewType': 'NEW_IMAGES' + } + ) + stream_arn = resp['TableDescription']['LatestStreamArn'] + + streams = boto3.client('dynamodbstreams', region_name='us-east-1') + resp = streams.describe_stream(StreamArn=stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = streams.get_shard_iterator( + StreamArn=stream_arn, + ShardId=shard_id, + ShardIteratorType='LATEST' + ) + iterator_id = resp['ShardIterator'] + + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}} + ) + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}} + ) + + resp = streams.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 2 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'INSERT' + diff --git a/tests/test_ec2/test_amis.py b/tests/test_ec2/test_amis.py index a8d4d1b67..fd7234511 100644 --- a/tests/test_ec2/test_amis.py +++ b/tests/test_ec2/test_amis.py @@ -258,11 +258,11 @@ def test_ami_filters(): amis_by_name = conn.get_all_images(filters={'name': imageA.name}) set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id])) - amis_by_public = conn.get_all_images(filters={'is-public': True}) + amis_by_public = conn.get_all_images(filters={'is-public': 'true'}) set([ami.id for ami in amis_by_public]).should.contain(imageB.id) len(amis_by_public).should.equal(35) - amis_by_nonpublic = conn.get_all_images(filters={'is-public': False}) + amis_by_nonpublic = conn.get_all_images(filters={'is-public': 'false'}) set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id) len(amis_by_nonpublic).should.equal(1) diff --git a/tests/test_ec2/test_vpc_peering.py b/tests/test_ec2/test_vpc_peering.py index 1f98791b3..082499a72 100644 --- a/tests/test_ec2/test_vpc_peering.py +++ b/tests/test_ec2/test_vpc_peering.py @@ -89,7 +89,8 @@ def test_vpc_peering_connections_delete(): verdict.should.equal(True) all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(0) + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('deleted') with assert_raises(EC2ResponseError) as cm: conn.delete_vpc_peering_connection("pcx-1234abcd") diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index a0e8318da..a0d470935 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -925,6 +925,65 @@ def test_update_container_instances_state(): status='test_status').should.throw(Exception) +@mock_ec2 +@mock_ecs +def test_update_container_instances_state_by_arn(): + ecs_client = boto3.client('ecs', region_name='us-east-1') + ec2 = boto3.resource('ec2', region_name='us-east-1') + + test_cluster_name = 'test_ecs_cluster' + _ = ecs_client.create_cluster( + clusterName=test_cluster_name + ) + + instance_to_create = 3 + test_instance_arns = [] + for i in range(0, instance_to_create): + test_instance = ec2.create_instances( + ImageId="ami-1234abcd", + MinCount=1, + MaxCount=1, + )[0] + + instance_id_document = json.dumps( + ec2_utils.generate_instance_identity_document(test_instance) + ) + + response = ecs_client.register_container_instance( + cluster=test_cluster_name, + instanceIdentityDocument=instance_id_document) + + test_instance_arns.append(response['containerInstance']['containerInstanceArn']) + + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='ACTIVE') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('ACTIVE') + ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='test_status').should.throw(Exception) + + @mock_ec2 @mock_ecs def test_run_task(): diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index bc23ff712..2b5f16d77 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -3,7 +3,9 @@ import base64 import boto import boto3 +import os import sure # noqa +import sys from boto.exception import BotoServerError from botocore.exceptions import ClientError from moto import mock_iam, mock_iam_deprecated @@ -11,9 +13,23 @@ from moto.iam.models import aws_managed_policies from nose.tools import assert_raises, assert_equals from nose.tools import raises +from datetime import datetime from tests.helpers import requires_boto_gte +MOCK_CERT = """-----BEGIN CERTIFICATE----- +MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt +b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx +FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC +gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt +ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO +vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG +9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj +FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt +8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ== +-----END CERTIFICATE-----""" + + @mock_iam_deprecated() def test_get_all_server_certs(): conn = boto.connect_iam() @@ -108,6 +124,10 @@ def test_create_role_and_instance_profile(): conn.list_roles().roles[0].role_name.should.equal('my-role') + # Test with an empty path: + profile = conn.create_instance_profile('my-other-profile') + profile.path.should.equal('/') + @mock_iam_deprecated() def test_remove_role_from_instance_profile(): @@ -536,6 +556,14 @@ def test_generate_credential_report(): result['generate_credential_report_response'][ 'generate_credential_report_result']['state'].should.equal('COMPLETE') +@mock_iam +def test_boto3_generate_credential_report(): + conn = boto3.client('iam', region_name='us-east-1') + result = conn.generate_credential_report() + result['State'].should.equal('STARTED') + result = conn.generate_credential_report() + result['State'].should.equal('COMPLETE') + @mock_iam_deprecated() def test_get_credential_report(): @@ -551,6 +579,19 @@ def test_get_credential_report(): 'get_credential_report_result']['content'].encode('ascii')).decode('ascii') report.should.match(r'.*my-user.*') +@mock_iam +def test_boto3_get_credential_report(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + with assert_raises(ClientError): + conn.get_credential_report() + result = conn.generate_credential_report() + while result['State'] != 'COMPLETE': + result = conn.generate_credential_report() + result = conn.get_credential_report() + report = result['Content'].decode('utf-8') + report.should.match(r'.*my-user.*') + @requires_boto_gte('2.39') @mock_iam_deprecated() @@ -695,15 +736,33 @@ def test_update_access_key(): resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive') +@mock_iam +def test_get_access_key_last_used(): + iam = boto3.resource('iam', region_name='us-east-1') + client = iam.meta.client + username = 'test-user' + iam.create_user(UserName=username) + with assert_raises(ClientError): + client.get_access_key_last_used(AccessKeyId='non-existent-key-id') + create_key_response = client.create_access_key(UserName=username)['AccessKey'] + resp = client.get_access_key_last_used(AccessKeyId=create_key_response['AccessKeyId']) + + datetime.strftime(resp["AccessKeyLastUsed"]["LastUsedDate"], "%Y-%m-%d").should.equal(datetime.strftime( + datetime.utcnow(), + "%Y-%m-%d" + )) + resp["UserName"].should.equal(create_key_response["UserName"]) + + @mock_iam def test_get_account_authorization_details(): import json conn = boto3.client('iam', region_name='us-east-1') conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") - conn.create_user(Path='/', UserName='testCloudAuxUser') - conn.create_group(Path='/', GroupName='testCloudAuxGroup') + conn.create_user(Path='/', UserName='testUser') + conn.create_group(Path='/', GroupName='testGroup') conn.create_policy( - PolicyName='testCloudAuxPolicy', + PolicyName='testPolicy', Path='/', PolicyDocument=json.dumps({ "Version": "2012-10-17", @@ -715,46 +774,158 @@ def test_get_account_authorization_details(): } ] }), - Description='Test CloudAux Policy' + Description='Test Policy' ) + conn.create_instance_profile(InstanceProfileName='ipn') + conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + result = conn.get_account_authorization_details(Filter=['Role']) - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 + assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 result = conn.get_account_authorization_details(Filter=['User']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 1 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 result = conn.get_account_authorization_details(Filter=['Group']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 1 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 1 + assert len(result['Policies']) == 0 result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 1 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 1 # Check for greater than 1 since this should always be greater than one but might change. # See iam/aws_managed_policies.py result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) > 1 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) > 1 result = conn.get_account_authorization_details() - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 1 - len(result['Policies']) > 1 + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 1 + assert len(result['GroupDetailList']) == 1 + assert len(result['Policies']) > 1 +@mock_iam +def test_signing_certs(): + client = boto3.client('iam', region_name='us-east-1') + # Create the IAM user first: + client.create_user(UserName='testing') + + # Upload the cert: + resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate'] + cert_id = resp['CertificateId'] + + assert resp['UserName'] == 'testing' + assert resp['Status'] == 'Active' + assert resp['CertificateBody'] == MOCK_CERT + assert resp['CertificateId'] + + # Upload a the cert with an invalid body: + with assert_raises(ClientError) as ce: + client.upload_signing_certificate(UserName='testing', CertificateBody='notacert') + assert ce.exception.response['Error']['Code'] == 'MalformedCertificate' + + # Upload with an invalid user: + with assert_raises(ClientError): + client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT) + + # Update: + client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError): + client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError) as ce: + client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive') + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id='x' * 32) + + # List the certs: + resp = client.list_signing_certificates(UserName='testing')['Certificates'] + assert len(resp) == 1 + assert resp[0]['CertificateBody'] == MOCK_CERT + assert resp[0]['Status'] == 'Inactive' # Changed with the update call above. + + with assert_raises(ClientError): + client.list_signing_certificates(UserName='notauser') + + # Delete: + client.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + with assert_raises(ClientError): + client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id) + +@mock_iam() +def test_create_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + response = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + +@mock_iam() +def test_get_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.get_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response['SAMLMetadataDocument'].should.equal('a' * 1024) + +@mock_iam() +def test_list_saml_providers(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + +@mock_iam() +def test_delete_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + len(response['SAMLProviderList']).should.equal(1) + conn.delete_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response = conn.list_saml_providers() + len(response['SAMLProviderList']).should.equal(0) + conn.create_user(UserName='testing') + + cert_id = '123456789012345678901234' + with assert_raises(ClientError) as ce: + conn.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id=cert_id) + + # Verify that it's not in the list: + resp = conn.list_signing_certificates(UserName='testing') + assert not resp['Certificates'] diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 5c6effd7a..826d2c56b 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -5,6 +5,8 @@ import sure # noqa import boto3 from moto import mock_iot +from botocore.exceptions import ClientError +from nose.tools import assert_raises @mock_iot @@ -261,6 +263,96 @@ def test_certs(): res.should.have.key('certificates').which.should.have.length_of(0) +@mock_iot +def test_delete_policy_validation(): + doc = """{ + "Version": "2012-10-17", + "Statement":[ + { + "Effect":"Allow", + "Action":[ + "iot: *" + ], + "Resource":"*" + } + ] + } + """ + client = boto3.client('iot', region_name='ap-northeast-1') + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert['certificateArn'] + policy_name = 'my-policy' + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_principal_policy(policyName=policy_name, principal=cert_arn) + + with assert_raises(ClientError) as e: + client.delete_policy(policyName=policy_name) + e.exception.response['Error']['Message'].should.contain( + 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' % policy_name) + res = client.list_policies() + res.should.have.key('policies').which.should.have.length_of(1) + + client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + client.delete_policy(policyName=policy_name) + res = client.list_policies() + res.should.have.key('policies').which.should.have.length_of(0) + + +@mock_iot +def test_delete_certificate_validation(): + doc = """{ + "Version": "2012-10-17", + "Statement":[ + { + "Effect":"Allow", + "Action":[ + "iot: *" + ], + "Resource":"*" + } + ] + } + """ + client = boto3.client('iot', region_name='ap-northeast-1') + cert = client.create_keys_and_certificate(setAsActive=True) + cert_id = cert['certificateId'] + cert_arn = cert['certificateArn'] + policy_name = 'my-policy' + thing_name = 'thing-1' + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_principal_policy(policyName=policy_name, principal=cert_arn) + client.create_thing(thingName=thing_name) + client.attach_thing_principal(thingName=thing_name, principal=cert_arn) + + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Certificate must be deactivated (not ACTIVE) before deletion.') + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.update_certificate(certificateId=cert_id, newStatus='REVOKED') + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Things must be detached before deletion (arn: %s)' % cert_arn) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.detach_thing_principal(thingName=thing_name, principal=cert_arn) + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Certificate policies must be detached before deletion (arn: %s)' % cert_arn) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + client.delete_certificate(certificateId=cert_id) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(0) + + @mock_iot def test_certs_create_inactive(): client = boto3.client('iot', region_name='ap-northeast-1') @@ -309,6 +401,47 @@ def test_policy(): @mock_iot def test_principal_policy(): + client = boto3.client('iot', region_name='ap-northeast-1') + policy_name = 'my-policy' + doc = '{}' + client.create_policy(policyName=policy_name, policyDocument=doc) + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert['certificateArn'] + + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(1) + for policy in res['policies']: + policy.should.have.key('policyName').which.should_not.be.none + policy.should.have.key('policyArn').which.should_not.be.none + + # do nothing if policy have already attached to certificate + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(1) + for policy in res['policies']: + policy.should.have.key('policyName').which.should_not.be.none + policy.should.have.key('policyArn').which.should_not.be.none + + res = client.list_policy_principals(policyName=policy_name) + res.should.have.key('principals').which.should.have.length_of(1) + for principal in res['principals']: + principal.should_not.be.none + + client.detach_policy(policyName=policy_name, target=cert_arn) + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(0) + res = client.list_policy_principals(policyName=policy_name) + res.should.have.key('principals').which.should.have.length_of(0) + with assert_raises(ClientError) as e: + client.detach_policy(policyName=policy_name, target=cert_arn) + e.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + + +@mock_iot +def test_principal_policy_deprecated(): client = boto3.client('iot', region_name='ap-northeast-1') policy_name = 'my-policy' doc = '{}' diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 76217b9d9..1ced9d937 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -164,8 +164,8 @@ def test_alias_rrset(): rrsets = conn.get_all_rrsets(zoneid, type="A") rrset_records = [(rr_set.name, rr) for rr_set in rrsets for rr in rr_set.resource_records] rrset_records.should.have.length_of(2) - rrset_records.should.contain(('foo.alias.testdns.aws.com', 'foo.testdns.aws.com')) - rrset_records.should.contain(('bar.alias.testdns.aws.com', 'bar.testdns.aws.com')) + rrset_records.should.contain(('foo.alias.testdns.aws.com.', 'foo.testdns.aws.com')) + rrset_records.should.contain(('bar.alias.testdns.aws.com.', 'bar.testdns.aws.com')) rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com') rrsets = conn.get_all_rrsets(zoneid, type="CNAME") rrsets.should.have.length_of(1) @@ -525,7 +525,7 @@ def test_change_resource_record_sets_crud_valid(): { 'Action': 'CREATE', 'ResourceRecordSet': { - 'Name': 'prod.redis.db', + 'Name': 'prod.redis.db.', 'Type': 'A', 'TTL': 10, 'ResourceRecords': [{ @@ -540,7 +540,7 @@ def test_change_resource_record_sets_crud_valid(): response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) len(response['ResourceRecordSets']).should.equal(1) a_record_detail = response['ResourceRecordSets'][0] - a_record_detail['Name'].should.equal('prod.redis.db') + a_record_detail['Name'].should.equal('prod.redis.db.') a_record_detail['Type'].should.equal('A') a_record_detail['TTL'].should.equal(10) a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}]) @@ -552,7 +552,7 @@ def test_change_resource_record_sets_crud_valid(): { 'Action': 'UPSERT', 'ResourceRecordSet': { - 'Name': 'prod.redis.db', + 'Name': 'prod.redis.db.', 'Type': 'CNAME', 'TTL': 60, 'ResourceRecords': [{ @@ -567,7 +567,7 @@ def test_change_resource_record_sets_crud_valid(): response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) len(response['ResourceRecordSets']).should.equal(1) cname_record_detail = response['ResourceRecordSets'][0] - cname_record_detail['Name'].should.equal('prod.redis.db') + cname_record_detail['Name'].should.equal('prod.redis.db.') cname_record_detail['Type'].should.equal('CNAME') cname_record_detail['TTL'].should.equal(60) cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}]) @@ -688,12 +688,12 @@ def test_list_resource_record_sets_name_type_filters(): # record_type, record_name all_records = [ - ('A', 'a.a.db'), - ('A', 'a.b.db'), - ('A', 'b.b.db'), - ('CNAME', 'b.b.db'), - ('CNAME', 'b.c.db'), - ('CNAME', 'c.c.db') + ('A', 'a.a.db.'), + ('A', 'a.b.db.'), + ('A', 'b.b.db.'), + ('CNAME', 'b.b.db.'), + ('CNAME', 'b.c.db.'), + ('CNAME', 'c.c.db.') ] for record_type, record_name in all_records: create_resource_record_set(record_type, record_name) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..7a53804ff 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -8,6 +8,7 @@ from functools import wraps from gzip import GzipFile from io import BytesIO import zlib +import pickle import json import boto @@ -65,6 +66,50 @@ class MyModel(object): s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) +@mock_s3 +def test_keys_are_pickleable(): + """Keys must be pickleable due to boto3 implementation details.""" + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + + pickled = pickle.dumps(key) + loaded = pickle.loads(pickled) + assert loaded.value == key.value + + +@mock_s3 +def test_append_to_value__basic(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b' And even more data') + assert key.value == b'data! And even more data' + assert key.size == 24 + + +@mock_s3 +def test_append_to_value__nothing_added(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b'') + assert key.value == b'data!' + assert key.size == 5 + + +@mock_s3 +def test_append_to_value__empty_key(): + key = s3model.FakeKey('name', b'') + assert key.value == b'' + assert key.size == 0 + + key.append_to_value(b'stuff') + assert key.value == b'stuff' + assert key.size == 5 + + @mock_s3 def test_my_model_save(): # Create Bucket so that test can run @@ -977,6 +1022,15 @@ def test_bucket_location(): bucket.get_location().should.equal("us-west-2") +@mock_s3 +def test_bucket_location_us_east_1(): + cli = boto3.client('s3') + bucket_name = 'mybucket' + # No LocationConstraint ==> us-east-1 + cli.create_bucket(Bucket=bucket_name) + cli.get_bucket_location(Bucket=bucket_name)['LocationConstraint'].should.equal(None) + + @mock_s3_deprecated def test_ranged_get(): conn = boto.connect_s3() @@ -1163,6 +1217,30 @@ def test_boto3_list_keys_xml_escaped(): assert 'Owner' not in resp['Contents'][0] +@mock_s3 +def test_boto3_list_objects_v2_common_prefix_pagination(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + + max_keys = 1 + keys = ['test/{i}/{i}'.format(i=i) for i in range(3)] + for key in keys: + s3.put_object(Bucket='mybucket', Key=key, Body=b'v') + + prefixes = [] + args = {"Bucket": 'mybucket', "Delimiter": "/", "Prefix": "test/", "MaxKeys": max_keys} + resp = {"IsTruncated": True} + while resp.get("IsTruncated", False): + if "NextContinuationToken" in resp: + args["ContinuationToken"] = resp["NextContinuationToken"] + resp = s3.list_objects_v2(**args) + if "CommonPrefixes" in resp: + assert len(resp["CommonPrefixes"]) == max_keys + prefixes.extend(i["Prefix"] for i in resp["CommonPrefixes"]) + + assert prefixes == [k[:k.rindex('/') + 1] for k in keys] + + @mock_s3 def test_boto3_list_objects_v2_truncated_response(): s3 = boto3.client('s3', region_name='us-east-1') @@ -1300,6 +1378,16 @@ def test_bucket_create_duplicate(): exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists') +@mock_s3 +def test_bucket_create_force_us_east_1(): + s3 = boto3.resource('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket="blah", CreateBucketConfiguration={ + 'LocationConstraint': 'us-east-1', + }) + exc.exception.response['Error']['Code'].should.equal('InvalidLocationConstraint') + + @mock_s3 def test_boto3_bucket_create_eu_central(): s3 = boto3.resource('s3', region_name='eu-central-1') @@ -1553,6 +1641,24 @@ def test_boto3_put_bucket_tagging(): }) resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + # With duplicate tag keys: + with assert_raises(ClientError) as err: + resp = s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagOne", + "Value": "ValueOneAgain" + } + ] + }) + e = err.exception + e.response["Error"]["Code"].should.equal("InvalidTag") + e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key") @mock_s3 def test_boto3_get_bucket_tagging(): @@ -2581,3 +2687,17 @@ TEST_XML = """\ """ + +@mock_s3 +def test_boto3_bucket_name_too_long(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*64) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') + +@mock_s3 +def test_boto3_bucket_name_too_short(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*2) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index ec384a660..169282421 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -39,12 +39,28 @@ def test_create_secret(): conn = boto3.client('secretsmanager', region_name='us-east-1') result = conn.create_secret(Name='test-secret', SecretString="foosecret") - assert result['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') + assert result['ARN'] assert result['Name'] == 'test-secret' secret = conn.get_secret_value(SecretId='test-secret') assert secret['SecretString'] == 'foosecret' +@mock_secretsmanager +def test_create_secret_with_tags(): + conn = boto3.client('secretsmanager', region_name='us-east-1') + secret_name = 'test-secret-with-tags' + + result = conn.create_secret( + Name=secret_name, + SecretString="foosecret", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + ) + assert result['ARN'] + assert result['Name'] == secret_name + secret_value = conn.get_secret_value(SecretId=secret_name) + assert secret_value['SecretString'] == 'foosecret' + secret_details = conn.describe_secret(SecretId=secret_name) + assert secret_details['Tags'] == [{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + @mock_secretsmanager def test_get_random_password_default_length(): conn = boto3.client('secretsmanager', region_name='us-west-2') @@ -159,10 +175,17 @@ def test_describe_secret(): conn.create_secret(Name='test-secret', SecretString='foosecret') + conn.create_secret(Name='test-secret-2', + SecretString='barsecret') + secret_description = conn.describe_secret(SecretId='test-secret') + secret_description_2 = conn.describe_secret(SecretId='test-secret-2') + assert secret_description # Returned dict is not empty - assert secret_description['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad') + assert secret_description['Name'] == ('test-secret') + assert secret_description['ARN'] != '' # Test arn not empty + assert secret_description_2['Name'] == ('test-secret-2') + assert secret_description_2['ARN'] != '' # Test arn not empty @mock_secretsmanager def test_describe_secret_that_does_not_exist(): @@ -190,9 +213,7 @@ def test_rotate_secret(): rotated_secret = conn.rotate_secret(SecretId=secret_name) assert rotated_secret - assert rotated_secret['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad' - ) + assert rotated_secret['ARN'] != '' # Test arn not empty assert rotated_secret['Name'] == secret_name assert rotated_secret['VersionId'] != '' diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py index e573f9b67..d0f495f57 100644 --- a/tests/test_secretsmanager/test_server.py +++ b/tests/test_secretsmanager/test_server.py @@ -82,11 +82,20 @@ def test_create_secret(): headers={ "X-Amz-Target": "secretsmanager.CreateSecret"}, ) + res_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "bar-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) json_data = json.loads(res.data.decode("utf-8")) - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') + assert json_data['ARN'] != '' assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(res_2.data.decode("utf-8")) + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' @mock_secretsmanager def test_describe_secret(): @@ -107,12 +116,30 @@ def test_describe_secret(): "X-Amz-Target": "secretsmanager.DescribeSecret" }, ) + + create_secret_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "barsecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + describe_secret_2 = test_client.post('/', + data={"SecretId": "test-secret-2"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) json_data = json.loads(describe_secret.data.decode("utf-8")) assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) + assert json_data['ARN'] != '' + assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(describe_secret_2.data.decode("utf-8")) + assert json_data_2 # Returned dict is not empty + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' @mock_secretsmanager def test_describe_secret_that_does_not_exist(): @@ -179,9 +206,7 @@ def test_rotate_secret(): json_data = json.loads(rotate_secret.data.decode("utf-8")) assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) + assert json_data['ARN'] != '' assert json_data['Name'] == 'test-secret' assert json_data['VersionId'] == client_request_token diff --git a/tests/test_ssm/test_ssm_boto3.py b/tests/test_ssm/test_ssm_boto3.py index f8ef3a237..77d439d83 100644 --- a/tests/test_ssm/test_ssm_boto3.py +++ b/tests/test_ssm/test_ssm_boto3.py @@ -277,6 +277,18 @@ def test_put_parameter(): response['Parameters'][0]['Type'].should.equal('String') response['Parameters'][0]['Version'].should.equal(2) +@mock_ssm +def test_put_parameter_china(): + client = boto3.client('ssm', region_name='cn-north-1') + + response = client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response['Version'].should.equal(1) + @mock_ssm def test_get_parameter(): @@ -319,13 +331,15 @@ def test_describe_parameters(): Name='test', Description='A test parameter', Value='value', - Type='String') + Type='String', + AllowedPattern=r'.*') response = client.describe_parameters() len(response['Parameters']).should.equal(1) response['Parameters'][0]['Name'].should.equal('test') response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['AllowedPattern'].should.equal(r'.*') @mock_ssm diff --git a/tox.ini b/tox.ini index 0f3f1466a..1fea4d81d 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,11 @@ envlist = py27, py36 [testenv] +setenv = + BOTO_CONFIG=/dev/null + AWS_SECRET_ACCESS_KEY=foobar_secret + AWS_ACCESS_KEY_ID=foobar_key + AWS_DEFAULT_REGION=us-east-1 deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-dev.txt