From 24e942b50e4ea9b4913f48777041551316f0fe22 Mon Sep 17 00:00:00 2001 From: Gary Donovan Date: Mon, 23 Jul 2018 14:38:52 +1000 Subject: [PATCH 001/175] Update list of implemented endpoints for cognito-idp --- IMPLEMENTATION_COVERAGE.md | 48 +++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 17b864dc3..b8c8dd1b7 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -831,16 +831,16 @@ - [ ] add_custom_attributes - [ ] admin_add_user_to_group - [ ] admin_confirm_sign_up -- [ ] admin_create_user -- [ ] admin_delete_user +- [X] admin_create_user +- [X] admin_delete_user - [ ] admin_delete_user_attributes - [ ] admin_disable_provider_for_user - [ ] admin_disable_user - [ ] admin_enable_user - [ ] admin_forget_device - [ ] admin_get_device -- [ ] admin_get_user -- [ ] admin_initiate_auth +- [X] admin_get_user +- [X] admin_initiate_auth - [ ] admin_link_provider_for_user - [ ] admin_list_devices - [ ] admin_list_groups_for_user @@ -855,32 +855,32 @@ - [ ] admin_update_user_attributes - [ ] admin_user_global_sign_out - [ ] associate_software_token -- [ ] change_password +- [X] change_password - [ ] confirm_device -- [ ] confirm_forgot_password +- [X] confirm_forgot_password - [ ] confirm_sign_up - [ ] create_group -- [ ] create_identity_provider +- [X] create_identity_provider - [ ] create_resource_server - [ ] create_user_import_job -- [ ] create_user_pool -- [ ] create_user_pool_client -- [ ] create_user_pool_domain +- [X] create_user_pool +- [X] create_user_pool_client +- [X] create_user_pool_domain - [ ] delete_group -- [ ] delete_identity_provider +- [X] delete_identity_provider - [ ] delete_resource_server - [ ] delete_user - [ ] delete_user_attributes -- [ ] delete_user_pool -- [ ] delete_user_pool_client -- [ ] delete_user_pool_domain -- [ ] describe_identity_provider +- [X] delete_user_pool +- [X] delete_user_pool_client +- [X] delete_user_pool_domain +- [X] describe_identity_provider - [ ] describe_resource_server - [ ] describe_risk_configuration - [ ] describe_user_import_job -- [ ] describe_user_pool -- [ ] describe_user_pool_client -- [ ] describe_user_pool_domain +- [X] describe_user_pool +- [X] describe_user_pool_client +- [X] describe_user_pool_domain - [ ] forget_device - [ ] forgot_password - [ ] get_csv_header @@ -896,15 +896,15 @@ - [ ] initiate_auth - [ ] list_devices - [ ] list_groups -- [ ] list_identity_providers +- [X] list_identity_providers - [ ] list_resource_servers - [ ] list_user_import_jobs -- [ ] list_user_pool_clients -- [ ] list_user_pools -- [ ] list_users +- [X] list_user_pool_clients +- [X] list_user_pools +- [X] list_users - [ ] list_users_in_group - [ ] resend_confirmation_code -- [ ] respond_to_auth_challenge +- [X] respond_to_auth_challenge - [ ] set_risk_configuration - [ ] set_ui_customization - [ ] set_user_mfa_preference @@ -920,7 +920,7 @@ - [ ] update_resource_server - [ ] update_user_attributes - [ ] update_user_pool -- [ ] update_user_pool_client +- [X] update_user_pool_client - [ ] verify_software_token - [ ] verify_user_attribute From 04fdd5617a2131dc68d2f7461718b70e62215e93 Mon Sep 17 00:00:00 2001 From: Gary Donovan Date: Mon, 23 Jul 2018 16:26:54 +1000 Subject: [PATCH 002/175] Implement group management for cognito-idp --- IMPLEMENTATION_COVERAGE.md | 8 +- moto/cognitoidp/exceptions.py | 10 +++ moto/cognitoidp/models.py | 67 ++++++++++++++++- moto/cognitoidp/responses.py | 41 ++++++++++ tests/test_cognitoidp/test_cognitoidp.py | 96 +++++++++++++++++++++++- 5 files changed, 214 insertions(+), 8 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index b8c8dd1b7..cceaa3c8a 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -859,14 +859,14 @@ - [ ] confirm_device - [X] confirm_forgot_password - [ ] confirm_sign_up -- [ ] create_group +- [X] create_group - [X] create_identity_provider - [ ] create_resource_server - [ ] create_user_import_job - [X] create_user_pool - [X] create_user_pool_client - [X] create_user_pool_domain -- [ ] delete_group +- [X] delete_group - [X] delete_identity_provider - [ ] delete_resource_server - [ ] delete_user @@ -885,7 +885,7 @@ - [ ] forgot_password - [ ] get_csv_header - [ ] get_device -- [ ] get_group +- [X] get_group - [ ] get_identity_provider_by_identifier - [ ] get_signing_certificate - [ ] get_ui_customization @@ -895,7 +895,7 @@ - [ ] global_sign_out - [ ] initiate_auth - [ ] list_devices -- [ ] list_groups +- [X] list_groups - [X] list_identity_providers - [ ] list_resource_servers - [ ] list_user_import_jobs diff --git a/moto/cognitoidp/exceptions.py b/moto/cognitoidp/exceptions.py index 1f1ec2309..452670213 100644 --- a/moto/cognitoidp/exceptions.py +++ b/moto/cognitoidp/exceptions.py @@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest): }) +class GroupExistsException(BadRequest): + + def __init__(self, message): + super(GroupExistsException, self).__init__() + self.description = json.dumps({ + "message": message, + '__type': 'GroupExistsException', + }) + + class NotAuthorizedError(BadRequest): def __init__(self, message): diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 10da0c6ff..a11eb435c 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -11,8 +11,7 @@ from jose import jws from moto.compat import OrderedDict from moto.core import BaseBackend, BaseModel -from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError - +from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError UserStatus = { "FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD", @@ -33,6 +32,7 @@ class CognitoIdpUserPool(BaseModel): self.clients = OrderedDict() self.identity_providers = OrderedDict() + self.groups = OrderedDict() self.users = OrderedDict() self.refresh_tokens = {} self.access_tokens = {} @@ -185,6 +185,29 @@ class CognitoIdpIdentityProvider(BaseModel): return identity_provider_json +class CognitoIdpGroup(BaseModel): + + def __init__(self, user_pool_id, group_name, description, role_arn, precedence): + self.user_pool_id = user_pool_id + self.group_name = group_name + self.description = description or "" + self.role_arn = role_arn + self.precedence = precedence + self.last_modified_date = datetime.datetime.now() + self.creation_date = self.last_modified_date + + def to_json(self): + return { + "GroupName": self.group_name, + "UserPoolId": self.user_pool_id, + "Description": self.description, + "RoleArn": self.role_arn, + "Precedence": self.precedence, + "LastModifiedDate": time.mktime(self.last_modified_date.timetuple()), + "CreationDate": time.mktime(self.creation_date.timetuple()), + } + + class CognitoIdpUser(BaseModel): def __init__(self, user_pool_id, username, password, status, attributes): @@ -367,6 +390,46 @@ class CognitoIdpBackend(BaseBackend): del user_pool.identity_providers[name] + # Group + def create_group(self, user_pool_id, group_name, description, role_arn, precedence): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence) + if group.group_name in user_pool.groups: + raise GroupExistsException("A group with the name already exists") + user_pool.groups[group.group_name] = group + + return group + + def get_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + return user_pool.groups[group_name] + + def list_groups(self, user_pool_id): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + return user_pool.groups.values() + + def delete_group(self, user_pool_id, group_name): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + if group_name not in user_pool.groups: + raise ResourceNotFoundError(group_name) + + del user_pool.groups[group_name] + # User def admin_create_user(self, user_pool_id, username, temporary_password, attributes): user_pool = self.user_pools.get(user_pool_id) diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index e6f20367e..a2f67ba84 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -129,6 +129,47 @@ class CognitoIdpResponse(BaseResponse): cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name) return "" + # Group + def create_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + description = self._get_param("Description") + role_arn = self._get_param("RoleArn") + precedence = self._get_param("Precedence") + + group = cognitoidp_backends[self.region].create_group( + user_pool_id, + group_name, + description, + role_arn, + precedence, + ) + + return json.dumps({ + "Group": group.to_json(), + }) + + def get_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name) + return json.dumps({ + "Group": group.to_json(), + }) + + def list_groups(self): + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].list_groups(user_pool_id) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def delete_group(self): + group_name = self._get_param("GroupName") + user_pool_id = self._get_param("UserPoolId") + cognitoidp_backends[self.region].delete_group(user_pool_id, group_name) + return "" + # User def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 56d7c08a8..c226f91b7 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -1,14 +1,18 @@ from __future__ import unicode_literals -import boto3 import json import os +import random import uuid +import boto3 +# noinspection PyUnresolvedReferences +import sure # noqa +from botocore.exceptions import ClientError from jose import jws +from nose.tools import assert_raises from moto import mock_cognitoidp -import sure # noqa @mock_cognitoidp @@ -323,6 +327,94 @@ def test_delete_identity_providers(): caught.should.be.true +@mock_cognitoidp +def test_create_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + description = str(uuid.uuid4()) + role_arn = "arn:aws:iam:::role/my-iam-role" + precedence = random.randint(0, 100000) + + result = conn.create_group( + GroupName=group_name, + UserPoolId=user_pool_id, + Description=description, + RoleArn=role_arn, + Precedence=precedence, + ) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["Description"].should.equal(description) + result["Group"]["RoleArn"].should.equal(role_arn) + result["Group"]["Precedence"].should.equal(precedence) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_create_group_with_duplicate_name_raises_error(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + with assert_raises(ClientError) as cm: + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.operation_name.should.equal('CreateGroup') + cm.exception.response['Error']['Code'].should.equal('GroupExistsException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_get_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + + result["Group"]["GroupName"].should.equal(group_name) + result["Group"]["UserPoolId"].should.equal(user_pool_id) + result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime") + result["Group"]["CreationDate"].should.be.a("datetime.datetime") + + +@mock_cognitoidp +def test_list_groups(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.list_groups(UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_delete_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + + with assert_raises(ClientError) as cm: + conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + + @mock_cognitoidp def test_admin_create_user(): conn = boto3.client("cognito-idp", "us-west-2") From ffa7560d026b9d0a3380b3c7cb3006d9e89af76a Mon Sep 17 00:00:00 2001 From: Gary Donovan Date: Mon, 23 Jul 2018 20:54:51 +1000 Subject: [PATCH 003/175] Implement user-group relationships for cognito-idp --- IMPLEMENTATION_COVERAGE.md | 10 +- moto/cognitoidp/models.py | 38 ++++++ moto/cognitoidp/responses.py | 42 +++++++ tests/test_cognitoidp/test_cognitoidp.py | 153 ++++++++++++++++++++++- 4 files changed, 237 insertions(+), 6 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index cceaa3c8a..47420e8a4 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -827,9 +827,9 @@ - [ ] unlink_identity - [ ] update_identity_pool -## cognito-idp - 0% implemented +## cognito-idp - 34% implemented - [ ] add_custom_attributes -- [ ] admin_add_user_to_group +- [X] admin_add_user_to_group - [ ] admin_confirm_sign_up - [X] admin_create_user - [X] admin_delete_user @@ -843,9 +843,9 @@ - [X] admin_initiate_auth - [ ] admin_link_provider_for_user - [ ] admin_list_devices -- [ ] admin_list_groups_for_user +- [X] admin_list_groups_for_user - [ ] admin_list_user_auth_events -- [ ] admin_remove_user_from_group +- [X] admin_remove_user_from_group - [ ] admin_reset_user_password - [ ] admin_respond_to_auth_challenge - [ ] admin_set_user_mfa_preference @@ -902,7 +902,7 @@ - [X] list_user_pool_clients - [X] list_user_pools - [X] list_users -- [ ] list_users_in_group +- [X] list_users_in_group - [ ] resend_confirmation_code - [X] respond_to_auth_challenge - [ ] set_risk_configuration diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index a11eb435c..349d1e319 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -196,6 +196,10 @@ class CognitoIdpGroup(BaseModel): self.last_modified_date = datetime.datetime.now() self.creation_date = self.last_modified_date + # Users who are members of this group. + # Note that these links are bidirectional. + self.users = set() + def to_json(self): return { "GroupName": self.group_name, @@ -221,6 +225,10 @@ class CognitoIdpUser(BaseModel): self.create_date = datetime.datetime.utcnow() self.last_modified_date = datetime.datetime.utcnow() + # Groups this user is a member of. + # Note that these links are bidirectional. + self.groups = set() + def _base_json(self): return { "UserPoolId": self.user_pool_id, @@ -428,8 +436,34 @@ class CognitoIdpBackend(BaseBackend): if group_name not in user_pool.groups: raise ResourceNotFoundError(group_name) + group = user_pool.groups[group_name] + for user in group.users: + user.groups.remove(group) + del user_pool.groups[group_name] + def admin_add_user_to_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.add(user) + user.groups.add(group) + + def list_users_in_group(self, user_pool_id, group_name): + group = self.get_group(user_pool_id, group_name) + return list(group.users) + + def admin_list_groups_for_user(self, user_pool_id, username): + user = self.admin_get_user(user_pool_id, username) + return list(user.groups) + + def admin_remove_user_from_group(self, user_pool_id, group_name, username): + group = self.get_group(user_pool_id, group_name) + user = self.admin_get_user(user_pool_id, username) + + group.users.discard(user) + user.groups.discard(group) + # User def admin_create_user(self, user_pool_id, username, temporary_password, attributes): user_pool = self.user_pools.get(user_pool_id) @@ -465,6 +499,10 @@ class CognitoIdpBackend(BaseBackend): if username not in user_pool.users: raise ResourceNotFoundError(username) + user = user_pool.users[username] + for group in user.groups: + group.users.remove(user) + del user_pool.users[username] def _log_user_in(self, user_pool, client, username): diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index a2f67ba84..7d19fa3e8 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -170,6 +170,48 @@ class CognitoIdpResponse(BaseResponse): cognitoidp_backends[self.region].delete_group(user_pool_id, group_name) return "" + def admin_add_user_to_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_add_user_to_group( + user_pool_id, + group_name, + username, + ) + + return "" + + def list_users_in_group(self): + user_pool_id = self._get_param("UserPoolId") + group_name = self._get_param("GroupName") + users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name) + return json.dumps({ + "Users": [user.to_json(extended=True) for user in users], + }) + + def admin_list_groups_for_user(self): + username = self._get_param("Username") + user_pool_id = self._get_param("UserPoolId") + groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username) + return json.dumps({ + "Groups": [group.to_json() for group in groups], + }) + + def admin_remove_user_from_group(self): + user_pool_id = self._get_param("UserPoolId") + username = self._get_param("Username") + group_name = self._get_param("GroupName") + + cognitoidp_backends[self.region].admin_remove_user_from_group( + user_pool_id, + group_name, + username, + ) + + return "" + # User def admin_create_user(self): user_pool_id = self._get_param("UserPoolId") diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index c226f91b7..07edc990c 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -408,13 +408,164 @@ def test_delete_group(): group_name = str(uuid.uuid4()) conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) - conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected with assert_raises(ClientError) as cm: conn.get_group(GroupName=group_name, UserPoolId=user_pool_id) cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') +@mock_cognitoidp +def test_admin_add_user_to_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + + +@mock_cognitoidp +def test_admin_add_user_to_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + +@mock_cognitoidp +def test_list_users_in_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username) + + +@mock_cognitoidp +def test_list_users_in_group_ignores_deleted_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + username2 = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username2) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name) + conn.admin_delete_user(UserPoolId=user_pool_id, Username=username) + + result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) + + result["Users"].should.have.length_of(1) + result["Users"][0]["Username"].should.equal(username2) + + +@mock_cognitoidp +def test_admin_list_groups_for_user(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name) + + +@mock_cognitoidp +def test_admin_list_groups_for_user_ignores_deleted_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + group_name2 = str(uuid.uuid4()) + conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2) + conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id) + + result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) + + result["Groups"].should.have.length_of(1) + result["Groups"][0]["GroupName"].should.equal(group_name2) + + +@mock_cognitoidp +def test_admin_remove_user_from_group(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected + conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \ + ["Users"].should.have.length_of(0) + conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \ + ["Groups"].should.have.length_of(0) + + +@mock_cognitoidp +def test_admin_remove_user_from_group_again_is_noop(): + conn = boto3.client("cognito-idp", "us-west-2") + + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + group_name = str(uuid.uuid4()) + conn.create_group(GroupName=group_name, UserPoolId=user_pool_id) + + username = str(uuid.uuid4()) + conn.admin_create_user(UserPoolId=user_pool_id, Username=username) + + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name) + + @mock_cognitoidp def test_admin_create_user(): conn = boto3.client("cognito-idp", "us-west-2") From b4c44a820f48d22090e131e27ae8f58738649ecb Mon Sep 17 00:00:00 2001 From: Jon Banafato Date: Fri, 19 Oct 2018 17:09:19 -0400 Subject: [PATCH 004/175] Add a long description to setup Include the readme as the `long_description` argument to `setup` and set its content type appropriately. This allows PyPI to render the content correctly. --- setup.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/setup.py b/setup.py index 98780dd5a..17e585912 100755 --- a/setup.py +++ b/setup.py @@ -1,10 +1,23 @@ #!/usr/bin/env python from __future__ import unicode_literals +import codecs +import os +import re import setuptools from setuptools import setup, find_packages import sys +# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15 +here = os.path.abspath(os.path.dirname(__file__)) + +def read(*parts): + # intentionally *not* adding an encoding option to open, See: + # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 + with codecs.open(os.path.join(here, *parts), 'r') as fp: + return fp.read() + + install_requires = [ "Jinja2>=2.7.3", "boto>=2.36.0", @@ -43,6 +56,8 @@ setup( version='1.3.6', description='A library that allows your python tests to easily' ' mock out the boto library', + long_description=read('README.md'), + long_description_content_type='text/markdown', author='Steve Pulec', author_email='spulec@gmail.com', url='https://github.com/spulec/moto', From ede02e2c2a319fbbe5921800af5aae680b01ca15 Mon Sep 17 00:00:00 2001 From: Adam Davis Date: Mon, 22 Oct 2018 13:37:52 -0700 Subject: [PATCH 005/175] Without double quotes, pip would not install --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 791226d6b..d6e9f30a1 100644 --- a/README.md +++ b/README.md @@ -259,7 +259,7 @@ It uses flask, which isn't a default dependency. You can install the server 'extra' package with: ```python -pip install moto[server] +pip install "moto[server]" ``` You can then start it running a service: From 544050ab270e45af6a87d43809f3330592cb057c Mon Sep 17 00:00:00 2001 From: Drew Pearce Date: Tue, 23 Oct 2018 15:37:28 -0400 Subject: [PATCH 006/175] added tests for dynamodb not equals --- tests/test_dynamodb2/test_dynamodb.py | 30 +++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index afc919dd7..ad73de41b 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -815,6 +815,16 @@ def test_scan_filter(): ) assert response['Count'] == 1 + response = table.scan( + FilterExpression=Attr('app').ne('app2') + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne('app1') + ) + assert response['Count'] == 0 + @mock_dynamodb2 def test_scan_filter2(): @@ -872,6 +882,26 @@ def test_scan_filter3(): ) assert response['Count'] == 1 + response = table.scan( + FilterExpression=Attr('active').ne(True) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('active').ne(False) + ) + assert response['Count'] == 1 + + response = table.scan( + FilterExpression=Attr('app').ne(1) + ) + assert response['Count'] == 0 + + response = table.scan( + FilterExpression=Attr('app').ne(2) + ) + assert response['Count'] == 1 + @mock_dynamodb2 def test_scan_filter4(): From ecc7c244675c1054e89f7b2efef2e5b0c0fcc45e Mon Sep 17 00:00:00 2001 From: Drew Pearce Date: Tue, 23 Oct 2018 15:54:56 -0400 Subject: [PATCH 007/175] simple fix for not equals in dynamodb filter expressions. i suspect this was just a typo --- moto/dynamodb2/comparisons.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/dynamodb2/comparisons.py b/moto/dynamodb2/comparisons.py index 53226c557..6d37345fe 100644 --- a/moto/dynamodb2/comparisons.py +++ b/moto/dynamodb2/comparisons.py @@ -383,7 +383,7 @@ class OpNotEqual(Op): def expr(self, item): lhs = self._lhs(item) rhs = self._rhs(item) - return lhs == rhs + return lhs != rhs class OpLessThanOrEqual(Op): From aa4be6fcad087139f2087c9794def59de1c118d3 Mon Sep 17 00:00:00 2001 From: George Alton Date: Wed, 24 Oct 2018 15:10:28 +0100 Subject: [PATCH 008/175] Adds limiting/pagination to cognitoidp list_* functions --- moto/cognitoidp/models.py | 50 ++++- moto/cognitoidp/responses.py | 53 +++-- tests/test_cognitoidp/test_cognitoidp.py | 244 +++++++++++++++++++++++ 3 files changed, 329 insertions(+), 18 deletions(-) diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 476d470b9..521701de7 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import datetime +import functools import json import os import time @@ -20,6 +21,43 @@ UserStatus = { } +def paginate(limit, start_arg="next_token", limit_arg="max_results"): + """Returns a limited result list, and an offset into list of remaining items + + Takes the next_token, and max_results kwargs given to a function and handles + the slicing of the results. The kwarg `next_token` is the offset into the + list to begin slicing from. `max_results` is the size of the result required + + If the max_results is not supplied then the `limit` parameter is used as a + default + + :param limit_arg: the name of argument in the decorated function that + controls amount of items returned + :param start_arg: the name of the argument in the decorated that provides + the starting offset + :param limit: A default maximum items to return + :return: a tuple containing a list of items, and the offset into the list + """ + default_start = 0 + + def outer_wrapper(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + # setup + start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg]) + stop = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg]) + end = start + stop + # call + result = func(*args, **kwargs) + # modify + results = list(result) + limited_results = results[start: end] + next_token = end if end < len(results) else None + return limited_results, next_token + return wrapper + return outer_wrapper + + class CognitoIdpUserPool(BaseModel): def __init__(self, region, name, extended_config): @@ -242,7 +280,8 @@ class CognitoIdpBackend(BaseBackend): self.user_pools[user_pool.id] = user_pool return user_pool - def list_user_pools(self): + @paginate(60) + def list_user_pools(self, max_results=None, next_token=None): return self.user_pools.values() def describe_user_pool(self, user_pool_id): @@ -289,7 +328,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.clients[user_pool_client.id] = user_pool_client return user_pool_client - def list_user_pool_clients(self, user_pool_id): + @paginate(60) + def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -339,7 +379,8 @@ class CognitoIdpBackend(BaseBackend): user_pool.identity_providers[name] = identity_provider return identity_provider - def list_identity_providers(self, user_pool_id): + @paginate(60) + def list_identity_providers(self, user_pool_id, max_results=None, next_token=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) @@ -387,7 +428,8 @@ class CognitoIdpBackend(BaseBackend): return user_pool.users[username] - def list_users(self, user_pool_id): + @paginate(60, "pagination_token", "limit") + def list_users(self, user_pool_id, pagination_token=None, limit=None): user_pool = self.user_pools.get(user_pool_id) if not user_pool: raise ResourceNotFoundError(user_pool_id) diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 50939786b..cb5374a6b 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse): }) def list_user_pools(self): - user_pools = cognitoidp_backends[self.region].list_user_pools() - return json.dumps({ - "UserPools": [user_pool.to_json() for user_pool in user_pools] - }) + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pools, next_token = cognitoidp_backends[self.region].list_user_pools( + max_results=max_results, next_token=next_token + ) + response = { + "UserPools": [user_pool.to_json() for user_pool in user_pools], + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool(self): user_pool_id = self._get_param("UserPoolId") @@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse): def list_user_pool_clients(self): user_pool_id = self._get_param("UserPoolId") - user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id, + max_results=max_results, next_token=next_token) + response = { "UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_user_pool_client(self): user_pool_id = self._get_param("UserPoolId") @@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse): def list_identity_providers(self): user_pool_id = self._get_param("UserPoolId") - identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id) - return json.dumps({ + max_results = self._get_param("MaxResults") + next_token = self._get_param("NextToken", "0") + identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers( + user_pool_id, max_results=max_results, next_token=next_token + ) + response = { "Providers": [identity_provider.to_json() for identity_provider in identity_providers] - }) + } + if next_token: + response["NextToken"] = str(next_token) + return json.dumps(response) def describe_identity_provider(self): user_pool_id = self._get_param("UserPoolId") @@ -155,10 +175,15 @@ class CognitoIdpResponse(BaseResponse): def list_users(self): user_pool_id = self._get_param("UserPoolId") - users = cognitoidp_backends[self.region].list_users(user_pool_id) - return json.dumps({ - "Users": [user.to_json(extended=True) for user in users] - }) + limit = self._get_param("Limit") + token = self._get_param("PaginationToken") + users, token = cognitoidp_backends[self.region].list_users(user_pool_id, + limit=limit, + pagination_token=token) + response = {"Users": [user.to_json(extended=True) for user in users]} + if token: + response["PaginationToken"] = str(token) + return json.dumps(response) def admin_disable_user(self): user_pool_id = self._get_param("UserPoolId") diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index f72a44762..362740ce3 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -41,6 +41,56 @@ def test_list_user_pools(): result["UserPools"][0]["Name"].should.equal(name) +@mock_cognitoidp +def test_list_user_pools_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pools + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token) + result_2["UserPools"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pools_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + + # Given 10 user pool clients + pool_count = 10 + for i in range(pool_count): + conn.create_user_pool(PoolName=str(uuid.uuid4())) + + max_results = pool_count + 5 + result = conn.list_user_pools(MaxResults=max_results) + result["UserPools"].should.have.length_of(pool_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_user_pool(): conn = boto3.client("cognito-idp", "us-west-2") @@ -140,6 +190,67 @@ def test_list_user_pool_clients(): result["UserPoolClients"][0]["ClientName"].should.equal(client_name) +@mock_cognitoidp +def test_list_user_pool_clients_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["UserPoolClients"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_user_pool_clients_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 user pool clients + client_count = 10 + for i in range(client_count): + client_name = str(uuid.uuid4()) + conn.create_user_pool_client(UserPoolId=user_pool_id, + ClientName=client_name) + max_results = client_count + 5 + result = conn.list_user_pool_clients(UserPoolId=user_pool_id, + MaxResults=max_results) + result["UserPoolClients"].should.have.length_of(client_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_user_pool_client(): conn = boto3.client("cognito-idp", "us-west-2") @@ -264,6 +375,83 @@ def test_list_identity_providers(): result["Providers"][0]["ProviderType"].should.equal(provider_type) +@mock_cognitoidp +def test_list_identity_providers_returns_max_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_returns_next_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(max_results) + result.should.have.key("NextToken") + + next_token = result["NextToken"] + result_2 = conn.list_identity_providers(UserPoolId=user_pool_id, + MaxResults=max_results, + NextToken=next_token) + result_2["Providers"].should.have.length_of(max_results) + result_2.shouldnt.have.key("NextToken") + + +@mock_cognitoidp +def test_list_identity_providers_when_max_items_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 identity providers linked to a user pool + identity_provider_count = 10 + for i in range(identity_provider_count): + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={}, + ) + + max_results = identity_provider_count + 5 + result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results) + result["Providers"].should.have.length_of(identity_provider_count) + result.shouldnt.have.key("NextToken") + + @mock_cognitoidp def test_describe_identity_providers(): conn = boto3.client("cognito-idp", "us-west-2") @@ -396,6 +584,62 @@ def test_list_users(): result["Users"][0]["Username"].should.equal(username) +@mock_cognitoidp +def test_list_users_returns_limit_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_returns_pagination_tokens(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(max_results) + result.should.have.key("PaginationToken") + + next_token = result["PaginationToken"] + result_2 = conn.list_users(UserPoolId=user_pool_id, + Limit=max_results, PaginationToken=next_token) + result_2["Users"].should.have.length_of(max_results) + result_2.shouldnt.have.key("PaginationToken") + + +@mock_cognitoidp +def test_list_users_when_limit_more_than_total_items(): + conn = boto3.client("cognito-idp", "us-west-2") + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + # Given 10 users + user_count = 10 + for i in range(user_count): + conn.admin_create_user(UserPoolId=user_pool_id, + Username=str(uuid.uuid4())) + + max_results = user_count + 5 + result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results) + result["Users"].should.have.length_of(user_count) + result.shouldnt.have.key("PaginationToken") + + @mock_cognitoidp def test_admin_disable_user(): conn = boto3.client("cognito-idp", "us-west-2") From 80f860727fbacece3717d258d9079faae8fc5334 Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Thu, 25 Oct 2018 20:34:53 +0900 Subject: [PATCH 009/175] Add support for IoT attach_policy --- IMPLEMENTATION_COVERAGE.md | 4 ++-- moto/iot/models.py | 8 ++++++++ moto/iot/responses.py | 9 +++++++++ tests/test_iot/test_iot.py | 29 +++++++++++++++++++++++++++++ 4 files changed, 48 insertions(+), 2 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7c68c0e31..7a1e2e7aa 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2376,11 +2376,11 @@ - [ ] unsubscribe_from_event - [ ] update_assessment_target -## iot - 30% implemented +## iot - 31% implemented - [ ] accept_certificate_transfer - [X] add_thing_to_thing_group - [ ] associate_targets_with_job -- [ ] attach_policy +- [X] attach_policy - [X] attach_principal_policy - [X] attach_thing_principal - [ ] cancel_certificate_transfer diff --git a/moto/iot/models.py b/moto/iot/models.py index c36bb985f..db9ad3817 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -429,6 +429,14 @@ class IoTBackend(BaseBackend): pass raise ResourceNotFoundException() + def attach_policy(self, policy_name, target): + principal = self._get_principal(target) + policy = self.get_policy(policy_name) + k = (target, policy_name) + if k in self.principal_policies: + return + self.principal_policies[k] = (principal, policy) + def attach_principal_policy(self, policy_name, principal_arn): principal = self._get_principal(principal_arn) policy = self.get_policy(policy_name) diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 006c4c4cc..042e5a314 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -224,6 +224,15 @@ class IoTResponse(BaseResponse): ) return json.dumps(dict()) + def attach_policy(self): + policy_name = self._get_param("policyName") + target = self._get_param('target') + self.iot_backend.attach_policy( + policy_name=policy_name, + target=target, + ) + return json.dumps(dict()) + def attach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get('x-amzn-iot-principal') diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 5c6effd7a..9082203d9 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -309,6 +309,35 @@ def test_policy(): @mock_iot def test_principal_policy(): + client = boto3.client('iot', region_name='ap-northeast-1') + policy_name = 'my-policy' + doc = '{}' + client.create_policy(policyName=policy_name, policyDocument=doc) + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert['certificateArn'] + + client.attach_policy(policyName=policy_name, target=cert_arn) + + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(1) + for policy in res['policies']: + policy.should.have.key('policyName').which.should_not.be.none + policy.should.have.key('policyArn').which.should_not.be.none + + res = client.list_policy_principals(policyName=policy_name) + res.should.have.key('principals').which.should.have.length_of(1) + for principal in res['principals']: + principal.should_not.be.none + + client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(0) + res = client.list_policy_principals(policyName=policy_name) + res.should.have.key('principals').which.should.have.length_of(0) + + +@mock_iot +def test_principal_policy_deprecated(): client = boto3.client('iot', region_name='ap-northeast-1') policy_name = 'my-policy' doc = '{}' From b485122ec6e3bae5f80dc39742b323f4eaf5ac06 Mon Sep 17 00:00:00 2001 From: zane Date: Wed, 24 Oct 2018 14:06:23 -0700 Subject: [PATCH 010/175] refactor to store multiple scrects, use uuid --- .gitignore | 2 +- moto/secretsmanager/models.py | 79 ++++++++++--------- moto/secretsmanager/utils.py | 5 +- .../test_secretsmanager.py | 18 +++-- tests/test_secretsmanager/test_server.py | 41 ++++++++-- 5 files changed, 91 insertions(+), 54 deletions(-) diff --git a/.gitignore b/.gitignore index 7f57e98e9..cfda51440 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,4 @@ python_env .ropeproject/ .pytest_cache/ venv/ - +.vscode/ diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 1404a0ec8..7f89e2eb6 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import time import json +import uuid import boto3 @@ -18,10 +19,6 @@ class SecretsManager(BaseModel): def __init__(self, region_name, **kwargs): self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.version_id = kwargs.get('version_id', '') - self.version_stage = kwargs.get('version_stage', '') - self.secret_string = '' class SecretsManagerBackend(BaseBackend): @@ -29,14 +26,7 @@ class SecretsManagerBackend(BaseBackend): def __init__(self, region_name=None, **kwargs): super(SecretsManagerBackend, self).__init__() self.region = region_name - self.secret_id = kwargs.get('secret_id', '') - self.name = kwargs.get('name', '') - self.createdate = int(time.time()) - self.secret_string = '' - self.rotation_enabled = False - self.rotation_lambda_arn = '' - self.auto_rotate_after_days = 0 - self.version_id = '' + self.secrets = {} def reset(self): region_name = self.region @@ -44,36 +34,49 @@ class SecretsManagerBackend(BaseBackend): self.__init__(region_name) def _is_valid_identifier(self, identifier): - return identifier in (self.name, self.secret_id) + return identifier in self.secrets def get_secret_value(self, secret_id, version_id, version_stage): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException() + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", - "SecretString": self.secret_string, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'], + "SecretString": secret['secret_string'], "VersionStages": [ "AWSCURRENT", ], - "CreatedDate": "2018-05-23 13:16:57.198000" + "CreatedDate": secret['createdate'] }) return response def create_secret(self, name, secret_string, **kwargs): - self.secret_string = secret_string - self.secret_id = name - self.name = name + generated_version_id = str(uuid.uuid4()) + + secret = { + 'secret_string': secret_string, + 'secret_id': name, + 'name': name, + 'createdate': int(time.time()), + 'rotation_enabled': False, + 'rotation_lambda_arn': '', + 'auto_rotate_after_days': 0, + 'version_id': generated_version_id + } + + self.secrets[name] = secret response = json.dumps({ "ARN": secret_arn(self.region, name), - "Name": self.name, - "VersionId": "A435958A-D821-4193-B719-B7769357AER4", + "Name": name, + "VersionId": generated_version_id, }) return response @@ -82,15 +85,17 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException + secret = self.secrets[secret_id] + response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], "Description": "", "KmsKeyId": "", - "RotationEnabled": self.rotation_enabled, - "RotationLambdaARN": self.rotation_lambda_arn, + "RotationEnabled": secret['rotation_enabled'], + "RotationLambdaARN": secret['rotation_lambda_arn'], "RotationRules": { - "AutomaticallyAfterDays": self.auto_rotate_after_days + "AutomaticallyAfterDays": secret['auto_rotate_after_days'] }, "LastRotatedDate": None, "LastChangedDate": None, @@ -141,17 +146,19 @@ class SecretsManagerBackend(BaseBackend): ) raise InvalidParameterException(msg) - self.version_id = client_request_token or '' - self.rotation_lambda_arn = rotation_lambda_arn or '' + secret = self.secrets[secret_id] + + secret['version_id'] = client_request_token or '' + secret['rotation_lambda_arn'] = rotation_lambda_arn or '' if rotation_rules: - self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0) - if self.auto_rotate_after_days > 0: - self.rotation_enabled = True + secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0) + if secret['auto_rotate_after_days'] > 0: + secret['rotation_enabled'] = True response = json.dumps({ - "ARN": secret_arn(self.region, self.secret_id), - "Name": self.name, - "VersionId": self.version_id + "ARN": secret_arn(self.region, secret['secret_id']), + "Name": secret['name'], + "VersionId": secret['version_id'] }) return response diff --git a/moto/secretsmanager/utils.py b/moto/secretsmanager/utils.py index 2cb92020a..231fea296 100644 --- a/moto/secretsmanager/utils.py +++ b/moto/secretsmanager/utils.py @@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers, def secret_arn(region, secret_id): - return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format( - region, secret_id) + id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5)) + return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format( + region, secret_id, id_string) def _exclude_characters(password, exclude_characters): diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index ec384a660..0e0b98b1e 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -39,8 +39,7 @@ def test_create_secret(): conn = boto3.client('secretsmanager', region_name='us-east-1') result = conn.create_secret(Name='test-secret', SecretString="foosecret") - assert result['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') + assert result['ARN'] assert result['Name'] == 'test-secret' secret = conn.get_secret_value(SecretId='test-secret') assert secret['SecretString'] == 'foosecret' @@ -159,10 +158,17 @@ def test_describe_secret(): conn.create_secret(Name='test-secret', SecretString='foosecret') + conn.create_secret(Name='test-secret-2', + SecretString='barsecret') + secret_description = conn.describe_secret(SecretId='test-secret') + secret_description_2 = conn.describe_secret(SecretId='test-secret-2') + assert secret_description # Returned dict is not empty - assert secret_description['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad') + assert secret_description['Name'] == ('test-secret') + assert secret_description['ARN'] != '' # Test arn not empty + assert secret_description_2['Name'] == ('test-secret-2') + assert secret_description_2['ARN'] != '' # Test arn not empty @mock_secretsmanager def test_describe_secret_that_does_not_exist(): @@ -190,9 +196,7 @@ def test_rotate_secret(): rotated_secret = conn.rotate_secret(SecretId=secret_name) assert rotated_secret - assert rotated_secret['ARN'] == ( - 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad' - ) + assert rotated_secret['ARN'] != '' # Test arn not empty assert rotated_secret['Name'] == secret_name assert rotated_secret['VersionId'] != '' diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py index e573f9b67..d0f495f57 100644 --- a/tests/test_secretsmanager/test_server.py +++ b/tests/test_secretsmanager/test_server.py @@ -82,11 +82,20 @@ def test_create_secret(): headers={ "X-Amz-Target": "secretsmanager.CreateSecret"}, ) + res_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "bar-secret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret"}, + ) json_data = json.loads(res.data.decode("utf-8")) - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad') + assert json_data['ARN'] != '' assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(res_2.data.decode("utf-8")) + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' @mock_secretsmanager def test_describe_secret(): @@ -107,12 +116,30 @@ def test_describe_secret(): "X-Amz-Target": "secretsmanager.DescribeSecret" }, ) + + create_secret_2 = test_client.post('/', + data={"Name": "test-secret-2", + "SecretString": "barsecret"}, + headers={ + "X-Amz-Target": "secretsmanager.CreateSecret" + }, + ) + describe_secret_2 = test_client.post('/', + data={"SecretId": "test-secret-2"}, + headers={ + "X-Amz-Target": "secretsmanager.DescribeSecret" + }, + ) json_data = json.loads(describe_secret.data.decode("utf-8")) assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) + assert json_data['ARN'] != '' + assert json_data['Name'] == 'test-secret' + + json_data_2 = json.loads(describe_secret_2.data.decode("utf-8")) + assert json_data_2 # Returned dict is not empty + assert json_data_2['ARN'] != '' + assert json_data_2['Name'] == 'test-secret-2' @mock_secretsmanager def test_describe_secret_that_does_not_exist(): @@ -179,9 +206,7 @@ def test_rotate_secret(): json_data = json.loads(rotate_secret.data.decode("utf-8")) assert json_data # Returned dict is not empty - assert json_data['ARN'] == ( - 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad' - ) + assert json_data['ARN'] != '' assert json_data['Name'] == 'test-secret' assert json_data['VersionId'] == client_request_token From 5ca68fbf06adecb0c0478f102a9cc7d331aa36c4 Mon Sep 17 00:00:00 2001 From: Illia Volochii Date: Fri, 26 Oct 2018 22:12:26 +0300 Subject: [PATCH 011/175] Update jsondiff to 1.1.2 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 98780dd5a..34002b79d 100755 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ install_requires = [ "python-jose<3.0.0", "mock", "docker>=2.5.1", - "jsondiff==1.1.1", + "jsondiff==1.1.2", "aws-xray-sdk<0.96,>=0.93", "responses>=0.9.0", ] From 090cad8c88169e3f6bd049d023d4e4d2fe309025 Mon Sep 17 00:00:00 2001 From: Jamie Starke Date: Fri, 26 Oct 2018 21:54:01 -0700 Subject: [PATCH 012/175] [1009] Converts ECS Instance full arn to instance_id for storage --- moto/ecs/models.py | 2 ++ tests/test_ecs/test_ecs_boto3.py | 59 ++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index d00853843..4a6737ceb 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -769,6 +769,8 @@ class EC2ContainerServiceBackend(BaseBackend): Container instances status should be one of [ACTIVE,DRAINING]") failures = [] container_instance_objects = [] + list_container_instance_ids = [x.split('/')[-1] + for x in list_container_instance_ids] for container_instance_id in list_container_instance_ids: container_instance = self.container_instances[cluster_name].get(container_instance_id, None) if container_instance is not None: diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index 70c1463ee..7274e62c1 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -910,6 +910,65 @@ def test_update_container_instances_state(): status='test_status').should.throw(Exception) +@mock_ec2 +@mock_ecs +def test_update_container_instances_state_by_arn(): + ecs_client = boto3.client('ecs', region_name='us-east-1') + ec2 = boto3.resource('ec2', region_name='us-east-1') + + test_cluster_name = 'test_ecs_cluster' + _ = ecs_client.create_cluster( + clusterName=test_cluster_name + ) + + instance_to_create = 3 + test_instance_arns = [] + for i in range(0, instance_to_create): + test_instance = ec2.create_instances( + ImageId="ami-1234abcd", + MinCount=1, + MaxCount=1, + )[0] + + instance_id_document = json.dumps( + ec2_utils.generate_instance_identity_document(test_instance) + ) + + response = ecs_client.register_container_instance( + cluster=test_cluster_name, + instanceIdentityDocument=instance_id_document) + + test_instance_arns.append(response['containerInstance']['containerInstanceArn']) + + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='DRAINING') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('DRAINING') + response = ecs_client.update_container_instances_state(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='ACTIVE') + len(response['failures']).should.equal(0) + len(response['containerInstances']).should.equal(instance_to_create) + response_statuses = [ci['status'] for ci in response['containerInstances']] + for status in response_statuses: + status.should.equal('ACTIVE') + ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name, + containerInstances=test_instance_arns, + status='test_status').should.throw(Exception) + + @mock_ec2 @mock_ecs def test_run_task(): From 249dd7059e9c91712e459355126b0d4b57b25583 Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Sun, 28 Oct 2018 17:13:17 +0900 Subject: [PATCH 013/175] add test case for IoT attach_policy do nothing if policy have already attached to certificate --- tests/test_iot/test_iot.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 9082203d9..47ea9d59b 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -318,6 +318,15 @@ def test_principal_policy(): client.attach_policy(policyName=policy_name, target=cert_arn) + res = client.list_principal_policies(principal=cert_arn) + res.should.have.key('policies').which.should.have.length_of(1) + for policy in res['policies']: + policy.should.have.key('policyName').which.should_not.be.none + policy.should.have.key('policyArn').which.should_not.be.none + + # do nothing if policy have already attached to certificate + client.attach_policy(policyName=policy_name, target=cert_arn) + res = client.list_principal_policies(principal=cert_arn) res.should.have.key('policies').which.should.have.length_of(1) for policy in res['policies']: From 9ba28a05b8b5aa709bde7188c6886f81155fd231 Mon Sep 17 00:00:00 2001 From: George Alton Date: Sun, 28 Oct 2018 11:00:21 +0000 Subject: [PATCH 014/175] avoids copying entire result into a new list --- moto/cognitoidp/models.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 521701de7..edeb7b128 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import datetime import functools +import itertools import json import os import time @@ -43,16 +44,12 @@ def paginate(limit, start_arg="next_token", limit_arg="max_results"): def outer_wrapper(func): @functools.wraps(func) def wrapper(*args, **kwargs): - # setup start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg]) - stop = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg]) - end = start + stop - # call + lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg]) + stop = start + lim result = func(*args, **kwargs) - # modify - results = list(result) - limited_results = results[start: end] - next_token = end if end < len(results) else None + limited_results = list(itertools.islice(result, start, stop)) + next_token = stop if stop < len(result) else None return limited_results, next_token return wrapper return outer_wrapper From a0708a70fcc1b265a25fccb01ec1ca67e409a5ce Mon Sep 17 00:00:00 2001 From: Mark Challoner Date: Mon, 29 Oct 2018 13:29:53 +0000 Subject: [PATCH 015/175] Fix Tags parameter on CloudFormation create_change_set method. --- moto/cloudformation/responses.py | 3 ++- .../test_cloudformation_stack_crud_boto3.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index a1295a20d..fe436939f 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -87,7 +87,8 @@ class CloudFormationResponse(BaseResponse): role_arn = self._get_param('RoleARN') update_or_create = self._get_param('ChangeSetType', 'CREATE') parameters_list = self._get_list_prefix("Parameters.member") - tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")} + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) parameters = {param['parameter_key']: param['parameter_value'] for param in parameters_list} if template_url: diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 9bfae6174..064e0fb33 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -391,6 +391,9 @@ def test_create_change_set_from_s3_url(): TemplateURL=key_url, ChangeSetName='NewChangeSet', ChangeSetType='CREATE', + Tags=[ + {'Key': 'tag-key', 'Value': 'tag-value'} + ], ) assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id'] assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId'] From 3d71a67794be7d4d3e2542d513f9e376ddb7635c Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Tue, 30 Oct 2018 14:38:59 +0900 Subject: [PATCH 016/175] Add some validations for IoT delete operations fix #1908 --- moto/iot/exceptions.py | 17 +++++++ moto/iot/models.py | 30 ++++++++++++- tests/test_iot/test_iot.py | 92 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 138 insertions(+), 1 deletion(-) diff --git a/moto/iot/exceptions.py b/moto/iot/exceptions.py index 7bbdb706d..3af3751d9 100644 --- a/moto/iot/exceptions.py +++ b/moto/iot/exceptions.py @@ -31,3 +31,20 @@ class VersionConflictException(IoTClientError): 'VersionConflictException', 'The version for thing %s does not match the expected version.' % name ) + + +class CertificateStateException(IoTClientError): + def __init__(self, msg, cert_id): + self.code = 406 + super(CertificateStateException, self).__init__( + 'CertificateStateException', + '%s Id: %s' % (msg, cert_id) + ) + + +class DeleteConflictException(IoTClientError): + def __init__(self, msg): + self.code = 409 + super(DeleteConflictException, self).__init__( + 'DeleteConflictException', msg + ) diff --git a/moto/iot/models.py b/moto/iot/models.py index c36bb985f..4b0b260e1 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -13,6 +13,8 @@ import boto3 from moto.core import BaseBackend, BaseModel from .exceptions import ( + CertificateStateException, + DeleteConflictException, ResourceNotFoundException, InvalidRequestException, VersionConflictException @@ -378,7 +380,25 @@ class IoTBackend(BaseBackend): return certificate, key_pair def delete_certificate(self, certificate_id): - self.describe_certificate(certificate_id) + cert = self.describe_certificate(certificate_id) + if cert.status == 'ACTIVE': + raise CertificateStateException( + 'Certificate must be deactivated (not ACTIVE) before deletion.', certificate_id) + + certs = [k[0] for k, v in self.principal_things.items() + if self._get_principal(k[0]).certificate_id == certificate_id] + if len(certs) > 0: + raise DeleteConflictException( + 'Things must be detached before deletion (arn: %s)' % certs[0] + ) + + certs = [k[0] for k, v in self.principal_policies.items() + if self._get_principal(k[0]).certificate_id == certificate_id] + if len(certs) > 0: + raise DeleteConflictException( + 'Certificate policies must be detached before deletion (arn: %s)' % certs[0] + ) + del self.certificates[certificate_id] def describe_certificate(self, certificate_id): @@ -411,6 +431,14 @@ class IoTBackend(BaseBackend): return policies[0] def delete_policy(self, policy_name): + + policies = [k[1] for k, v in self.principal_policies.items() if k[1] == policy_name] + if len(policies) > 0: + raise DeleteConflictException( + 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' + % policy_name + ) + policy = self.get_policy(policy_name) del self.policies[policy.name] diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 5c6effd7a..21b670d46 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -5,6 +5,8 @@ import sure # noqa import boto3 from moto import mock_iot +from botocore.exceptions import ClientError +from nose.tools import assert_raises @mock_iot @@ -261,6 +263,96 @@ def test_certs(): res.should.have.key('certificates').which.should.have.length_of(0) +@mock_iot +def test_delete_policy_validation(): + doc = """{ + "Version": "2012-10-17", + "Statement":[ + { + "Effect":"Allow", + "Action":[ + "iot: *" + ], + "Resource":"*" + } + ] + } + """ + client = boto3.client('iot', region_name='ap-northeast-1') + cert = client.create_keys_and_certificate(setAsActive=True) + cert_arn = cert['certificateArn'] + policy_name = 'my-policy' + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_principal_policy(policyName=policy_name, principal=cert_arn) + + with assert_raises(ClientError) as e: + client.delete_policy(policyName=policy_name) + e.exception.response['Error']['Message'].should.contain( + 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' % policy_name) + res = client.list_policies() + res.should.have.key('policies').which.should.have.length_of(1) + + client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + client.delete_policy(policyName=policy_name) + res = client.list_policies() + res.should.have.key('policies').which.should.have.length_of(0) + + +@mock_iot +def test_delete_certificate_validation(): + doc = """{ + "Version": "2012-10-17", + "Statement":[ + { + "Effect":"Allow", + "Action":[ + "iot: *" + ], + "Resource":"*" + } + ] + } + """ + client = boto3.client('iot', region_name='ap-northeast-1') + cert = client.create_keys_and_certificate(setAsActive=True) + cert_id = cert['certificateId'] + cert_arn = cert['certificateArn'] + policy_name = 'my-policy' + thing_name = 'thing-1' + client.create_policy(policyName=policy_name, policyDocument=doc) + client.attach_principal_policy(policyName=policy_name, principal=cert_arn) + client.create_thing(thingName=thing_name) + client.attach_thing_principal(thingName=thing_name, principal=cert_arn) + + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Certificate must be deactivated (not ACTIVE) before deletion.') + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.update_certificate(certificateId=cert_id, newStatus='REVOKED') + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Things must be detached before deletion (arn: %s)' % cert_arn) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.detach_thing_principal(thingName=thing_name, principal=cert_arn) + with assert_raises(ClientError) as e: + client.delete_certificate(certificateId=cert_id) + e.exception.response['Error']['Message'].should.contain( + 'Certificate policies must be detached before deletion (arn: %s)' % cert_arn) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(1) + + client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + client.delete_certificate(certificateId=cert_id) + res = client.list_certificates() + res.should.have.key('certificates').which.should.have.length_of(0) + + @mock_iot def test_certs_create_inactive(): client = boto3.client('iot', region_name='ap-northeast-1') From e38eea751f55d2e139f3271a73f69d35ff64129b Mon Sep 17 00:00:00 2001 From: jamesandres Date: Wed, 31 Oct 2018 11:39:49 +0000 Subject: [PATCH 017/175] Go easier on the CPU when moto sqs is idle For our local development setup we have found that moto is using around 25% CPU constantly. Digging in with gdb it turned out that it was burning that CPU in the sleeping loop. Here i'm increasing the sleep by 10x which brings the idle CPU usage down by 10x (to ~2%). I'm not familiar enough with the moto/sqs codebase to know if lengthening this sleep will have an adverse effect; however, in other Python dev I've noticed that (in Python 2.7 anyway..) Python threading won't context switch a thread until a sleep of at least 0.01 seconds is performed (shockingly long!). So based on this guesswork I suspect sleeping for 0.01 seconds won't cause any grief. --- moto/sqs/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/sqs/models.py b/moto/sqs/models.py index f3262a988..1404ded75 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -534,7 +534,7 @@ class SQSBackend(BaseBackend): break import time - time.sleep(0.001) + time.sleep(0.01) continue previous_result_count = len(result) From 67cb2e25bbc365484806daccc97d56cc95d6afe1 Mon Sep 17 00:00:00 2001 From: Pall Valmundsson Date: Thu, 1 Nov 2018 19:51:17 +0000 Subject: [PATCH 018/175] Support IAM Credential Report in boto3 Lowercase XML element names in API responses seem to cause issues for boto3. --- moto/iam/responses.py | 8 ++++---- tests/test_iam/test_iam.py | 21 +++++++++++++++++++++ 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 22558f3f6..91ba09543 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -1243,8 +1243,8 @@ LIST_ACCESS_KEYS_TEMPLATE = """ CREDENTIAL_REPORT_GENERATING = """ - STARTED - No report exists. Starting a new report generation task + STARTED + No report exists. Starting a new report generation task fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1253,7 +1253,7 @@ CREDENTIAL_REPORT_GENERATING = """ CREDENTIAL_REPORT_GENERATED = """ - COMPLETE + COMPLETE fa788a82-aa8a-11e4-a278-1786c418872b" @@ -1262,7 +1262,7 @@ CREDENTIAL_REPORT_GENERATED = """ CREDENTIAL_REPORT = """ - {{ report }} + {{ report }} 2015-02-02T20:02:02Z text/csv diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index bc23ff712..34e198886 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -536,6 +536,14 @@ def test_generate_credential_report(): result['generate_credential_report_response'][ 'generate_credential_report_result']['state'].should.equal('COMPLETE') +@mock_iam +def test_generate_credential_report_boto3(): + conn = boto3.client('iam', region_name='us-east-1') + result = conn.generate_credential_report() + result['State'].should.equal('STARTED') + result = conn.generate_credential_report() + result['State'].should.equal('COMPLETE') + @mock_iam_deprecated() def test_get_credential_report(): @@ -551,6 +559,19 @@ def test_get_credential_report(): 'get_credential_report_result']['content'].encode('ascii')).decode('ascii') report.should.match(r'.*my-user.*') +@mock_iam +def test_get_credential_report(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_user(UserName='my-user') + with assert_raises(ClientError): + conn.get_credential_report() + result = conn.generate_credential_report() + while result['State'] != 'COMPLETE': + result = conn.generate_credential_report() + result = conn.get_credential_report() + report = result['Content'].decode('utf-8') + report.should.match(r'.*my-user.*') + @requires_boto_gte('2.39') @mock_iam_deprecated() From e9f8bec91a46c939fc8c23d3b6cac1de30383585 Mon Sep 17 00:00:00 2001 From: Pall Valmundsson Date: Thu, 1 Nov 2018 19:58:09 +0000 Subject: [PATCH 019/175] Fix boto3 IAM test function names --- tests/test_iam/test_iam.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 34e198886..c33bd7fab 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -537,7 +537,7 @@ def test_generate_credential_report(): 'generate_credential_report_result']['state'].should.equal('COMPLETE') @mock_iam -def test_generate_credential_report_boto3(): +def test_boto3_generate_credential_report(): conn = boto3.client('iam', region_name='us-east-1') result = conn.generate_credential_report() result['State'].should.equal('STARTED') @@ -560,7 +560,7 @@ def test_get_credential_report(): report.should.match(r'.*my-user.*') @mock_iam -def test_get_credential_report(): +def test_boto3_get_credential_report(): conn = boto3.client('iam', region_name='us-east-1') conn.create_user(UserName='my-user') with assert_raises(ClientError): From b4b0ae50773f0cd9192f0c908e54010d924ed8ef Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Fri, 19 Oct 2018 16:40:58 -0700 Subject: [PATCH 020/175] Some IAM fixes. - Fixed InstanceProfiles having `Path` set to `None`. - Added in some dynamic `CreateDate`s. - Fixed missing Instance Profile ID's being sent over --- moto/iam/models.py | 2 + moto/iam/responses.py | 104 ++++++++++++++++++------------------- tests/test_iam/test_iam.py | 67 +++++++++++++----------- 3 files changed, 90 insertions(+), 83 deletions(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index 4a5240a08..accdf0334 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -117,6 +117,7 @@ class Role(BaseModel): self.path = path self.policies = {} self.managed_policies = {} + self.create_date = datetime.now(pytz.utc) @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): @@ -168,6 +169,7 @@ class InstanceProfile(BaseModel): self.name = name self.path = path self.roles = roles if roles else [] + self.create_date = datetime.now(pytz.utc) @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 22558f3f6..43b438c96 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -201,7 +201,7 @@ class IamResponse(BaseResponse): def create_instance_profile(self): profile_name = self._get_param('InstanceProfileName') - path = self._get_param('Path') + path = self._get_param('Path', '/') profile = iam_backend.create_instance_profile( profile_name, path, role_ids=[]) @@ -734,7 +734,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """ {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1382,7 +1382,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ user.path }} {{ user.name }} {{ user.arn }} - 2012-05-09T15:45:35Z + {{ user.created_iso_8601 }} {% endfor %} @@ -1401,7 +1401,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ group.name }} {{ group.path }} {{ group.arn }} - 2012-05-09T16:27:11Z + {{ group.create_date }} {% endfor %} @@ -1421,23 +1421,23 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {% for profile in instance_profiles %} - {{ profile.id }} - - {% for role in profile.roles %} - - {{ role.path }} - {{ role.arn }} - {{ role.name }} - {{ role.assume_role_policy_document }} - 2012-05-09T15:45:35Z - {{ role.id }} - - {% endfor %} - - {{ profile.name }} - {{ profile.path }} - {{ profile.arn }} - 2012-05-09T16:27:11Z + {{ profile.id }} + + {% for role in profile.roles %} + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_role_policy_document }} + {{ role.create_date }} + {{ role.id }} + + {% endfor %} + + {{ profile.name }} + {{ profile.path }} + {{ profile.arn }} + {{ profile.create_date }} {% endfor %} @@ -1445,7 +1445,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ role.arn }} {{ role.name }} {{ role.assume_role_policy_document }} - 2014-07-30T17:09:20Z + {{ role.create_date }} {{ role.id }} {% endfor %} @@ -1474,9 +1474,9 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {{ policy.arn }} 1 - 2012-05-09T16:27:11Z + {{ policy.create_datetime }} true - 2012-05-09T16:27:11Z + {{ policy.update_datetime }} {% endfor %} diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index bc23ff712..72b9205dd 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -108,6 +108,10 @@ def test_create_role_and_instance_profile(): conn.list_roles().roles[0].role_name.should.equal('my-role') + # Test with an empty path: + profile = conn.create_instance_profile('my-other-profile') + profile.path.should.equal('/') + @mock_iam_deprecated() def test_remove_role_from_instance_profile(): @@ -700,10 +704,10 @@ def test_get_account_authorization_details(): import json conn = boto3.client('iam', region_name='us-east-1') conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") - conn.create_user(Path='/', UserName='testCloudAuxUser') - conn.create_group(Path='/', GroupName='testCloudAuxGroup') + conn.create_user(Path='/', UserName='testUser') + conn.create_group(Path='/', GroupName='testGroup') conn.create_policy( - PolicyName='testCloudAuxPolicy', + PolicyName='testPolicy', Path='/', PolicyDocument=json.dumps({ "Version": "2012-10-17", @@ -715,46 +719,47 @@ def test_get_account_authorization_details(): } ] }), - Description='Test CloudAux Policy' + Description='Test Policy' ) + conn.create_instance_profile(InstanceProfileName='ipn') + conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + result = conn.get_account_authorization_details(Filter=['Role']) - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 + assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 result = conn.get_account_authorization_details(Filter=['User']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 1 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 0 result = conn.get_account_authorization_details(Filter=['Group']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 1 - len(result['Policies']) == 0 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 1 + assert len(result['Policies']) == 0 result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) == 1 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) == 1 # Check for greater than 1 since this should always be greater than one but might change. # See iam/aws_managed_policies.py result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy']) - len(result['RoleDetailList']) == 0 - len(result['UserDetailList']) == 0 - len(result['GroupDetailList']) == 0 - len(result['Policies']) > 1 + assert len(result['RoleDetailList']) == 0 + assert len(result['UserDetailList']) == 0 + assert len(result['GroupDetailList']) == 0 + assert len(result['Policies']) > 1 result = conn.get_account_authorization_details() - len(result['RoleDetailList']) == 1 - len(result['UserDetailList']) == 1 - len(result['GroupDetailList']) == 1 - len(result['Policies']) > 1 - - - + assert len(result['RoleDetailList']) == 1 + assert len(result['UserDetailList']) == 1 + assert len(result['GroupDetailList']) == 1 + assert len(result['Policies']) > 1 From 6e17ba51c6ee91b1a6109da5887706c07b39679e Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Sun, 21 Oct 2018 17:24:45 -0700 Subject: [PATCH 021/175] Fixed a truncation bug for `list_user_policies`. --- moto/iam/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 43b438c96..3d54b9331 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -1199,8 +1199,8 @@ LIST_USER_POLICIES_TEMPLATE = """ {{ policy }} {% endfor %} + false - false 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE From 94b5438d76271c342b98e81655c1e258a68538ad Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Wed, 24 Oct 2018 18:00:52 -0700 Subject: [PATCH 022/175] Added IAM User signing certificate support --- moto/iam/exceptions.py | 8 ++++ moto/iam/models.py | 59 ++++++++++++++++++++++++++- moto/iam/responses.py | 82 ++++++++++++++++++++++++++++++++++++++ moto/iam/utils.py | 3 +- tests/test_iam/test_iam.py | 76 +++++++++++++++++++++++++++++++++++ 5 files changed, 225 insertions(+), 3 deletions(-) diff --git a/moto/iam/exceptions.py b/moto/iam/exceptions.py index 84f15f51f..61922ea18 100644 --- a/moto/iam/exceptions.py +++ b/moto/iam/exceptions.py @@ -24,3 +24,11 @@ class IAMReportNotPresentException(RESTError): def __init__(self, message): super(IAMReportNotPresentException, self).__init__( "ReportNotPresent", message) + + +class MalformedCertificate(RESTError): + code = 400 + + def __init__(self, cert): + super(MalformedCertificate, self).__init__( + 'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert)) diff --git a/moto/iam/models.py b/moto/iam/models.py index accdf0334..20804f4d3 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -1,14 +1,18 @@ from __future__ import unicode_literals import base64 +import sys from datetime import datetime import json +from cryptography import x509 +from cryptography.hazmat.backends import default_backend + import pytz from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_without_milliseconds from .aws_managed_policies import aws_managed_policies_data -from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException +from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id ACCOUNT_ID = 123456789012 @@ -215,6 +219,16 @@ class Certificate(BaseModel): return "arn:aws:iam::{0}:server-certificate{1}{2}".format(ACCOUNT_ID, self.path, self.cert_name) +class SigningCertificate(BaseModel): + + def __init__(self, id, user_name, body): + self.id = id + self.user_name = user_name + self.body = body + self.upload_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%d-%H-%M-%S") + self.status = 'Active' + + class AccessKey(BaseModel): def __init__(self, user_name): @@ -299,6 +313,7 @@ class User(BaseModel): self.access_keys = [] self.password = None self.password_reset_required = False + self.signing_certificates = {} @property def arn(self): @@ -767,6 +782,48 @@ class IAMBackend(BaseBackend): return users + def upload_signing_certificate(self, user_name, body): + user = self.get_user(user_name) + cert_id = random_resource_id(size=32) + + # Validate the signing cert: + try: + if sys.version_info < (3, 0): + data = bytes(body) + else: + data = bytes(body, 'utf8') + + x509.load_pem_x509_certificate(data, default_backend()) + + except Exception: + raise MalformedCertificate(body) + + user.signing_certificates[cert_id] = SigningCertificate(cert_id, user_name, body) + + return user.signing_certificates[cert_id] + + def delete_signing_certificate(self, user_name, cert_id): + user = self.get_user(user_name) + + try: + del user.signing_certificates[cert_id] + except KeyError: + raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id)) + + def list_signing_certificates(self, user_name): + user = self.get_user(user_name) + + return list(user.signing_certificates.values()) + + def update_signing_certificate(self, user_name, cert_id, status): + user = self.get_user(user_name) + + try: + user.signing_certificates[cert_id].status = status + + except KeyError: + raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id)) + def create_login_profile(self, user_name, password): # This does not currently deal with PasswordPolicyViolation. user = self.get_user(user_name) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 3d54b9331..fa33cefea 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -552,6 +552,38 @@ class IamResponse(BaseResponse): roles=account_details['roles'] ) + def upload_signing_certificate(self): + user_name = self._get_param('UserName') + cert_body = self._get_param('CertificateBody') + + cert = iam_backend.upload_signing_certificate(user_name, cert_body) + template = self.response_template(UPLOAD_SIGNING_CERTIFICATE_TEMPLATE) + return template.render(cert=cert) + + def update_signing_certificate(self): + user_name = self._get_param('UserName') + cert_id = self._get_param('CertificateId') + status = self._get_param('Status') + + iam_backend.update_signing_certificate(user_name, cert_id, status) + template = self.response_template(UPDATE_SIGNING_CERTIFICATE_TEMPLATE) + return template.render() + + def delete_signing_certificate(self): + user_name = self._get_param('UserName') + cert_id = self._get_param('CertificateId') + + iam_backend.delete_signing_certificate(user_name, cert_id) + template = self.response_template(DELETE_SIGNING_CERTIFICATE_TEMPLATE) + return template.render() + + def list_signing_certificates(self): + user_name = self._get_param('UserName') + + certs = iam_backend.list_signing_certificates(user_name) + template = self.response_template(LIST_SIGNING_CERTIFICATES_TEMPLATE) + return template.render(user_name=user_name, certificates=certs) + ATTACH_ROLE_POLICY_TEMPLATE = """ @@ -1485,3 +1517,53 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """92e79ae7-7399-11e4-8c85-4b53eEXAMPLE """ + + +UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """ + + + {{ cert.user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +DELETE_SIGNING_CERTIFICATE_TEMPLATE = """ + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" + + +LIST_SIGNING_CERTIFICATES_TEMPLATE = """ + + {{ user_name }} + + {% for cert in certificates %} + + {{ user_name }} + {{ cert.id }} + {{ cert.body }} + {{ cert.status }} + + {% endfor %} + + false + + + 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE + +""" diff --git a/moto/iam/utils.py b/moto/iam/utils.py index 1fae85a6c..f59bdfffe 100644 --- a/moto/iam/utils.py +++ b/moto/iam/utils.py @@ -12,8 +12,7 @@ def random_alphanumeric(length): ) -def random_resource_id(): - size = 20 +def random_resource_id(size=20): chars = list(range(10)) + list(string.ascii_lowercase) return ''.join(six.text_type(random.choice(chars)) for x in range(size)) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 72b9205dd..7bf38f48e 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -14,6 +14,19 @@ from nose.tools import raises from tests.helpers import requires_boto_gte +MOCK_CERT = """-----BEGIN CERTIFICATE----- +MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt +b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx +FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC +gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt +ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO +vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG +9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj +FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt +8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ== +-----END CERTIFICATE-----""" + + @mock_iam_deprecated() def test_get_all_server_certs(): conn = boto.connect_iam() @@ -763,3 +776,66 @@ def test_get_account_authorization_details(): assert len(result['UserDetailList']) == 1 assert len(result['GroupDetailList']) == 1 assert len(result['Policies']) > 1 + + +@mock_iam +def test_signing_certs(): + client = boto3.client('iam', region_name='us-east-1') + + # Create the IAM user first: + client.create_user(UserName='testing') + + # Upload the cert: + resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate'] + cert_id = resp['CertificateId'] + + assert resp['UserName'] == 'testing' + assert resp['Status'] == 'Active' + assert resp['CertificateBody'] == MOCK_CERT + assert resp['CertificateId'] + + # Upload a the cert with an invalid body: + with assert_raises(ClientError) as ce: + client.upload_signing_certificate(UserName='testing', CertificateBody='notacert') + assert ce.exception.response['Error']['Code'] == 'MalformedCertificate' + + # Upload with an invalid user: + with assert_raises(ClientError): + client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT) + + # Update: + client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError): + client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive') + + with assert_raises(ClientError) as ce: + client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive') + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id='x' * 32) + + # List the certs: + resp = client.list_signing_certificates(UserName='testing')['Certificates'] + assert len(resp) == 1 + assert resp[0]['CertificateBody'] == MOCK_CERT + assert resp[0]['Status'] == 'Inactive' # Changed with the update call above. + + with assert_raises(ClientError): + client.list_signing_certificates(UserName='notauser') + + # Delete: + client.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + with assert_raises(ClientError): + client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id) + + with assert_raises(ClientError) as ce: + client.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + + assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( + id=cert_id) + + # Verify that it's not in the list: + resp = client.list_signing_certificates(UserName='testing') + assert not resp['Certificates'] From b66965e6e879a1647ca031448d8536f2e129d2c4 Mon Sep 17 00:00:00 2001 From: adam davis Date: Fri, 2 Nov 2018 16:04:17 -0700 Subject: [PATCH 023/175] Adding cloudformation-validate. Cfn-lint does the heavy lifting. --- moto/batch/responses.py | 2 +- moto/cloudformation/models.py | 4 + moto/cloudformation/responses.py | 27 +++++ moto/cloudformation/utils.py | 33 ++++++ setup.py | 1 + tests/test_cloudformation/test_validate.py | 115 +++++++++++++++++++++ 6 files changed, 181 insertions(+), 1 deletion(-) create mode 100644 tests/test_cloudformation/test_validate.py diff --git a/moto/batch/responses.py b/moto/batch/responses.py index e626b7d4c..7fb606184 100644 --- a/moto/batch/responses.py +++ b/moto/batch/responses.py @@ -27,7 +27,7 @@ class BatchResponse(BaseResponse): elif not hasattr(self, '_json'): try: self._json = json.loads(self.body) - except json.JSONDecodeError: + except ValueError: print() return self._json diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index e5ab7255d..6ec821b42 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -13,6 +13,7 @@ from .utils import ( generate_changeset_id, generate_stack_id, yaml_tag_constructor, + validate_template_cfn_lint, ) from .exceptions import ValidationError @@ -270,6 +271,9 @@ class CloudFormationBackend(BaseBackend): next_token = str(token + 100) if len(all_exports) > token + 100 else None return exports, next_token + def validate_template(self, template): + return validate_template_cfn_lint(template) + def _validate_export_uniqueness(self, stack): new_stack_export_names = [x.name for x in stack.exports] export_names = self.exports.keys() diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index a1295a20d..0964287a3 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import json +import yaml from six.moves.urllib.parse import urlparse from moto.core.responses import BaseResponse @@ -294,6 +295,32 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(LIST_EXPORTS_RESPONSE) return template.render(exports=exports, next_token=next_token) + def validate_template(self): + cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody')) + if cfn_lint: + raise ValidationError(cfn_lint[0].message) + description = "" + try: + description = json.loads(self._get_param('TemplateBody'))['Description'] + except (ValueError, KeyError): + pass + try: + description = yaml.load(self._get_param('TemplateBody'))['Description'] + except (yaml.ParserError, KeyError): + pass + template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE) + return template.render(description=description) + + +VALIDATE_STACK_RESPONSE_TEMPLATE = """ + + + + +{{ description }} + + +""" CREATE_STACK_RESPONSE_TEMPLATE = """ diff --git a/moto/cloudformation/utils.py b/moto/cloudformation/utils.py index f3b8874ed..f963ce7c8 100644 --- a/moto/cloudformation/utils.py +++ b/moto/cloudformation/utils.py @@ -3,6 +3,9 @@ import uuid import six import random import yaml +import os + +from cfnlint import decode, core def generate_stack_id(stack_name): @@ -38,3 +41,33 @@ def yaml_tag_constructor(loader, tag, node): key = 'Fn::{}'.format(tag[1:]) return {key: _f(loader, tag, node)} + + +def validate_template_cfn_lint(template): + + # Save the template to a temporary file -- cfn-lint requires a file + filename = "file.tmp" + with open(filename, "w") as file: + file.write(template) + abs_filename = os.path.abspath(filename) + + # decode handles both yaml and json + template, matches = decode.decode(abs_filename, False) + + # Set cfn-lint to info + core.configure_logging(None) + + # Initialize the ruleset to be applied (no overrules, no excludes) + rules = core.get_rules([], [], []) + + # Use us-east-1 region (spec file) for validation + regions = ['us-east-1'] + + # Process all the rules and gather the errors + matches = core.run_checks( + abs_filename, + template, + rules, + regions) + + return matches diff --git a/setup.py b/setup.py index 98780dd5a..873ca16b2 100755 --- a/setup.py +++ b/setup.py @@ -24,6 +24,7 @@ install_requires = [ "jsondiff==1.1.1", "aws-xray-sdk<0.96,>=0.93", "responses>=0.9.0", + "cfn-lint" ] extras_require = { diff --git a/tests/test_cloudformation/test_validate.py b/tests/test_cloudformation/test_validate.py new file mode 100644 index 000000000..e2c3af05d --- /dev/null +++ b/tests/test_cloudformation/test_validate.py @@ -0,0 +1,115 @@ +from collections import OrderedDict +import json +import yaml +import os +import boto3 +from nose.tools import raises +import botocore + + +from moto.cloudformation.exceptions import ValidationError +from moto.cloudformation.models import FakeStack +from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export +from moto.sqs.models import Queue +from moto.s3.models import FakeBucket +from moto.cloudformation.utils import yaml_tag_constructor +from boto.cloudformation.stack import Output +from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2 + +json_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1", + "Resources": { + "EC2Instance1": { + "Type": "AWS::EC2::Instance", + "Properties": { + "ImageId": "ami-d3adb33f", + "KeyName": "dummy", + "InstanceType": "t2.micro", + "Tags": [ + { + "Key": "Description", + "Value": "Test tag" + }, + { + "Key": "Name", + "Value": "Name tag for tests" + } + ] + } + } + } +} + +# One resource is required +json_bad_template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack 1" +} + +dummy_template_json = json.dumps(json_template) +dummy_bad_template_json = json.dumps(json_bad_template) + + +@mock_cloudformation +def test_boto3_json_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=dummy_template_json, + ) + assert response['Description'] == "Stack 1" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_json_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=dummy_bad_template_json, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True + + +yaml_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 +""" + +yaml_bad_template = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template +""" + +@mock_cloudformation +def test_boto3_yaml_validate_successful(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + response = cf_conn.validate_template( + TemplateBody=yaml_template, + ) + assert response['Description'] == "Simple CloudFormation Test Template" + assert response['Parameters'] == [] + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + +@mock_cloudformation +def test_boto3_yaml_invalid_missing_resource(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + try: + cf_conn.validate_template( + TemplateBody=yaml_bad_template, + ) + assert False + except botocore.exceptions.ClientError as e: + assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \ + ' with id Missing top level item Resources to file module does not exist' + assert True From 4af9407ef4ac22ba2a13a442d44ced30ef74cef8 Mon Sep 17 00:00:00 2001 From: Justin Dray Date: Tue, 6 Nov 2018 00:03:09 +0000 Subject: [PATCH 024/175] Update to jose 3.x --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 046ecf6e0..811be3855 100755 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ install_requires = [ "pyaml", "pytz", "python-dateutil<3.0.0,>=2.1", - "python-jose<3.0.0", + "python-jose<4.0.0", "mock", "docker>=2.5.1", "jsondiff==1.1.1", From e52158f811818b84f86736727de8868b76180d64 Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Tue, 6 Nov 2018 17:12:13 +0900 Subject: [PATCH 025/175] Add support for IoT detach_policy --- IMPLEMENTATION_COVERAGE.md | 4 ++-- moto/iot/models.py | 9 +++++++++ moto/iot/responses.py | 9 +++++++++ tests/test_iot/test_iot.py | 7 ++++++- 4 files changed, 26 insertions(+), 3 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7a1e2e7aa..0a00dec94 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2376,7 +2376,7 @@ - [ ] unsubscribe_from_event - [ ] update_assessment_target -## iot - 31% implemented +## iot - 32% implemented - [ ] accept_certificate_transfer - [X] add_thing_to_thing_group - [ ] associate_targets_with_job @@ -2429,7 +2429,7 @@ - [X] describe_thing_group - [ ] describe_thing_registration_task - [X] describe_thing_type -- [ ] detach_policy +- [X] detach_policy - [X] detach_principal_policy - [X] detach_thing_principal - [ ] disable_topic_rule diff --git a/moto/iot/models.py b/moto/iot/models.py index db9ad3817..5c33aecda 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -445,6 +445,15 @@ class IoTBackend(BaseBackend): return self.principal_policies[k] = (principal, policy) + def detach_policy(self, policy_name, target): + # this may raises ResourceNotFoundException + self._get_principal(target) + self.get_policy(policy_name) + k = (target, policy_name) + if k not in self.principal_policies: + raise ResourceNotFoundException() + del self.principal_policies[k] + def detach_principal_policy(self, policy_name, principal_arn): # this may raises ResourceNotFoundException self._get_principal(principal_arn) diff --git a/moto/iot/responses.py b/moto/iot/responses.py index 042e5a314..214576f52 100644 --- a/moto/iot/responses.py +++ b/moto/iot/responses.py @@ -242,6 +242,15 @@ class IoTResponse(BaseResponse): ) return json.dumps(dict()) + def detach_policy(self): + policy_name = self._get_param("policyName") + target = self._get_param('target') + self.iot_backend.detach_policy( + policy_name=policy_name, + target=target, + ) + return json.dumps(dict()) + def detach_principal_policy(self): policy_name = self._get_param("policyName") principal = self.headers.get('x-amzn-iot-principal') diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py index 47ea9d59b..161c5832a 100644 --- a/tests/test_iot/test_iot.py +++ b/tests/test_iot/test_iot.py @@ -5,6 +5,8 @@ import sure # noqa import boto3 from moto import mock_iot +from botocore.exceptions import ClientError +from nose.tools import assert_raises @mock_iot @@ -338,11 +340,14 @@ def test_principal_policy(): for principal in res['principals']: principal.should_not.be.none - client.detach_principal_policy(policyName=policy_name, principal=cert_arn) + client.detach_policy(policyName=policy_name, target=cert_arn) res = client.list_principal_policies(principal=cert_arn) res.should.have.key('policies').which.should.have.length_of(0) res = client.list_policy_principals(policyName=policy_name) res.should.have.key('principals').which.should.have.length_of(0) + with assert_raises(ClientError) as e: + client.detach_policy(policyName=policy_name, target=cert_arn) + e.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') @mock_iot From 0b57ffe26a894ecc7d39a9c1155dadedc0c5f7ba Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Wed, 7 Nov 2018 15:03:25 -0500 Subject: [PATCH 026/175] Add StreamSpecification to dynamodb2 package --- moto/dynamodb2/models.py | 22 +++++++++- moto/dynamodb2/responses.py | 9 +++- tests/test_dynamodb2/test_dynamodb.py | 59 +++++++++++++++++++++++++++ 3 files changed, 88 insertions(+), 2 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index a54c4f7d0..16e97ea2f 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -294,7 +294,7 @@ class Item(BaseModel): class Table(BaseModel): - def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None): + def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None): self.name = table_name self.attr = attr self.schema = schema @@ -325,10 +325,18 @@ class Table(BaseModel): 'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED', # 'AttributeName': 'string' # Can contain this } + self.set_stream_specification(streams) def _generate_arn(self, name): return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name + def set_stream_specification(self, streams): + self.stream_specification = streams + if streams and streams.get('StreamEnabled'): + self.latest_stream_label = datetime.datetime.utcnow().isoformat() + else: + self.latest_stream_label = None + def describe(self, base_key='TableDescription'): results = { base_key: { @@ -345,6 +353,11 @@ class Table(BaseModel): 'LocalSecondaryIndexes': [index for index in self.indexes], } } + if self.stream_specification: + results[base_key]['StreamSpecification'] = self.stream_specification + if self.latest_stream_label: + results[base_key]['LatestStreamLabel'] = self.latest_stream_label + results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label return results def __len__(self): @@ -680,6 +693,13 @@ class DynamoDBBackend(BaseBackend): table.throughput = throughput return table + def update_table_streams(self, name, stream_specification): + table = self.tables[name] + if stream_specification['StreamEnabled'] and table.latest_stream_label: + raise ValueError('Table already has stream enabled') + table.set_stream_specification(stream_specification) + return table + def update_table_global_indexes(self, name, global_index_updates): table = self.tables[name] gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index e2f1ef1cc..73bd3ae38 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -104,13 +104,16 @@ class DynamoHandler(BaseResponse): # getting the indexes global_indexes = body.get("GlobalSecondaryIndexes", []) local_secondary_indexes = body.get("LocalSecondaryIndexes", []) + # get the stream specification + streams = body.get("StreamSpecification") table = self.dynamodb_backend.create_table(table_name, schema=key_schema, throughput=throughput, attr=attr, global_indexes=global_indexes, - indexes=local_secondary_indexes) + indexes=local_secondary_indexes, + streams=streams) if table is not None: return dynamo_json_dump(table.describe()) else: @@ -163,12 +166,16 @@ class DynamoHandler(BaseResponse): def update_table(self): name = self.body['TableName'] + table = self.dynamodb_backend.get_table(name) if 'GlobalSecondaryIndexUpdates' in self.body: table = self.dynamodb_backend.update_table_global_indexes( name, self.body['GlobalSecondaryIndexUpdates']) if 'ProvisionedThroughput' in self.body: throughput = self.body["ProvisionedThroughput"] table = self.dynamodb_backend.update_table_throughput(name, throughput) + if 'StreamSpecification' in self.body: + table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) + return dynamo_json_dump(table.describe()) def describe_table(self): diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index afc919dd7..7f30bbccf 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1336,3 +1336,62 @@ def test_query_global_secondary_index_when_created_via_update_table_resource(): assert len(forum_and_subject_items) == 1 assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats', 'subject': 'my pet is the cutest'} + + +@mock_dynamodb2 +def test_dynamodb_streams_1(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + + resp = conn.delete_table(TableName='test-streams') + + assert 'StreamSpecification' in resp['TableDescription'] + + +@mock_dynamodb2 +def test_dynamodb_streams_2(): + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-stream-update', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + ) + + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-stream-update', + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + ) + + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'NEW_IMAGE' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + assert 'LatestStreamArn' in resp['TableDescription'] + From 519899f74f6d50c33ffe82d8d6fee2c86afd3a8e Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Wed, 7 Nov 2018 17:10:00 -0500 Subject: [PATCH 027/175] Much of the way towards complete DynamoDB Streams implementation --- moto/__init__.py | 1 + moto/backends.py | 1 + moto/dynamodb2/models.py | 37 ++++++- moto/dynamodbstreams/__init__.py | 6 ++ moto/dynamodbstreams/models.py | 99 +++++++++++++++++++ moto/dynamodbstreams/responses.py | 29 ++++++ moto/dynamodbstreams/urls.py | 10 ++ setup.cfg | 2 +- .../test_dynamodbstreams.py | 96 ++++++++++++++++++ 9 files changed, 278 insertions(+), 3 deletions(-) create mode 100644 moto/dynamodbstreams/__init__.py create mode 100644 moto/dynamodbstreams/models.py create mode 100644 moto/dynamodbstreams/responses.py create mode 100644 moto/dynamodbstreams/urls.py create mode 100644 tests/test_dynamodbstreams/test_dynamodbstreams.py diff --git a/moto/__init__.py b/moto/__init__.py index dd3593d5d..e86c499a7 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -16,6 +16,7 @@ from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: n from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa +from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa diff --git a/moto/backends.py b/moto/backends.py index d95424385..7df167a06 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -12,6 +12,7 @@ from moto.core import moto_api_backends from moto.datapipeline import datapipeline_backends from moto.dynamodb import dynamodb_backends from moto.dynamodb2 import dynamodb_backends2 +from moto.dynamodbstreams import dynamodbstreams_backends from moto.ec2 import ec2_backends from moto.ecr import ecr_backends from moto.ecs import ecs_backends diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index 16e97ea2f..d58241cf6 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -292,6 +292,34 @@ class Item(BaseModel): 'ADD not supported for %s' % ', '.join(update_action['Value'].keys())) +class StreamShard(BaseModel): + def __init__(self, table): + self.table = table + self.id = 'shardId-00000001541626099285-f35f62ef' + self.starting_sequence_number = 1100000000017454423009 + self.items = [] + self.created_on = datetime.datetime.utcnow() + + def to_json(self): + return { + 'ShardId': self.id, + 'SequenceNumberRange': { + 'StartingSequenceNumber': str(self.starting_sequence_number) + } + } + + def add(self, old, new): + t = self.table.stream_specification['StreamViewType'] + if t == 'KEYS_ONLY': + self.items.append(new.key) + elif t == 'NEW_IMAGE': + self.items.append(new) + elif t == 'OLD_IMAGE': + self.items.append(old) + elif t == 'NEW_AND_OLD_IMAGES': + self.items.append((old, new)) + + class Table(BaseModel): def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None): @@ -334,8 +362,10 @@ class Table(BaseModel): self.stream_specification = streams if streams and streams.get('StreamEnabled'): self.latest_stream_label = datetime.datetime.utcnow().isoformat() + self.stream_shard = StreamShard(self) else: self.latest_stream_label = None + self.stream_shard = None def describe(self, base_key='TableDescription'): results = { @@ -398,6 +428,7 @@ class Table(BaseModel): else: range_value = None + current = self.get_item(hash_value, lookup_range_value) item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) @@ -413,8 +444,6 @@ class Table(BaseModel): else: lookup_range_value = DynamoType(expected_range_value) - current = self.get_item(hash_value, lookup_range_value) - if current is None: current_attr = {} elif hasattr(current, 'attrs'): @@ -445,6 +474,10 @@ class Table(BaseModel): self.items[hash_value][range_value] = item else: self.items[hash_value] = item + + if self.stream_shard is not None: + self.stream_shard.add(current, item) + return item def __nonzero__(self): diff --git a/moto/dynamodbstreams/__init__.py b/moto/dynamodbstreams/__init__.py new file mode 100644 index 000000000..b35879eba --- /dev/null +++ b/moto/dynamodbstreams/__init__.py @@ -0,0 +1,6 @@ +from __future__ import unicode_literals +from .models import dynamodbstreams_backends +from ..core.models import base_decorator + +dynamodbstreams_backend = dynamodbstreams_backends['us-east-1'] +mock_dynamodbstreams = base_decorator(dynamodbstreams_backends) diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py new file mode 100644 index 000000000..35704de63 --- /dev/null +++ b/moto/dynamodbstreams/models.py @@ -0,0 +1,99 @@ +from __future__ import unicode_literals + +import os +import json +import boto3 +import base64 +import datetime + +from moto.core import BaseBackend, BaseModel +from moto.dynamodb2.models import dynamodb_backends + + +class ShardIterator(BaseModel): + def __init__(self, stream_shard, shard_iterator_type, sequence_number=None): + self.id = base64.b64encode(os.urandom(472)).decode('utf-8') + self.stream_shard = stream_shard + self.shard_iterator_type = shard_iterator_type + if shard_iterator_type == 'TRIM_HORIZON': + self.sequence_number = stream_shard.starting_sequence_number + elif shard_iterator_type == 'LATEST': + self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items) + elif shard_iterator_type == 'AT_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER': + self.sequence_number = sequence_number + 1 + + def to_json(self): + return { + 'ShardIterator': '{}/stream/{}|1|{}'.format( + self.stream_shard.table.table_arn, + self.stream_shard.table.latest_stream_label, + self.id) + } + + +class DynamoDBStreamsBackend(BaseBackend): + def __init__(self, region): + self.region = region + + def reset(self): + region = self.region + self.__dict__ = {} + self.__init__(region) + + @property + def dynamodb(self): + return dynamodb_backends[self.region] + + def _get_table_from_arn(self, arn): + table_name = arn.split(':', 6)[5].split('/')[1] + return self.dynamodb.get_table(table_name) + + def describe_stream(self, arn): + table = self._get_table_from_arn(arn) + resp = {'StreamDescription': { + 'StreamArn': arn, + 'StreamLabel': table.latest_stream_label, + 'StreamStatus': ('ENABLED' if table.latest_stream_label + else 'DISABLED'), + 'StreamViewType': table.stream_specification['StreamViewType'], + 'CreationRequestDateTime': table.stream_shard.created_on.isoformat(), + 'TableName': table.name, + 'KeySchema': table.schema, + 'Shards': ([table.stream_shard.to_json()] if table.stream_shard + else []) + }} + + return json.dumps(resp) + + def list_streams(self, table_name=None): + streams = [] + for table in self.dynamodb.tables.values(): + if table_name is not None and table.name != table_name: + continue + if table.latest_stream_label: + d = table.describe(base_key='Table') + streams.append({ + 'StreamArn': d['Table']['LatestStreamArn'], + 'TableName': d['Table']['TableName'], + 'StreamLabel': d['Table']['LatestStreamLabel'] + }) + + return json.dumps({'Streams': streams}) + + def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None): + table = self._get_table_from_arn(arn) + assert table.stream_shard.id == shard_id + + shard_iterator = ShardIterator(table.stream_shard, shard_iterator_type, + sequence_number) + + return json.dumps(shard_iterator.to_json()) + + + +available_regions = boto3.session.Session().get_available_regions( + 'dynamodbstreams') +dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region) + for region in available_regions} diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py new file mode 100644 index 000000000..ef05c1718 --- /dev/null +++ b/moto/dynamodbstreams/responses.py @@ -0,0 +1,29 @@ +from __future__ import unicode_literals + +import json + +from moto.core.responses import BaseResponse + +from .models import dynamodbstreams_backends + + +class DynamoDBStreamsHandler(BaseResponse): + + @property + def backend(self): + return dynamodbstreams_backends[self.region] + + def describe_stream(self): + arn = self._get_param('StreamArn') + return self.backend.describe_stream(arn) + + def list_streams(self): + table_name = self._get_param('TableName') + return self.backend.list_streams(table_name) + + def get_shard_iterator(self): + arn = self._get_param('StreamArn') + shard_id = self._get_param('ShardId') + shard_iterator_type = self._get_param('ShardIteratorType') + return self.backend.get_shard_iterator(arn, shard_id, + shard_iterator_type) diff --git a/moto/dynamodbstreams/urls.py b/moto/dynamodbstreams/urls.py new file mode 100644 index 000000000..1d0f94c35 --- /dev/null +++ b/moto/dynamodbstreams/urls.py @@ -0,0 +1,10 @@ +from __future__ import unicode_literals +from .responses import DynamoDBStreamsHandler + +url_bases = [ + "https?://streams.dynamodb.(.+).amazonaws.com" +] + +url_paths = { + "{0}/$": DynamoDBStreamsHandler.dispatch, +} diff --git a/setup.cfg b/setup.cfg index fb04c16a8..9dbd988db 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ [nosetests] verbosity=1 detailed-errors=1 -with-coverage=1 +#with-coverage=1 cover-package=moto [bdist_wheel] diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py new file mode 100644 index 000000000..a10445aac --- /dev/null +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -0,0 +1,96 @@ +from __future__ import unicode_literals, print_function + +import boto3 +from moto import mock_dynamodb2, mock_dynamodbstreams + + +class TestClass(): + stream_arn = None + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + # create a table with a stream + conn = boto3.client('dynamodb', region_name='us-east-1') + + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamEnabled': True, + 'StreamViewType': 'NEW_AND_OLD_IMAGES' + } + ) + self.stream_arn = resp['TableDescription']['LatestStreamArn'] + + def teardown(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + conn.delete_table(TableName='test-streams') + self.stream_arn = None + + for m in self.mocks: + m.stop() + + + def test_verify_stream(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.describe_table(TableName='test-streams') + assert 'LatestStreamArn' in resp['Table'] + + def test_describe_stream(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + assert 'StreamDescription' in resp + desc = resp['StreamDescription'] + assert desc['StreamArn'] == self.stream_arn + assert desc['TableName'] == 'test-streams' + + def test_list_streams(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.list_streams() + assert resp['Streams'][0]['StreamArn'] == self.stream_arn + + resp = conn.list_streams(TableName='no-stream') + assert not resp['Streams'] + + def test_get_shard_iterator(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + assert 'ShardIterator' in resp + + def test_get_records(self): + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert 'Records' in resp + + # TODO: Add tests for inserting records into the stream, and + # the various stream types + From 0f6086f708112c024eb7f3079339b1698a507012 Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Thu, 8 Nov 2018 10:54:54 -0500 Subject: [PATCH 028/175] Finalize implementation of DynamoDB Streams --- moto/dynamodb2/models.py | 97 ++++++++++++++----- moto/dynamodbstreams/models.py | 44 +++++++-- moto/dynamodbstreams/responses.py | 7 ++ setup.cfg | 2 +- .../test_dynamodbstreams.py | 49 +++++++++- 5 files changed, 166 insertions(+), 33 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index d58241cf6..4283c038b 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -1,10 +1,11 @@ from __future__ import unicode_literals -from collections import defaultdict +from collections import defaultdict, namedtuple import copy import datetime import decimal import json import re +import uuid import boto3 from moto.compat import OrderedDict @@ -292,6 +293,44 @@ class Item(BaseModel): 'ADD not supported for %s' % ', '.join(update_action['Value'].keys())) +class StreamRecord(BaseModel): + def __init__(self, table, stream_type, event_name, old, new, seq): + old_a = old.to_json()['Attributes'] if old is not None else {} + new_a = new.to_json()['Attributes'] if new is not None else {} + + rec = old if old is not None else new + keys = {table.hash_key_attr: rec.hash_key.to_json()} + if table.range_key_attr is not None: + keys[table.range_key_attr] = rec.range_key.to_json() + + self.record = { + 'eventID': uuid.uuid4().hex, + 'eventName': event_name, + 'eventSource': 'aws:dynamodb', + 'eventVersion': '1.0', + 'awsRegion': 'us-east-1', + 'dynamodb': { + 'StreamViewType': stream_type, + 'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(), + 'SequenceNumber': seq, + 'SizeBytes': 1, + 'Keys': keys + } + } + + if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['NewImage'] = new_a + if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'): + self.record['dynamodb']['OldImage'] = old_a + + # This is a substantial overestimate but it's the easiest to do now + self.record['dynamodb']['SizeBytes'] = len( + json.dumps(self.record['dynamodb'])) + + def to_json(self): + return self.record + + class StreamShard(BaseModel): def __init__(self, table): self.table = table @@ -310,15 +349,22 @@ class StreamShard(BaseModel): def add(self, old, new): t = self.table.stream_specification['StreamViewType'] - if t == 'KEYS_ONLY': - self.items.append(new.key) - elif t == 'NEW_IMAGE': - self.items.append(new) - elif t == 'OLD_IMAGE': - self.items.append(old) - elif t == 'NEW_AND_OLD_IMAGES': - self.items.append((old, new)) - + if old is None: + event_name = 'INSERT' + elif new is None: + event_name = 'DELETE' + else: + event_name = 'MODIFY' + seq = len(self.items) + self.starting_sequence_number + self.items.append( + StreamRecord(self.table, t, event_name, old, new, seq)) + + def get(self, start, quantity): + start -= self.starting_sequence_number + assert start >= 0 + end = start + quantity + return [i.to_json() for i in self.items[start:end]] + class Table(BaseModel): @@ -428,22 +474,22 @@ class Table(BaseModel): else: range_value = None + if expected is None: + expected = {} + lookup_range_value = range_value + else: + expected_range_value = expected.get( + self.range_key_attr, {}).get("Value") + if(expected_range_value is None): + lookup_range_value = range_value + else: + lookup_range_value = DynamoType(expected_range_value) current = self.get_item(hash_value, lookup_range_value) + item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) if not overwrite: - if expected is None: - expected = {} - lookup_range_value = range_value - else: - expected_range_value = expected.get( - self.range_key_attr, {}).get("Value") - if(expected_range_value is None): - lookup_range_value = range_value - else: - lookup_range_value = DynamoType(expected_range_value) - if current is None: current_attr = {} elif hasattr(current, 'attrs'): @@ -508,9 +554,14 @@ class Table(BaseModel): def delete_item(self, hash_key, range_key): try: if range_key: - return self.items[hash_key].pop(range_key) + item = self.items[hash_key].pop(range_key) else: - return self.items.pop(hash_key) + item = self.items.pop(hash_key) + + if self.stream_shard is not None: + self.stream_shard.add(item, None) + + return item except KeyError: return None diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py index 35704de63..7b43eb744 100644 --- a/moto/dynamodbstreams/models.py +++ b/moto/dynamodbstreams/models.py @@ -11,8 +11,9 @@ from moto.dynamodb2.models import dynamodb_backends class ShardIterator(BaseModel): - def __init__(self, stream_shard, shard_iterator_type, sequence_number=None): + def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None): self.id = base64.b64encode(os.urandom(472)).decode('utf-8') + self.streams_backend = streams_backend self.stream_shard = stream_shard self.shard_iterator_type = shard_iterator_type if shard_iterator_type == 'TRIM_HORIZON': @@ -24,18 +25,43 @@ class ShardIterator(BaseModel): elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER': self.sequence_number = sequence_number + 1 + @property + def arn(self): + return '{}/stream/{}|1|{}'.format( + self.stream_shard.table.table_arn, + self.stream_shard.table.latest_stream_label, + self.id) + def to_json(self): return { - 'ShardIterator': '{}/stream/{}|1|{}'.format( - self.stream_shard.table.table_arn, - self.stream_shard.table.latest_stream_label, - self.id) + 'ShardIterator': self.arn + } + + def get(self, limit=1000): + items = self.stream_shard.get(self.sequence_number, limit) + try: + last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items) + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AFTER_SEQUENCE_NUMBER', + last_sequence_number) + except ValueError: + new_shard_iterator = ShardIterator(self.streams_backend, + self.stream_shard, + 'AT_SEQUENCE_NUMBER', + self.sequence_number) + + self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator + return { + 'NextShardIterator': new_shard_iterator.arn, + 'Records': items } class DynamoDBStreamsBackend(BaseBackend): def __init__(self, region): self.region = region + self.shard_iterators = {} def reset(self): region = self.region @@ -86,11 +112,17 @@ class DynamoDBStreamsBackend(BaseBackend): table = self._get_table_from_arn(arn) assert table.stream_shard.id == shard_id - shard_iterator = ShardIterator(table.stream_shard, shard_iterator_type, + shard_iterator = ShardIterator(self, table.stream_shard, + shard_iterator_type, sequence_number) + self.shard_iterators[shard_iterator.arn] = shard_iterator return json.dumps(shard_iterator.to_json()) + def get_records(self, iterator_arn, limit): + shard_iterator = self.shard_iterators[iterator_arn] + return json.dumps(shard_iterator.get(limit)) + available_regions = boto3.session.Session().get_available_regions( diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py index ef05c1718..c07377d38 100644 --- a/moto/dynamodbstreams/responses.py +++ b/moto/dynamodbstreams/responses.py @@ -27,3 +27,10 @@ class DynamoDBStreamsHandler(BaseResponse): shard_iterator_type = self._get_param('ShardIteratorType') return self.backend.get_shard_iterator(arn, shard_id, shard_iterator_type) + + def get_records(self): + arn = self._get_param('ShardIterator') + limit = self._get_param('Limit') + if limit is None: + limit = 1000 + return self.backend.get_records(arn, limit) diff --git a/setup.cfg b/setup.cfg index 9dbd988db..fb04c16a8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ [nosetests] verbosity=1 detailed-errors=1 -#with-coverage=1 +with-coverage=1 cover-package=moto [bdist_wheel] diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py index a10445aac..94c0c51b2 100644 --- a/tests/test_dynamodbstreams/test_dynamodbstreams.py +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -75,7 +75,7 @@ class TestClass(): ) assert 'ShardIterator' in resp - def test_get_records(self): + def test_get_records_empty(self): conn = boto3.client('dynamodbstreams', region_name='us-east-1') resp = conn.describe_stream(StreamArn=self.stream_arn) @@ -90,7 +90,50 @@ class TestClass(): resp = conn.get_records(ShardIterator=iterator_id) assert 'Records' in resp + assert len(resp['Records']) == 0 - # TODO: Add tests for inserting records into the stream, and - # the various stream types + def test_get_records_seq(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'foo'} + } + ) + conn.put_item( + TableName='test-streams', + Item={ + 'id': {'S': 'entry1'}, + 'first_col': {'S': 'bar'}, + 'second_col': {'S': 'baz'} + } + ) + conn.delete_item( + TableName='test-streams', + Key={'id': {'S': 'entry1'}} + ) + conn = boto3.client('dynamodbstreams', region_name='us-east-1') + + resp = conn.describe_stream(StreamArn=self.stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = conn.get_shard_iterator( + StreamArn=self.stream_arn, + ShardId=shard_id, + ShardIteratorType='TRIM_HORIZON' + ) + iterator_id = resp['ShardIterator'] + + resp = conn.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 3 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'MODIFY' + assert resp['Records'][2]['eventName'] == 'DELETE' + + # now try fetching from the next shard iterator, it should be + # empty + resp = conn.get_records(ShardIterator=resp['NextShardIterator']) + assert len(resp['Records']) == 0 From ff6a57f44391e2e8523c1ca49022ec39ed0facd6 Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Thu, 8 Nov 2018 11:08:24 -0500 Subject: [PATCH 029/175] Fix flake8 failures --- moto/backends.py | 1 + moto/dynamodb2/models.py | 16 ++++++++-------- moto/dynamodb2/responses.py | 2 +- moto/dynamodbstreams/models.py | 14 ++++++-------- moto/dynamodbstreams/responses.py | 4 +--- 5 files changed, 17 insertions(+), 20 deletions(-) diff --git a/moto/backends.py b/moto/backends.py index 7df167a06..1a333415e 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -60,6 +60,7 @@ BACKENDS = { 'datapipeline': datapipeline_backends, 'dynamodb': dynamodb_backends, 'dynamodb2': dynamodb_backends2, + 'dynamodbstreams': dynamodbstreams_backends, 'ec2': ec2_backends, 'ecr': ecr_backends, 'ecs': ecs_backends, diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index 4283c038b..e2882f1e4 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -1,5 +1,5 @@ from __future__ import unicode_literals -from collections import defaultdict, namedtuple +from collections import defaultdict import copy import datetime import decimal @@ -302,7 +302,7 @@ class StreamRecord(BaseModel): keys = {table.hash_key_attr: rec.hash_key.to_json()} if table.range_key_attr is not None: keys[table.range_key_attr] = rec.range_key.to_json() - + self.record = { 'eventID': uuid.uuid4().hex, 'eventName': event_name, @@ -317,7 +317,7 @@ class StreamRecord(BaseModel): 'Keys': keys } } - + if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'): self.record['dynamodb']['NewImage'] = new_a if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'): @@ -364,7 +364,7 @@ class StreamShard(BaseModel): assert start >= 0 end = start + quantity return [i.to_json() for i in self.items[start:end]] - + class Table(BaseModel): @@ -485,7 +485,7 @@ class Table(BaseModel): else: lookup_range_value = DynamoType(expected_range_value) current = self.get_item(hash_value, lookup_range_value) - + item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) @@ -520,10 +520,10 @@ class Table(BaseModel): self.items[hash_value][range_value] = item else: self.items[hash_value] = item - + if self.stream_shard is not None: self.stream_shard.add(current, item) - + return item def __nonzero__(self): @@ -560,7 +560,7 @@ class Table(BaseModel): if self.stream_shard is not None: self.stream_shard.add(item, None) - + return item except KeyError: return None diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index 73bd3ae38..d2bf99bfe 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -175,7 +175,7 @@ class DynamoHandler(BaseResponse): table = self.dynamodb_backend.update_table_throughput(name, throughput) if 'StreamSpecification' in self.body: table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) - + return dynamo_json_dump(table.describe()) def describe_table(self): diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py index 7b43eb744..41cc6e280 100644 --- a/moto/dynamodbstreams/models.py +++ b/moto/dynamodbstreams/models.py @@ -4,7 +4,6 @@ import os import json import boto3 import base64 -import datetime from moto.core import BaseBackend, BaseModel from moto.dynamodb2.models import dynamodb_backends @@ -31,7 +30,7 @@ class ShardIterator(BaseModel): self.stream_shard.table.table_arn, self.stream_shard.table.latest_stream_label, self.id) - + def to_json(self): return { 'ShardIterator': self.arn @@ -67,7 +66,7 @@ class DynamoDBStreamsBackend(BaseBackend): region = self.region self.__dict__ = {} self.__init__(region) - + @property def dynamodb(self): return dynamodb_backends[self.region] @@ -75,7 +74,7 @@ class DynamoDBStreamsBackend(BaseBackend): def _get_table_from_arn(self, arn): table_name = arn.split(':', 6)[5].split('/')[1] return self.dynamodb.get_table(table_name) - + def describe_stream(self, arn): table = self._get_table_from_arn(arn) resp = {'StreamDescription': { @@ -90,7 +89,7 @@ class DynamoDBStreamsBackend(BaseBackend): 'Shards': ([table.stream_shard.to_json()] if table.stream_shard else []) }} - + return json.dumps(resp) def list_streams(self, table_name=None): @@ -105,7 +104,7 @@ class DynamoDBStreamsBackend(BaseBackend): 'TableName': d['Table']['TableName'], 'StreamLabel': d['Table']['LatestStreamLabel'] }) - + return json.dumps({'Streams': streams}) def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None): @@ -116,14 +115,13 @@ class DynamoDBStreamsBackend(BaseBackend): shard_iterator_type, sequence_number) self.shard_iterators[shard_iterator.arn] = shard_iterator - + return json.dumps(shard_iterator.to_json()) def get_records(self, iterator_arn, limit): shard_iterator = self.shard_iterators[iterator_arn] return json.dumps(shard_iterator.get(limit)) - available_regions = boto3.session.Session().get_available_regions( 'dynamodbstreams') diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py index c07377d38..c9c113615 100644 --- a/moto/dynamodbstreams/responses.py +++ b/moto/dynamodbstreams/responses.py @@ -1,7 +1,5 @@ from __future__ import unicode_literals -import json - from moto.core.responses import BaseResponse from .models import dynamodbstreams_backends @@ -12,7 +10,7 @@ class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] - + def describe_stream(self): arn = self._get_param('StreamArn') return self.backend.describe_stream(arn) From 9d190aa04e4c66831449a1a5cd3673c35e995bf9 Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Thu, 8 Nov 2018 13:22:24 -0500 Subject: [PATCH 030/175] Tweak functionality and add tests --- moto/dynamodb2/models.py | 17 ++-- .../test_dynamodbstreams.py | 99 ++++++++++++++++++- 2 files changed, 108 insertions(+), 8 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index e2882f1e4..d15b9fce5 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -406,10 +406,13 @@ class Table(BaseModel): def set_stream_specification(self, streams): self.stream_specification = streams - if streams and streams.get('StreamEnabled'): + if streams and (streams.get('StreamEnabled') + or streams.get('StreamViewType')): + self.stream_specification['StreamEnabled'] = True self.latest_stream_label = datetime.datetime.utcnow().isoformat() self.stream_shard = StreamShard(self) else: + self.stream_specification = {'StreamEnabled': False} self.latest_stream_label = None self.stream_shard = None @@ -429,11 +432,11 @@ class Table(BaseModel): 'LocalSecondaryIndexes': [index for index in self.indexes], } } - if self.stream_specification: + if self.stream_specification and self.stream_specification['StreamEnabled']: results[base_key]['StreamSpecification'] = self.stream_specification - if self.latest_stream_label: - results[base_key]['LatestStreamLabel'] = self.latest_stream_label - results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label + if self.latest_stream_label: + results[base_key]['LatestStreamLabel'] = self.latest_stream_label + results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label return results def __len__(self): @@ -779,7 +782,9 @@ class DynamoDBBackend(BaseBackend): def update_table_streams(self, name, stream_specification): table = self.tables[name] - if stream_specification['StreamEnabled'] and table.latest_stream_label: + if ((stream_specification.get('StreamEnabled') + or stream_specification.get('StreamViewType')) + and table.latest_stream_label): raise ValueError('Table already has stream enabled') table.set_stream_specification(stream_specification) return table diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py index 94c0c51b2..7e4025626 100644 --- a/tests/test_dynamodbstreams/test_dynamodbstreams.py +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -1,10 +1,12 @@ from __future__ import unicode_literals, print_function +from nose.tools import assert_raises + import boto3 from moto import mock_dynamodb2, mock_dynamodbstreams -class TestClass(): +class TestCore(): stream_arn = None mocks = [] @@ -84,7 +86,7 @@ class TestClass(): resp = conn.get_shard_iterator( StreamArn=self.stream_arn, ShardId=shard_id, - ShardIteratorType='TRIM_HORIZON' + ShardIteratorType='LATEST' ) iterator_id = resp['ShardIterator'] @@ -137,3 +139,96 @@ class TestClass(): # empty resp = conn.get_records(ShardIterator=resp['NextShardIterator']) assert len(resp['Records']) == 0 + + +class TestEdges(): + mocks = [] + + def setup(self): + self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()] + for m in self.mocks: + m.start() + + def teardown(self): + for m in self.mocks: + m.stop() + + + def test_enable_stream_on_table(self): + conn = boto3.client('dynamodb', region_name='us-east-1') + resp = conn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1} + ) + assert 'StreamSpecification' not in resp['TableDescription'] + + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'KEYS_ONLY' + } + ) + assert 'StreamSpecification' in resp['TableDescription'] + assert resp['TableDescription']['StreamSpecification'] == { + 'StreamEnabled': True, + 'StreamViewType': 'KEYS_ONLY' + } + assert 'LatestStreamLabel' in resp['TableDescription'] + + # now try to enable it again + with assert_raises(ValueError): + resp = conn.update_table( + TableName='test-streams', + StreamSpecification={ + 'StreamViewType': 'OLD_IMAGES' + } + ) + + def test_stream_with_range_key(self): + dyn = boto3.client('dynamodb', region_name='us-east-1') + + resp = dyn.create_table( + TableName='test-streams', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}, + {'AttributeName': 'color', 'KeyType': 'RANGE'}], + AttributeDefinitions=[{'AttributeName': 'id', + 'AttributeType': 'S'}, + {'AttributeName': 'color', + 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1}, + StreamSpecification={ + 'StreamViewType': 'NEW_IMAGES' + } + ) + stream_arn = resp['TableDescription']['LatestStreamArn'] + + streams = boto3.client('dynamodbstreams', region_name='us-east-1') + resp = streams.describe_stream(StreamArn=stream_arn) + shard_id = resp['StreamDescription']['Shards'][0]['ShardId'] + + resp = streams.get_shard_iterator( + StreamArn=stream_arn, + ShardId=shard_id, + ShardIteratorType='LATEST' + ) + iterator_id = resp['ShardIterator'] + + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}} + ) + dyn.put_item( + TableName='test-streams', + Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}} + ) + + resp = streams.get_records(ShardIterator=iterator_id) + assert len(resp['Records']) == 2 + assert resp['Records'][0]['eventName'] == 'INSERT' + assert resp['Records'][1]['eventName'] == 'INSERT' + From e7b01292e96172a1d716fc80c7df628e00c2b83e Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Thu, 8 Nov 2018 13:57:44 -0500 Subject: [PATCH 031/175] Fix test failures in server mode --- moto/dynamodb2/responses.py | 6 +++++- moto/server.py | 11 +++++++---- tests/test_dynamodbstreams/test_dynamodbstreams.py | 2 +- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index d2bf99bfe..79dd749e2 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -174,7 +174,11 @@ class DynamoHandler(BaseResponse): throughput = self.body["ProvisionedThroughput"] table = self.dynamodb_backend.update_table_throughput(name, throughput) if 'StreamSpecification' in self.body: - table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) + try: + table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification']) + except ValueError: + er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException' + return self.error(er, 'Cannot enable stream') return dynamo_json_dump(table.describe()) diff --git a/moto/server.py b/moto/server.py index ba2470478..5ad02d383 100644 --- a/moto/server.py +++ b/moto/server.py @@ -80,10 +80,13 @@ class DomainDispatcherApplication(object): region = 'us-east-1' if service == 'dynamodb': - dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] - # If Newer API version, use dynamodb2 - if dynamo_api_version > "20111205": - host = "dynamodb2" + if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'): + host = 'dynamodbstreams' + else: + dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] + # If Newer API version, use dynamodb2 + if dynamo_api_version > "20111205": + host = "dynamodb2" else: host = "{service}.{region}.amazonaws.com".format( service=service, region=region) diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py index 7e4025626..b60c21053 100644 --- a/tests/test_dynamodbstreams/test_dynamodbstreams.py +++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py @@ -180,7 +180,7 @@ class TestEdges(): assert 'LatestStreamLabel' in resp['TableDescription'] # now try to enable it again - with assert_raises(ValueError): + with assert_raises(conn.exceptions.ResourceInUseException): resp = conn.update_table( TableName='test-streams', StreamSpecification={ From 770ad1db565e345b1f11edfcba0225307d52be5d Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Thu, 8 Nov 2018 16:21:06 -0500 Subject: [PATCH 032/175] Correct behavior of ReturnValues parameter to put_item and update_item --- moto/dynamodb2/responses.py | 38 +++++++++++++++- tests/test_dynamodb2/test_dynamodb.py | 63 +++++++++++++++++++++++++++ 2 files changed, 99 insertions(+), 2 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index e2f1ef1cc..cb716d563 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -183,6 +183,7 @@ class DynamoHandler(BaseResponse): def put_item(self): name = self.body['TableName'] item = self.body['Item'] + return_values = self.body.get('ReturnValues', 'NONE') if has_empty_keys_or_values(item): return get_empty_str_error() @@ -193,6 +194,13 @@ class DynamoHandler(BaseResponse): else: expected = None + if return_values == 'ALL_OLD': + existing_item = self.dynamodb_backend.get_item(name, item) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} + # Attempt to parse simple ConditionExpressions into an Expected # expression if not expected: @@ -228,6 +236,10 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 1 } + if return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + else: + item_dict.pop('Attributes', None) return dynamo_json_dump(item_dict) else: er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException' @@ -527,9 +539,9 @@ class DynamoHandler(BaseResponse): return dynamo_json_dump(item_dict) def update_item(self): - name = self.body['TableName'] key = self.body['Key'] + return_values = self.body.get('ReturnValues', 'NONE') update_expression = self.body.get('UpdateExpression') attribute_updates = self.body.get('AttributeUpdates') expression_attribute_names = self.body.get( @@ -537,6 +549,10 @@ class DynamoHandler(BaseResponse): expression_attribute_values = self.body.get( 'ExpressionAttributeValues', {}) existing_item = self.dynamodb_backend.get_item(name, key) + if existing_item: + existing_attributes = existing_item.to_json()['Attributes'] + else: + existing_attributes = {} if has_empty_keys_or_values(expression_attribute_values): return get_empty_str_error() @@ -591,8 +607,26 @@ class DynamoHandler(BaseResponse): 'TableName': name, 'CapacityUnits': 0.5 } - if not existing_item: + unchanged_attributes = { + k for k in existing_attributes.keys() + if existing_attributes[k] == item_dict['Attributes'].get(k) + } + changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes) + + if return_values == 'NONE': item_dict['Attributes'] = {} + elif return_values == 'ALL_OLD': + item_dict['Attributes'] = existing_attributes + elif return_values == 'UPDATED_OLD': + item_dict['Attributes'] = { + k: v for k, v in existing_attributes.items() + if k in changed_attributes + } + elif return_values == 'UPDATED_NEW': + item_dict['Attributes'] = { + k: v for k, v in item_dict['Attributes'].items() + if k in changed_attributes + } return dynamo_json_dump(item_dict) diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index afc919dd7..5bfe8f9bd 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1246,6 +1246,69 @@ def test_update_if_not_exists(): assert resp['Items'][0]['created_at'] == 123 +# https://github.com/spulec/moto/issues/1937 +@mock_dynamodb2 +def test_update_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + def update(col, to, rv): + return dynamodb.update_item( + TableName='moto-test', + Key={'id': {'S': 'foo'}}, + AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}}, + ReturnValues=rv + ) + + r = update('col1', 'val1', 'ALL_NEW') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col1', 'val2', 'ALL_OLD') + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + r = update('col2', 'val3', 'UPDATED_NEW') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col2', 'val4', 'UPDATED_OLD') + assert r['Attributes'] == {'col2': {'S': 'val3'}} + + r = update('col1', 'val5', 'NONE') + assert r['Attributes'] == {} + + +@mock_dynamodb2 +def test_put_return_attributes(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + + dynamodb.create_table( + TableName='moto-test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1} + ) + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}, + ReturnValues='NONE' + ) + assert 'Attributes' not in r + + r = dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}}, + ReturnValues='ALL_OLD' + ) + assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} + + + @mock_dynamodb2 def test_query_global_secondary_index_when_created_via_update_table_resource(): dynamodb = boto3.resource('dynamodb', region_name='us-east-1') From 46c0f8915f913b92190986ba74ea2fb266dd28c9 Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Fri, 9 Nov 2018 09:32:19 -0500 Subject: [PATCH 033/175] Fix flake8 failures (boo) --- moto/dynamodb2/models.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index d15b9fce5..8187ceaf9 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -406,8 +406,7 @@ class Table(BaseModel): def set_stream_specification(self, streams): self.stream_specification = streams - if streams and (streams.get('StreamEnabled') - or streams.get('StreamViewType')): + if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')): self.stream_specification['StreamEnabled'] = True self.latest_stream_label = datetime.datetime.utcnow().isoformat() self.stream_shard = StreamShard(self) @@ -782,9 +781,7 @@ class DynamoDBBackend(BaseBackend): def update_table_streams(self, name, stream_specification): table = self.tables[name] - if ((stream_specification.get('StreamEnabled') - or stream_specification.get('StreamViewType')) - and table.latest_stream_label): + if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label: raise ValueError('Table already has stream enabled') table.set_stream_specification(stream_specification) return table From 2ec32c80f797039da8d7d7dd7096241f960ab33f Mon Sep 17 00:00:00 2001 From: Karl Gutwin Date: Fri, 9 Nov 2018 13:21:38 -0500 Subject: [PATCH 034/175] Merge in functionality from #1899 --- moto/dynamodb2/responses.py | 15 ++++++++++++++- tests/test_dynamodb2/test_dynamodb.py | 19 ++++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index cb716d563..03abf73c3 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -185,6 +185,10 @@ class DynamoHandler(BaseResponse): item = self.body['Item'] return_values = self.body.get('ReturnValues', 'NONE') + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') + if has_empty_keys_or_values(item): return get_empty_str_error() @@ -524,7 +528,11 @@ class DynamoHandler(BaseResponse): def delete_item(self): name = self.body['TableName'] keys = self.body['Key'] - return_values = self.body.get('ReturnValues', '') + return_values = self.body.get('ReturnValues', 'NONE') + if return_values not in ('ALL_OLD', 'NONE'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') + table = self.dynamodb_backend.get_table(name) if not table: er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException' @@ -554,6 +562,11 @@ class DynamoHandler(BaseResponse): else: existing_attributes = {} + if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD', + 'UPDATED_NEW'): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, 'Return values set to invalid value') + if has_empty_keys_or_values(expression_attribute_values): return get_empty_str_error() diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index 5bfe8f9bd..47a714b1d 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1000,6 +1000,11 @@ def test_delete_item(): response = table.scan() assert response['Count'] == 2 + # Test ReturnValues validation + with assert_raises(ClientError) as ex: + table.delete_item(Key={'client': 'client1', 'app': 'app1'}, + ReturnValues='ALL_NEW') + # Test deletion and returning old value response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD') response['Attributes'].should.contain('client') @@ -1281,6 +1286,9 @@ def test_update_return_attributes(): r = update('col1', 'val5', 'NONE') assert r['Attributes'] == {} + with assert_raises(ClientError) as ex: + r = update('col1', 'val6', 'WRONG') + @mock_dynamodb2 def test_put_return_attributes(): @@ -1306,7 +1314,16 @@ def test_put_return_attributes(): ReturnValues='ALL_OLD' ) assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}} - + + with assert_raises(ClientError) as ex: + dynamodb.put_item( + TableName='moto-test', + Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}}, + ReturnValues='ALL_NEW' + ) + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value') @mock_dynamodb2 From 6e6f23a1b0b7f4c9bce4060b5d32433c776c2dba Mon Sep 17 00:00:00 2001 From: Mark Challoner Date: Wed, 14 Nov 2018 12:03:42 +0000 Subject: [PATCH 035/175] Set deleted status on vpc peer deletion. --- moto/ec2/models.py | 9 ++++++--- tests/test_ec2/test_vpc_peering.py | 3 ++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index b94cac479..f7d1eb044 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -2230,6 +2230,10 @@ class VPCPeeringConnectionStatus(object): self.code = code self.message = message + def deleted(self): + self.code = 'deleted' + self.message = 'Deleted by {deleter ID}' + def initiating(self): self.code = 'initiating-request' self.message = 'Initiating Request to {accepter ID}' @@ -2292,9 +2296,8 @@ class VPCPeeringConnectionBackend(object): return self.vpc_pcxs.get(vpc_pcx_id) def delete_vpc_peering_connection(self, vpc_pcx_id): - deleted = self.vpc_pcxs.pop(vpc_pcx_id, None) - if not deleted: - raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id) + deleted = self.get_vpc_peering_connection(vpc_pcx_id) + deleted._status.deleted() return deleted def accept_vpc_peering_connection(self, vpc_pcx_id): diff --git a/tests/test_ec2/test_vpc_peering.py b/tests/test_ec2/test_vpc_peering.py index 1f98791b3..082499a72 100644 --- a/tests/test_ec2/test_vpc_peering.py +++ b/tests/test_ec2/test_vpc_peering.py @@ -89,7 +89,8 @@ def test_vpc_peering_connections_delete(): verdict.should.equal(True) all_vpc_pcxs = conn.get_all_vpc_peering_connections() - all_vpc_pcxs.should.have.length_of(0) + all_vpc_pcxs.should.have.length_of(1) + all_vpc_pcxs[0]._status.code.should.equal('deleted') with assert_raises(EC2ResponseError) as cm: conn.delete_vpc_peering_connection("pcx-1234abcd") From b5eb724773d23f34fc9093bb06b6cdb5813c668b Mon Sep 17 00:00:00 2001 From: Jon Miller Date: Thu, 15 Nov 2018 15:24:45 -0800 Subject: [PATCH 036/175] Add default path to Role & InstanceProfile --- moto/iam/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index 4a5240a08..522915a24 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -114,7 +114,7 @@ class Role(BaseModel): self.id = role_id self.name = name self.assume_role_policy_document = assume_role_policy_document - self.path = path + self.path = path or '/' self.policies = {} self.managed_policies = {} @@ -166,7 +166,7 @@ class InstanceProfile(BaseModel): def __init__(self, instance_profile_id, name, path, roles): self.id = instance_profile_id self.name = name - self.path = path + self.path = path or '/' self.roles = roles if roles else [] @classmethod From cf5bd7665cff971b8f0e470e96773bcbe1e5ad36 Mon Sep 17 00:00:00 2001 From: Lorenz Hufnagel Date: Fri, 16 Nov 2018 12:23:39 +0100 Subject: [PATCH 037/175] Mock AWS credentials https://github.com/spulec/moto/issues/1924 --- .travis.yml | 2 -- moto/core/models.py | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3a5de0fa2..d386102fc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,8 +23,6 @@ matrix: sudo: true before_install: - export BOTO_CONFIG=/dev/null - - export AWS_SECRET_ACCESS_KEY=foobar_secret - - export AWS_ACCESS_KEY_ID=foobar_key install: # We build moto first so the docker container doesn't try to compile it as well, also note we don't use # -d for docker run so the logs show up in travis diff --git a/moto/core/models.py b/moto/core/models.py index 19267ca08..9fe1e96bd 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -4,6 +4,7 @@ from __future__ import absolute_import import functools import inspect +import os import re import six from io import BytesIO @@ -21,6 +22,11 @@ from .utils import ( ) +# "Mock" the AWS credentials as they can't be mocked in Botocore currently +os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key") +os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret") + + class BaseMockAWS(object): nested_count = 0 From 57fa11136b3c04d53195653dfca36e55d1485693 Mon Sep 17 00:00:00 2001 From: Joe Engel Date: Mon, 19 Nov 2018 15:47:21 -0800 Subject: [PATCH 038/175] Add functionalities for SAML Providers --- IMPLEMENTATION_COVERAGE.md | 10 ++--- moto/iam/models.py | 39 +++++++++++++++++ moto/iam/responses.py | 88 ++++++++++++++++++++++++++++++++++++++ tests/test_iam/test_iam.py | 46 ++++++++++++++++++++ 4 files changed, 178 insertions(+), 5 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7c68c0e31..d8c226e52 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2225,7 +2225,7 @@ - [X] create_policy - [X] create_policy_version - [X] create_role -- [ ] create_saml_provider +- [X] create_saml_provider - [ ] create_service_linked_role - [ ] create_service_specific_credential - [X] create_user @@ -2243,7 +2243,7 @@ - [X] delete_policy_version - [X] delete_role - [X] delete_role_policy -- [ ] delete_saml_provider +- [X] delete_saml_provider - [X] delete_server_certificate - [ ] delete_service_linked_role - [ ] delete_service_specific_credential @@ -2273,7 +2273,7 @@ - [X] get_policy_version - [X] get_role - [X] get_role_policy -- [ ] get_saml_provider +- [X] get_saml_provider - [X] get_server_certificate - [ ] get_service_linked_role_deletion_status - [ ] get_ssh_public_key @@ -2296,7 +2296,7 @@ - [X] list_policy_versions - [X] list_role_policies - [ ] list_roles -- [ ] list_saml_providers +- [X] list_saml_providers - [ ] list_server_certificates - [ ] list_service_specific_credentials - [ ] list_signing_certificates @@ -2323,7 +2323,7 @@ - [ ] update_open_id_connect_provider_thumbprint - [ ] update_role - [ ] update_role_description -- [ ] update_saml_provider +- [X] update_saml_provider - [ ] update_server_certificate - [ ] update_service_specific_credential - [ ] update_signing_certificate diff --git a/moto/iam/models.py b/moto/iam/models.py index 4a5240a08..c6f9e8113 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -50,6 +50,16 @@ class Policy(BaseModel): self.update_datetime = datetime.now(pytz.utc) +class SAMLProvider(BaseModel): + def __init__(self, name, saml_metadata_document=None): + self.name = name + self.saml_metadata_document = saml_metadata_document + + @property + def arn(self): + return "arn:aws:iam::{0}:saml-provider/{1}".format(ACCOUNT_ID, self.name) + + class PolicyVersion(object): def __init__(self, @@ -427,6 +437,7 @@ class IAMBackend(BaseBackend): self.credential_report = None self.managed_policies = self._init_managed_policies() self.account_aliases = [] + self.saml_providers = {} super(IAMBackend, self).__init__() def _init_managed_policies(self): @@ -937,5 +948,33 @@ class IAMBackend(BaseBackend): 'managed_policies': returned_policies } + def create_saml_provider(self, name, saml_metadata_document): + saml_provider = SAMLProvider(name, saml_metadata_document) + self.saml_providers[name] = saml_provider + return saml_provider + + def update_saml_provider(self, saml_provider_arn, saml_metadata_document): + saml_provider = self.get_saml_provider(saml_provider_arn) + saml_provider.saml_metadata_document = saml_metadata_document + return saml_provider + + def delete_saml_provider(self, saml_provider_arn): + try: + for saml_provider in list(self.list_saml_providers()): + if saml_provider.arn == saml_provider_arn: + del self.saml_providers[saml_provider.name] + except KeyError: + raise IAMNotFoundException( + "SAMLProvider {0} not found".format(saml_provider_arn)) + + def list_saml_providers(self): + return self.saml_providers.values() + + def get_saml_provider(self, saml_provider_arn): + for saml_provider in self.list_saml_providers(): + if saml_provider.arn == saml_provider_arn: + return saml_provider + raise IAMNotFoundException("SamlProvider {0} not found".format(saml_provider_arn)) + iam_backend = IAMBackend() diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 22558f3f6..e05e3b3e7 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -552,6 +552,42 @@ class IamResponse(BaseResponse): roles=account_details['roles'] ) + def create_saml_provider(self): + saml_provider_name = self._get_param('Name') + saml_metadata_document = self._get_param('SAMLMetadataDocument') + saml_provider = iam_backend.create_saml_provider(saml_provider_name, saml_metadata_document) + + template = self.response_template(CREATE_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + + def update_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + saml_metadata_document = self._get_param('SAMLMetadataDocument') + saml_provider = iam_backend.update_saml_provider(saml_provider_arn, saml_metadata_document) + + template = self.response_template(UPDATE_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + + def delete_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + iam_backend.delete_saml_provider(saml_provider_arn) + + template = self.response_template(DELETE_SAML_PROVIDER_TEMPLATE) + return template.render() + + def list_saml_providers(self): + saml_providers = iam_backend.list_saml_providers() + + template = self.response_template(LIST_SAML_PROVIDERS_TEMPLATE) + return template.render(saml_providers=saml_providers) + + def get_saml_provider(self): + saml_provider_arn = self._get_param('SAMLProviderArn') + saml_provider = iam_backend.get_saml_provider(saml_provider_arn) + + template = self.response_template(GET_SAML_PROVIDER_TEMPLATE) + return template.render(saml_provider=saml_provider) + ATTACH_ROLE_POLICY_TEMPLATE = """ @@ -1485,3 +1521,55 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """92e79ae7-7399-11e4-8c85-4b53eEXAMPLE """ + +CREATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +LIST_SAML_PROVIDERS_TEMPLATE = """ + + + {% for saml_provider in saml_providers %} + + {{ saml_provider.arn }} + 2032-05-09T16:27:11Z + 2012-05-09T16:27:03Z + + {% endfor %} + + + + fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804 + +""" + +GET_SAML_PROVIDER_TEMPLATE = """ + + 2012-05-09T16:27:11Z + 2015-12-31T21:59:59Z + {{ saml_provider.saml_metadata_document }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" + +DELETE_SAML_PROVIDER_TEMPLATE = """ + + c749ee7f-99ef-11e1-a4c3-27EXAMPLE804 + +""" + +UPDATE_SAML_PROVIDER_TEMPLATE = """ + + {{ saml_provider.arn }} + + + 29f47818-99f5-11e1-a4c3-27EXAMPLE804 + +""" diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index bc23ff712..a2c6b596e 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -3,7 +3,9 @@ import base64 import boto import boto3 +import os import sure # noqa +import sys from boto.exception import BotoServerError from botocore.exceptions import ClientError from moto import mock_iam, mock_iam_deprecated @@ -756,5 +758,49 @@ def test_get_account_authorization_details(): len(result['GroupDetailList']) == 1 len(result['Policies']) > 1 +@mock_iam() +def test_create_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + response = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") +@mock_iam() +def test_get_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.get_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response['SAMLMetadataDocument'].should.equal('a' * 1024) +@mock_iam() +def test_list_saml_providers(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + +@mock_iam() +def test_delete_saml_provider(): + conn = boto3.client('iam', region_name='us-east-1') + saml_provider_create = conn.create_saml_provider( + Name="TestSAMLProvider", + SAMLMetadataDocument='a' * 1024 + ) + response = conn.list_saml_providers() + print(response) + len(response['SAMLProviderList']).should.equal(1) + conn.delete_saml_provider( + SAMLProviderArn=saml_provider_create['SAMLProviderArn'] + ) + response = conn.list_saml_providers() + len(response['SAMLProviderList']).should.equal(0) From 293b25a8f934fbdf2127fd2888c7323f7681d521 Mon Sep 17 00:00:00 2001 From: Tatsuya Hoshino Date: Tue, 20 Nov 2018 21:43:59 +0900 Subject: [PATCH 039/175] Add a missing trailing dot to the Name of ResourceRecordSet AWS Route53 treats www.example.com (without a trailing dot) and www.example.com. (with a trailing dot) as identical. Hence, after creating a `www.example.com` record, `www.example.com.` name is saved in Route53. But moto treated `www.example.com` and `www.example.com.` as different. This commit fixes the moto behavior. --- moto/route53/responses.py | 3 +++ tests/test_route53/test_route53.py | 24 ++++++++++++------------ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/moto/route53/responses.py b/moto/route53/responses.py index 6679e7945..98ffa4c47 100644 --- a/moto/route53/responses.py +++ b/moto/route53/responses.py @@ -123,6 +123,9 @@ class Route53(BaseResponse): """ % (record_set['Name'], the_zone.name) return 400, headers, error_msg + if not record_set['Name'].endswith('.'): + record_set['Name'] += '.' + if action in ('CREATE', 'UPSERT'): if 'ResourceRecords' in record_set: resource_records = list( diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 76217b9d9..1ced9d937 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -164,8 +164,8 @@ def test_alias_rrset(): rrsets = conn.get_all_rrsets(zoneid, type="A") rrset_records = [(rr_set.name, rr) for rr_set in rrsets for rr in rr_set.resource_records] rrset_records.should.have.length_of(2) - rrset_records.should.contain(('foo.alias.testdns.aws.com', 'foo.testdns.aws.com')) - rrset_records.should.contain(('bar.alias.testdns.aws.com', 'bar.testdns.aws.com')) + rrset_records.should.contain(('foo.alias.testdns.aws.com.', 'foo.testdns.aws.com')) + rrset_records.should.contain(('bar.alias.testdns.aws.com.', 'bar.testdns.aws.com')) rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com') rrsets = conn.get_all_rrsets(zoneid, type="CNAME") rrsets.should.have.length_of(1) @@ -525,7 +525,7 @@ def test_change_resource_record_sets_crud_valid(): { 'Action': 'CREATE', 'ResourceRecordSet': { - 'Name': 'prod.redis.db', + 'Name': 'prod.redis.db.', 'Type': 'A', 'TTL': 10, 'ResourceRecords': [{ @@ -540,7 +540,7 @@ def test_change_resource_record_sets_crud_valid(): response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) len(response['ResourceRecordSets']).should.equal(1) a_record_detail = response['ResourceRecordSets'][0] - a_record_detail['Name'].should.equal('prod.redis.db') + a_record_detail['Name'].should.equal('prod.redis.db.') a_record_detail['Type'].should.equal('A') a_record_detail['TTL'].should.equal(10) a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}]) @@ -552,7 +552,7 @@ def test_change_resource_record_sets_crud_valid(): { 'Action': 'UPSERT', 'ResourceRecordSet': { - 'Name': 'prod.redis.db', + 'Name': 'prod.redis.db.', 'Type': 'CNAME', 'TTL': 60, 'ResourceRecords': [{ @@ -567,7 +567,7 @@ def test_change_resource_record_sets_crud_valid(): response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) len(response['ResourceRecordSets']).should.equal(1) cname_record_detail = response['ResourceRecordSets'][0] - cname_record_detail['Name'].should.equal('prod.redis.db') + cname_record_detail['Name'].should.equal('prod.redis.db.') cname_record_detail['Type'].should.equal('CNAME') cname_record_detail['TTL'].should.equal(60) cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}]) @@ -688,12 +688,12 @@ def test_list_resource_record_sets_name_type_filters(): # record_type, record_name all_records = [ - ('A', 'a.a.db'), - ('A', 'a.b.db'), - ('A', 'b.b.db'), - ('CNAME', 'b.b.db'), - ('CNAME', 'b.c.db'), - ('CNAME', 'c.c.db') + ('A', 'a.a.db.'), + ('A', 'a.b.db.'), + ('A', 'b.b.db.'), + ('CNAME', 'b.b.db.'), + ('CNAME', 'b.c.db.'), + ('CNAME', 'c.c.db.') ] for record_type, record_name in all_records: create_resource_record_set(record_type, record_name) From 96f9896885063bec511e195885088a074fa4b5dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Bataille?= Date: Tue, 20 Nov 2018 18:17:21 +0100 Subject: [PATCH 040/175] test(#1959): define dummy aws credentials for test --- tox.ini | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tox.ini b/tox.ini index 0f3f1466a..1830c2e1b 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,10 @@ deps = commands = {envpython} setup.py test nosetests {posargs} +setenv = + AWS_ACCESS_KEY_ID=dummy_key + AWS_SECRET_ACCESS_KEY=dummy_secret + AWS_DEFAULT_REGION=us-east-1 [flake8] ignore = E128,E501 From 9291ff533a467a08eb1fe50924036f4b05a0ce07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Bataille?= Date: Tue, 20 Nov 2018 18:39:31 +0100 Subject: [PATCH 041/175] test(#1959): LocationConstraint us-east-1 should be refused --- tests/test_s3/test_s3.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..18fb768ef 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1300,6 +1300,16 @@ def test_bucket_create_duplicate(): exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists') +@mock_s3 +def test_bucket_create_force_us_east_1(): + s3 = boto3.resource('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket="blah", CreateBucketConfiguration={ + 'LocationConstraint': 'us-east-1', + }) + exc.exception.response['Error']['Code'].should.equal('InvalidLocationConstraint') + + @mock_s3 def test_boto3_bucket_create_eu_central(): s3 = boto3.resource('s3', region_name='eu-central-1') From 437eb892e23dbae7a159a67f16073d46f23977e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Bataille?= Date: Tue, 20 Nov 2018 18:52:50 +0100 Subject: [PATCH 042/175] feat(#1959): LocationConstraint us-east-1 is not accepted by the CreateBucket operation --- moto/s3/responses.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 13e5f87d9..d27351e14 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -432,8 +432,19 @@ class ResponseObject(_TemplateEnvironmentMixin): else: if body: + # us-east-1, the default AWS region behaves a bit differently + # - you should not use it as a location constraint --> it fails + # - querying the location constraint returns None try: - region_name = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + forced_region = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint'] + + if forced_region == DEFAULT_REGION_NAME: + raise S3ClientError( + 'InvalidLocationConstraint', + 'The specified location-constraint is not valid' + ) + else: + region_name = forced_region except KeyError: pass From b0eb7b263e9a9f0aa1d33ef8cee54cba64a7976d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Bataille?= Date: Tue, 20 Nov 2018 19:42:51 +0100 Subject: [PATCH 043/175] test(#1959): us-east-1 located bucket should return a None LocationConstraint --- tests/test_s3/test_s3.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 18fb768ef..6541e77b8 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -977,6 +977,15 @@ def test_bucket_location(): bucket.get_location().should.equal("us-west-2") +@mock_s3_deprecated +def test_bucket_location_us_east_1(): + cli = boto3.client('s3') + bucket_name = 'mybucket' + # No LocationConstraint ==> us-east-1 + cli.create_bucket(Bucket=bucket_name) + cli.get_bucket_location(Bucket=bucket_name)['LocationConstraint'].should.equal(None) + + @mock_s3_deprecated def test_ranged_get(): conn = boto.connect_s3() From 34ac5c72b992be546648bea960d56cbd28e0b993 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Bataille?= Date: Tue, 20 Nov 2018 19:50:42 +0100 Subject: [PATCH 044/175] feat(#1959): bucket in us-east-1 return None as LocationConstraint --- moto/s3/responses.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index d27351e14..1eb010842 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -193,7 +193,13 @@ class ResponseObject(_TemplateEnvironmentMixin): elif 'location' in querystring: bucket = self.backend.get_bucket(bucket_name) template = self.response_template(S3_BUCKET_LOCATION) - return template.render(location=bucket.location) + + location = bucket.location + # us-east-1 is different - returns a None location + if location == DEFAULT_REGION_NAME: + location = None + + return template.render(location=location) elif 'lifecycle' in querystring: bucket = self.backend.get_bucket(bucket_name) if not bucket.rules: @@ -1187,7 +1193,7 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """ """ S3_BUCKET_LOCATION = """ -{{ location }}""" +{% if location != None %}{{ location }}{% endif %}""" S3_BUCKET_LIFECYCLE_CONFIGURATION = """ From fb7e52beccd5cc15e10823630de3d99e1a998ba4 Mon Sep 17 00:00:00 2001 From: Jon Michaelchuck Date: Thu, 22 Nov 2018 06:08:03 -0600 Subject: [PATCH 045/175] Check bucket name length at CreateBucket Check that s3 bucket names follow the documented length restriction: 'Bucket names must be at least 3 and no more than 63 characters long.' See https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html --- moto/s3/exceptions.py | 10 ++++++++++ moto/s3/models.py | 9 +++++++-- tests/test_s3/test_s3.py | 14 ++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index 26515dfd2..d6815ce72 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -178,3 +178,13 @@ class InvalidStorageClass(S3ClientError): "InvalidStorageClass", "The storage class you specified is not valid", *args, **kwargs) + + +class InvalidBucketName(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(InvalidBucketName, self).__init__( + "InvalidBucketName", + "The specified bucket is not valid.", + *args, **kwargs) diff --git a/moto/s3/models.py b/moto/s3/models.py index bb4d7848c..70b71645c 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -14,10 +14,13 @@ import six from bisect import insort from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime -from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \ - InvalidNotificationDestination, MalformedXML, InvalidStorageClass +from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \ + EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, \ + InvalidStorageClass from .utils import clean_key_name, _VersionedKeyStore +MAX_BUCKET_NAME_LENGTH = 63 +MIN_BUCKET_NAME_LENGTH = 3 UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] @@ -634,6 +637,8 @@ class S3Backend(BaseBackend): def create_bucket(self, bucket_name, region_name): if bucket_name in self.buckets: raise BucketAlreadyExists(bucket=bucket_name) + if not MIN_BUCKET_NAME_LENGTH <= len(bucket_name) <= MAX_BUCKET_NAME_LENGTH: + raise InvalidBucketName() new_bucket = FakeBucket(name=bucket_name, region_name=region_name) self.buckets[bucket_name] = new_bucket return new_bucket diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..35f79d3e2 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -2581,3 +2581,17 @@ TEST_XML = """\ """ + +@mock_s3 +def test_boto3_bucket_name_too_long(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*64) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') + +@mock_s3 +def test_boto3_bucket_name_too_short(): + s3 = boto3.client('s3', region_name='us-east-1') + with assert_raises(ClientError) as exc: + s3.create_bucket(Bucket='x'*2) + exc.exception.response['Error']['Code'].should.equal('InvalidBucketName') From 69e093fcea48e76eb434e84965343f66d4a8babe Mon Sep 17 00:00:00 2001 From: Jon Michaelchuck Date: Thu, 22 Nov 2018 06:16:37 -0600 Subject: [PATCH 046/175] flake8 indentation fix --- moto/s3/models.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/moto/s3/models.py b/moto/s3/models.py index 70b71645c..401f7b3df 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -15,8 +15,7 @@ from bisect import insort from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \ - EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, \ - InvalidStorageClass + EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass from .utils import clean_key_name, _VersionedKeyStore MAX_BUCKET_NAME_LENGTH = 63 From 4092657472cab070dd9531bd82afd4b45fe95bb3 Mon Sep 17 00:00:00 2001 From: Andrew McCall Date: Thu, 22 Nov 2018 17:10:33 +0000 Subject: [PATCH 047/175] Added double toto_str/from_str, returned by emr create_cluster --- moto/core/responses.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/moto/core/responses.py b/moto/core/responses.py index 0f133e72c..8fb247f75 100644 --- a/moto/core/responses.py +++ b/moto/core/responses.py @@ -718,6 +718,8 @@ def to_str(value, spec): return str(value) elif vtype == 'float': return str(value) + elif vtype == 'double': + return str(value) elif vtype == 'timestamp': return datetime.datetime.utcfromtimestamp( value).replace(tzinfo=pytz.utc).isoformat() @@ -737,6 +739,8 @@ def from_str(value, spec): return int(value) elif vtype == 'float': return float(value) + elif vtype == 'double': + return float(value) elif vtype == 'timestamp': return value elif vtype == 'string': From df2120f38ca4fed270255955aefffed072071e02 Mon Sep 17 00:00:00 2001 From: Ka Wai Wan Date: Sat, 24 Nov 2018 02:32:39 -0800 Subject: [PATCH 048/175] Add instance protection support in autoscaling, with tests --- moto/autoscaling/models.py | 61 ++++++-- moto/autoscaling/responses.py | 23 +++ tests/test_autoscaling/test_autoscaling.py | 166 ++++++++++++++++++++- 3 files changed, 228 insertions(+), 22 deletions(-) diff --git a/moto/autoscaling/models.py b/moto/autoscaling/models.py index 0ebc4c465..27e81a87c 100644 --- a/moto/autoscaling/models.py +++ b/moto/autoscaling/models.py @@ -17,10 +17,12 @@ ASG_NAME_TAG = "aws:autoscaling:groupName" class InstanceState(object): - def __init__(self, instance, lifecycle_state="InService", health_status="Healthy"): + def __init__(self, instance, lifecycle_state="InService", + health_status="Healthy", protected_from_scale_in=False): self.instance = instance self.lifecycle_state = lifecycle_state self.health_status = health_status + self.protected_from_scale_in = protected_from_scale_in class FakeScalingPolicy(BaseModel): @@ -152,7 +154,8 @@ class FakeAutoScalingGroup(BaseModel): min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, termination_policies, - autoscaling_backend, tags): + autoscaling_backend, tags, + new_instances_protected_from_scale_in=False): self.autoscaling_backend = autoscaling_backend self.name = name @@ -178,6 +181,7 @@ class FakeAutoScalingGroup(BaseModel): self.target_group_arns = target_group_arns self.placement_group = placement_group self.termination_policies = termination_policies + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in self.suspended_processes = [] self.instance_states = [] @@ -210,6 +214,8 @@ class FakeAutoScalingGroup(BaseModel): placement_group=None, termination_policies=properties.get("TerminationPolicies", []), tags=properties.get("Tags", []), + new_instances_protected_from_scale_in=properties.get( + "NewInstancesProtectedFromScaleIn", False) ) return group @@ -238,7 +244,8 @@ class FakeAutoScalingGroup(BaseModel): def update(self, availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies): + placement_group, termination_policies, + new_instances_protected_from_scale_in=None): if availability_zones: self.availability_zones = availability_zones if max_size is not None: @@ -256,6 +263,8 @@ class FakeAutoScalingGroup(BaseModel): self.health_check_period = health_check_period if health_check_type is not None: self.health_check_type = health_check_type + if new_instances_protected_from_scale_in is not None: + self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in if desired_capacity is not None: self.set_desired_capacity(desired_capacity) @@ -280,12 +289,16 @@ class FakeAutoScalingGroup(BaseModel): else: # Need to remove some instances count_to_remove = curr_instance_count - self.desired_capacity - instances_to_remove = self.instance_states[:count_to_remove] - instance_ids_to_remove = [ - instance.instance.id for instance in instances_to_remove] - self.autoscaling_backend.ec2_backend.terminate_instances( - instance_ids_to_remove) - self.instance_states = self.instance_states[count_to_remove:] + instances_to_remove = [ # only remove unprotected + state for state in self.instance_states + if not state.protected_from_scale_in + ][:count_to_remove] + if instances_to_remove: # just in case not instances to remove + instance_ids_to_remove = [ + instance.instance.id for instance in instances_to_remove] + self.autoscaling_backend.ec2_backend.terminate_instances( + instance_ids_to_remove) + self.instance_states = list(set(self.instance_states) - set(instances_to_remove)) def get_propagated_tags(self): propagated_tags = {} @@ -310,7 +323,10 @@ class FakeAutoScalingGroup(BaseModel): ) for instance in reservation.instances: instance.autoscaling_group = self - self.instance_states.append(InstanceState(instance)) + self.instance_states.append(InstanceState( + instance, + protected_from_scale_in=self.new_instances_protected_from_scale_in, + )) def append_target_groups(self, target_group_arns): append = [x for x in target_group_arns if x not in self.target_group_arns] @@ -372,7 +388,8 @@ class AutoScalingBackend(BaseBackend): default_cooldown, health_check_period, health_check_type, load_balancers, target_group_arns, placement_group, - termination_policies, tags): + termination_policies, tags, + new_instances_protected_from_scale_in=False): def make_int(value): return int(value) if value is not None else value @@ -403,6 +420,7 @@ class AutoScalingBackend(BaseBackend): termination_policies=termination_policies, autoscaling_backend=self, tags=tags, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in, ) self.autoscaling_groups[name] = group @@ -415,12 +433,14 @@ class AutoScalingBackend(BaseBackend): launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, placement_group, - termination_policies): + termination_policies, + new_instances_protected_from_scale_in=None): group = self.autoscaling_groups[name] group.update(availability_zones, desired_capacity, max_size, min_size, launch_config_name, vpc_zone_identifier, default_cooldown, health_check_period, health_check_type, - placement_group, termination_policies) + placement_group, termination_policies, + new_instances_protected_from_scale_in=new_instances_protected_from_scale_in) return group def describe_auto_scaling_groups(self, names): @@ -448,7 +468,13 @@ class AutoScalingBackend(BaseBackend): raise ResourceContentionError else: group.desired_capacity = original_size + len(instance_ids) - new_instances = [InstanceState(self.ec2_backend.get_instance(x)) for x in instance_ids] + new_instances = [ + InstanceState( + self.ec2_backend.get_instance(x), + protected_from_scale_in=group.new_instances_protected_from_scale_in, + ) + for x in instance_ids + ] for instance in new_instances: self.ec2_backend.create_tags([instance.instance.id], {ASG_NAME_TAG: group.name}) group.instance_states.extend(new_instances) @@ -626,6 +652,13 @@ class AutoScalingBackend(BaseBackend): group = self.autoscaling_groups[group_name] group.suspended_processes = scaling_processes or [] + def set_instance_protection(self, group_name, instance_ids, protected_from_scale_in): + group = self.autoscaling_groups[group_name] + protected_instances = [ + x for x in group.instance_states if x.instance.id in instance_ids] + for instance in protected_instances: + instance.protected_from_scale_in = protected_from_scale_in + autoscaling_backends = {} for region, ec2_backend in ec2_backends.items(): diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py index 5586c51dd..065f32ebe 100644 --- a/moto/autoscaling/responses.py +++ b/moto/autoscaling/responses.py @@ -85,6 +85,8 @@ class AutoScalingResponse(BaseResponse): termination_policies=self._get_multi_param( 'TerminationPolicies.member'), tags=self._get_list_prefix('Tags.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', False) ) template = self.response_template(CREATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -192,6 +194,8 @@ class AutoScalingResponse(BaseResponse): placement_group=self._get_param('PlacementGroup'), termination_policies=self._get_multi_param( 'TerminationPolicies.member'), + new_instances_protected_from_scale_in=self._get_bool_param( + 'NewInstancesProtectedFromScaleIn', None) ) template = self.response_template(UPDATE_AUTOSCALING_GROUP_TEMPLATE) return template.render() @@ -290,6 +294,15 @@ class AutoScalingResponse(BaseResponse): template = self.response_template(SUSPEND_PROCESSES_TEMPLATE) return template.render() + def set_instance_protection(self): + group_name = self._get_param('AutoScalingGroupName') + instance_ids = self._get_multi_param('InstanceIds.member') + protected_from_scale_in = self._get_bool_param('ProtectedFromScaleIn') + self.autoscaling_backend.set_instance_protection( + group_name, instance_ids, protected_from_scale_in) + template = self.response_template(SET_INSTANCE_PROTECTION_TEMPLATE) + return template.render() + CREATE_LAUNCH_CONFIGURATION_TEMPLATE = """ @@ -490,6 +503,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ instance_state.instance.id }} {{ group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -530,6 +544,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ group.placement_group }} {% endif %} + {{ group.new_instances_protected_from_scale_in|string|lower }} {% endfor %} @@ -565,6 +580,7 @@ DESCRIBE_AUTOSCALING_INSTANCES_TEMPLATE = """{{ instance_state.instance.id }} {{ instance_state.instance.autoscaling_group.launch_config_name }} {{ instance_state.lifecycle_state }} + {{ instance_state.protected_from_scale_in|string|lower }} {% endfor %} @@ -668,3 +684,10 @@ SET_INSTANCE_HEALTH_TEMPLATE = """ + + +{{ requestid }} + +""" diff --git a/tests/test_autoscaling/test_autoscaling.py b/tests/test_autoscaling/test_autoscaling.py index f86ca2b81..b1a65fb7e 100644 --- a/tests/test_autoscaling/test_autoscaling.py +++ b/tests/test_autoscaling/test_autoscaling.py @@ -710,6 +710,7 @@ def test_create_autoscaling_group_boto3(): 'PropagateAtLaunch': False }], VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) @@ -728,13 +729,48 @@ def test_describe_autoscaling_groups_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) + response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) - response['AutoScalingGroups'][0][ - 'AutoScalingGroupName'].should.equal('test_asg') + group = response['AutoScalingGroups'][0] + group['AutoScalingGroupName'].should.equal('test_asg') + group['NewInstancesProtectedFromScaleIn'].should.equal(True) + group['Instances'][0]['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_describe_autoscaling_instances_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups( + AutoScalingGroupNames=["test_asg"] + ) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + + response = client.describe_auto_scaling_instances(InstanceIds=instance_ids) + for instance in response['AutoScalingInstances']: + instance['AutoScalingGroupName'].should.equal('test_asg') + instance['ProtectedFromScaleIn'].should.equal(True) @mock_autoscaling @@ -751,17 +787,21 @@ def test_update_autoscaling_group_boto3(): MaxSize=20, DesiredCapacity=5, VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, ) - response = client.update_auto_scaling_group( + _ = client.update_auto_scaling_group( AutoScalingGroupName='test_asg', MinSize=1, + NewInstancesProtectedFromScaleIn=False, ) response = client.describe_auto_scaling_groups( AutoScalingGroupNames=["test_asg"] ) - response['AutoScalingGroups'][0]['MinSize'].should.equal(1) + group = response['AutoScalingGroups'][0] + group['MinSize'].should.equal(1) + group['NewInstancesProtectedFromScaleIn'].should.equal(False) @mock_autoscaling @@ -992,9 +1032,7 @@ def test_attach_one_instance(): 'PropagateAtLaunch': True }], VPCZoneIdentifier=mocked_networking['subnet1'], - ) - response = client.describe_auto_scaling_groups( - AutoScalingGroupNames=['test_asg'] + NewInstancesProtectedFromScaleIn=True, ) ec2 = boto3.resource('ec2', 'us-east-1') @@ -1009,7 +1047,11 @@ def test_attach_one_instance(): response = client.describe_auto_scaling_groups( AutoScalingGroupNames=['test_asg'] ) - response['AutoScalingGroups'][0]['Instances'].should.have.length_of(3) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(3) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + @mock_autoscaling @mock_ec2 @@ -1100,3 +1142,111 @@ def test_suspend_processes(): launch_suspended = True assert launch_suspended is True + +@mock_autoscaling +def test_set_instance_protection(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=False, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + protected = instance_ids[:3] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=protected, + ProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + for instance in response['AutoScalingGroups'][0]['Instances']: + instance['ProtectedFromScaleIn'].should.equal( + instance['InstanceId'] in protected + ) + + +@mock_autoscaling +def test_set_desired_capacity_up_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=10, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instances = response['AutoScalingGroups'][0]['Instances'] + instances.should.have.length_of(10) + for instance in instances: + instance['ProtectedFromScaleIn'].should.equal(True) + + +@mock_autoscaling +def test_set_desired_capacity_down_boto3(): + mocked_networking = setup_networking() + client = boto3.client('autoscaling', region_name='us-east-1') + _ = client.create_launch_configuration( + LaunchConfigurationName='test_launch_configuration' + ) + _ = client.create_auto_scaling_group( + AutoScalingGroupName='test_asg', + LaunchConfigurationName='test_launch_configuration', + MinSize=0, + MaxSize=20, + DesiredCapacity=5, + VPCZoneIdentifier=mocked_networking['subnet1'], + NewInstancesProtectedFromScaleIn=True, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + instance_ids = [ + instance['InstanceId'] + for instance in response['AutoScalingGroups'][0]['Instances'] + ] + unprotected, protected = instance_ids[:2], instance_ids[2:] + + _ = client.set_instance_protection( + AutoScalingGroupName='test_asg', + InstanceIds=unprotected, + ProtectedFromScaleIn=False, + ) + + _ = client.set_desired_capacity( + AutoScalingGroupName='test_asg', + DesiredCapacity=1, + ) + + response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg']) + group = response['AutoScalingGroups'][0] + group['DesiredCapacity'].should.equal(1) + instance_ids = {instance['InstanceId'] for instance in group['Instances']} + set(protected).should.equal(instance_ids) + set(unprotected).should_not.be.within(instance_ids) # only unprotected killed From 009b02bcd521626996d38d4983d02a4cf3d890de Mon Sep 17 00:00:00 2001 From: Jon Michaelchuck Date: Mon, 26 Nov 2018 15:56:46 -0800 Subject: [PATCH 049/175] Raise a client error if PutBucketTags request contains duplicate keys A PutBucketTags request with duplicate keys will raise a ClientError with code InvalidTag and message 'Cannot provide multiple Tags with the same key'. --- moto/s3/exceptions.py | 9 +++++++++ moto/s3/models.py | 5 ++++- tests/test_s3/test_s3.py | 18 ++++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index 26515dfd2..f78e24943 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -178,3 +178,12 @@ class InvalidStorageClass(S3ClientError): "InvalidStorageClass", "The storage class you specified is not valid", *args, **kwargs) + +class DuplicateTagKeys(S3ClientError): + code = 400 + + def __init__(self, *args, **kwargs): + super(DuplicateTagKeys, self).__init__( + "InvalidTag", + "Cannot provide multiple Tags with the same key", + *args, **kwargs) diff --git a/moto/s3/models.py b/moto/s3/models.py index bb4d7848c..fd53417fa 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -15,7 +15,7 @@ from bisect import insort from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \ - InvalidNotificationDestination, MalformedXML, InvalidStorageClass + InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys from .utils import clean_key_name, _VersionedKeyStore UPLOAD_ID_BYTES = 43 @@ -773,6 +773,9 @@ class S3Backend(BaseBackend): return key def put_bucket_tagging(self, bucket_name, tagging): + tag_keys = [tag.key for tag in tagging.tag_set.tags] + if len(tag_keys) != len(set(tag_keys)): + raise DuplicateTagKeys() bucket = self.get_bucket(bucket_name) bucket.set_tags(tagging) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..ffafc0dfd 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1553,6 +1553,24 @@ def test_boto3_put_bucket_tagging(): }) resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200) + # With duplicate tag keys: + with assert_raises(ClientError) as err: + resp = s3.put_bucket_tagging(Bucket=bucket_name, + Tagging={ + "TagSet": [ + { + "Key": "TagOne", + "Value": "ValueOne" + }, + { + "Key": "TagOne", + "Value": "ValueOneAgain" + } + ] + }) + e = err.exception + e.response["Error"]["Code"].should.equal("InvalidTag") + e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key") @mock_s3 def test_boto3_get_bucket_tagging(): From 2d554cd098bbaff1db4c5901d5b535e68a727899 Mon Sep 17 00:00:00 2001 From: grahamlyons Date: Mon, 26 Nov 2018 23:58:41 +0000 Subject: [PATCH 050/175] Return the deleted stacks in the list stacks call This matches the behaviour of the AWS API. --- moto/cloudformation/models.py | 6 +++++- .../test_cloudformation_stack_crud_boto3.py | 15 +++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index e5ab7255d..536931c25 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -223,7 +223,11 @@ class CloudFormationBackend(BaseBackend): return list(stacks) def list_stacks(self): - return self.stacks.values() + return [ + v for v in self.stacks.values() + ] + [ + v for v in self.deleted_stacks.values() + ] def get_stack(self, name_or_stack_id): all_stacks = dict(self.deleted_stacks, **self.stacks) diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 9bfae6174..19d025753 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -532,6 +532,21 @@ def test_delete_stack_by_name(): cf_conn.describe_stacks()['Stacks'].should.have.length_of(0) +@mock_cloudformation +def test_delete_stack(): + cf = boto3.client('cloudformation', region_name='us-east-1') + cf.create_stack( + StackName="test_stack", + TemplateBody=dummy_template_json, + ) + + cf.delete_stack( + StackName="test_stack", + ) + stacks = cf.list_stacks() + assert stacks['StackSummaries'][0]['StackStatus'] == 'DELETE_COMPLETE' + + @mock_cloudformation def test_describe_deleted_stack(): cf_conn = boto3.client('cloudformation', region_name='us-east-1') From 4de92accab606e5a21409c36f682939c0c5f1027 Mon Sep 17 00:00:00 2001 From: grahamlyons Date: Tue, 27 Nov 2018 11:11:13 +0000 Subject: [PATCH 051/175] Fix existing tests which use `list_stacks` The tests are expecting that deleted stacks are not listed when in fact they should be. --- .../test_cloudformation_stack_crud.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud.py b/tests/test_cloudformation/test_cloudformation_stack_crud.py index 801faf8a1..b7906632b 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud.py @@ -266,9 +266,9 @@ def test_delete_stack_by_name(): template_body=dummy_template_json, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) @mock_cloudformation_deprecated @@ -279,9 +279,9 @@ def test_delete_stack_by_id(): template_body=dummy_template_json, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack(stack_id) - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) with assert_raises(BotoServerError): conn.describe_stacks("test_stack") @@ -296,9 +296,9 @@ def test_delete_stack_with_resource_missing_delete_attr(): template_body=dummy_template_json3, ) - conn.list_stacks().should.have.length_of(1) + conn.describe_stacks().should.have.length_of(1) conn.delete_stack("test_stack") - conn.list_stacks().should.have.length_of(0) + conn.describe_stacks().should.have.length_of(0) @mock_cloudformation_deprecated From 7189d019dfa326978ea4d8006355d1f5ba302c4f Mon Sep 17 00:00:00 2001 From: martynaspaulikas Date: Tue, 27 Nov 2018 11:28:09 +0000 Subject: [PATCH 052/175] Implemented get_access_key_last_used Written test that still does not work due to: ParamValidationError: Parameter validation failed: Unknown parameter in input: "UserName", must be one of: AccessKeyId Refactored update_access_key and delete_access_key functions --- IMPLEMENTATION_COVERAGE.md | 2 +- moto/iam/models.py | 32 ++++++++++++++++++++++---------- moto/iam/responses.py | 7 +++++++ tests/test_iam/test_iam.py | 13 +++++++++++++ 4 files changed, 43 insertions(+), 11 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7c68c0e31..e3bac1918 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2257,7 +2257,7 @@ - [X] detach_user_policy - [X] enable_mfa_device - [ ] generate_credential_report -- [ ] get_access_key_last_used +- [X] get_access_key_last_used - [X] get_account_authorization_details - [ ] get_account_password_policy - [ ] get_account_summary diff --git a/moto/iam/models.py b/moto/iam/models.py index 4a5240a08..32e1ea5ee 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -224,6 +224,10 @@ class AccessKey(BaseModel): datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ" ) + self.last_used = datetime.strftime( + datetime.utcnow(), + "%Y-%m-%dT%H:%M:%SZ" + ) def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException @@ -351,22 +355,25 @@ class User(BaseModel): def get_all_access_keys(self): return self.access_keys + def get_access_key_last_used(self, access_key_id): + key = self.get_access_key_by_id(access_key_id) + return key.last_used + def delete_access_key(self, access_key_id): - for key in self.access_keys: - if key.access_key_id == access_key_id: - self.access_keys.remove(key) - break - else: - raise IAMNotFoundException( - "Key {0} not found".format(access_key_id)) + key = self.get_access_key_by_id(access_key_id) + self.access_keys.remove(key) def update_access_key(self, access_key_id, status): + key = self.get_access_key_by_id(access_key_id) + key.status = status + + def get_access_key_by_id(self, access_key_id): for key in self.access_keys: if key.access_key_id == access_key_id: - key.status = status - break + return key else: - raise IAMNotFoundException("The Access Key with id {0} cannot be found".format(access_key_id)) + raise IAMNotFoundException( + "The Access Key with id {0} cannot be found".format(access_key_id)) def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException @@ -838,6 +845,11 @@ class IAMBackend(BaseBackend): user = self.get_user(user_name) user.update_access_key(access_key_id, status) + def get_access_key_last_used(self, user_name, access_key_id): + user = self.get_user(user_name) + last_used = user.get_access_key_last_used(access_key_id) + return last_used + def get_all_access_keys(self, user_name, marker=None, max_items=None): user = self.get_user(user_name) keys = user.get_all_access_keys() diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 22558f3f6..c3c6f8f8a 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -454,6 +454,13 @@ class IamResponse(BaseResponse): template = self.response_template(GENERIC_EMPTY_TEMPLATE) return template.render(name='UpdateAccessKey') + def get_access_key_last_used(self): + user_name = self._get_param('UserName') + access_key_id = self._get_param('AccessKeyId') + iam_backend.get_access_key_last_used(user_name, access_key_id) + template = self.response_template(GENERIC_EMPTY_TEMPLATE) + return template.render(name='GetAccessKeyLastUsed') + def list_access_keys(self): user_name = self._get_param('UserName') diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index bc23ff712..343d42b3a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -695,6 +695,19 @@ def test_update_access_key(): resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive') +@mock_iam +def test_get_access_key_last_used(): + iam = boto3.resource('iam', region_name='us-east-1') + client = iam.meta.client + username = 'test-user' + iam.create_user(UserName=username) + with assert_raises(ClientError): + client.get_access_key_last_used(UserName=username, AccessKeyId='non-existent-key') + key = client.create_access_key(UserName=username)['AccessKey'] + resp = client.get_access_key_last_used(UserName=username, AccessKeyId=key['AccessKeyId']) + resp.should.equal(key.last_used) + + @mock_iam def test_get_account_authorization_details(): import json From 81dea64a9e072bc6fe2ae76317645e59c5b97b7d Mon Sep 17 00:00:00 2001 From: Craig Anderson Date: Tue, 27 Nov 2018 15:02:46 +0000 Subject: [PATCH 053/175] Ignore pyenv file. --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7f57e98e9..f0118e85e 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,4 @@ python_env .ropeproject/ .pytest_cache/ venv/ - +.python-version From 96ed66c08d384fd3543804adf43253a2b61a5a79 Mon Sep 17 00:00:00 2001 From: Craig Anderson Date: Tue, 27 Nov 2018 15:31:56 +0000 Subject: [PATCH 054/175] Add AllowedPattern to SSM describe_parameters response (#1955) --- moto/ssm/models.py | 15 +++++++++++---- moto/ssm/responses.py | 3 ++- tests/test_ssm/test_ssm_boto3.py | 4 +++- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/moto/ssm/models.py b/moto/ssm/models.py index f16a7d981..2f316a3ac 100644 --- a/moto/ssm/models.py +++ b/moto/ssm/models.py @@ -14,10 +14,12 @@ import itertools class Parameter(BaseModel): - def __init__(self, name, value, type, description, keyid, last_modified_date, version): + def __init__(self, name, value, type, description, allowed_pattern, keyid, + last_modified_date, version): self.name = name self.type = type self.description = description + self.allowed_pattern = allowed_pattern self.keyid = keyid self.last_modified_date = last_modified_date self.version = version @@ -58,6 +60,10 @@ class Parameter(BaseModel): if self.keyid: r['KeyId'] = self.keyid + + if self.allowed_pattern: + r['AllowedPattern'] = self.allowed_pattern + return r @@ -291,7 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend): return self._parameters[name] return None - def put_parameter(self, name, description, value, type, keyid, overwrite): + def put_parameter(self, name, description, value, type, allowed_pattern, + keyid, overwrite): previous_parameter = self._parameters.get(name) version = 1 @@ -302,8 +309,8 @@ class SimpleSystemManagerBackend(BaseBackend): return last_modified_date = time.time() - self._parameters[name] = Parameter( - name, value, type, description, keyid, last_modified_date, version) + self._parameters[name] = Parameter(name, value, type, description, + allowed_pattern, keyid, last_modified_date, version) return version def add_tags_to_resource(self, resource_type, resource_id, tags): diff --git a/moto/ssm/responses.py b/moto/ssm/responses.py index eb05e51b6..c47d4127a 100644 --- a/moto/ssm/responses.py +++ b/moto/ssm/responses.py @@ -160,11 +160,12 @@ class SimpleSystemManagerResponse(BaseResponse): description = self._get_param('Description') value = self._get_param('Value') type_ = self._get_param('Type') + allowed_pattern = self._get_param('AllowedPattern') keyid = self._get_param('KeyId') overwrite = self._get_param('Overwrite', False) result = self.ssm_backend.put_parameter( - name, description, value, type_, keyid, overwrite) + name, description, value, type_, allowed_pattern, keyid, overwrite) if result is None: error = { diff --git a/tests/test_ssm/test_ssm_boto3.py b/tests/test_ssm/test_ssm_boto3.py index f8ef3a237..94682429f 100644 --- a/tests/test_ssm/test_ssm_boto3.py +++ b/tests/test_ssm/test_ssm_boto3.py @@ -319,13 +319,15 @@ def test_describe_parameters(): Name='test', Description='A test parameter', Value='value', - Type='String') + Type='String', + AllowedPattern=r'.*') response = client.describe_parameters() len(response['Parameters']).should.equal(1) response['Parameters'][0]['Name'].should.equal('test') response['Parameters'][0]['Type'].should.equal('String') + response['Parameters'][0]['AllowedPattern'].should.equal(r'.*') @mock_ssm From 9418a6916d561ca4202f92d94192827eae371ef6 Mon Sep 17 00:00:00 2001 From: martynaspaulikas Date: Tue, 27 Nov 2018 16:12:41 +0000 Subject: [PATCH 055/175] Fix tests and functionality of get_access_key_last_used() --- Makefile | 1 + moto/iam/models.py | 25 +++++++++++++++++-------- moto/iam/responses.py | 20 +++++++++++++++----- tests/test_iam/test_iam.py | 14 ++++++++++---- 4 files changed, 43 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index f224d7091..de08c6f74 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,7 @@ test: lint rm -f .coverage rm -rf cover @nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE) + test_server: @TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/ diff --git a/moto/iam/models.py b/moto/iam/models.py index 32e1ea5ee..f3d640940 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -355,10 +355,6 @@ class User(BaseModel): def get_all_access_keys(self): return self.access_keys - def get_access_key_last_used(self, access_key_id): - key = self.get_access_key_by_id(access_key_id) - return key.last_used - def delete_access_key(self, access_key_id): key = self.get_access_key_by_id(access_key_id) self.access_keys.remove(key) @@ -845,10 +841,23 @@ class IAMBackend(BaseBackend): user = self.get_user(user_name) user.update_access_key(access_key_id, status) - def get_access_key_last_used(self, user_name, access_key_id): - user = self.get_user(user_name) - last_used = user.get_access_key_last_used(access_key_id) - return last_used + def get_access_key_last_used(self, access_key_id): + access_keys_list = self.get_all_access_keys_for_all_users() + for key in access_keys_list: + if key.access_key_id == access_key_id: + return { + 'user_name': key.user_name, + 'last_used': key.last_used + } + else: + raise IAMNotFoundException( + "The Access Key with id {0} cannot be found".format(access_key_id)) + + def get_all_access_keys_for_all_users(self): + access_keys_list = [] + for user_name in self.users: + access_keys_list += self.get_all_access_keys(user_name) + return access_keys_list def get_all_access_keys(self, user_name, marker=None, max_items=None): user = self.get_user(user_name) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index c3c6f8f8a..1674b1b53 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -455,15 +455,13 @@ class IamResponse(BaseResponse): return template.render(name='UpdateAccessKey') def get_access_key_last_used(self): - user_name = self._get_param('UserName') access_key_id = self._get_param('AccessKeyId') - iam_backend.get_access_key_last_used(user_name, access_key_id) - template = self.response_template(GENERIC_EMPTY_TEMPLATE) - return template.render(name='GetAccessKeyLastUsed') + last_used_response = iam_backend.get_access_key_last_used(access_key_id) + template = self.response_template(GET_ACCESS_KEY_LAST_USED_TEMPLATE) + return template.render(user_name=last_used_response["user_name"], last_used=last_used_response["last_used"]) def list_access_keys(self): user_name = self._get_param('UserName') - keys = iam_backend.get_all_access_keys(user_name) template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE) return template.render(user_name=user_name, keys=keys) @@ -1247,6 +1245,18 @@ LIST_ACCESS_KEYS_TEMPLATE = """ """ + +GET_ACCESS_KEY_LAST_USED_TEMPLATE = """ + + + {{ user_name }} + + {{ last_used }} + + + +""" + CREDENTIAL_REPORT_GENERATING = """ diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 343d42b3a..b6c836b17 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -11,6 +11,7 @@ from moto.iam.models import aws_managed_policies from nose.tools import assert_raises, assert_equals from nose.tools import raises +from datetime import datetime from tests.helpers import requires_boto_gte @@ -702,10 +703,15 @@ def test_get_access_key_last_used(): username = 'test-user' iam.create_user(UserName=username) with assert_raises(ClientError): - client.get_access_key_last_used(UserName=username, AccessKeyId='non-existent-key') - key = client.create_access_key(UserName=username)['AccessKey'] - resp = client.get_access_key_last_used(UserName=username, AccessKeyId=key['AccessKeyId']) - resp.should.equal(key.last_used) + client.get_access_key_last_used(AccessKeyId='non-existent-key-id') + create_key_response = client.create_access_key(UserName=username)['AccessKey'] + resp = client.get_access_key_last_used(AccessKeyId=create_key_response['AccessKeyId']) + + datetime.strftime(resp["AccessKeyLastUsed"]["LastUsedDate"], "%Y-%m-%d").should.equal(datetime.strftime( + datetime.utcnow(), + "%Y-%m-%d" + )) + resp["UserName"].should.equal(create_key_response["UserName"]) @mock_iam From d29869bf9b678c5573f0ff343dd204c2459142a3 Mon Sep 17 00:00:00 2001 From: Jon Michaelchuck Date: Tue, 27 Nov 2018 08:32:30 -0800 Subject: [PATCH 056/175] flake8 fix --- moto/s3/exceptions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index f78e24943..c7d82ddfd 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -179,6 +179,7 @@ class InvalidStorageClass(S3ClientError): "The storage class you specified is not valid", *args, **kwargs) + class DuplicateTagKeys(S3ClientError): code = 400 From 5a3b5cab29aec9219e1d01c3143de41ee472955d Mon Sep 17 00:00:00 2001 From: vadym-serdiuk Date: Wed, 28 Nov 2018 17:33:22 +0200 Subject: [PATCH 057/175] Strip parenthesis in the KeyConditionExpression The "bloop" package uses parenthesis in the KeyConditionExpression, so query method returns nothing due to the wrong parsing of the parameters. --- moto/dynamodb2/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index e2f1ef1cc..aeddb76bf 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -311,7 +311,7 @@ class DynamoHandler(BaseResponse): def query(self): name = self.body['TableName'] # {u'KeyConditionExpression': u'#n0 = :v0', u'ExpressionAttributeValues': {u':v0': {u'S': u'johndoe'}}, u'ExpressionAttributeNames': {u'#n0': u'username'}} - key_condition_expression = self.body.get('KeyConditionExpression') + key_condition_expression = self.body.get('KeyConditionExpression').strip('()') projection_expression = self.body.get('ProjectionExpression') expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) filter_expression = self.body.get('FilterExpression') From 5db35ef168ff0ae9a1ebd397885cdaa326221f5c Mon Sep 17 00:00:00 2001 From: amitchakote7 Date: Thu, 29 Nov 2018 15:39:39 +1100 Subject: [PATCH 058/175] Added TargetGroupARNs to DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE --- moto/autoscaling/responses.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py index 5586c51dd..845db0136 100644 --- a/moto/autoscaling/responses.py +++ b/moto/autoscaling/responses.py @@ -508,6 +508,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """ {% endif %} + {% if group.target_group_arns %} + + {% for target_group_arn in group.target_group_arns %} + {{ target_group_arn }} + {% endfor %} + + {% else %} + + {% endif %} {{ group.min_size }} {% if group.vpc_zone_identifier %} {{ group.vpc_zone_identifier }} From 7d472896e1177d3e1144eb4d36ad3bbaf8ff771a Mon Sep 17 00:00:00 2001 From: vadym-serdiuk Date: Tue, 4 Dec 2018 12:28:17 +0200 Subject: [PATCH 059/175] Move parenthesis stripping down --- moto/dynamodb2/responses.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index aeddb76bf..12530716c 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -311,7 +311,7 @@ class DynamoHandler(BaseResponse): def query(self): name = self.body['TableName'] # {u'KeyConditionExpression': u'#n0 = :v0', u'ExpressionAttributeValues': {u':v0': {u'S': u'johndoe'}}, u'ExpressionAttributeNames': {u'#n0': u'username'}} - key_condition_expression = self.body.get('KeyConditionExpression').strip('()') + key_condition_expression = self.body.get('KeyConditionExpression') projection_expression = self.body.get('ProjectionExpression') expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) filter_expression = self.body.get('FilterExpression') @@ -385,7 +385,7 @@ class DynamoHandler(BaseResponse): range_values = [value_alias_map[ range_key_expression_components[2]]] else: - hash_key_expression = key_condition_expression + hash_key_expression = key_condition_expression.strip('()') range_comparison = None range_values = [] From 2038fa92be7ce144260a574d9945c9c390f792a1 Mon Sep 17 00:00:00 2001 From: Taro Sato Date: Wed, 5 Dec 2018 16:17:28 -0800 Subject: [PATCH 060/175] Activate proper pagination for S3 common prefixes --- moto/s3/responses.py | 15 +++++++++++---- tests/test_s3/test_s3.py | 24 ++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 13e5f87d9..1d8cf8c53 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -338,9 +338,15 @@ class ResponseObject(_TemplateEnvironmentMixin): if continuation_token or start_after: limit = continuation_token or start_after - result_keys = self._get_results_from_token(result_keys, limit) + if not delimiter: + result_keys = self._get_results_from_token(result_keys, limit) + else: + result_folders = self._get_results_from_token(result_folders, limit) - result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + if not delimiter: + result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys) + else: + result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys) return template.render( bucket=bucket, @@ -358,7 +364,7 @@ class ResponseObject(_TemplateEnvironmentMixin): def _get_results_from_token(self, result_keys, token): continuation_index = 0 for key in result_keys: - if key.name > token: + if (key.name if isinstance(key, FakeKey) else key) > token: break continuation_index += 1 return result_keys[continuation_index:] @@ -367,7 +373,8 @@ class ResponseObject(_TemplateEnvironmentMixin): if len(result_keys) > max_keys: is_truncated = 'true' result_keys = result_keys[:max_keys] - next_continuation_token = result_keys[-1].name + item = result_keys[-1] + next_continuation_token = (item.name if isinstance(item, FakeKey) else item) else: is_truncated = 'false' next_continuation_token = None diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..f5575c803 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1163,6 +1163,30 @@ def test_boto3_list_keys_xml_escaped(): assert 'Owner' not in resp['Contents'][0] +@mock_s3 +def test_boto3_list_objects_v2_common_prefix_pagination(): + s3 = boto3.client('s3', region_name='us-east-1') + s3.create_bucket(Bucket='mybucket') + + max_keys = 1 + keys = ['test/{i}/{i}'.format(i=i) for i in range(3)] + for key in keys: + s3.put_object(Bucket='mybucket', Key=key, Body=b'v') + + prefixes = [] + args = {"Bucket": 'mybucket', "Delimiter": "/", "Prefix": "test/", "MaxKeys": max_keys} + resp = {"IsTruncated": True} + while resp.get("IsTruncated", False): + if "NextContinuationToken" in resp: + args["ContinuationToken"] = resp["NextContinuationToken"] + resp = s3.list_objects_v2(**args) + if "CommonPrefixes" in resp: + assert len(resp["CommonPrefixes"]) == max_keys + prefixes.extend(i["Prefix"] for i in resp["CommonPrefixes"]) + + assert prefixes == [k[:k.rindex('/') + 1] for k in keys] + + @mock_s3 def test_boto3_list_objects_v2_truncated_response(): s3 = boto3.client('s3', region_name='us-east-1') From a744adbcc5f3ca88aed521bd6b4e5c02e95687ad Mon Sep 17 00:00:00 2001 From: Jon Beilke Date: Thu, 6 Dec 2018 12:18:59 -0600 Subject: [PATCH 061/175] AWS is case-sensitive when using is-public to filter for AMIs and expects lower-case values --- moto/ec2/models.py | 2 +- tests/test_ec2/test_amis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index b94cac479..31720073b 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -1115,7 +1115,7 @@ class Ami(TaggedEC2Resource): elif filter_name == 'image-id': return self.id elif filter_name == 'is-public': - return str(self.is_public) + return self.is_public_string elif filter_name == 'state': return self.state elif filter_name == 'name': diff --git a/tests/test_ec2/test_amis.py b/tests/test_ec2/test_amis.py index a8d4d1b67..fd7234511 100644 --- a/tests/test_ec2/test_amis.py +++ b/tests/test_ec2/test_amis.py @@ -258,11 +258,11 @@ def test_ami_filters(): amis_by_name = conn.get_all_images(filters={'name': imageA.name}) set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id])) - amis_by_public = conn.get_all_images(filters={'is-public': True}) + amis_by_public = conn.get_all_images(filters={'is-public': 'true'}) set([ami.id for ami in amis_by_public]).should.contain(imageB.id) len(amis_by_public).should.equal(35) - amis_by_nonpublic = conn.get_all_images(filters={'is-public': False}) + amis_by_nonpublic = conn.get_all_images(filters={'is-public': 'false'}) set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id) len(amis_by_nonpublic).should.equal(1) From 99afb38524ab4211bdd2ea9320535aed4d321adb Mon Sep 17 00:00:00 2001 From: Trevor Edwards Date: Fri, 7 Dec 2018 13:22:38 -0800 Subject: [PATCH 062/175] Replace pyaml with PyYAML dependency --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a1b8c5dae..0598d7a10 100755 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ install_requires = [ "xmltodict", "six>1.9", "werkzeug", - "pyaml", + "PyYAML", "pytz", "python-dateutil<3.0.0,>=2.1", "python-jose<3.0.0", From 1ca80146971c71c933fa320bb1c76dd8b9a86b55 Mon Sep 17 00:00:00 2001 From: sbkg0002 Date: Sat, 8 Dec 2018 21:04:08 +0100 Subject: [PATCH 063/175] Add idna version depencies. --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index a1b8c5dae..62485148c 100755 --- a/setup.py +++ b/setup.py @@ -24,6 +24,7 @@ install_requires = [ "jsondiff==1.1.1", "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", + "idna<2.8,>=2.5", ] extras_require = { From b0a280bde2c7a722c8d7e1b278c3b458781ecf6d Mon Sep 17 00:00:00 2001 From: Diego Argueta Date: Tue, 18 Dec 2018 14:08:37 -0800 Subject: [PATCH 064/175] Move S3 storage to SpooledTemporaryFile --- moto/s3/models.py | 42 ++++++++++++++----- tests/test_s3/test_s3.py | 4 +- tests/test_s3/test_server.py | 2 +- .../test_bucket_path_server.py | 6 +-- .../test_s3bucket_path/test_s3bucket_path.py | 4 +- 5 files changed, 40 insertions(+), 18 deletions(-) diff --git a/moto/s3/models.py b/moto/s3/models.py index bb4d7848c..4ce2afb34 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -8,6 +8,7 @@ import itertools import codecs import random import string +import tempfile import six @@ -21,6 +22,7 @@ from .utils import clean_key_name, _VersionedKeyStore UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] +DEFAULT_KEY_BUFFER_SIZE = 2 ** 24 class FakeDeleteMarker(BaseModel): @@ -42,9 +44,9 @@ class FakeDeleteMarker(BaseModel): class FakeKey(BaseModel): - def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0): + def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0, + max_buffer_size=DEFAULT_KEY_BUFFER_SIZE): self.name = name - self.value = value self.last_modified = datetime.datetime.utcnow() self.acl = get_canned_acl('private') self.website_redirect_location = None @@ -56,10 +58,24 @@ class FakeKey(BaseModel): self._is_versioned = is_versioned self._tagging = FakeTagging() + self.value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) + self.value = value + @property def version_id(self): return self._version_id + @property + def value(self): + self.value_buffer.seek(0) + return self.value_buffer.read() + + @value.setter + def value(self, new_value): + self.value_buffer.seek(0) + self.value_buffer.truncate() + self.value_buffer.write(new_value) + def copy(self, new_name=None): r = copy.deepcopy(self) if new_name is not None: @@ -83,7 +99,9 @@ class FakeKey(BaseModel): self.acl = acl def append_to_value(self, value): - self.value += value + self.value_buffer.seek(0, os.SEEK_END) + self.value_buffer.write(value) + self.last_modified = datetime.datetime.utcnow() self._etag = None # must recalculate etag if self._is_versioned: @@ -101,11 +119,14 @@ class FakeKey(BaseModel): def etag(self): if self._etag is None: value_md5 = hashlib.md5() - if isinstance(self.value, six.text_type): - value = self.value.encode("utf-8") - else: - value = self.value - value_md5.update(value) + + self.value_buffer.seek(0) + while True: + block = self.value_buffer.read(DEFAULT_KEY_BUFFER_SIZE) + if not block: + break + value_md5.update(block) + self._etag = value_md5.hexdigest() return '"{0}"'.format(self._etag) @@ -132,7 +153,7 @@ class FakeKey(BaseModel): res = { 'ETag': self.etag, 'last-modified': self.last_modified_RFC1123, - 'content-length': str(len(self.value)), + 'content-length': str(self.size), } if self._storage_class != 'STANDARD': res['x-amz-storage-class'] = self._storage_class @@ -150,7 +171,8 @@ class FakeKey(BaseModel): @property def size(self): - return len(self.value) + self.value_buffer.seek(0, os.SEEK_END) + return self.value_buffer.tell() @property def storage_class(self): diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..07137bcfa 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -524,7 +524,7 @@ def test_post_to_bucket(): requests.post("https://foobar.s3.amazonaws.com/", { 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') @@ -538,7 +538,7 @@ def test_post_with_metadata_to_bucket(): requests.post("https://foobar.s3.amazonaws.com/", { 'key': 'the-key', - 'file': 'nothing', + 'file': b'nothing', 'x-amz-meta-test': 'metadata' }) diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py index 9c8252a04..934dd5a42 100644 --- a/tests/test_s3/test_server.py +++ b/tests/test_s3/test_server.py @@ -72,7 +72,7 @@ def test_s3_server_post_to_bucket(): test_client.post('/', "https://tester.localhost:5000/", data={ 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) res = test_client.get('/the-key', 'http://tester.localhost:5000/') diff --git a/tests/test_s3bucket_path/test_bucket_path_server.py b/tests/test_s3bucket_path/test_bucket_path_server.py index 434110e87..604adc289 100644 --- a/tests/test_s3bucket_path/test_bucket_path_server.py +++ b/tests/test_s3bucket_path/test_bucket_path_server.py @@ -73,7 +73,7 @@ def test_s3_server_post_to_bucket(): test_client.post('/foobar2', "https://localhost:5000/", data={ 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) res = test_client.get('/foobar2/the-key', 'http://localhost:5000/') @@ -89,7 +89,7 @@ def test_s3_server_put_ipv6(): test_client.post('/foobar2', "https://[::]:5000/", data={ 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) res = test_client.get('/foobar2/the-key', 'http://[::]:5000/') @@ -105,7 +105,7 @@ def test_s3_server_put_ipv4(): test_client.post('/foobar2', "https://127.0.0.1:5000/", data={ 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) res = test_client.get('/foobar2/the-key', 'http://127.0.0.1:5000/') diff --git a/tests/test_s3bucket_path/test_s3bucket_path.py b/tests/test_s3bucket_path/test_s3bucket_path.py index 21d786c61..58ae4db6f 100644 --- a/tests/test_s3bucket_path/test_s3bucket_path.py +++ b/tests/test_s3bucket_path/test_s3bucket_path.py @@ -198,7 +198,7 @@ def test_post_to_bucket(): requests.post("https://s3.amazonaws.com/foobar", { 'key': 'the-key', - 'file': 'nothing' + 'file': b'nothing' }) bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') @@ -212,7 +212,7 @@ def test_post_with_metadata_to_bucket(): requests.post("https://s3.amazonaws.com/foobar", { 'key': 'the-key', - 'file': 'nothing', + 'file': b'nothing', 'x-amz-meta-test': 'metadata' }) From 26adaeefa0cb24983aa46bc873a16f857f2bb686 Mon Sep 17 00:00:00 2001 From: Diego Argueta Date: Tue, 18 Dec 2018 14:08:51 -0800 Subject: [PATCH 065/175] Start testing on 3.7 --- .travis.yml | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3a5de0fa2..bf0222afb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: xenial language: python sudo: false services: @@ -5,22 +6,10 @@ services: python: - 2.7 - 3.6 + - 3.7 env: - TEST_SERVER_MODE=false - TEST_SERVER_MODE=true -# Due to incomplete Python 3.7 support on Travis CI ( -# https://github.com/travis-ci/travis-ci/issues/9815), -# using a matrix is necessary -matrix: - include: - - python: 3.7 - env: TEST_SERVER_MODE=false - dist: xenial - sudo: true - - python: 3.7 - env: TEST_SERVER_MODE=true - dist: xenial - sudo: true before_install: - export BOTO_CONFIG=/dev/null - export AWS_SECRET_ACCESS_KEY=foobar_secret From 2cc8784e5c31b95659df5b9fdf4f755ad37c726a Mon Sep 17 00:00:00 2001 From: Diego Argueta Date: Tue, 18 Dec 2018 14:53:52 -0800 Subject: [PATCH 066/175] Restore files modified in non-working fix. --- tests/test_s3/test_s3.py | 4 ++-- tests/test_s3/test_server.py | 2 +- tests/test_s3bucket_path/test_bucket_path_server.py | 6 +++--- tests/test_s3bucket_path/test_s3bucket_path.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 07137bcfa..6e339abb6 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -524,7 +524,7 @@ def test_post_to_bucket(): requests.post("https://foobar.s3.amazonaws.com/", { 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') @@ -538,7 +538,7 @@ def test_post_with_metadata_to_bucket(): requests.post("https://foobar.s3.amazonaws.com/", { 'key': 'the-key', - 'file': b'nothing', + 'file': 'nothing', 'x-amz-meta-test': 'metadata' }) diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py index 934dd5a42..9c8252a04 100644 --- a/tests/test_s3/test_server.py +++ b/tests/test_s3/test_server.py @@ -72,7 +72,7 @@ def test_s3_server_post_to_bucket(): test_client.post('/', "https://tester.localhost:5000/", data={ 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) res = test_client.get('/the-key', 'http://tester.localhost:5000/') diff --git a/tests/test_s3bucket_path/test_bucket_path_server.py b/tests/test_s3bucket_path/test_bucket_path_server.py index 604adc289..434110e87 100644 --- a/tests/test_s3bucket_path/test_bucket_path_server.py +++ b/tests/test_s3bucket_path/test_bucket_path_server.py @@ -73,7 +73,7 @@ def test_s3_server_post_to_bucket(): test_client.post('/foobar2', "https://localhost:5000/", data={ 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) res = test_client.get('/foobar2/the-key', 'http://localhost:5000/') @@ -89,7 +89,7 @@ def test_s3_server_put_ipv6(): test_client.post('/foobar2', "https://[::]:5000/", data={ 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) res = test_client.get('/foobar2/the-key', 'http://[::]:5000/') @@ -105,7 +105,7 @@ def test_s3_server_put_ipv4(): test_client.post('/foobar2', "https://127.0.0.1:5000/", data={ 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) res = test_client.get('/foobar2/the-key', 'http://127.0.0.1:5000/') diff --git a/tests/test_s3bucket_path/test_s3bucket_path.py b/tests/test_s3bucket_path/test_s3bucket_path.py index 58ae4db6f..21d786c61 100644 --- a/tests/test_s3bucket_path/test_s3bucket_path.py +++ b/tests/test_s3bucket_path/test_s3bucket_path.py @@ -198,7 +198,7 @@ def test_post_to_bucket(): requests.post("https://s3.amazonaws.com/foobar", { 'key': 'the-key', - 'file': b'nothing' + 'file': 'nothing' }) bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing') @@ -212,7 +212,7 @@ def test_post_with_metadata_to_bucket(): requests.post("https://s3.amazonaws.com/foobar", { 'key': 'the-key', - 'file': b'nothing', + 'file': 'nothing', 'x-amz-meta-test': 'metadata' }) From 36b0117eec06c97dbfeccbb3af0d7f6e4825cc5b Mon Sep 17 00:00:00 2001 From: Alan Alexander Date: Wed, 19 Dec 2018 15:02:36 -0800 Subject: [PATCH 067/175] Updating the list of urls the SSM moto will match to include china --- moto/ssm/urls.py | 1 + tests/test_ssm/test_ssm_boto3.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/moto/ssm/urls.py b/moto/ssm/urls.py index d22866486..9ac327325 100644 --- a/moto/ssm/urls.py +++ b/moto/ssm/urls.py @@ -3,6 +3,7 @@ from .responses import SimpleSystemManagerResponse url_bases = [ "https?://ssm.(.+).amazonaws.com", + "https?://ssm.(.+).amazonaws.com.cn", ] url_paths = { diff --git a/tests/test_ssm/test_ssm_boto3.py b/tests/test_ssm/test_ssm_boto3.py index f8ef3a237..645ec2aa5 100644 --- a/tests/test_ssm/test_ssm_boto3.py +++ b/tests/test_ssm/test_ssm_boto3.py @@ -277,6 +277,18 @@ def test_put_parameter(): response['Parameters'][0]['Type'].should.equal('String') response['Parameters'][0]['Version'].should.equal(2) +@mock_ssm +def test_put_parameter_china(): + client = boto3.client('ssm', region_name='cn-north-1') + + response = client.put_parameter( + Name='test', + Description='A test parameter', + Value='value', + Type='String') + + response['Version'].should.equal(1) + @mock_ssm def test_get_parameter(): From f15f006f7834ab69a5ee38ae30955fdc3863562e Mon Sep 17 00:00:00 2001 From: Diego Argueta <620513-dargueta@users.noreply.github.com> Date: Thu, 20 Dec 2018 00:34:04 -0800 Subject: [PATCH 068/175] Hack around text problem in unit tests. Now that payloads are not allowed to be text, some unit tests will cause crashes on Python 3 because the payload sent by requests gets passed to FakeKey as a string instead of raw bytes. I haven't been able to figure out a way around the issue that doesn't get super messy inside s3/responses.py so I'm just converting the value to bytes using the system's default encoding. --- moto/s3/models.py | 7 +++++++ tox.ini | 4 ++++ 2 files changed, 11 insertions(+) diff --git a/moto/s3/models.py b/moto/s3/models.py index 4ce2afb34..525b3c81a 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -9,6 +9,7 @@ import codecs import random import string import tempfile +import sys import six @@ -23,6 +24,7 @@ UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] DEFAULT_KEY_BUFFER_SIZE = 2 ** 24 +DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() class FakeDeleteMarker(BaseModel): @@ -74,6 +76,11 @@ class FakeKey(BaseModel): def value(self, new_value): self.value_buffer.seek(0) self.value_buffer.truncate() + + # Hack for working around moto's own unit tests; this probably won't + # actually get hit in normal use. + if isinstance(new_value, six.text_type): + new_value = new_value.encode(DEFAULT_TEXT_ENCODING) self.value_buffer.write(new_value) def copy(self, new_name=None): diff --git a/tox.ini b/tox.ini index 0f3f1466a..454409d72 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,10 @@ envlist = py27, py36 [testenv] +setenv = + BOTO_CONFIG=/dev/null + AWS_SECRET_ACCESS_KEY=foobar_secret + AWS_ACCESS_KEY_ID=foobar_key deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-dev.txt From 191ad6d778c9f0769726dac312032c86bbbb48f4 Mon Sep 17 00:00:00 2001 From: Diego Argueta Date: Thu, 20 Dec 2018 11:15:15 -0800 Subject: [PATCH 069/175] Make keys pickleable --- moto/s3/models.py | 48 ++++++++++++++++++++++++++++------------ tests/test_s3/test_s3.py | 11 +++++++++ 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/moto/s3/models.py b/moto/s3/models.py index 525b3c81a..767a8332f 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -23,7 +23,7 @@ from .utils import clean_key_name, _VersionedKeyStore UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] -DEFAULT_KEY_BUFFER_SIZE = 2 ** 24 +DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024 DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() @@ -60,7 +60,8 @@ class FakeKey(BaseModel): self._is_versioned = is_versioned self._tagging = FakeTagging() - self.value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) + self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size) + self._max_buffer_size = max_buffer_size self.value = value @property @@ -69,19 +70,19 @@ class FakeKey(BaseModel): @property def value(self): - self.value_buffer.seek(0) - return self.value_buffer.read() + self._value_buffer.seek(0) + return self._value_buffer.read() @value.setter def value(self, new_value): - self.value_buffer.seek(0) - self.value_buffer.truncate() + self._value_buffer.seek(0) + self._value_buffer.truncate() # Hack for working around moto's own unit tests; this probably won't # actually get hit in normal use. if isinstance(new_value, six.text_type): new_value = new_value.encode(DEFAULT_TEXT_ENCODING) - self.value_buffer.write(new_value) + self._value_buffer.write(new_value) def copy(self, new_name=None): r = copy.deepcopy(self) @@ -106,8 +107,8 @@ class FakeKey(BaseModel): self.acl = acl def append_to_value(self, value): - self.value_buffer.seek(0, os.SEEK_END) - self.value_buffer.write(value) + self._value_buffer.seek(0, os.SEEK_END) + self._value_buffer.write(value) self.last_modified = datetime.datetime.utcnow() self._etag = None # must recalculate etag @@ -126,10 +127,9 @@ class FakeKey(BaseModel): def etag(self): if self._etag is None: value_md5 = hashlib.md5() - - self.value_buffer.seek(0) + self._value_buffer.seek(0) while True: - block = self.value_buffer.read(DEFAULT_KEY_BUFFER_SIZE) + block = self._value_buffer.read(DEFAULT_KEY_BUFFER_SIZE) if not block: break value_md5.update(block) @@ -178,8 +178,8 @@ class FakeKey(BaseModel): @property def size(self): - self.value_buffer.seek(0, os.SEEK_END) - return self.value_buffer.tell() + self._value_buffer.seek(0, os.SEEK_END) + return self._value_buffer.tell() @property def storage_class(self): @@ -190,6 +190,26 @@ class FakeKey(BaseModel): if self._expiry is not None: return self._expiry.strftime("%a, %d %b %Y %H:%M:%S GMT") + # Keys need to be pickleable due to some implementation details of boto3. + # Since file objects aren't pickleable, we need to override the default + # behavior. The following is adapted from the Python docs: + # https://docs.python.org/3/library/pickle.html#handling-stateful-objects + def __getstate__(self): + state = self.__dict__.copy() + state['value'] = self.value + del state['_value_buffer'] + return state + + def __setstate__(self, state): + self.__dict__.update({ + k: v for k, v in six.iteritems(state) + if k != 'value' + }) + + self._value_buffer = \ + tempfile.SpooledTemporaryFile(max_size=self._max_buffer_size) + self.value = state['value'] + class FakeMultipart(BaseModel): diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 6e339abb6..3af214c39 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -8,6 +8,7 @@ from functools import wraps from gzip import GzipFile from io import BytesIO import zlib +import pickle import json import boto @@ -65,6 +66,16 @@ class MyModel(object): s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value) +@mock_s3 +def test_keys_are_pickleable(): + """Keys must be pickleable due to boto3 implementation details.""" + key = s3model.FakeKey('name', b'data!') + pickled = pickle.dumps(key) + loaded = pickle.loads(pickled) + + assert loaded.value == key.value + + @mock_s3 def test_my_model_save(): # Create Bucket so that test can run From 1998d59cfcbff4cf6240a16889c2c98cd7e5a416 Mon Sep 17 00:00:00 2001 From: Diego Argueta Date: Thu, 20 Dec 2018 11:40:13 -0800 Subject: [PATCH 070/175] Add more tests to please Coveralls --- tests/test_s3/test_s3.py | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 3af214c39..10fd68f48 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -70,12 +70,46 @@ class MyModel(object): def test_keys_are_pickleable(): """Keys must be pickleable due to boto3 implementation details.""" key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + pickled = pickle.dumps(key) loaded = pickle.loads(pickled) - assert loaded.value == key.value +@mock_s3 +def test_append_to_value__basic(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b' And even more data') + assert key.value == b'data! And even more data' + assert key.size == 24 + + +@mock_s3 +def test_append_to_value__nothing_added(): + key = s3model.FakeKey('name', b'data!') + assert key.value == b'data!' + assert key.size == 5 + + key.append_to_value(b'') + assert key.value == b'data!' + assert key.size == 5 + + +@mock_s3 +def test_append_to_value__empty_key(): + key = s3model.FakeKey('name', b'') + assert key.value == b'' + assert key.size == 0 + + key.append_to_value(b'stuff') + assert key.value == b'stuff' + assert key.size == 5 + + @mock_s3 def test_my_model_save(): # Create Bucket so that test can run From 08d17a7a13f6ea9d655a3b336f1cc595ee9c4ff4 Mon Sep 17 00:00:00 2001 From: zane Date: Fri, 21 Dec 2018 14:04:52 -0800 Subject: [PATCH 071/175] adding Tags support --- moto/secretsmanager/models.py | 12 ++++-------- moto/secretsmanager/responses.py | 4 +++- .../test_secretsmanager/test_secretsmanager.py | 17 +++++++++++++++++ 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 7f89e2eb6..1350ab469 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -56,7 +56,7 @@ class SecretsManagerBackend(BaseBackend): return response - def create_secret(self, name, secret_string, **kwargs): + def create_secret(self, name, secret_string, tags, **kwargs): generated_version_id = str(uuid.uuid4()) @@ -68,7 +68,8 @@ class SecretsManagerBackend(BaseBackend): 'rotation_enabled': False, 'rotation_lambda_arn': '', 'auto_rotate_after_days': 0, - 'version_id': generated_version_id + 'version_id': generated_version_id, + 'tags': tags } self.secrets[name] = secret @@ -101,12 +102,7 @@ class SecretsManagerBackend(BaseBackend): "LastChangedDate": None, "LastAccessedDate": None, "DeletedDate": None, - "Tags": [ - { - "Key": "", - "Value": "" - }, - ] + "Tags": secret['tags'] }) return response diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index b8b6872a8..932e7bfd7 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -19,9 +19,11 @@ class SecretsManagerResponse(BaseResponse): def create_secret(self): name = self._get_param('Name') secret_string = self._get_param('SecretString') + tags = self._get_param('Tags', if_none=[]) return secretsmanager_backends[self.region].create_secret( name=name, - secret_string=secret_string + secret_string=secret_string, + tags=tags ) def get_random_password(self): diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 0e0b98b1e..169282421 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -44,6 +44,23 @@ def test_create_secret(): secret = conn.get_secret_value(SecretId='test-secret') assert secret['SecretString'] == 'foosecret' +@mock_secretsmanager +def test_create_secret_with_tags(): + conn = boto3.client('secretsmanager', region_name='us-east-1') + secret_name = 'test-secret-with-tags' + + result = conn.create_secret( + Name=secret_name, + SecretString="foosecret", + Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + ) + assert result['ARN'] + assert result['Name'] == secret_name + secret_value = conn.get_secret_value(SecretId=secret_name) + assert secret_value['SecretString'] == 'foosecret' + secret_details = conn.describe_secret(SecretId=secret_name) + assert secret_details['Tags'] == [{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}] + @mock_secretsmanager def test_get_random_password_default_length(): conn = boto3.client('secretsmanager', region_name='us-west-2') From f4767c805ed7c81513dea2bb8be3432b739675a0 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Fri, 28 Dec 2018 19:38:09 -0500 Subject: [PATCH 072/175] Uncomment EMR FailureDetails. Closes #1891. --- moto/emr/responses.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/moto/emr/responses.py b/moto/emr/responses.py index 49e37ab9a..933e0177b 100644 --- a/moto/emr/responses.py +++ b/moto/emr/responses.py @@ -613,13 +613,11 @@ DESCRIBE_STEP_TEMPLATE = """ Date: Fri, 28 Dec 2018 23:05:25 -0500 Subject: [PATCH 074/175] adds Environment to the Lambda cfm optional keys Adds Environment to the list of keys that can be included in Lambda cloudformation functions. --- moto/awslambda/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index b11bde042..a37a15e27 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -386,7 +386,7 @@ class LambdaFunction(BaseModel): 'Role': properties['Role'], 'Runtime': properties['Runtime'], } - optional_properties = 'Description MemorySize Publish Timeout VpcConfig'.split() + optional_properties = 'Description MemorySize Publish Timeout VpcConfig Environment'.split() # NOTE: Not doing `properties.get(k, DEFAULT)` to avoid duplicating the # default logic for prop in optional_properties: From c7a7cc9c57aea701503769a4ed7ae0355cb51184 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Sat, 29 Dec 2018 05:42:16 -0500 Subject: [PATCH 075/175] Remvoe duplicate setenv in tox. --- tox.ini | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 868c46766..1fea4d81d 100644 --- a/tox.ini +++ b/tox.ini @@ -6,16 +6,13 @@ setenv = BOTO_CONFIG=/dev/null AWS_SECRET_ACCESS_KEY=foobar_secret AWS_ACCESS_KEY_ID=foobar_key + AWS_DEFAULT_REGION=us-east-1 deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-dev.txt commands = {envpython} setup.py test nosetests {posargs} -setenv = - AWS_ACCESS_KEY_ID=dummy_key - AWS_SECRET_ACCESS_KEY=dummy_secret - AWS_DEFAULT_REGION=us-east-1 [flake8] ignore = E128,E501 From 9f3ae31a53d64986c44869eb07a04e1828108be3 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Sat, 29 Dec 2018 06:33:55 -0500 Subject: [PATCH 076/175] Fix bad merge. --- moto/iam/responses.py | 1 - moto/s3/exceptions.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 804386cc3..d0e749d57 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -1622,7 +1622,6 @@ UPDATE_SAML_PROVIDER_TEMPLATE = """ diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index fbac15c71..27c842111 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -179,7 +179,7 @@ class InvalidStorageClass(S3ClientError): "The storage class you specified is not valid", *args, **kwargs) - + class InvalidBucketName(S3ClientError): code = 400 @@ -187,8 +187,10 @@ class InvalidBucketName(S3ClientError): super(InvalidBucketName, self).__init__( "InvalidBucketName", "The specified bucket is not valid.", + *args, **kwargs + ) + - class DuplicateTagKeys(S3ClientError): code = 400 From 1f9ade61c01698b43e70247e0cf55f6b908bae74 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Sat, 29 Dec 2018 06:47:16 -0500 Subject: [PATCH 077/175] Fix iam SAML tests. --- tests/test_iam/test_iam.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 7434be754..2b5f16d77 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -911,20 +911,21 @@ def test_delete_saml_provider(): SAMLMetadataDocument='a' * 1024 ) response = conn.list_saml_providers() - print(response) len(response['SAMLProviderList']).should.equal(1) conn.delete_saml_provider( SAMLProviderArn=saml_provider_create['SAMLProviderArn'] ) response = conn.list_saml_providers() len(response['SAMLProviderList']).should.equal(0) + conn.create_user(UserName='testing') + cert_id = '123456789012345678901234' with assert_raises(ClientError) as ce: - client.delete_signing_certificate(UserName='testing', CertificateId=cert_id) + conn.delete_signing_certificate(UserName='testing', CertificateId=cert_id) assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format( id=cert_id) # Verify that it's not in the list: - resp = client.list_signing_certificates(UserName='testing') + resp = conn.list_signing_certificates(UserName='testing') assert not resp['Certificates'] From c2a1f4eb144cd6149d14627ea559e3fa4e7b5ca5 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Sat, 29 Dec 2018 07:07:29 -0500 Subject: [PATCH 078/175] Fix S3 bucket location test. --- tests/test_s3/test_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 22d687248..7a53804ff 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1022,7 +1022,7 @@ def test_bucket_location(): bucket.get_location().should.equal("us-west-2") -@mock_s3_deprecated +@mock_s3 def test_bucket_location_us_east_1(): cli = boto3.client('s3') bucket_name = 'mybucket' From 0b15bb13b6c4173bde85e8592689839f6f1c1154 Mon Sep 17 00:00:00 2001 From: Gary Donovan Date: Thu, 10 Jan 2019 21:39:12 +1100 Subject: [PATCH 079/175] Make EQ conditions work reliably in DynamoDB. The AWS API represents a set object as a list of values. Internally moto also represents a set as a list. This means that when we do value comparisons, the order of the values can cause a set equality test to fail. --- moto/dynamodb2/models.py | 16 +++----- .../test_dynamodb_table_without_range_key.py | 41 +++++++++++++++++++ 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index 8187ceaf9..677bbfb07 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -66,6 +66,8 @@ class DynamoType(object): return int(self.value) except ValueError: return float(self.value) + elif self.is_set(): + return set(self.value) else: return self.value @@ -509,15 +511,12 @@ class Table(BaseModel): elif 'Value' in val and DynamoType(val['Value']).value != current_attr[key].value: raise ValueError("The conditional request failed") elif 'ComparisonOperator' in val: - comparison_func = get_comparison_func( - val['ComparisonOperator']) dynamo_types = [ DynamoType(ele) for ele in val.get("AttributeValueList", []) ] - for t in dynamo_types: - if not comparison_func(current_attr[key].value, t.value): - raise ValueError('The conditional request failed') + if not current_attr[key].compare(val['ComparisonOperator'], dynamo_types): + raise ValueError('The conditional request failed') if range_value: self.items[hash_value][range_value] = item else: @@ -946,15 +945,12 @@ class DynamoDBBackend(BaseBackend): elif 'Value' in val and DynamoType(val['Value']).value != item_attr[key].value: raise ValueError("The conditional request failed") elif 'ComparisonOperator' in val: - comparison_func = get_comparison_func( - val['ComparisonOperator']) dynamo_types = [ DynamoType(ele) for ele in val.get("AttributeValueList", []) ] - for t in dynamo_types: - if not comparison_func(item_attr[key].value, t.value): - raise ValueError('The conditional request failed') + if not item_attr[key].compare(val['ComparisonOperator'], dynamo_types): + raise ValueError('The conditional request failed') # Update does not fail on new items, so create one if item is None: diff --git a/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py b/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py index 15e5284b7..874804db0 100644 --- a/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py +++ b/tests/test_dynamodb2/test_dynamodb_table_without_range_key.py @@ -750,6 +750,47 @@ def test_boto3_update_item_conditions_pass_because_expect_exists_by_compare_to_n returned_item = table.get_item(Key={'username': 'johndoe'}) assert dict(returned_item)['Item']['foo'].should.equal("baz") + +@mock_dynamodb2 +def test_boto3_update_settype_item_with_conditions(): + class OrderedSet(set): + """A set with predictable iteration order""" + def __init__(self, values): + super(OrderedSet, self).__init__(values) + self.__ordered_values = values + + def __iter__(self): + return iter(self.__ordered_values) + + table = _create_user_table() + table.put_item(Item={'username': 'johndoe'}) + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=:new_value', + ExpressionAttributeValues={ + ':new_value': OrderedSet(['hello', 'world']), + }, + ) + + table.update_item( + Key={'username': 'johndoe'}, + UpdateExpression='SET foo=:new_value', + ExpressionAttributeValues={ + ':new_value': set(['baz']), + }, + Expected={ + 'foo': { + 'ComparisonOperator': 'EQ', + 'AttributeValueList': [ + OrderedSet(['world', 'hello']), # Opposite order to original + ], + } + }, + ) + returned_item = table.get_item(Key={'username': 'johndoe'}) + assert dict(returned_item)['Item']['foo'].should.equal(set(['baz'])) + + @mock_dynamodb2 def test_boto3_put_item_conditions_pass(): table = _create_user_table() From 4207a8e182e8f38b975c93a45cf180aa399f43e9 Mon Sep 17 00:00:00 2001 From: John Corrales Date: Thu, 10 Jan 2019 21:33:15 -0800 Subject: [PATCH 080/175] Add stacksets (#3) Added most stack set responses --- IMPLEMENTATION_COVERAGE.md | 22 +- moto/cloudformation/models.py | 189 +++++++++- moto/cloudformation/responses.py | 325 +++++++++++++++++ moto/cloudformation/utils.py | 13 +- .../test_cloudformation_stack_crud_boto3.py | 337 ++++++++++++++++++ 5 files changed, 872 insertions(+), 14 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 735af6002..61bfaf197 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -475,19 +475,19 @@ - [ ] continue_update_rollback - [X] create_change_set - [X] create_stack -- [ ] create_stack_instances -- [ ] create_stack_set +- [X] create_stack_instances +- [X] create_stack_set - [ ] delete_change_set - [X] delete_stack -- [ ] delete_stack_instances -- [ ] delete_stack_set +- [X] delete_stack_instances +- [X] delete_stack_set - [ ] describe_account_limits - [ ] describe_change_set - [ ] describe_stack_events -- [ ] describe_stack_instance +- [X] describe_stack_instance - [ ] describe_stack_resource - [ ] describe_stack_resources -- [ ] describe_stack_set +- [X] describe_stack_set - [ ] describe_stack_set_operation - [X] describe_stacks - [ ] estimate_template_cost @@ -498,18 +498,18 @@ - [ ] list_change_sets - [X] list_exports - [ ] list_imports -- [ ] list_stack_instances +- [X] list_stack_instances - [X] list_stack_resources - [ ] list_stack_set_operation_results -- [ ] list_stack_set_operations -- [ ] list_stack_sets +- [X] list_stack_set_operations +- [X] list_stack_sets - [X] list_stacks - [ ] set_stack_policy - [ ] signal_resource - [ ] stop_stack_set_operation - [X] update_stack -- [ ] update_stack_instances -- [ ] update_stack_set +- [X] update_stack_instances +- [X] update_stack_set - [ ] update_termination_protection - [ ] validate_template diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index c45c5d5fe..ed2ac1a11 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -1,5 +1,5 @@ from __future__ import unicode_literals -from datetime import datetime +from datetime import datetime, timedelta import json import yaml import uuid @@ -12,12 +12,137 @@ from .parsing import ResourceMap, OutputMap from .utils import ( generate_changeset_id, generate_stack_id, + generate_stackset_arn, + generate_stackset_id, yaml_tag_constructor, validate_template_cfn_lint, ) from .exceptions import ValidationError +class FakeStackSet(BaseModel): + + def __init__(self, stackset_id, name, template, region='us-east-1', status='ACTIVE', description=None, parameters=None, tags=None, admin_role=None, execution_role=None): + self.id = stackset_id + self.arn = generate_stackset_arn(stackset_id, region) + self.name = name + self.template = template + self.description = description + self.parameters = parameters + self.tags = tags + self.admin_role = admin_role + self.execution_role = execution_role + self.status = status + self.instances = FakeStackInstances(parameters, self.id, self.name) + self.operations = [] + + @property + def stack_instances(self): + return self.instances.stack_instances + + def _create_operation(self, operation_id, action, status): + operation = { + 'OperationId': str(operation_id), + 'Action': action, + 'Status': status, + 'CreationTimestamp': datetime.now(), + 'EndTimestamp': datetime.now() + timedelta(minutes=2), + } + + self.operations += [operation] + return operation + + def delete(self): + self.status = 'DELETED' + + def update(self, template, description, parameters, tags, admin_role, + execution_role, accounts, regions, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.template = template if template else self.template + self.description = description if description is not None else self.description + self.parameters = parameters if parameters else self.parameters + self.tags = tags if tags else self.tags + self.admin_role = admin_role if admin_role else self.admin_role + self.execution_role = execution_role if execution_role else self.execution_role + + if accounts and regions: + self.update_instances(accounts, regions, self.parameters) + + operation = self._create_operation(operation_id=operation_id, action='UPDATE', status='SUCCEEDED') + return operation + + def create_stack_instances(self, accounts, regions, parameters, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + if not parameters: + parameters = self.parameters + + self.instances.create_instances(accounts, regions, parameters, operation_id) + self._create_operation(operation_id=operation_id, action='CREATE', status='SUCCEEDED') + + def delete_stack_instances(self, accounts, regions, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.instances.delete(accounts, regions) + + self._create_operation(operation_id=operation_id, action='DELETE', status='SUCCEEDED') + + def update_instances(self, accounts, regions, parameters, operation_id=None): + if not operation_id: + operation_id = uuid.uuid4() + + self.instances.update(accounts, regions, parameters) + operation = self._create_operation(operation_id=operation_id, action='UPDATE', status='SUCCEEDED') + return operation + + +class FakeStackInstances(BaseModel): + def __init__(self, parameters, stackset_id, stackset_name): + self.parameters = parameters if parameters else {} + self.stackset_id = stackset_id + self.stack_name = "StackSet-{}".format(stackset_id) + self.stackset_name = stackset_name + self.stack_instances = [] + + def create_instances(self, accounts, regions, parameters, operation_id): + new_instances = [] + for region in regions: + for account in accounts: + instance = { + 'StackId': generate_stack_id(self.stack_name, region, account), + 'StackSetId': self.stackset_id, + 'Region': region, + 'Account': account, + 'Status': "CURRENT", + 'ParameterOverrides': parameters if parameters else [], + } + new_instances.append(instance) + self.stack_instances += new_instances + return new_instances + + def update(self, accounts, regions, parameters): + for account in accounts: + for region in regions: + instance = self.get_instance(account, region) + if parameters: + instance['ParameterOverrides'] = parameters + else: + instance['ParameterOverrides'] = [] + + def delete(self, accounts, regions): + for i, instance in enumerate(self.stack_instances): + if instance['Region'] in regions and instance['Account'] in accounts: + self.stack_instances.pop(i) + + def get_instance(self, account, region): + for i, instance in enumerate(self.stack_instances): + if instance['Region'] == region and instance['Account'] == account: + return self.stack_instances[i] + + class FakeStack(BaseModel): def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None, create_change_set=False): @@ -146,10 +271,72 @@ class CloudFormationBackend(BaseBackend): def __init__(self): self.stacks = OrderedDict() + self.stacksets = OrderedDict() self.deleted_stacks = {} self.exports = OrderedDict() self.change_sets = OrderedDict() + def create_stack_set(self, name, template, parameters, tags=None, description=None, region='us-east-1', admin_role=None, execution_role=None): + stackset_id = generate_stackset_id(name) + new_stackset = FakeStackSet( + stackset_id=stackset_id, + name=name, + template=template, + parameters=parameters, + description=description, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + ) + self.stacksets[stackset_id] = new_stackset + return new_stackset + + def get_stack_set(self, name): + stacksets = self.stacksets.keys() + for stackset in stacksets: + if self.stacksets[stackset].name == name: + return self.stacksets[stackset] + raise ValidationError(name) + + def delete_stack_set(self, name): + stacksets = self.stacksets.keys() + for stackset in stacksets: + if self.stacksets[stackset].name == name: + self.stacksets[stackset].delete() + + def create_stack_instances(self, stackset_name, accounts, regions, parameters, operation_id=None): + stackset = self.get_stack_set(stackset_name) + + stackset.create_stack_instances( + accounts=accounts, + regions=regions, + parameters=parameters, + operation_id=operation_id, + ) + return stackset + + def update_stack_set(self, stackset_name, template=None, description=None, + parameters=None, tags=None, admin_role=None, execution_role=None, + accounts=None, regions=None, operation_id=None): + stackset = self.get_stack_set(stackset_name) + update = stackset.update( + template=template, + description=description, + parameters=parameters, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + accounts=accounts, + regions=regions, + operation_id=operation_id + ) + return update + + def delete_stack_instances(self, stackset_name, accounts, regions, operation_id=None): + stackset = self.get_stack_set(stackset_name) + stackset.delete_stack_instances(accounts, regions, operation_id) + return stackset + def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, create_change_set=False): stack_id = generate_stack_id(name) new_stack = FakeStack( diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index 9e67e931a..c85c86989 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -312,6 +312,151 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE) return template.render(description=description) + def create_stack_set(self): + stackset_name = self._get_param('StackSetName') + stack_body = self._get_param('TemplateBody') + template_url = self._get_param('TemplateURL') + # role_arn = self._get_param('RoleARN') + parameters_list = self._get_list_prefix("Parameters.member") + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) + + # Copy-Pasta - Hack dict-comprehension + parameters = dict([ + (parameter['parameter_key'], parameter['parameter_value']) + for parameter + in parameters_list + ]) + if template_url: + stack_body = self._get_stack_from_s3_url(template_url) + + stackset = self.cloudformation_backend.create_stack_set( + name=stackset_name, + template=stack_body, + parameters=parameters, + tags=tags, + # role_arn=role_arn, + ) + if self.request_json: + return json.dumps({ + 'CreateStackSetResponse': { + 'CreateStackSetResult': { + 'StackSetId': stackset.stackset_id, + } + } + }) + else: + template = self.response_template(CREATE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def create_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + parameters = self._get_multi_param('ParameterOverrides.member') + self.cloudformation_backend.create_stack_instances(stackset_name, accounts, regions, parameters) + template = self.response_template(CREATE_STACK_INSTANCES_TEMPLATE) + return template.render() + + def delete_stack_set(self): + stackset_name = self._get_param('StackSetName') + self.cloudformation_backend.delete_stack_set(stackset_name) + template = self.response_template(DELETE_STACK_SET_RESPONSE_TEMPLATE) + return template.render() + + def delete_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + self.cloudformation_backend.delete_stack_instances(stackset_name, accounts, regions) + + template = self.response_template(DELETE_STACK_INSTANCES_TEMPLATE) + return template.render() + + def describe_stack_set(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + + if not stackset.admin_role: + stackset.admin_role = 'arn:aws:iam::123456789012:role/AWSCloudFormationStackSetAdministrationRole' + if not stackset.execution_role: + stackset.execution_role = 'AWSCloudFormationStackSetExecutionRole' + + template = self.response_template(DESCRIBE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def describe_stack_instance(self): + stackset_name = self._get_param('StackSetName') + account = self._get_param('StackInstanceAccount') + region = self._get_param('StackInstanceRegion') + + instance = self.cloudformation_backend.get_stack_set(stackset_name).instances.get_instance(account, region) + template = self.response_template(DESCRIBE_STACK_INSTANCE_TEMPLATE) + rendered = template.render(instance=instance) + return rendered + + def list_stack_sets(self): + stacksets = self.cloudformation_backend.stacksets + template = self.response_template(LIST_STACK_SETS_TEMPLATE) + return template.render(stacksets=stacksets) + + def list_stack_instances(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + template = self.response_template(LIST_STACK_INSTANCES_TEMPLATE) + return template.render(stackset=stackset) + + def list_stack_set_operations(self): + stackset_name = self._get_param('StackSetName') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + template = self.response_template(LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE) + return template.render(stackset=stackset) + + def update_stack_set(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + description = self._get_param('Description') + execution_role = self._get_param('ExecutionRoleName') + admin_role = self._get_param('AdministrationRoleARN') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + template_body = self._get_param('TemplateBody') + template_url = self._get_param('TemplateURL') + if template_url: + template_body = self._get_stack_from_s3_url(template_url) + tags = dict((item['key'], item['value']) + for item in self._get_list_prefix("Tags.member")) + parameters_list = self._get_list_prefix("Parameters.member") + parameters = dict([ + (parameter['parameter_key'], parameter['parameter_value']) + for parameter + in parameters_list + ]) + operation = self.cloudformation_backend.update_stack_set( + stackset_name=stackset_name, + template=template_body, + description=description, + parameters=parameters, + tags=tags, + admin_role=admin_role, + execution_role=execution_role, + accounts=accounts, + regions=regions, + operation_id=operation_id + ) + + template = self.response_template(UPDATE_STACK_SET_RESPONSE_TEMPLATE) + return template.render(operation=operation) + + def update_stack_instances(self): + stackset_name = self._get_param('StackSetName') + accounts = self._get_multi_param('Accounts.member') + regions = self._get_multi_param('Regions.member') + parameters = self._get_multi_param('ParameterOverrides.member') + operation = self.cloudformation_backend.get_stack_set(stackset_name).update_instances(accounts, regions, parameters) + template = self.response_template(UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE) + return template.render(operation=operation) + VALIDATE_STACK_RESPONSE_TEMPLATE = """ @@ -553,3 +698,183 @@ LIST_EXPORTS_RESPONSE = """ + + {{ stackset.stackset_id }} + + + f457258c-391d-41d1-861f-example + + +""" + +DESCRIBE_STACK_SET_RESPONSE_TEMPLATE = """ + + + + {{ stackset.arn }} + {{ stackset.execution_role }} + {{ stackset.admin_role }} + {{ stackset.id }} + {{ stackset.template }} + {{ stackset.name }} + + {% for param_name, param_value in stackset.parameters.items() %} + + {{ param_name }} + {{ param_value }} + + {% endfor %} + + + {% for tag_key, tag_value in stackset.tags.items() %} + + {{ tag_key }} + {{ tag_value }} + + {% endfor %} + + {{ stackset.status }} + + + + d8b64e11-5332-46e1-9603-example + +""" + +DELETE_STACK_SET_RESPONSE_TEMPLATE = """ + + + c35ec2d0-d69f-4c4d-9bd7-example + +""" + +CREATE_STACK_INSTANCES_TEMPLATE = """ + + 1459ad6d-63cc-4c96-a73e-example + + + 6b29f7e3-69be-4d32-b374-example + + +""" + +LIST_STACK_INSTANCES_TEMPLATE = """ + + + {% for instance in stackset.stack_instances %} + + {{ instance.StackId }} + {{ instance.StackSetId }} + {{ instance.Region }} + {{ instance.Account }} + {{ instance.Status }} + + {% endfor %} + + + + 83c27e73-b498-410f-993c-example + + +""" + +DELETE_STACK_INSTANCES_TEMPLATE = """ + + d76a070d-279a-45f3-b9b4-example + + + e5325090-66f6-4ecd-a531-example + + +""" + +DESCRIBE_STACK_INSTANCE_TEMPLATE = """ + + + {{ instance.StackId }} + {{ instance.StackSetId }} + {% if instance.ParameterOverrides %} + + {% for override in instance.ParameterOverrides %} + {% if override['ParameterKey'] or override['ParameterValue'] %} + + {{ override.ParameterKey }} + false + {{ override.ParameterValue }} + + {% endif %} + {% endfor %} + + {% else %} + + {% endif %} + {{ instance.Region }} + {{ instance.Account }} + {{ instance.Status }} + + + + c6c7be10-0343-4319-8a25-example + + +""" + +LIST_STACK_SETS_TEMPLATE = """ + + + {% for key, value in stacksets.items() %} + + {{ value.name }} + {{ value.id }} + {{ value.status }} + + {% endfor %} + + + + 4dcacb73-841e-4ed8-b335-example + + +""" + +UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE = """ + + {{ operation }} + + + bdbf8e94-19b6-4ce4-af85-example + + +""" + +UPDATE_STACK_SET_RESPONSE_TEMPLATE = """ + + {{ operation.OperationId }} + + + adac907b-17e3-43e6-a254-example + + +""" + +LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE = """ + + + {% for operation in stackset.operations %} + + {{ operation.CreationTimestamp }} + {{ operation.OperationId }} + {{ operation.Action }} + {{ operation.EndTimestamp }} + {{ operation.Status }} + + {% endfor %} + + + + 65b9d9be-08bb-4a43-9a21-example + + +""" diff --git a/moto/cloudformation/utils.py b/moto/cloudformation/utils.py index f963ce7c8..de75d2c15 100644 --- a/moto/cloudformation/utils.py +++ b/moto/cloudformation/utils.py @@ -8,9 +8,9 @@ import os from cfnlint import decode, core -def generate_stack_id(stack_name): +def generate_stack_id(stack_name, region="us-east-1", account="123456789"): random_id = uuid.uuid4() - return "arn:aws:cloudformation:us-east-1:123456789:stack/{0}/{1}".format(stack_name, random_id) + return "arn:aws:cloudformation:{}:{}:stack/{}/{}".format(region, account, stack_name, random_id) def generate_changeset_id(changeset_name, region_name): @@ -18,6 +18,15 @@ def generate_changeset_id(changeset_name, region_name): return 'arn:aws:cloudformation:{0}:123456789:changeSet/{1}/{2}'.format(region_name, changeset_name, random_id) +def generate_stackset_id(stackset_name): + random_id = uuid.uuid4() + return '{}:{}'.format(stackset_name, random_id) + + +def generate_stackset_arn(stackset_id, region_name): + return 'arn:aws:cloudformation:{}:123456789012:stackset/{}'.format(region_name, stackset_id) + + def random_suffix(): size = 12 chars = list(range(10)) + ['A-Z'] diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 4585da056..dd8d6cf6c 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -184,6 +184,343 @@ dummy_import_template_json = json.dumps(dummy_import_template) dummy_redrive_template_json = json.dumps(dummy_redrive_template) +@mock_cloudformation +def test_boto3_describe_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + use1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-east-1', + ) + + usw2_instance['StackInstance'].should.have.key('Region').which.should.equal('us-west-2') + usw2_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012') + use1_instance['StackInstance'].should.have.key('Region').which.should.equal('us-east-1') + use1_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012') + + +@mock_cloudformation +def test_boto3_list_stacksets_length(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_set( + StackSetName="test_stack_set2", + TemplateBody=dummy_template_yaml, + ) + stacksets = cf_conn.list_stack_sets() + stacksets.should.have.length_of(2) + + +@mock_cloudformation +def test_boto3_list_stacksets_contents(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + stacksets = cf_conn.list_stack_sets() + stacksets['Summaries'][0].should.have.key('StackSetName').which.should.equal('test_stack_set') + stacksets['Summaries'][0].should.have.key('Status').which.should.equal('ACTIVE') + + +@mock_cloudformation +def test_boto3_update_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'SomeParam', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'AnotherParam', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'SomeParam', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'AnotherParam', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + cf_conn.update_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-west-1', 'us-west-2'], + ParameterOverrides=param_overrides, + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + usw1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-1', + ) + use1_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-east-1', + ) + + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + use1_instance['StackInstance']['ParameterOverrides'].should.be.empty + + +@mock_cloudformation +def test_boto3_delete_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + cf_conn.delete_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1'], + RetainStacks=False, + ) + + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(1) + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Region'].should.equal( + 'us-west-2') + + +@mock_cloudformation +def test_boto3_create_stack_instances(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(2) + cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Account'].should.equal( + '123456789012') + + +@mock_cloudformation +def test_boto3_create_stack_instances_with_param_overrides(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ParameterOverrides=param_overrides, + ) + usw2_instance = cf_conn.describe_stack_instance( + StackSetName="test_stack_set", + StackInstanceAccount='123456789012', + StackInstanceRegion='us-west-2', + ) + + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + + +@mock_cloudformation +def test_update_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + param = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'}, + ] + param_overrides = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'} + ] + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param, + ) + cf_conn.update_stack_set( + StackSetName='test_stack_set', + TemplateBody=dummy_template_yaml_with_ref, + Parameters=param_overrides, + ) + stackset = cf_conn.describe_stack_set(StackSetName='test_stack_set') + + stackset['StackSet']['Parameters'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue']) + stackset['StackSet']['Parameters'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue']) + stackset['StackSet']['Parameters'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey']) + stackset['StackSet']['Parameters'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey']) + + +@mock_cloudformation +def test_boto3_list_stack_set_operations(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + cf_conn.update_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-2'], + ) + + list_operation = cf_conn.list_stack_set_operations(StackSetName="test_stack_set") + list_operation['Summaries'].should.have.length_of(2) + list_operation['Summaries'][-1]['Action'].should.equal('UPDATE') + + +@mock_cloudformation +def test_boto3_delete_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.delete_stack_set(StackSetName='test_stack_set') + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['Status'].should.equal( + 'DELETED') + + +@mock_cloudformation +def test_boto3_create_stack_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal( + dummy_template_json) + + +@mock_cloudformation +def test_boto3_create_stack_set_with_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_yaml, + ) + + cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal( + dummy_template_yaml) + + +@mock_cloudformation +@mock_s3 +def test_create_stack_set_from_s3_url(): + s3 = boto3.client('s3') + s3_conn = boto3.resource('s3') + bucket = s3_conn.create_bucket(Bucket="foobar") + + key = s3_conn.Object( + 'foobar', 'template-key').put(Body=dummy_template_json) + key_url = s3.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': 'foobar', + 'Key': 'template-key' + } + ) + + cf_conn = boto3.client('cloudformation', region_name='us-west-1') + cf_conn.create_stack_set( + StackSetName='stack_from_url', + TemplateURL=key_url, + ) + cf_conn.describe_stack_set(StackSetName="stack_from_url")['StackSet']['TemplateBody'].should.equal( + dummy_template_json) + + +@mock_cloudformation +def test_boto3_create_stack_set_with_ref_yaml(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + params = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, + ] + cf_conn.create_stack_set( + StackSetName="test_stack", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=params + ) + + cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['TemplateBody'].should.equal( + dummy_template_yaml_with_ref) + + +@mock_cloudformation +def test_boto3_describe_stack_set_params(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + params = [ + {'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'}, + {'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'}, + ] + cf_conn.create_stack_set( + StackSetName="test_stack", + TemplateBody=dummy_template_yaml_with_ref, + Parameters=params + ) + + cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['Parameters'].should.equal( + params) + @mock_cloudformation def test_boto3_create_stack(): From 5fb43ee7b6630518cfa14b96a06510b46ebc1c9e Mon Sep 17 00:00:00 2001 From: John Corrales Date: Mon, 14 Jan 2019 22:01:53 -0800 Subject: [PATCH 081/175] Operations (#4) Added stop, list operation results, and describe operation --- IMPLEMENTATION_COVERAGE.md | 8 +- moto/cloudformation/models.py | 38 ++++++--- moto/cloudformation/responses.py | 77 ++++++++++++++++++ .../test_cloudformation_stack_crud_boto3.py | 80 +++++++++++++++++++ 4 files changed, 189 insertions(+), 14 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 61bfaf197..98e426e3c 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -470,7 +470,7 @@ - [ ] upgrade_applied_schema - [ ] upgrade_published_schema -## cloudformation - 21% implemented +## cloudformation - 56% implemented - [ ] cancel_update_stack - [ ] continue_update_rollback - [X] create_change_set @@ -488,7 +488,7 @@ - [ ] describe_stack_resource - [ ] describe_stack_resources - [X] describe_stack_set -- [ ] describe_stack_set_operation +- [X] describe_stack_set_operation - [X] describe_stacks - [ ] estimate_template_cost - [X] execute_change_set @@ -500,13 +500,13 @@ - [ ] list_imports - [X] list_stack_instances - [X] list_stack_resources -- [ ] list_stack_set_operation_results +- [X] list_stack_set_operation_results - [X] list_stack_set_operations - [X] list_stack_sets - [X] list_stacks - [ ] set_stack_policy - [ ] signal_resource -- [ ] stop_stack_set_operation +- [X] stop_stack_set_operation - [X] update_stack - [X] update_stack_instances - [X] update_stack_set diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index ed2ac1a11..bbb1a0fd2 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -22,7 +22,10 @@ from .exceptions import ValidationError class FakeStackSet(BaseModel): - def __init__(self, stackset_id, name, template, region='us-east-1', status='ACTIVE', description=None, parameters=None, tags=None, admin_role=None, execution_role=None): + def __init__(self, stackset_id, name, template, region='us-east-1', + status='ACTIVE', description=None, parameters=None, tags=None, + admin_role='AWSCloudFormationStackSetAdministrationRole', + execution_role='AWSCloudFormationStackSetExecutionRole'): self.id = stackset_id self.arn = generate_stackset_arn(stackset_id, region) self.name = name @@ -34,24 +37,33 @@ class FakeStackSet(BaseModel): self.execution_role = execution_role self.status = status self.instances = FakeStackInstances(parameters, self.id, self.name) + self.stack_instances = self.instances.stack_instances self.operations = [] - @property - def stack_instances(self): - return self.instances.stack_instances - - def _create_operation(self, operation_id, action, status): + def _create_operation(self, operation_id, action, status, accounts=[], regions=[]): operation = { 'OperationId': str(operation_id), 'Action': action, 'Status': status, 'CreationTimestamp': datetime.now(), 'EndTimestamp': datetime.now() + timedelta(minutes=2), + 'Instances': [{account: region} for account in accounts for region in regions], } self.operations += [operation] return operation + def get_operation(self, operation_id): + for operation in self.operations: + if operation_id == operation['OperationId']: + return operation + raise ValidationError(operation_id) + + def update_operation(self, operation_id, status): + operation = self.get_operation(operation_id) + operation['Status'] = status + return operation_id + def delete(self): self.status = 'DELETED' @@ -70,7 +82,9 @@ class FakeStackSet(BaseModel): if accounts and regions: self.update_instances(accounts, regions, self.parameters) - operation = self._create_operation(operation_id=operation_id, action='UPDATE', status='SUCCEEDED') + operation = self._create_operation(operation_id=operation_id, + action='UPDATE', status='SUCCEEDED', accounts=accounts, + regions=regions) return operation def create_stack_instances(self, accounts, regions, parameters, operation_id=None): @@ -80,7 +94,8 @@ class FakeStackSet(BaseModel): parameters = self.parameters self.instances.create_instances(accounts, regions, parameters, operation_id) - self._create_operation(operation_id=operation_id, action='CREATE', status='SUCCEEDED') + self._create_operation(operation_id=operation_id, action='CREATE', + status='SUCCEEDED', accounts=accounts, regions=regions) def delete_stack_instances(self, accounts, regions, operation_id=None): if not operation_id: @@ -88,14 +103,17 @@ class FakeStackSet(BaseModel): self.instances.delete(accounts, regions) - self._create_operation(operation_id=operation_id, action='DELETE', status='SUCCEEDED') + self._create_operation(operation_id=operation_id, action='DELETE', + status='SUCCEEDED', accounts=accounts, regions=regions) def update_instances(self, accounts, regions, parameters, operation_id=None): if not operation_id: operation_id = uuid.uuid4() self.instances.update(accounts, regions, parameters) - operation = self._create_operation(operation_id=operation_id, action='UPDATE', status='SUCCEEDED') + operation = self._create_operation(operation_id=operation_id, + action='UPDATE', status='SUCCEEDED', accounts=accounts, + regions=regions) return operation diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index c85c86989..86eb3df0a 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -412,6 +412,30 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE) return template.render(stackset=stackset) + def stop_stack_set_operation(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + stackset.update_operation(operation_id, 'STOPPED') + template = self.response_template(STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE) + return template.render() + + def describe_stack_set_operation(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + operation = stackset.get_operation(operation_id) + template = self.response_template(DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE) + return template.render(stackset=stackset, operation=operation) + + def list_stack_set_operation_results(self): + stackset_name = self._get_param('StackSetName') + operation_id = self._get_param('OperationId') + stackset = self.cloudformation_backend.get_stack_set(stackset_name) + operation = stackset.get_operation(operation_id) + template = self.response_template(LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE) + return template.render(operation=operation) + def update_stack_set(self): stackset_name = self._get_param('StackSetName') operation_id = self._get_param('OperationId') @@ -878,3 +902,56 @@ LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE = """ """ + +STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE = """ + + + 2188554a-07c6-4396-b2c5-example + +""" + +DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE = """ + + + {{ stackset.execution_role }} + arn:aws:iam::123456789012:role/{{ stackset.admin_role }} + {{ stackset.id }} + {{ operation.CreationTimestamp }} + {{ operation.OperationId }} + {{ operation.Action }} + + + + {{ operation.EndTimestamp }} + {{ operation.Status }} + + + + 2edc27b6-9ce2-486a-a192-example + + +""" + +LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE = """ + + + {% for instance in operation.Instances %} + {% for account, region in instance.items() %} + + + Function not found: arn:aws:lambda:us-west-2:123456789012:function:AWSCloudFormationStackSetAccountGate + SKIPPED + + {{ region }} + {{ account }} + {{ operation.Status }} + + {% endfor %} + {% endfor %} + + + + ac05a9ce-5f98-4197-a29b-example + + +""" diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index dd8d6cf6c..eb7f6bcc5 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -240,6 +240,86 @@ def test_boto3_list_stacksets_contents(): stacksets['Summaries'][0].should.have.key('Status').which.should.equal('ACTIVE') +@mock_cloudformation +def test_boto3_stop_stack_set_operation(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + list_operation = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set" + ) + list_operation['Summaries'][-1]['Status'].should.equal('STOPPED') + + +@mock_cloudformation +def test_boto3_describe_stack_set_operation(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + response = cf_conn.describe_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id, + ) + + response['StackSetOperation']['Status'].should.equal('STOPPED') + response['StackSetOperation']['Action'].should.equal('CREATE') + + +@mock_cloudformation +def test_boto3_list_stack_set_operation_results(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_stack_set( + StackSetName="test_stack_set", + TemplateBody=dummy_template_json, + ) + cf_conn.create_stack_instances( + StackSetName="test_stack_set", + Accounts=['123456789012'], + Regions=['us-east-1', 'us-west-1', 'us-west-2'], + ) + operation_id = cf_conn.list_stack_set_operations( + StackSetName="test_stack_set")['Summaries'][-1]['OperationId'] + + cf_conn.stop_stack_set_operation( + StackSetName="test_stack_set", + OperationId=operation_id + ) + response = cf_conn.list_stack_set_operation_results( + StackSetName="test_stack_set", + OperationId=operation_id, + ) + + response['Summaries'].should.have.length_of(3) + response['Summaries'][0].should.have.key('Account').which.should.equal('123456789012') + response['Summaries'][1].should.have.key('Status').which.should.equal('STOPPED') + + @mock_cloudformation def test_boto3_update_stack_instances(): cf_conn = boto3.client('cloudformation', region_name='us-east-1') From 0ab5edc962f6c71b233486cd4ff80d8a3137dde3 Mon Sep 17 00:00:00 2001 From: John Corrales Date: Tue, 15 Jan 2019 07:20:13 -0800 Subject: [PATCH 082/175] return delete_instance operation --- moto/cloudformation/models.py | 3 ++- moto/cloudformation/responses.py | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index bbb1a0fd2..11b97aa34 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -103,8 +103,9 @@ class FakeStackSet(BaseModel): self.instances.delete(accounts, regions) - self._create_operation(operation_id=operation_id, action='DELETE', + operation = self._create_operation(operation_id=operation_id, action='DELETE', status='SUCCEEDED', accounts=accounts, regions=regions) + return operation def update_instances(self, accounts, regions, parameters, operation_id=None): if not operation_id: diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index 86eb3df0a..2c7f6b91a 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -368,10 +368,10 @@ class CloudFormationResponse(BaseResponse): stackset_name = self._get_param('StackSetName') accounts = self._get_multi_param('Accounts.member') regions = self._get_multi_param('Regions.member') - self.cloudformation_backend.delete_stack_instances(stackset_name, accounts, regions) + operation = self.cloudformation_backend.delete_stack_instances(stackset_name, accounts, regions) template = self.response_template(DELETE_STACK_INSTANCES_TEMPLATE) - return template.render() + return template.render(operation=operation) def describe_stack_set(self): stackset_name = self._get_param('StackSetName') @@ -806,7 +806,7 @@ LIST_STACK_INSTANCES_TEMPLATE = """ - d76a070d-279a-45f3-b9b4-example + {{ operation.OperationId }} e5325090-66f6-4ecd-a531-example From 6bd7e5941f7c13c01959539067e9e565770da7f4 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Tue, 15 Jan 2019 17:34:22 +0100 Subject: [PATCH 083/175] Added test case for running multiple instances in the same command. --- tests/test_ec2/test_instances.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/test_ec2/test_instances.py b/tests/test_ec2/test_instances.py index 84b4fbd7d..c0f0eea4d 100644 --- a/tests/test_ec2/test_instances.py +++ b/tests/test_ec2/test_instances.py @@ -1254,3 +1254,18 @@ def test_create_instance_ebs_optimized(): ) instance.load() instance.ebs_optimized.should.be(False) + +@mock_ec2 +def test_run_multiple_instances_in_same_command(): + instance_count = 4 + client = boto3.client('ec2', region_name='us-east-1') + client.run_instances(ImageId='ami-1234abcd', + MinCount=instance_count, + MaxCount=instance_count) + reservations = client.describe_instances()['Reservations'] + + reservations[0]['Instances'].should.have.length_of(instance_count) + + instances = reservations[0]['Instances'] + for i in range(0, instance_count): + instances[i]['AmiLaunchIndex'].should.be(i) From 22288ef83b32e32d221c129fe1206e0aae38fe43 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Tue, 15 Jan 2019 17:36:10 +0100 Subject: [PATCH 084/175] Implemented initializing ami_launch_index property. --- moto/ec2/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index a450943b7..cc333e790 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -388,6 +388,7 @@ class Instance(TaggedEC2Resource, BotoInstance): self.ebs_optimized = kwargs.get("ebs_optimized", False) self.source_dest_check = "true" self.launch_time = utc_date_and_time() + self.ami_launch_index = kwargs.get("ami_launch_index", 0) self.disable_api_termination = kwargs.get("disable_api_termination", False) self._spot_fleet_id = kwargs.get("spot_fleet_id", None) associate_public_ip = kwargs.get("associate_public_ip", False) From ca5a8033e54ac049448c4c36394d80c588640668 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Tue, 15 Jan 2019 17:37:22 +0100 Subject: [PATCH 085/175] Implemented providing ami_launch_index to the new Instance's constructor. --- moto/ec2/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index cc333e790..11ad8b101 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -720,6 +720,7 @@ class InstanceBackend(object): instance_tags = tags.get('instance', {}) for index in range(count): + kwargs["ami_launch_index"] = index new_instance = Instance( self, image_id, From e4768662818c42fe920379ce7cd7b093380da12c Mon Sep 17 00:00:00 2001 From: acsbendi Date: Tue, 15 Jan 2019 17:38:44 +0100 Subject: [PATCH 086/175] Implemented showing ami_launch_index property in responses. --- moto/ec2/responses/instances.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py index 3e4705f92..49f1face7 100644 --- a/moto/ec2/responses/instances.py +++ b/moto/ec2/responses/instances.py @@ -244,7 +244,7 @@ EC2_RUN_INSTANCES = """ -{{ requestid }} + """ @@ -412,7 +412,7 @@ ATTACH_INSTANCES_TEMPLATE = """ -{{ requestid }} + """ @@ -454,7 +454,7 @@ DETACH_INSTANCES_TEMPLATE = """ -{{ requestid }} + """ @@ -654,7 +654,7 @@ DELETE_POLICY_TEMPLATE = """ -{{ requestid }} + """ @@ -670,14 +670,14 @@ DESCRIBE_LOAD_BALANCERS_TEMPLATE = """ -{{ requestid }} + """ @@ -690,13 +690,13 @@ SUSPEND_PROCESSES_TEMPLATE = """ -{{ requestid }} + """ SET_INSTANCE_PROTECTION_TEMPLATE = """ -{{ requestid }} + """ diff --git a/moto/sqs/responses.py b/moto/sqs/responses.py index b4f64b14e..5ddaf8849 100644 --- a/moto/sqs/responses.py +++ b/moto/sqs/responses.py @@ -420,7 +420,7 @@ CREATE_QUEUE_RESPONSE = """ {{ queue.visibility_timeout }} - {{ requestid }} + """ @@ -429,7 +429,7 @@ GET_QUEUE_URL_RESPONSE = """ {{ queue.url(request_url) }} - {{ requestid }} + """ @@ -440,13 +440,13 @@ LIST_QUEUES_RESPONSE = """ {% endfor %} - {{ requestid }} + """ DELETE_QUEUE_RESPONSE = """ - {{ requestid }} + """ @@ -460,13 +460,13 @@ GET_QUEUE_ATTRIBUTES_RESPONSE = """ {% endfor %} - {{ requestid }} + """ SET_QUEUE_ATTRIBUTE_RESPONSE = """ - {{ requestid }} + """ @@ -483,7 +483,7 @@ SEND_MESSAGE_RESPONSE = """ - {{ requestid }} + """ @@ -543,7 +543,7 @@ RECEIVE_MESSAGE_RESPONSE = """ {% endfor %} - {{ requestid }} + """ @@ -561,13 +561,13 @@ SEND_MESSAGE_BATCH_RESPONSE = """ {% endfor %} - {{ requestid }} + """ DELETE_MESSAGE_RESPONSE = """ - {{ requestid }} + """ @@ -580,13 +580,13 @@ DELETE_MESSAGE_BATCH_RESPONSE = """ {% endfor %} - {{ requestid }} + """ CHANGE_MESSAGE_VISIBILITY_RESPONSE = """ - {{ requestid }} + """ @@ -613,7 +613,7 @@ CHANGE_MESSAGE_VISIBILITY_BATCH_RESPONSE = """ - {{ requestid }} + """ From 19a0179608393d1093805589c441d59cef549fac Mon Sep 17 00:00:00 2001 From: Robert Lewis Date: Fri, 18 Jan 2019 07:52:47 -0800 Subject: [PATCH 089/175] Use regex to populate requestId XML tag --- moto/core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/core/utils.py b/moto/core/utils.py index 777a03752..ca670e871 100644 --- a/moto/core/utils.py +++ b/moto/core/utils.py @@ -280,7 +280,7 @@ def amzn_request_id(f): # Update request ID in XML try: - body = body.replace('{{ requestid }}', request_id) + body = re.sub(r'(?<=).*(?=<\/RequestId>)', request_id, body) except Exception: # Will just ignore if it cant work on bytes (which are str's on python2) pass From acdb1c9768c1923fecad9d3afb93b092b5f10eb3 Mon Sep 17 00:00:00 2001 From: Robert Lewis Date: Fri, 18 Jan 2019 08:32:45 -0800 Subject: [PATCH 090/175] Add requestid checking for sqs --- tests/test_sqs/test_sqs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py index 9beb9a3fa..d53ae50f7 100644 --- a/tests/test_sqs/test_sqs.py +++ b/tests/test_sqs/test_sqs.py @@ -416,7 +416,9 @@ def test_send_receive_message_timestamps(): conn.create_queue(QueueName="test-queue") queue = sqs.Queue("test-queue") - queue.send_message(MessageBody="derp") + response = queue.send_message(MessageBody="derp") + assert response['ResponseMetadata']['RequestId'] + messages = conn.receive_message( QueueUrl=queue.url, MaxNumberOfMessages=1)['Messages'] From 570b73691beb1c562ef7db4dd4731a9b848bff2c Mon Sep 17 00:00:00 2001 From: Robert Lewis Date: Fri, 18 Jan 2019 16:09:42 -0800 Subject: [PATCH 091/175] Add requestid checking for autoscaling --- tests/test_autoscaling/test_autoscaling.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_autoscaling/test_autoscaling.py b/tests/test_autoscaling/test_autoscaling.py index b1a65fb7e..d01f2dcb3 100644 --- a/tests/test_autoscaling/test_autoscaling.py +++ b/tests/test_autoscaling/test_autoscaling.py @@ -543,6 +543,7 @@ def test_describe_load_balancers(): ) response = client.describe_load_balancers(AutoScalingGroupName='test_asg') + assert response['ResponseMetadata']['RequestId'] list(response['LoadBalancers']).should.have.length_of(1) response['LoadBalancers'][0]['LoadBalancerName'].should.equal('my-lb') From 3c5ce6c09ef4765c61ed599507a807a88ce056c3 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Mon, 21 Jan 2019 13:30:22 +0100 Subject: [PATCH 092/175] Fixed new subnets associated with a network ACL from a different VPC. --- moto/ec2/models.py | 6 +++--- tests/test_ec2/test_network_acls.py | 19 ++++++++++++++++++- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index a450943b7..a00cb7c3b 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -2464,7 +2464,7 @@ class SubnetBackend(object): default_for_az, map_public_ip_on_launch) # AWS associates a new subnet with the default Network ACL - self.associate_default_network_acl_with_subnet(subnet_id) + self.associate_default_network_acl_with_subnet(subnet_id, vpc_id) self.subnets[availability_zone][subnet_id] = subnet return subnet @@ -3636,9 +3636,9 @@ class NetworkAclBackend(object): new_acl.associations[new_assoc_id] = association return association - def associate_default_network_acl_with_subnet(self, subnet_id): + def associate_default_network_acl_with_subnet(self, subnet_id, vpc_id): association_id = random_network_acl_subnet_association_id() - acl = next(acl for acl in self.network_acls.values() if acl.default) + acl = next(acl for acl in self.network_acls.values() if acl.default and acl.vpc_id == vpc_id) acl.associations[association_id] = NetworkAclAssociation(self, association_id, subnet_id, acl.id) diff --git a/tests/test_ec2/test_network_acls.py b/tests/test_ec2/test_network_acls.py index fd2ec105e..a4eb193e2 100644 --- a/tests/test_ec2/test_network_acls.py +++ b/tests/test_ec2/test_network_acls.py @@ -1,8 +1,9 @@ from __future__ import unicode_literals import boto +import boto3 import sure # noqa -from moto import mock_ec2_deprecated +from moto import mock_ec2_deprecated, mock_ec2 @mock_ec2_deprecated @@ -173,3 +174,19 @@ def test_network_acl_tagging(): if na.id == network_acl.id) test_network_acl.tags.should.have.length_of(1) test_network_acl.tags["a key"].should.equal("some value") + + +@mock_ec2 +def test_new_subnet_in_new_vpc_associates_with_default_network_acl(): + ec2 = boto3.resource('ec2', region_name='us-west-1') + new_vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16') + new_vpc.reload() + + subnet = ec2.create_subnet(VpcId=new_vpc.id, CidrBlock='10.0.0.0/24') + subnet.reload() + + new_vpcs_default_network_acl = next(iter(new_vpc.network_acls.all()), None) + new_vpcs_default_network_acl.reload() + new_vpcs_default_network_acl.vpc_id.should.equal(new_vpc.id) + new_vpcs_default_network_acl.associations.should.have.length_of(1) + new_vpcs_default_network_acl.associations[0]['SubnetId'].should.equal(subnet.id) From 68b8e6b636b74cd20dd5d2c769e617527472b31f Mon Sep 17 00:00:00 2001 From: acsbendi Date: Mon, 21 Jan 2019 15:36:49 +0100 Subject: [PATCH 093/175] Implemented adding default entries to default network ACLs. --- moto/ec2/models.py | 14 ++++++++++++++ tests/test_ec2/test_network_acls.py | 24 ++++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index a00cb7c3b..d008b759c 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -3560,8 +3560,22 @@ class NetworkAclBackend(object): self.get_vpc(vpc_id) network_acl = NetworkAcl(self, network_acl_id, vpc_id, default) self.network_acls[network_acl_id] = network_acl + if default: + self.add_default_entries(network_acl_id) return network_acl + def add_default_entries(self, network_acl_id): + default_acl_entries = [ + {'rule_number': 100, 'rule_action': 'allow', 'egress': 'true'}, + {'rule_number': 32767, 'rule_action': 'deny', 'egress': 'true'}, + {'rule_number': 100, 'rule_action': 'allow', 'egress': 'false'}, + {'rule_number': 32767, 'rule_action': 'deny', 'egress': 'false'} + ] + for entry in default_acl_entries: + self.create_network_acl_entry(network_acl_id=network_acl_id, rule_number=entry['rule_number'], protocol='-1', + rule_action=entry['rule_action'], egress=entry['egress'], cidr_block='0.0.0.0/0', + icmp_code=None, icmp_type=None, port_range_from=None, port_range_to=None) + def get_all_network_acls(self, network_acl_ids=None, filters=None): network_acls = self.network_acls.values() diff --git a/tests/test_ec2/test_network_acls.py b/tests/test_ec2/test_network_acls.py index a4eb193e2..9c92c949e 100644 --- a/tests/test_ec2/test_network_acls.py +++ b/tests/test_ec2/test_network_acls.py @@ -190,3 +190,27 @@ def test_new_subnet_in_new_vpc_associates_with_default_network_acl(): new_vpcs_default_network_acl.vpc_id.should.equal(new_vpc.id) new_vpcs_default_network_acl.associations.should.have.length_of(1) new_vpcs_default_network_acl.associations[0]['SubnetId'].should.equal(subnet.id) + + +@mock_ec2 +def test_default_network_acl_default_entries(): + ec2 = boto3.resource('ec2', region_name='us-west-1') + default_network_acl = next(iter(ec2.network_acls.all()), None) + default_network_acl.is_default.should.be.ok + + default_network_acl.entries.should.have.length_of(4) + unique_entries = [] + for entry in default_network_acl.entries: + entry['CidrBlock'].should.equal('0.0.0.0/0') + entry['Protocol'].should.equal('-1') + entry['RuleNumber'].should.be.within([100, 32767]) + entry['RuleAction'].should.be.within(['allow', 'deny']) + assert type(entry['Egress']) is bool + if entry['RuleAction'] == 'allow': + entry['RuleNumber'].should.be.equal(100) + else: + entry['RuleNumber'].should.be.equal(32767) + if entry not in unique_entries: + unique_entries.append(entry) + + unique_entries.should.have.length_of(4) From 337601b5fb9cbff9fad5dce719fc4c30dc2d5c96 Mon Sep 17 00:00:00 2001 From: Mark Challoner Date: Tue, 30 Oct 2018 11:09:35 +0000 Subject: [PATCH 094/175] Add CloudFormation methods delete_change_set, describe_change_set and list_change_sets. --- IMPLEMENTATION_COVERAGE.md | 6 +- moto/cloudformation/models.py | 102 ++++++++++++++-- moto/cloudformation/parsing.py | 54 +++++++-- moto/cloudformation/responses.py | 111 ++++++++++++++++++ .../test_cloudformation_stack_crud_boto3.py | 58 ++++++++- 5 files changed, 308 insertions(+), 23 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 735af6002..42367db9b 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -477,12 +477,12 @@ - [X] create_stack - [ ] create_stack_instances - [ ] create_stack_set -- [ ] delete_change_set +- [X] delete_change_set - [X] delete_stack - [ ] delete_stack_instances - [ ] delete_stack_set - [ ] describe_account_limits -- [ ] describe_change_set +- [X] describe_change_set - [ ] describe_stack_events - [ ] describe_stack_instance - [ ] describe_stack_resource @@ -495,7 +495,7 @@ - [ ] get_stack_policy - [ ] get_template - [ ] get_template_summary -- [ ] list_change_sets +- [X] list_change_sets - [X] list_exports - [ ] list_imports - [ ] list_stack_instances diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index c45c5d5fe..864e98a92 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -127,6 +127,49 @@ class FakeStack(BaseModel): self.status = "DELETE_COMPLETE" +class FakeChange(BaseModel): + + def __init__(self, action, logical_resource_id, resource_type): + self.action = action + self.logical_resource_id = logical_resource_id + self.resource_type = resource_type + + +class FakeChangeSet(FakeStack): + + def __init__(self, stack_id, stack_name, stack_template, change_set_id, change_set_name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None): + super(FakeChangeSet, self).__init__( + stack_id, + stack_name, + stack_template, + parameters, + region_name, + notification_arns=notification_arns, + tags=tags, + role_arn=role_arn, + cross_stack_resources=cross_stack_resources, + create_change_set=True, + ) + self.stack_name = stack_name + self.change_set_id = change_set_id + self.change_set_name = change_set_name + self.changes = self.diff(template=template, parameters=parameters) + + def diff(self, template, parameters=None): + self.template = template + self._parse_template() + changes = [] + resources_by_action = self.resource_map.diff(self.template_dict, parameters) + for action, resources in resources_by_action.items(): + for resource_name, resource in resources.items(): + changes.append(FakeChange( + action=action, + logical_resource_id=resource_name, + resource_type=resource['ResourceType'], + )) + return changes + + class FakeEvent(BaseModel): def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None): @@ -171,24 +214,62 @@ class CloudFormationBackend(BaseBackend): return new_stack def create_change_set(self, stack_name, change_set_name, template, parameters, region_name, change_set_type, notification_arns=None, tags=None, role_arn=None): + stack_id = None + stack_template = None if change_set_type == 'UPDATE': stacks = self.stacks.values() stack = None for s in stacks: if s.name == stack_name: stack = s + stack_id = stack.stack_id + stack_template = stack.template if stack is None: raise ValidationError(stack_name) - else: - stack = self.create_stack(stack_name, template, parameters, - region_name, notification_arns, tags, - role_arn, create_change_set=True) + stack_id = generate_stack_id(stack_name) + stack_template = template + change_set_id = generate_changeset_id(change_set_name, region_name) - self.stacks[change_set_name] = {'Id': change_set_id, - 'StackId': stack.stack_id} - self.change_sets[change_set_id] = stack - return change_set_id, stack.stack_id + new_change_set = FakeChangeSet( + stack_id=stack_id, + stack_name=stack_name, + stack_template=stack_template, + change_set_id=change_set_id, + change_set_name=change_set_name, + template=template, + parameters=parameters, + region_name=region_name, + notification_arns=notification_arns, + tags=tags, + role_arn=role_arn, + cross_stack_resources=self.exports + ) + self.change_sets[change_set_id] = new_change_set + self.stacks[stack_id] = new_change_set + return change_set_id, stack_id + + def delete_change_set(self, change_set_name, stack_name=None): + if change_set_name in self.change_sets: + # This means arn was passed in + del self.change_sets[change_set_name] + else: + for cs in self.change_sets: + if self.change_sets[cs].change_set_name == change_set_name: + del self.change_sets[cs] + + def describe_change_set(self, change_set_name, stack_name=None): + change_set = None + if change_set_name in self.change_sets: + # This means arn was passed in + change_set = self.change_sets[change_set_name] + else: + for cs in self.change_sets: + if self.change_sets[cs].change_set_name == change_set_name: + change_set = self.change_sets[cs] + if change_set is None: + raise ValidationError(change_set_name) + return change_set def execute_change_set(self, change_set_name, stack_name=None): stack = None @@ -197,7 +278,7 @@ class CloudFormationBackend(BaseBackend): stack = self.change_sets[change_set_name] else: for cs in self.change_sets: - if self.change_sets[cs].name == change_set_name: + if self.change_sets[cs].change_set_name == change_set_name: stack = self.change_sets[cs] if stack is None: raise ValidationError(stack_name) @@ -223,6 +304,9 @@ class CloudFormationBackend(BaseBackend): else: return list(stacks) + def list_change_sets(self): + return self.change_sets.values() + def list_stacks(self): return [ v for v in self.stacks.values() diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py index 35b05d101..0be68944b 100644 --- a/moto/cloudformation/parsing.py +++ b/moto/cloudformation/parsing.py @@ -465,36 +465,70 @@ class ResourceMap(collections.Mapping): ec2_models.ec2_backends[self._region_name].create_tags( [self[resource].physical_resource_id], self.tags) - def update(self, template, parameters=None): + def diff(self, template, parameters=None): if parameters: self.input_parameters = parameters self.load_mapping() self.load_parameters() self.load_conditions() + old_template = self._resource_json_map + new_template = template['Resources'] + + resource_names_by_action = { + 'Add': set(new_template) - set(old_template), + 'Modify': set(name for name in new_template if name in old_template and new_template[ + name] != old_template[name]), + 'Remove': set(old_template) - set(new_template) + } + resources_by_action = { + 'Add': {}, + 'Modify': {}, + 'Remove': {}, + } + + for resource_name in resource_names_by_action['Add']: + resources_by_action['Add'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': new_template[resource_name]['Type'] + } + + for resource_name in resource_names_by_action['Modify']: + resources_by_action['Modify'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': new_template[resource_name]['Type'] + } + + for resource_name in resource_names_by_action['Remove']: + resources_by_action['Remove'][resource_name] = { + 'LogicalResourceId': resource_name, + 'ResourceType': old_template[resource_name]['Type'] + } + + return resources_by_action + + def update(self, template, parameters=None): + resources_by_action = self.diff(template, parameters) + old_template = self._resource_json_map new_template = template['Resources'] self._resource_json_map = new_template - new_resource_names = set(new_template) - set(old_template) - for resource_name in new_resource_names: + for resource_name, resource in resources_by_action['Add'].items(): resource_json = new_template[resource_name] new_resource = parse_and_create_resource( resource_name, resource_json, self, self._region_name) self._parsed_resources[resource_name] = new_resource - removed_resource_names = set(old_template) - set(new_template) - for resource_name in removed_resource_names: + for resource_name, resource in resources_by_action['Remove'].items(): resource_json = old_template[resource_name] parse_and_delete_resource( resource_name, resource_json, self, self._region_name) self._parsed_resources.pop(resource_name) - resources_to_update = set(name for name in new_template if name in old_template and new_template[ - name] != old_template[name]) tries = 1 - while resources_to_update and tries < 5: - for resource_name in resources_to_update.copy(): + while resources_by_action['Modify'] and tries < 5: + for resource_name, resource in resources_by_action['Modify'].copy().items(): resource_json = new_template[resource_name] try: changed_resource = parse_and_update_resource( @@ -505,7 +539,7 @@ class ResourceMap(collections.Mapping): last_exception = e else: self._parsed_resources[resource_name] = changed_resource - resources_to_update.remove(resource_name) + del resources_by_action['Modify'][resource_name] tries += 1 if tries == 5: raise last_exception diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py index 9e67e931a..84805efaf 100644 --- a/moto/cloudformation/responses.py +++ b/moto/cloudformation/responses.py @@ -120,6 +120,31 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(CREATE_CHANGE_SET_RESPONSE_TEMPLATE) return template.render(stack_id=stack_id, change_set_id=change_set_id) + def delete_change_set(self): + stack_name = self._get_param('StackName') + change_set_name = self._get_param('ChangeSetName') + + self.cloudformation_backend.delete_change_set(change_set_name=change_set_name, stack_name=stack_name) + if self.request_json: + return json.dumps({ + 'DeleteChangeSetResponse': { + 'DeleteChangeSetResult': {}, + } + }) + else: + template = self.response_template(DELETE_CHANGE_SET_RESPONSE_TEMPLATE) + return template.render() + + def describe_change_set(self): + stack_name = self._get_param('StackName') + change_set_name = self._get_param('ChangeSetName') + change_set = self.cloudformation_backend.describe_change_set( + change_set_name=change_set_name, + stack_name=stack_name, + ) + template = self.response_template(DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE) + return template.render(change_set=change_set) + @amzn_request_id def execute_change_set(self): stack_name = self._get_param('StackName') @@ -187,6 +212,11 @@ class CloudFormationResponse(BaseResponse): template = self.response_template(DESCRIBE_STACK_EVENTS_RESPONSE) return template.render(stack=stack) + def list_change_sets(self): + change_sets = self.cloudformation_backend.list_change_sets() + template = self.response_template(LIST_CHANGE_SETS_RESPONSE) + return template.render(change_sets=change_sets) + def list_stacks(self): stacks = self.cloudformation_backend.list_stacks() template = self.response_template(LIST_STACKS_RESPONSE) @@ -354,6 +384,66 @@ CREATE_CHANGE_SET_RESPONSE_TEMPLATE = """ """ +DELETE_CHANGE_SET_RESPONSE_TEMPLATE = """ + + + + 3d3200a1-810e-3023-6cc3-example + + +""" + +DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE = """ + + {{ change_set.change_set_id }} + {{ change_set.change_set_name }} + {{ change_set.stack_id }} + {{ change_set.stack_name }} + {{ change_set.description }} + + {% for param_name, param_value in change_set.stack_parameters.items() %} + + {{ param_name }} + {{ param_value }} + + {% endfor %} + + 2011-05-23T15:47:44Z + {{ change_set.execution_status }} + {{ change_set.status }} + {{ change_set.status_reason }} + {% if change_set.notification_arns %} + + {% for notification_arn in change_set.notification_arns %} + {{ notification_arn }} + {% endfor %} + + {% else %} + + {% endif %} + {% if change_set.role_arn %} + {{ change_set.role_arn }} + {% endif %} + {% if change_set.changes %} + + {% for change in change_set.changes %} + + Resource + + {{ change.action }} + {{ change.logical_resource_id }} + {{ change.resource_type }} + + + {% endfor %} + + {% endif %} + {% if next_token %} + {{ next_token }} + {% endif %} + +""" + EXECUTE_CHANGE_SET_RESPONSE_TEMPLATE = """ @@ -479,6 +569,27 @@ DESCRIBE_STACK_EVENTS_RESPONSE = """ + + + {% for change_set in change_sets %} + + {{ change_set.stack_id }} + {{ change_set.stack_name }} + {{ change_set.change_set_id }} + {{ change_set.change_set_name }} + {{ change_set.execution_status }} + {{ change_set.status }} + {{ change_set.status_reason }} + 2011-05-23T15:47:44Z + {{ change_set.description }} + + {% endfor %} + + +""" + + LIST_STACKS_RESPONSE = """ diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py index 4585da056..2511de6da 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py @@ -399,6 +399,32 @@ def test_create_change_set_from_s3_url(): assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId'] +@mock_cloudformation +def test_describe_change_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + + stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet") + stack['ChangeSetName'].should.equal('NewChangeSet') + stack['StackName'].should.equal('NewStack') + + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_update_template_json, + ChangeSetName='NewChangeSet2', + ChangeSetType='UPDATE', + ) + stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet2") + stack['ChangeSetName'].should.equal('NewChangeSet2') + stack['StackName'].should.equal('NewStack') + stack['Changes'].should.have.length_of(2) + + @mock_cloudformation def test_execute_change_set_w_arn(): cf_conn = boto3.client('cloudformation', region_name='us-east-1') @@ -420,7 +446,7 @@ def test_execute_change_set_w_name(): ChangeSetName='NewChangeSet', ChangeSetType='CREATE', ) - cf_conn.execute_change_set(ChangeSetName='NewStack', StackName='NewStack') + cf_conn.execute_change_set(ChangeSetName='NewChangeSet', StackName='NewStack') @mock_cloudformation @@ -489,6 +515,20 @@ def test_describe_stack_by_stack_id(): stack_by_id['StackName'].should.equal("test_stack") +@mock_cloudformation +def test_list_change_sets(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack2', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet2', + ChangeSetType='CREATE', + ) + change_set = cf_conn.list_change_sets(StackName='NewStack2')['Summaries'][0] + change_set['StackName'].should.equal('NewStack2') + change_set['ChangeSetName'].should.equal('NewChangeSet2') + + @mock_cloudformation def test_list_stacks(): cf = boto3.resource('cloudformation', region_name='us-east-1') @@ -521,6 +561,22 @@ def test_delete_stack_from_resource(): list(cf.stacks.all()).should.have.length_of(0) +@mock_cloudformation +@mock_ec2 +def test_delete_change_set(): + cf_conn = boto3.client('cloudformation', region_name='us-east-1') + cf_conn.create_change_set( + StackName='NewStack', + TemplateBody=dummy_template_json, + ChangeSetName='NewChangeSet', + ChangeSetType='CREATE', + ) + + cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(1) + cf_conn.delete_change_set(ChangeSetName='NewChangeSet', StackName='NewStack') + cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(0) + + @mock_cloudformation @mock_ec2 def test_delete_stack_by_name(): From 4d9039bf907cc2ecd7992aa3a746a14778b5ce2d Mon Sep 17 00:00:00 2001 From: Robert Lewis Date: Mon, 21 Jan 2019 13:08:59 -0800 Subject: [PATCH 095/175] Add name to authors file --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 0a152505a..5eb313dda 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -54,3 +54,4 @@ Moto is written by Steve Pulec with contributions from: * [William Richard](https://github.com/william-richard) * [Alex Casalboni](https://github.com/alexcasalboni) * [Jon Beilke](https://github.com/jrbeilke) +* [Robert Lewis](https://github.com/ralewis85) From b4b63202d9bb4879db151c38a778692f23ab8f05 Mon Sep 17 00:00:00 2001 From: David Morrison Date: Wed, 23 Jan 2019 15:59:31 -0800 Subject: [PATCH 096/175] tag specifications for spot fleet requests --- moto/ec2/models.py | 35 ++++++++++++++++++++---- moto/ec2/responses/spot_fleets.py | 15 +++++++++++ tests/test_ec2/test_spot_fleet.py | 44 ++++++++++++++++++++++++++++++- 3 files changed, 88 insertions(+), 6 deletions(-) mode change 100755 => 100644 moto/ec2/models.py diff --git a/moto/ec2/models.py b/moto/ec2/models.py old mode 100755 new mode 100644 index a450943b7..cd20e7e0e --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -2879,7 +2879,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): def __init__(self, ec2_backend, spot_request_id, price, image_id, type, valid_from, valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, - kernel_id, ramdisk_id, monitoring_enabled, subnet_id, spot_fleet_id, + kernel_id, ramdisk_id, monitoring_enabled, subnet_id, tags, spot_fleet_id, **kwargs): super(SpotInstanceRequest, self).__init__(**kwargs) ls = LaunchSpecification() @@ -2903,6 +2903,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): ls.monitored = monitoring_enabled ls.subnet_id = subnet_id self.spot_fleet_id = spot_fleet_id + self.tags = tags if security_groups: for group_name in security_groups: @@ -2936,6 +2937,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource): security_group_names=[], security_group_ids=self.launch_specification.groups, spot_fleet_id=self.spot_fleet_id, + tags=self.tags, ) instance = reservation.instances[0] return instance @@ -2951,15 +2953,16 @@ class SpotRequestBackend(object): valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, kernel_id, ramdisk_id, - monitoring_enabled, subnet_id, spot_fleet_id=None): + monitoring_enabled, subnet_id, tags=None, spot_fleet_id=None): requests = [] + tags = tags or {} for _ in range(count): spot_request_id = random_spot_request_id() request = SpotInstanceRequest(self, spot_request_id, price, image_id, type, valid_from, valid_until, launch_group, availability_zone_group, key_name, security_groups, user_data, instance_type, placement, kernel_id, ramdisk_id, - monitoring_enabled, subnet_id, spot_fleet_id) + monitoring_enabled, subnet_id, tags, spot_fleet_id) self.spot_instance_requests[spot_request_id] = request requests.append(request) return requests @@ -2979,8 +2982,8 @@ class SpotRequestBackend(object): class SpotFleetLaunchSpec(object): def __init__(self, ebs_optimized, group_set, iam_instance_profile, image_id, - instance_type, key_name, monitoring, spot_price, subnet_id, user_data, - weighted_capacity): + instance_type, key_name, monitoring, spot_price, subnet_id, tag_specifications, + user_data, weighted_capacity): self.ebs_optimized = ebs_optimized self.group_set = group_set self.iam_instance_profile = iam_instance_profile @@ -2990,6 +2993,7 @@ class SpotFleetLaunchSpec(object): self.monitoring = monitoring self.spot_price = spot_price self.subnet_id = subnet_id + self.tag_specifications = tag_specifications self.user_data = user_data self.weighted_capacity = float(weighted_capacity) @@ -3020,6 +3024,7 @@ class SpotFleetRequest(TaggedEC2Resource): monitoring=spec.get('monitoring._enabled'), spot_price=spec.get('spot_price', self.spot_price), subnet_id=spec['subnet_id'], + tag_specifications=self._parse_tag_specifications(spec), user_data=spec.get('user_data'), weighted_capacity=spec['weighted_capacity'], ) @@ -3102,6 +3107,7 @@ class SpotFleetRequest(TaggedEC2Resource): monitoring_enabled=launch_spec.monitoring, subnet_id=launch_spec.subnet_id, spot_fleet_id=self.id, + tags=launch_spec.tag_specifications, ) self.spot_requests.extend(requests) self.fulfilled_capacity += added_weight @@ -3124,6 +3130,25 @@ class SpotFleetRequest(TaggedEC2Resource): self.spot_requests = [req for req in self.spot_requests if req.instance.id not in instance_ids] self.ec2_backend.terminate_instances(instance_ids) + def _parse_tag_specifications(self, spec): + try: + tag_spec_num = max([int(key.split('.')[1]) for key in spec if key.startswith("tag_specification_set")]) + except ValueError: # no tag specifications + return {} + + tag_specifications = {} + for si in range(1, tag_spec_num + 1): + resource_type = spec["tag_specification_set.{si}._resource_type".format(si=si)] + + tags = [key for key in spec if key.startswith("tag_specification_set.{si}._tag".format(si=si))] + tag_num = max([int(key.split('.')[3]) for key in tags]) + tag_specifications[resource_type] = dict(( + spec["tag_specification_set.{si}._tag.{ti}._key".format(si=si, ti=ti)], + spec["tag_specification_set.{si}._tag.{ti}._value".format(si=si, ti=ti)], + ) for ti in range(1, tag_num + 1)) + + return tag_specifications + class SpotFleetBackend(object): def __init__(self): diff --git a/moto/ec2/responses/spot_fleets.py b/moto/ec2/responses/spot_fleets.py index 0366af9d6..bb9aeb4ca 100644 --- a/moto/ec2/responses/spot_fleets.py +++ b/moto/ec2/responses/spot_fleets.py @@ -107,6 +107,21 @@ DESCRIBE_SPOT_FLEET_TEMPLATE = """ Date: Thu, 24 Jan 2019 20:39:55 -0500 Subject: [PATCH 097/175] [cognitoidp] feat: add update_identity_provider --- moto/cognitoidp/models.py | 13 ++++++++++ moto/cognitoidp/responses.py | 8 ++++++ tests/test_cognitoidp/test_cognitoidp.py | 32 ++++++++++++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 00868f7b3..bdd279ba6 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -426,6 +426,19 @@ class CognitoIdpBackend(BaseBackend): return identity_provider + def update_identity_provider(self, user_pool_id, name, extended_config): + user_pool = self.user_pools.get(user_pool_id) + if not user_pool: + raise ResourceNotFoundError(user_pool_id) + + identity_provider = user_pool.identity_providers.get(name) + if not identity_provider: + raise ResourceNotFoundError(name) + + identity_provider.extended_config.update(extended_config) + + return identity_provider + def delete_identity_provider(self, user_pool_id, name): user_pool = self.user_pools.get(user_pool_id) if not user_pool: diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 8b3941c21..264910739 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -143,6 +143,14 @@ class CognitoIdpResponse(BaseResponse): "IdentityProvider": identity_provider.to_json(extended=True) }) + def update_identity_provider(self): + user_pool_id = self._get_param("UserPoolId") + name = self._get_param("ProviderName") + identity_provider = cognitoidp_backends[self.region].update_identity_provider(user_pool_id, name, self.parameters) + return json.dumps({ + "IdentityProvider": identity_provider.to_json(extended=True) + }) + def delete_identity_provider(self): user_pool_id = self._get_param("UserPoolId") name = self._get_param("ProviderName") diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 0ef082d5c..5706c9c8d 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -484,6 +484,38 @@ def test_describe_identity_providers(): result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value) +@mock_cognitoidp +def test_update_identity_provider(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + new_value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + conn.create_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderType=provider_type, + ProviderDetails={ + "thing": value + }, + ) + + result = conn.update_identity_provider( + UserPoolId=user_pool_id, + ProviderName=provider_name, + ProviderDetails={ + "thing": new_value + }, + ) + + result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id) + result["IdentityProvider"]["ProviderName"].should.equal(provider_name) + result["IdentityProvider"]["ProviderType"].should.equal(provider_type) + result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(new_value) + + @mock_cognitoidp def test_delete_identity_providers(): conn = boto3.client("cognito-idp", "us-west-2") From 935039d6848436abc1cc01a16eed24179939ac9a Mon Sep 17 00:00:00 2001 From: William Richard Date: Fri, 25 Jan 2019 14:47:23 -0500 Subject: [PATCH 098/175] ECR now uses api.ecr as its endpoint. This changed in botocore 1.12.85. See https://github.com/boto/botocore/commit/b5fa8a51393e24fbc7337a357a2d51a0f750c627 This fixes https://github.com/spulec/moto/issues/2035 --- moto/ecr/urls.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/ecr/urls.py b/moto/ecr/urls.py index 86b8a8dbc..5b12cd843 100644 --- a/moto/ecr/urls.py +++ b/moto/ecr/urls.py @@ -3,6 +3,7 @@ from .responses import ECRResponse url_bases = [ "https?://ecr.(.+).amazonaws.com", + "https?://api.ecr.(.+).amazonaws.com", ] url_paths = { From 1a36c0c377738809c7df6a9087970ef1b79cced5 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Tue, 29 Jan 2019 18:09:31 -0800 Subject: [PATCH 099/175] IAM Role Tagging support --- IMPLEMENTATION_COVERAGE.md | 30 +++--- moto/iam/exceptions.py | 45 +++++++++ moto/iam/models.py | 89 ++++++++++++++++- moto/iam/responses.py | 81 ++++++++++++++++ setup.py | 4 +- tests/test_iam/test_iam.py | 193 +++++++++++++++++++++++++++++++++++++ 6 files changed, 426 insertions(+), 16 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 735af6002..a9002ea7d 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2208,7 +2208,7 @@ - [ ] describe_event_types - [ ] describe_events -## iam - 48% implemented +## iam - 62% implemented - [ ] add_client_id_to_open_id_connect_provider - [X] add_role_to_instance_profile - [X] add_user_to_group @@ -2247,7 +2247,7 @@ - [X] delete_server_certificate - [ ] delete_service_linked_role - [ ] delete_service_specific_credential -- [ ] delete_signing_certificate +- [X] delete_signing_certificate - [ ] delete_ssh_public_key - [X] delete_user - [X] delete_user_policy @@ -2279,7 +2279,7 @@ - [ ] get_ssh_public_key - [X] get_user - [X] get_user_policy -- [ ] list_access_keys +- [X] list_access_keys - [X] list_account_aliases - [X] list_attached_group_policies - [X] list_attached_role_policies @@ -2287,19 +2287,21 @@ - [ ] list_entities_for_policy - [X] list_group_policies - [X] list_groups -- [ ] list_groups_for_user -- [ ] list_instance_profiles -- [ ] list_instance_profiles_for_role +- [X] list_groups_for_user +- [X] list_instance_profiles +- [X] list_instance_profiles_for_role - [X] list_mfa_devices - [ ] list_open_id_connect_providers - [X] list_policies - [X] list_policy_versions - [X] list_role_policies -- [ ] list_roles +- [X] list_roles +- [X] list_role_tags +- [ ] list_user_tags - [X] list_saml_providers -- [ ] list_server_certificates +- [X] list_server_certificates - [ ] list_service_specific_credentials -- [ ] list_signing_certificates +- [X] list_signing_certificates - [ ] list_ssh_public_keys - [X] list_user_policies - [X] list_users @@ -2315,6 +2317,10 @@ - [ ] set_default_policy_version - [ ] simulate_custom_policy - [ ] simulate_principal_policy +- [X] tag_role +- [ ] tag_user +- [X] untag_role +- [ ] untag_user - [X] update_access_key - [ ] update_account_password_policy - [ ] update_assume_role_policy @@ -2326,11 +2332,11 @@ - [X] update_saml_provider - [ ] update_server_certificate - [ ] update_service_specific_credential -- [ ] update_signing_certificate +- [X] update_signing_certificate - [ ] update_ssh_public_key - [ ] update_user -- [ ] upload_server_certificate -- [ ] upload_signing_certificate +- [X] upload_server_certificate +- [X] upload_signing_certificate - [ ] upload_ssh_public_key ## importexport - 0% implemented diff --git a/moto/iam/exceptions.py b/moto/iam/exceptions.py index 61922ea18..5b13277da 100644 --- a/moto/iam/exceptions.py +++ b/moto/iam/exceptions.py @@ -32,3 +32,48 @@ class MalformedCertificate(RESTError): def __init__(self, cert): super(MalformedCertificate, self).__init__( 'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert)) + + +class DuplicateTags(RESTError): + code = 400 + + def __init__(self): + super(DuplicateTags, self).__init__( + 'InvalidInput', 'Duplicate tag keys found. Please note that Tag keys are case insensitive.') + + +class TagKeyTooBig(RESTError): + code = 400 + + def __init__(self, tag, param='tags.X.member.key'): + super(TagKeyTooBig, self).__init__( + 'ValidationError', "1 validation error detected: Value '{}' at '{}' failed to satisfy " + "constraint: Member must have length less than or equal to 128.".format(tag, param)) + + +class TagValueTooBig(RESTError): + code = 400 + + def __init__(self, tag): + super(TagValueTooBig, self).__init__( + 'ValidationError', "1 validation error detected: Value '{}' at 'tags.X.member.value' failed to satisfy " + "constraint: Member must have length less than or equal to 256.".format(tag)) + + +class InvalidTagCharacters(RESTError): + code = 400 + + def __init__(self, tag, param='tags.X.member.key'): + message = "1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(tag, param) + message += "constraint: Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+" + + super(InvalidTagCharacters, self).__init__('ValidationError', message) + + +class TooManyTags(RESTError): + code = 400 + + def __init__(self, tags, param='tags'): + super(TooManyTags, self).__init__( + 'ValidationError', "1 validation error detected: Value '{}' at '{}' failed to satisfy " + "constraint: Member must have length less than or equal to 50.".format(tags, param)) diff --git a/moto/iam/models.py b/moto/iam/models.py index 5e4bb8263..86a5d7a32 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -3,6 +3,7 @@ import base64 import sys from datetime import datetime import json +import re from cryptography import x509 from cryptography.hazmat.backends import default_backend @@ -12,7 +13,8 @@ from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_without_milliseconds from .aws_managed_policies import aws_managed_policies_data -from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate +from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate, \ + DuplicateTags, TagKeyTooBig, InvalidTagCharacters, TooManyTags, TagValueTooBig from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id ACCOUNT_ID = 123456789012 @@ -32,7 +34,6 @@ class MFADevice(object): class Policy(BaseModel): - is_attachable = False def __init__(self, @@ -132,6 +133,7 @@ class Role(BaseModel): self.policies = {} self.managed_policies = {} self.create_date = datetime.now(pytz.utc) + self.tags = {} @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): @@ -175,6 +177,9 @@ class Role(BaseModel): raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"') raise UnformattedGetAttTemplateException() + def get_tags(self): + return [self.tags[tag] for tag in self.tags] + class InstanceProfile(BaseModel): @@ -614,6 +619,86 @@ class IAMBackend(BaseBackend): role = self.get_role(role_name) return role.policies.keys() + def _validate_tag_key(self, tag_key, exception_param='tags.X.member.key'): + """Validates the tag key. + + :param all_tags: Dict to check if there is a duplicate tag. + :param tag_key: The tag key to check against. + :param exception_param: The exception parameter to send over to help format the message. This is to reflect + the difference between the tag and untag APIs. + :return: + """ + # Validate that the key length is correct: + if len(tag_key) > 128: + raise TagKeyTooBig(tag_key, param=exception_param) + + # Validate that the tag key fits the proper Regex: + # [\w\s_.:/=+\-@]+ SHOULD be the same as the Java regex on the AWS documentation: [\p{L}\p{Z}\p{N}_.:/=+\-@]+ + match = re.findall(r'[\w\s_.:/=+\-@]+', tag_key) + # Kudos if you can come up with a better way of doing a global search :) + if not len(match) or len(match[0]) < len(tag_key): + raise InvalidTagCharacters(tag_key, param=exception_param) + + def _check_tag_duplicate(self, all_tags, tag_key): + """Validates that a tag key is not a duplicate + + :param all_tags: Dict to check if there is a duplicate tag. + :param tag_key: The tag key to check against. + :return: + """ + if tag_key in all_tags: + raise DuplicateTags() + + def list_role_tags(self, role_name, marker, max_items=100): + role = self.get_role(role_name) + + max_items = int(max_items) + tag_index = sorted(role.tags) + start_idx = int(marker) if marker else 0 + + tag_index = tag_index[start_idx:start_idx + max_items] + + if len(role.tags) <= (start_idx + max_items): + marker = None + else: + marker = str(start_idx + max_items) + + # Make the tag list of dict's: + tags = [role.tags[tag] for tag in tag_index] + + return tags, marker + + def tag_role(self, role_name, tags): + if len(tags) > 50: + raise TooManyTags(tags) + + role = self.get_role(role_name) + + tag_keys = {} + for tag in tags: + # Need to index by the lowercase tag key since the keys are case insensitive, but their case is retained. + ref_key = tag['Key'].lower() + self._check_tag_duplicate(tag_keys, ref_key) + self._validate_tag_key(tag['Key']) + if len(tag['Value']) > 256: + raise TagValueTooBig(tag['Value']) + + tag_keys[ref_key] = tag + + role.tags.update(tag_keys) + + def untag_role(self, role_name, tag_keys): + if len(tag_keys) > 50: + raise TooManyTags(tag_keys, param='tagKeys') + + role = self.get_role(role_name) + + for key in tag_keys: + ref_key = key.lower() + self._validate_tag_key(key, exception_param='tagKeys') + + role.tags.pop(ref_key, None) + def create_policy_version(self, policy_arn, policy_document, set_as_default): policy = self.get_policy(policy_arn) if not policy: diff --git a/moto/iam/responses.py b/moto/iam/responses.py index d0e749d57..aeeb54936 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -625,6 +625,34 @@ class IamResponse(BaseResponse): template = self.response_template(LIST_SIGNING_CERTIFICATES_TEMPLATE) return template.render(user_name=user_name, certificates=certs) + def list_role_tags(self): + role_name = self._get_param('RoleName') + marker = self._get_param('Marker') + max_items = self._get_param('MaxItems', 100) + + tags, marker = iam_backend.list_role_tags(role_name, marker, max_items) + + template = self.response_template(LIST_ROLE_TAG_TEMPLATE) + return template.render(tags=tags, marker=marker) + + def tag_role(self): + role_name = self._get_param('RoleName') + tags = self._get_multi_param('Tags.member') + + iam_backend.tag_role(role_name, tags) + + template = self.response_template(TAG_ROLE_TEMPLATE) + return template.render() + + def untag_role(self): + role_name = self._get_param('RoleName') + tag_keys = self._get_multi_param('TagKeys.member') + + iam_backend.untag_role(role_name, tag_keys) + + template = self.response_template(UNTAG_ROLE_TEMPLATE) + return template.render() + ATTACH_ROLE_POLICY_TEMPLATE = """ @@ -878,6 +906,16 @@ GET_ROLE_TEMPLATE = """ {% endfor %} + + {% for tag in role.get_tags() %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + {% for profile in instance_profiles %} @@ -1671,3 +1717,38 @@ LIST_SIGNING_CERTIFICATES_TEMPLATE = """ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE """ + + +TAG_ROLE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +LIST_ROLE_TAG_TEMPLATE = """ + + {{ 'true' if marker else 'false' }} + {% if marker %} + {{ marker }} + {% endif %} + + {% for tag in tags %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + + + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" + + +UNTAG_ROLE_TEMPLATE = """ + + EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE + +""" diff --git a/setup.py b/setup.py index 74683836e..8ac06c4dd 100755 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ def read(*parts): install_requires = [ "Jinja2>=2.7.3", "boto>=2.36.0", - "boto3>=1.6.16", - "botocore>=1.12.13", + "boto3>=1.9.86", + "botocore>=1.12.86", "cryptography>=2.3.0", "requests>=2.5", "xmltodict", diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 2b5f16d77..15364928a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -306,6 +306,7 @@ def test_create_policy_versions(): PolicyDocument='{"some":"policy"}') version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) + @mock_iam def test_get_policy(): conn = boto3.client('iam', region_name='us-east-1') @@ -579,6 +580,7 @@ def test_get_credential_report(): 'get_credential_report_result']['content'].encode('ascii')).decode('ascii') report.should.match(r'.*my-user.*') + @mock_iam def test_boto3_get_credential_report(): conn = boto3.client('iam', region_name='us-east-1') @@ -780,12 +782,24 @@ def test_get_account_authorization_details(): conn.create_instance_profile(InstanceProfileName='ipn') conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + result = conn.get_account_authorization_details(Filter=['Role']) assert len(result['RoleDetailList']) == 1 assert len(result['UserDetailList']) == 0 assert len(result['GroupDetailList']) == 0 assert len(result['Policies']) == 0 assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 + assert len(result['RoleDetailList'][0]['Tags']) == 2 result = conn.get_account_authorization_details(Filter=['User']) assert len(result['RoleDetailList']) == 0 @@ -872,6 +886,7 @@ def test_signing_certs(): with assert_raises(ClientError): client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id) + @mock_iam() def test_create_saml_provider(): conn = boto3.client('iam', region_name='us-east-1') @@ -881,6 +896,7 @@ def test_create_saml_provider(): ) response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + @mock_iam() def test_get_saml_provider(): conn = boto3.client('iam', region_name='us-east-1') @@ -893,6 +909,7 @@ def test_get_saml_provider(): ) response['SAMLMetadataDocument'].should.equal('a' * 1024) + @mock_iam() def test_list_saml_providers(): conn = boto3.client('iam', region_name='us-east-1') @@ -903,6 +920,7 @@ def test_list_saml_providers(): response = conn.list_saml_providers() response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider") + @mock_iam() def test_delete_saml_provider(): conn = boto3.client('iam', region_name='us-east-1') @@ -929,3 +947,178 @@ def test_delete_saml_provider(): # Verify that it's not in the list: resp = conn.list_signing_certificates(UserName='testing') assert not resp['Certificates'] + + +@mock_iam() +def test_tag_role(): + """Tests both the tag_role and get_role_tags capability""" + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}") + + # Get without tags: + role = conn.get_role(RoleName='my-role')['Role'] + assert not role.get('Tags') + + # With proper tag values: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + + # Get role: + role = conn.get_role(RoleName='my-role')['Role'] + assert len(role['Tags']) == 2 + assert role['Tags'][0]['Key'] == 'somekey' + assert role['Tags'][0]['Value'] == 'somevalue' + assert role['Tags'][1]['Key'] == 'someotherkey' + assert role['Tags'][1]['Value'] == 'someothervalue' + + # Same -- but for list_role_tags: + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert role['Tags'][0]['Key'] == 'somekey' + assert role['Tags'][0]['Value'] == 'somevalue' + assert role['Tags'][1]['Key'] == 'someotherkey' + assert role['Tags'][1]['Value'] == 'someothervalue' + assert not tags['IsTruncated'] + assert not tags.get('Marker') + + # Test pagination: + tags = conn.list_role_tags(RoleName='my-role', MaxItems=1) + assert len(tags['Tags']) == 1 + assert tags['IsTruncated'] + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == 'somevalue' + assert tags['Marker'] == '1' + + tags = conn.list_role_tags(RoleName='my-role', Marker=tags['Marker']) + assert len(tags['Tags']) == 1 + assert tags['Tags'][0]['Key'] == 'someotherkey' + assert tags['Tags'][0]['Value'] == 'someothervalue' + assert not tags['IsTruncated'] + assert not tags.get('Marker') + + # Test updating an existing tag: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somenewvalue' + } + ]) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == 'somenewvalue' + + # Empty is good: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': '' + } + ]) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 2 + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == '' + + # Test creating tags with invalid values: + # With more than 50 tags: + with assert_raises(ClientError) as ce: + too_many_tags = list(map(lambda x: {'Key': str(x), 'Value': str(x)}, range(0, 51))) + conn.tag_role(RoleName='my-role', Tags=too_many_tags) + assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \ + in ce.exception.response['Error']['Message'] + + # With a duplicate tag: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': ''}, {'Key': '0', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # Duplicate tag with different casing: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': 'a', 'Value': ''}, {'Key': 'A', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # With a really big key: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0' * 129, 'Value': ''}]) + assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message'] + + # With a really big value: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': '0' * 257}]) + assert 'Member must have length less than or equal to 256.' in ce.exception.response['Error']['Message'] + + # With an invalid character: + with assert_raises(ClientError) as ce: + conn.tag_role(RoleName='my-role', Tags=[{'Key': 'NOWAY!', 'Value': ''}]) + assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \ + in ce.exception.response['Error']['Message'] + + # With a role that doesn't exist: + with assert_raises(ClientError): + conn.tag_role(RoleName='notarole', Tags=[{'Key': 'some', 'Value': 'value'}]) + + +@mock_iam +def test_untag_role(): + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}") + + # With proper tag values: + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + + # Remove them: + conn.untag_role(RoleName='my-role', TagKeys=['somekey']) + tags = conn.list_role_tags(RoleName='my-role') + assert len(tags['Tags']) == 1 + assert tags['Tags'][0]['Key'] == 'someotherkey' + assert tags['Tags'][0]['Value'] == 'someothervalue' + + # And again: + conn.untag_role(RoleName='my-role', TagKeys=['someotherkey']) + tags = conn.list_role_tags(RoleName='my-role') + assert not tags['Tags'] + + # Test removing tags with invalid values: + # With more than 50 tags: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=[str(x) for x in range(0, 51)]) + assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \ + in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With a really big key: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=['0' * 129]) + assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With an invalid character: + with assert_raises(ClientError) as ce: + conn.untag_role(RoleName='my-role', TagKeys=['NOWAY!']) + assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \ + in ce.exception.response['Error']['Message'] + assert 'tagKeys' in ce.exception.response['Error']['Message'] + + # With a role that doesn't exist: + with assert_raises(ClientError): + conn.untag_role(RoleName='notarole', TagKeys=['somevalue']) From dfceab9bf0f54a3cc8738322f3d4d00a6dfdf444 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Mon, 4 Feb 2019 13:44:01 -0800 Subject: [PATCH 100/175] Some fixes to get_account_authorization_details --- moto/iam/responses.py | 63 +++++++++++++++++++++++++------------- tests/test_iam/test_iam.py | 40 +++++++++++++++++------- 2 files changed, 71 insertions(+), 32 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index aeeb54936..818c6de36 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -554,7 +554,8 @@ class IamResponse(BaseResponse): policies=account_details['managed_policies'], users=account_details['users'], groups=account_details['groups'], - roles=account_details['roles'] + roles=account_details['roles'], + get_groups_for_user=iam_backend.get_groups_for_user ) def create_saml_provider(self): @@ -1499,8 +1500,19 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {% for user in users %} - - + + {% for group in get_groups_for_user(user.name) %} + {{ group.name }} + {% endfor %} + + + {% for policy in user.managed_policies %} + + {{ user.managed_policies[policy].name }} + {{ policy }} + + {% endfor %} + {{ user.id }} {{ user.path }} {{ user.name }} @@ -1514,25 +1526,39 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ {{ group.id }} - {% for policy in group.managed_policies %} - - {{ policy.name }} - {{ policy.arn }} - + {% for policy_arn in group.managed_policies %} + + {{ group.managed_policies[policy_arn].name }} + {{ policy_arn }} + {% endfor %} {{ group.name }} {{ group.path }} {{ group.arn }} {{ group.create_date }} - + + {% for policy in group.policies %} + + {{ policy }} + {{ group.get_policy(policy) }} + + {% endfor %} + {% endfor %} {% for role in roles %} - + + {% for inline_policy in role.policies %} + + {{ inline_policy }} + {{ role.policies[inline_policy] }} + + {% endfor %} + {% for policy in role.managed_policies %} @@ -1589,19 +1615,14 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ policy.id }} {{ policy.path }} + {% for policy_version in policy.versions %} - - {"Version":"2012-10-17","Statement":{"Effect":"Allow", - "Action":["iam:CreatePolicy","iam:CreatePolicyVersion", - "iam:DeletePolicy","iam:DeletePolicyVersion","iam:GetPolicy", - "iam:GetPolicyVersion","iam:ListPolicies", - "iam:ListPolicyVersions","iam:SetDefaultPolicyVersion"], - "Resource":"*"}} - - true - v1 - 2012-05-09T16:27:11Z + {{ policy_version.document }} + {{ policy_version.is_default }} + {{ policy_version.version_id }} + {{ policy_version.create_datetime }} + {% endfor %} {{ policy.arn }} 1 diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 15364928a..ac538649c 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -759,6 +759,17 @@ def test_get_access_key_last_used(): @mock_iam def test_get_account_authorization_details(): import json + test_policy = json.dumps({ + "Version": "2012-10-17", + "Statement": [ + { + "Action": "s3:ListBucket", + "Resource": "*", + "Effect": "Allow", + } + ] + }) + conn = boto3.client('iam', region_name='us-east-1') conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") conn.create_user(Path='/', UserName='testUser') @@ -766,22 +777,22 @@ def test_get_account_authorization_details(): conn.create_policy( PolicyName='testPolicy', Path='/', - PolicyDocument=json.dumps({ - "Version": "2012-10-17", - "Statement": [ - { - "Action": "s3:ListBucket", - "Resource": "*", - "Effect": "Allow", - } - ] - }), + PolicyDocument=test_policy, Description='Test Policy' ) + # Attach things to the user and group: + conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy) + conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy) + + conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + conn.add_user_to_group(UserName='testUser', GroupName='testGroup') + + # Add things to the role: conn.create_instance_profile(InstanceProfileName='ipn') conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') - conn.tag_role(RoleName='my-role', Tags=[ { 'Key': 'somekey', @@ -792,6 +803,7 @@ def test_get_account_authorization_details(): 'Value': 'someothervalue' } ]) + conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy) result = conn.get_account_authorization_details(Filter=['Role']) assert len(result['RoleDetailList']) == 1 @@ -800,10 +812,13 @@ def test_get_account_authorization_details(): assert len(result['Policies']) == 0 assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 assert len(result['RoleDetailList'][0]['Tags']) == 2 + assert len(result['RoleDetailList'][0]['RolePolicyList']) == 1 result = conn.get_account_authorization_details(Filter=['User']) assert len(result['RoleDetailList']) == 0 assert len(result['UserDetailList']) == 1 + assert len(result['UserDetailList'][0]['GroupList']) == 1 + assert len(result['UserDetailList'][0]['AttachedManagedPolicies']) == 1 assert len(result['GroupDetailList']) == 0 assert len(result['Policies']) == 0 @@ -811,6 +826,8 @@ def test_get_account_authorization_details(): assert len(result['RoleDetailList']) == 0 assert len(result['UserDetailList']) == 0 assert len(result['GroupDetailList']) == 1 + assert len(result['GroupDetailList'][0]['GroupPolicyList']) == 1 + assert len(result['GroupDetailList'][0]['AttachedManagedPolicies']) == 1 assert len(result['Policies']) == 0 result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) @@ -818,6 +835,7 @@ def test_get_account_authorization_details(): assert len(result['UserDetailList']) == 0 assert len(result['GroupDetailList']) == 0 assert len(result['Policies']) == 1 + assert len(result['Policies'][0]['PolicyVersionList']) == 1 # Check for greater than 1 since this should always be greater than one but might change. # See iam/aws_managed_policies.py From 94672799cf735aa0079fd27e448e2d2bf6596e7e Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Thu, 7 Feb 2019 17:32:31 -0800 Subject: [PATCH 101/175] Fixed a bug in get_account_authorization_details for attached managed policies. Fixed a bug in the XML template for `get_account_authorization_details` where attached managed policies were empty strings. --- moto/iam/responses.py | 6 +++--- tests/test_iam/test_iam.py | 11 +++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 818c6de36..e3cc4b90b 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -1560,10 +1560,10 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """ - {% for policy in role.managed_policies %} + {% for policy_arn in role.managed_policies %} - {{ policy.name }} - {{ policy.arn }} + {{ role.managed_policies[policy_arn].name }} + {{ policy_arn }} {% endfor %} diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index ac538649c..d5f1bb4f9 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -804,6 +804,7 @@ def test_get_account_authorization_details(): } ]) conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy) + conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') result = conn.get_account_authorization_details(Filter=['Role']) assert len(result['RoleDetailList']) == 1 @@ -813,6 +814,10 @@ def test_get_account_authorization_details(): assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 assert len(result['RoleDetailList'][0]['Tags']) == 2 assert len(result['RoleDetailList'][0]['RolePolicyList']) == 1 + assert len(result['RoleDetailList'][0]['AttachedManagedPolicies']) == 1 + assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' result = conn.get_account_authorization_details(Filter=['User']) assert len(result['RoleDetailList']) == 0 @@ -821,6 +826,9 @@ def test_get_account_authorization_details(): assert len(result['UserDetailList'][0]['AttachedManagedPolicies']) == 1 assert len(result['GroupDetailList']) == 0 assert len(result['Policies']) == 0 + assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' result = conn.get_account_authorization_details(Filter=['Group']) assert len(result['RoleDetailList']) == 0 @@ -829,6 +837,9 @@ def test_get_account_authorization_details(): assert len(result['GroupDetailList'][0]['GroupPolicyList']) == 1 assert len(result['GroupDetailList'][0]['AttachedManagedPolicies']) == 1 assert len(result['Policies']) == 0 + assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy' + assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \ + 'arn:aws:iam::123456789012:policy/testPolicy' result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy']) assert len(result['RoleDetailList']) == 0 From f035b9613d750131b71f3cb4468e0260b6d7351d Mon Sep 17 00:00:00 2001 From: Greg Sterin Date: Thu, 14 Feb 2019 18:10:01 -0800 Subject: [PATCH 102/175] support a bit more of the dynamoDB ConditionExpression syntax --- moto/dynamodb2/responses.py | 109 +++++++++++++++++--------- tests/test_dynamodb2/test_dynamodb.py | 107 +++++++++++++++++++++++++ 2 files changed, 181 insertions(+), 35 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index 60886e171..bd09b6890 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -31,6 +31,68 @@ def get_empty_str_error(): )) +def condition_expression_to_expected(condition_expression, expression_attribute_names, expression_attribute_values): + """ + Limited condition expression syntax parsing. + Supports Global Negation ex: NOT(inner expressions). + Supports simple AND conditions ex: cond_a AND cond_b and cond_c. + Atomic expressions supported are attribute_exists(key), attribute_not_exists(key) and #key = :value. + """ + expected = {} + if condition_expression and 'OR' not in condition_expression: + reverse_re = re.compile('^NOT\s*\((.*)\)$') + reverse_m = reverse_re.match(condition_expression.strip()) + + reverse = False + if reverse_m: + reverse = True + condition_expression = reverse_m.group(1) + + cond_items = [c.strip() for c in condition_expression.split('AND')] + if cond_items: + overwrite = False + exists_re = re.compile('^attribute_exists\s*\((.*)\)$') + not_exists_re = re.compile( + '^attribute_not_exists\s*\((.*)\)$') + equals_re= re.compile('^(#?\w+)\s*=\s*(\:?\w+)') + + for cond in cond_items: + exists_m = exists_re.match(cond) + not_exists_m = not_exists_re.match(cond) + equals_m = equals_re.match(cond) + + if exists_m: + attribute_name = expression_attribute_names_lookup(exists_m.group(1), expression_attribute_names) + expected[attribute_name] = {'Exists': True if not reverse else False} + elif not_exists_m: + attribute_name = expression_attribute_names_lookup(not_exists_m.group(1), expression_attribute_names) + expected[attribute_name] = {'Exists': False if not reverse else True} + elif equals_m: + attribute_name = expression_attribute_names_lookup(equals_m.group(1), expression_attribute_names) + attribute_value = expression_attribute_values_lookup(equals_m.group(2), expression_attribute_values) + expected[attribute_name] = { + 'AttributeValueList': [attribute_value], + 'ComparisonOperator': 'EQ' if not reverse else 'NEQ'} + + return expected + + +def expression_attribute_names_lookup(attribute_name, expression_attribute_names): + if attribute_name.startswith('#') and attribute_name in expression_attribute_names: + return expression_attribute_names[attribute_name] + else: + return attribute_name + + +def expression_attribute_values_lookup(attribute_value, expression_attribute_values): + if isinstance(attribute_value, six.string_types) and \ + attribute_value.startswith(':') and\ + attribute_value in expression_attribute_values: + return expression_attribute_values[attribute_value] + else: + return attribute_value + + class DynamoHandler(BaseResponse): def get_endpoint_name(self, headers): @@ -220,24 +282,13 @@ class DynamoHandler(BaseResponse): # expression if not expected: condition_expression = self.body.get('ConditionExpression') - if condition_expression and 'OR' not in condition_expression: - cond_items = [c.strip() - for c in condition_expression.split('AND')] - - if cond_items: - expected = {} - overwrite = False - exists_re = re.compile('^attribute_exists\s*\((.*)\)$') - not_exists_re = re.compile( - '^attribute_not_exists\s*\((.*)\)$') - - for cond in cond_items: - exists_m = exists_re.match(cond) - not_exists_m = not_exists_re.match(cond) - if exists_m: - expected[exists_m.group(1)] = {'Exists': True} - elif not_exists_m: - expected[not_exists_m.group(1)] = {'Exists': False} + expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) + expression_attribute_values = self.body.get('ExpressionAttributeValues', {}) + expected = condition_expression_to_expected(condition_expression, + expression_attribute_names, + expression_attribute_values) + if expected: + overwrite = False try: result = self.dynamodb_backend.put_item(name, item, expected, overwrite) @@ -590,23 +641,11 @@ class DynamoHandler(BaseResponse): # expression if not expected: condition_expression = self.body.get('ConditionExpression') - if condition_expression and 'OR' not in condition_expression: - cond_items = [c.strip() - for c in condition_expression.split('AND')] - - if cond_items: - expected = {} - exists_re = re.compile('^attribute_exists\s*\((.*)\)$') - not_exists_re = re.compile( - '^attribute_not_exists\s*\((.*)\)$') - - for cond in cond_items: - exists_m = exists_re.match(cond) - not_exists_m = not_exists_re.match(cond) - if exists_m: - expected[exists_m.group(1)] = {'Exists': True} - elif not_exists_m: - expected[not_exists_m.group(1)] = {'Exists': False} + expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) + expression_attribute_values = self.body.get('ExpressionAttributeValues', {}) + expected = condition_expression_to_expected(condition_expression, + expression_attribute_names, + expression_attribute_values) # Support spaces between operators in an update expression # E.g. `a = b + c` -> `a=b+c` diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index ea9be19c1..3d6f1de65 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1505,3 +1505,110 @@ def test_dynamodb_streams_2(): assert 'LatestStreamLabel' in resp['TableDescription'] assert 'LatestStreamArn' in resp['TableDescription'] +@mock_dynamodb2 +def test_condition_expressions(): + client = boto3.client('dynamodb', region_name='us-east-1') + dynamodb = boto3.resource('dynamodb', region_name='us-east-1') + + # Create the DynamoDB table. + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123} + ) + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + } + ) + + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match', + ExpressionAttributeNames={ + '#existing': 'existing', + '#nonexistent': 'nope', + '#match': 'match', + }, + ExpressionAttributeValues={ + ':match': {'S': 'match'} + } + ) + + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='NOT(attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2))', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2)', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='NOT(attribute_not_exists(#nonexistent1) AND attribute_not_exists(#nonexistent2))', + ExpressionAttributeNames={ + '#nonexistent1': 'nope', + '#nonexistent2': 'nope2' + } + ) + + with assert_raises(client.exceptions.ConditionalCheckFailedException): + client.put_item( + TableName='test1', + Item={ + 'client': {'S': 'client1'}, + 'app': {'S': 'app1'}, + 'match': {'S': 'match'}, + 'existing': {'S': 'existing'}, + }, + ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match', + ExpressionAttributeNames={ + '#existing': 'existing', + '#nonexistent': 'nope', + '#match': 'match', + }, + ExpressionAttributeValues={ + ':match': {'S': 'match2'} + } + ) From c1232a7a23b4022c008f273ea71e74b590056773 Mon Sep 17 00:00:00 2001 From: Greg Sterin Date: Fri, 15 Feb 2019 13:49:42 -0800 Subject: [PATCH 103/175] linting --- moto/dynamodb2/responses.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index bd09b6890..49095f09c 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -50,11 +50,10 @@ def condition_expression_to_expected(condition_expression, expression_attribute_ cond_items = [c.strip() for c in condition_expression.split('AND')] if cond_items: - overwrite = False exists_re = re.compile('^attribute_exists\s*\((.*)\)$') not_exists_re = re.compile( '^attribute_not_exists\s*\((.*)\)$') - equals_re= re.compile('^(#?\w+)\s*=\s*(\:?\w+)') + equals_re = re.compile('^(#?\w+)\s*=\s*(\:?\w+)') for cond in cond_items: exists_m = exists_re.match(cond) From 4c6f08d463afca69113a8d6b4beb6fd110fa42d0 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sat, 16 Feb 2019 09:27:23 -0600 Subject: [PATCH 104/175] Update kms and lambda to work with terraform --- moto/awslambda/models.py | 8 ++++ moto/awslambda/responses.py | 17 ++++++++- moto/kms/models.py | 16 +++++++- moto/kms/responses.py | 22 +++++++++++ tests/test_awslambda/test_lambda.py | 34 +++++++++++++++++ tests/test_kms/test_kms.py | 57 +++++++++++++++++++++++++++++ 6 files changed, 152 insertions(+), 2 deletions(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index a37a15e27..9fc41c11e 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -500,6 +500,11 @@ class LambdaStorage(object): except ValueError: return self._functions[name]['latest'] + def list_versions_by_function(self, name): + if name not in self._functions: + return None + return [self._functions[name]['latest']] + def get_arn(self, arn): return self._arns.get(arn, None) @@ -607,6 +612,9 @@ class LambdaBackend(BaseBackend): def get_function(self, function_name, qualifier=None): return self._lambdas.get_function(function_name, qualifier) + def list_versions_by_function(self, function_name): + return self._lambdas.list_versions_by_function(function_name) + def get_function_by_arn(self, function_arn): return self._lambdas.get_arn(function_arn) diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index 1a9a4df83..bcd2da903 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -52,7 +52,11 @@ class LambdaResponse(BaseResponse): self.setup_class(request, full_url, headers) if request.method == 'GET': # This is ListVersionByFunction - raise ValueError("Cannot handle request") + + path = request.path if hasattr(request, 'path') else path_url(request.url) + function_name = path.split('/')[-2] + return self._list_versions_by_function(function_name) + elif request.method == 'POST': return self._publish_function(request, full_url, headers) else: @@ -151,6 +155,17 @@ class LambdaResponse(BaseResponse): return 200, {}, json.dumps(result) + def _list_versions_by_function(self, function_name): + result = { + 'Versions': [] + } + + for fn in self.lambda_backend.list_versions_by_function(function_name): + json_data = fn.get_configuration() + result['Versions'].append(json_data) + + return 200, {}, json.dumps(result) + def _create_function(self, request, full_url, headers): try: fn = self.lambda_backend.create_function(self.json_body) diff --git a/moto/kms/models.py b/moto/kms/models.py index bb39d1b24..fb4040617 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -21,6 +21,7 @@ class Key(BaseModel): self.account_id = "0123456789012" self.key_rotation_status = False self.deletion_date = None + self.tags = {} @property def physical_resource_id(self): @@ -35,7 +36,7 @@ class Key(BaseModel): "KeyMetadata": { "AWSAccountId": self.account_id, "Arn": self.arn, - "CreationDate": "2015-01-01 00:00:00", + "CreationDate": datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ"), "Description": self.description, "Enabled": self.enabled, "KeyId": self.id, @@ -84,6 +85,19 @@ class KmsBackend(BaseBackend): self.keys[key.id] = key return key + def update_key_description(self, key_id, description): + key = self.keys[self.get_key_id(key_id)] + key.description = description + + def tag_resource(self, key_id, tags): + key = self.keys[self.get_key_id(key_id)] + key.tags = tags + + + def list_resource_tags(self, key_id): + key = self.keys[self.get_key_id(key_id)] + return key.tags + def delete_key(self, key_id): if key_id in self.keys: if key_id in self.key_to_aliases: diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 5883f51ec..b66ca267c 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -38,6 +38,28 @@ class KmsResponse(BaseResponse): policy, key_usage, description, self.region) return json.dumps(key.to_dict()) + def update_key_description(self): + key_id = self.parameters.get('KeyId') + description = self.parameters.get('Description') + + self.kms_backend.update_key_description(key_id,description) + return json.dumps(None) + + def tag_resource(self): + key_id = self.parameters.get('KeyId') + tags = self.parameters.get('Tags') + self.kms_backend.tag_resource(key_id,tags) + return json.dumps({}) + + def list_resource_tags(self): + key_id = self.parameters.get('KeyId') + tags = self.kms_backend.list_resource_tags(key_id) + return json.dumps({ + "Tags": tags, + "NextMarker": None, + "Truncated": False, + }) + def describe_key(self): key_id = self.parameters.get('KeyId') try: diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 8ea9cc6fd..9c5120f51 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -819,3 +819,37 @@ def get_function_policy(): assert isinstance(response['Policy'], str) res = json.loads(response['Policy']) assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction' + + + +@mock_lambda +@mock_s3 +def test_list_versions_by_function(): + s3_conn = boto3.client('s3', 'us-west-2') + s3_conn.create_bucket(Bucket='test-bucket') + + zip_content = get_test_zip_file2() + s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) + conn = boto3.client('lambda', 'us-west-2') + + conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + conn.publish_version(FunctionName='testFunction') + + versions = conn.list_versions_by_function(FunctionName='testFunction') + + assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' + diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 8bccae27a..520c7262c 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -717,3 +717,60 @@ def test_cancel_key_deletion(): assert result["KeyMetadata"]["Enabled"] == False assert result["KeyMetadata"]["KeyState"] == 'Disabled' assert 'DeletionDate' not in result["KeyMetadata"] + + +@mock_kms +def test_update_key_description(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='old_description') + key_id = key['KeyMetadata']['KeyId'] + + result = client.update_key_description(KeyId=key_id, Description='new_description') + assert 'ResponseMetadata' in result + + +@mock_kms +def test_tag_resource(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='cancel-key-deletion') + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + + keyid = response['KeyId'] + response = client.tag_resource( + KeyId=keyid, + Tags=[ + { + 'TagKey': 'string', + 'TagValue': 'string' + }, + ] + ) + + # Shouldn't have any data, just header + assert len(response.keys()) == 1 + + +@mock_kms +def test_list_resource_tags(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='cancel-key-deletion') + response = client.schedule_key_deletion( + KeyId=key['KeyMetadata']['KeyId'] + ) + + keyid = response['KeyId'] + response = client.tag_resource( + KeyId=keyid, + Tags=[ + { + 'TagKey': 'string', + 'TagValue': 'string' + }, + ] + ) + + response = client.list_resource_tags(KeyId=keyid) + assert response['Tags'][0]['TagKey'] == 'string' + assert response['Tags'][0]['TagValue'] == 'string' From 0a3ff94e66da2bacb27e8a788034ae2c8d0e29be Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sat, 16 Feb 2019 09:37:27 -0600 Subject: [PATCH 105/175] Update kms and lambda to work with terraform --- moto/kms/models.py | 2 -- moto/kms/responses.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/moto/kms/models.py b/moto/kms/models.py index fb4040617..9d13589c1 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -64,7 +64,6 @@ class Key(BaseModel): ) key.key_rotation_status = properties['EnableKeyRotation'] key.enabled = properties['Enabled'] - return key def get_cfn_attribute(self, attribute_name): @@ -93,7 +92,6 @@ class KmsBackend(BaseBackend): key = self.keys[self.get_key_id(key_id)] key.tags = tags - def list_resource_tags(self, key_id): key = self.keys[self.get_key_id(key_id)] return key.tags diff --git a/moto/kms/responses.py b/moto/kms/responses.py index b66ca267c..2674f765c 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -42,13 +42,13 @@ class KmsResponse(BaseResponse): key_id = self.parameters.get('KeyId') description = self.parameters.get('Description') - self.kms_backend.update_key_description(key_id,description) + self.kms_backend.update_key_description(key_id, description) return json.dumps(None) def tag_resource(self): key_id = self.parameters.get('KeyId') tags = self.parameters.get('Tags') - self.kms_backend.tag_resource(key_id,tags) + self.kms_backend.tag_resource(key_id, tags) return json.dumps({}) def list_resource_tags(self): From 6897a118ed59819c911642e946a5378bad53a3c7 Mon Sep 17 00:00:00 2001 From: Domenico Testa Date: Sat, 16 Feb 2019 16:42:00 +0100 Subject: [PATCH 106/175] Fixing S3 copy_key error when using unicode. --- moto/s3/responses.py | 5 +++-- tests/test_s3/test_s3.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 43f690bc8..e4f97cd43 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -19,7 +19,8 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \ FakeTag -from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url +from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, \ + parse_region_from_url from xml.dom import minidom @@ -733,7 +734,7 @@ class ResponseObject(_TemplateEnvironmentMixin): # Copy key # you can have a quoted ?version=abc with a version Id, so work on # we need to parse the unquoted string first - src_key = request.headers.get("x-amz-copy-source") + src_key = clean_key_name(request.headers.get("x-amz-copy-source")) if isinstance(src_key, six.binary_type): src_key = src_key.decode('utf-8') src_key_parsed = urlparse(src_key) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 7a53804ff..eea720240 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -418,6 +418,22 @@ def test_copy_key(): "new-key").get_contents_as_string().should.equal(b"some value") +@mock_s3_deprecated +def test_copy_key_with_unicode(): + conn = boto.connect_s3('the_key', 'the_secret') + bucket = conn.create_bucket("foobar") + key = Key(bucket) + key.key = "the-unicode-💩-key" + key.set_contents_from_string("some value") + + bucket.copy_key('new-key', 'foobar', 'the-unicode-💩-key') + + bucket.get_key( + "the-unicode-💩-key").get_contents_as_string().should.equal(b"some value") + bucket.get_key( + "new-key").get_contents_as_string().should.equal(b"some value") + + @mock_s3_deprecated def test_copy_key_with_version(): conn = boto.connect_s3('the_key', 'the_secret') From 5372e6840fabdd0394135e647dc07f8c13178a31 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sat, 16 Feb 2019 12:37:46 -0600 Subject: [PATCH 107/175] Increasing code coverage --- moto/awslambda/responses.py | 8 +++-- tests/test_awslambda/test_lambda.py | 53 +++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 3 deletions(-) diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index bcd2da903..d4eb73bc3 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -160,9 +160,11 @@ class LambdaResponse(BaseResponse): 'Versions': [] } - for fn in self.lambda_backend.list_versions_by_function(function_name): - json_data = fn.get_configuration() - result['Versions'].append(json_data) + functions = self.lambda_backend.list_versions_by_function(function_name) + if functions: + for fn in functions: + json_data = fn.get_configuration() + result['Versions'].append(json_data) return 200, {}, json.dumps(result) diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 9c5120f51..c05f9f0ac 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -853,3 +853,56 @@ def test_list_versions_by_function(): assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' + + +@mock_lambda +@mock_s3 +def test_create_function_with_already_exists(): + s3_conn = boto3.client('s3', 'us-west-2') + s3_conn.create_bucket(Bucket='test-bucket') + + zip_content = get_test_zip_file2() + s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) + conn = boto3.client('lambda', 'us-west-2') + + conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'S3Bucket': 'test-bucket', + 'S3Key': 'test.zip', + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + assert response['FunctionName'] == 'testFunction' + + +@mock_lambda +@mock_s3 +def test_list_versions_by_function_for_nonexistent_function(): + conn = boto3.client('lambda', 'us-west-2') + + versions = conn.list_versions_by_function(FunctionName='testFunction') + + assert len(versions['Versions']) == 0 \ No newline at end of file From 8ad28f8400b6deae46cb666c56758278035a2478 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sat, 16 Feb 2019 20:53:27 -0600 Subject: [PATCH 108/175] Adding additional tests to increase coverage --- file.tmp | 9 ++++ moto/packages/httpretty/core.py | 60 +++++++++++++++++++++++++++ moto/packages/httpretty/http.py | 3 +- tests/test_packages/__init__.py | 8 ++++ tests/test_packages/test_httpretty.py | 37 +++++++++++++++++ 5 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 file.tmp create mode 100644 tests/test_packages/__init__.py create mode 100644 tests/test_packages/test_httpretty.py diff --git a/file.tmp b/file.tmp new file mode 100644 index 000000000..80053c647 --- /dev/null +++ b/file.tmp @@ -0,0 +1,9 @@ + + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 diff --git a/moto/packages/httpretty/core.py b/moto/packages/httpretty/core.py index 8ad9168a5..168f18431 100644 --- a/moto/packages/httpretty/core.py +++ b/moto/packages/httpretty/core.py @@ -72,6 +72,19 @@ from datetime import datetime from datetime import timedelta from errno import EAGAIN +import logging +from inspect import currentframe +import inspect + +logging.basicConfig(filename='/tmp/models.log',level=logging.DEBUG) + +DEBUG=0 + +def get_linenumber(): + cf = currentframe() + return " - "+str(cf.f_back.f_lineno) + + # Some versions of python internally shadowed the # SocketType variable incorrectly https://bugs.python.org/issue20386 BAD_SOCKET_SHADOW = socket.socket != socket.SocketType @@ -155,15 +168,34 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): """ def __init__(self, headers, body=''): + + if DEBUG: + logging.debug('__init__ - ' + ' -caller: ' + str( + inspect.stack()[1][3]) + "-" + get_linenumber()) + logging.debug('headers: '+str(headers)) + # first of all, lets make sure that if headers or body are # unicode strings, it must be converted into a utf-8 encoded # byte string self.raw_headers = utf8(headers.strip()) + + if DEBUG: + logging.debug('raw_headers: '+str(self.raw_headers)) + self.body = utf8(body) + if DEBUG: + logging.debug('body: '+str(self.body)) + # Now let's concatenate the headers with the body, and create # `rfile` based on it self.rfile = StringIO(b'\r\n\r\n'.join([self.raw_headers, self.body])) + + if DEBUG: + logging.debug('rfile: '+str(self.rfile)) + + self.wfile = StringIO() # Creating `wfile` as an empty # StringIO, just to avoid any real # I/O calls @@ -171,6 +203,10 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): # parsing the request line preemptively self.raw_requestline = self.rfile.readline() + if DEBUG: + logging.debug('raw_requestline: '+str(self.raw_requestline)) + + # initiating the error attributes with None self.error_code = None self.error_message = None @@ -182,6 +218,9 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): # making the HTTP method string available as the command self.method = self.command + if DEBUG: + logging.debug('method: '+str(self.method)) + # Now 2 convenient attributes for the HTTPretty API: # `querystring` holds a dictionary with the parsed query string @@ -207,8 +246,23 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): ) def parse_querystring(self, qs): + + if DEBUG: + logging.debug('parse_querystring - ' + ' -caller: ' + str( + inspect.stack()[1][3]) + "-" + get_linenumber()) + logging.debug('qs: '+str(qs)) + expanded = unquote_utf8(qs) + + if DEBUG: + logging.debug('expanded: '+str(expanded)) + parsed = parse_qs(expanded) + + if DEBUG: + logging.debug('parsed: '+str(parsed)) + result = {} for k in parsed: result[k] = list(map(decode_utf8, parsed[k])) @@ -218,6 +272,12 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): def parse_request_body(self, body): """ Attempt to parse the post based on the content-type passed. Return the regular body if not """ + if DEBUG: + logging.debug('parse_request_body - ' + ' -caller: ' + str( + inspect.stack()[1][3]) + "-" + get_linenumber()) + logging.debug('body: '+str(body)) + PARSING_FUNCTIONS = { 'application/json': json.loads, 'text/json': json.loads, diff --git a/moto/packages/httpretty/http.py b/moto/packages/httpretty/http.py index 7e9a56885..17d580f5f 100644 --- a/moto/packages/httpretty/http.py +++ b/moto/packages/httpretty/http.py @@ -29,7 +29,6 @@ import re from .compat import BaseClass from .utils import decode_utf8 - STATUSES = { 100: "Continue", 101: "Switching Protocols", @@ -134,6 +133,7 @@ def parse_requestline(s): ... ValueError: Not a Request-Line """ + methods = '|'.join(HttpBaseClass.METHODS) m = re.match(r'(' + methods + ')\s+(.*)\s+HTTP/(1.[0|1])', s, re.I) if m: @@ -146,6 +146,7 @@ def last_requestline(sent_data): """ Find the last line in sent_data that can be parsed with parse_requestline """ + for line in reversed(sent_data): try: parse_requestline(decode_utf8(line)) diff --git a/tests/test_packages/__init__.py b/tests/test_packages/__init__.py new file mode 100644 index 000000000..bf582e0b3 --- /dev/null +++ b/tests/test_packages/__init__.py @@ -0,0 +1,8 @@ +from __future__ import unicode_literals + +import logging +# Disable extra logging for tests +logging.getLogger('boto').setLevel(logging.CRITICAL) +logging.getLogger('boto3').setLevel(logging.CRITICAL) +logging.getLogger('botocore').setLevel(logging.CRITICAL) +logging.getLogger('nose').setLevel(logging.CRITICAL) diff --git a/tests/test_packages/test_httpretty.py b/tests/test_packages/test_httpretty.py new file mode 100644 index 000000000..48277a2de --- /dev/null +++ b/tests/test_packages/test_httpretty.py @@ -0,0 +1,37 @@ +# #!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +import mock + +from moto.packages.httpretty.core import HTTPrettyRequest, fake_gethostname, fake_gethostbyname + + +def test_parse_querystring(): + + core = HTTPrettyRequest(headers='test test HTTP/1.1') + + qs = 'test test' + response = core.parse_querystring(qs) + + assert response == {} + +def test_parse_request_body(): + core = HTTPrettyRequest(headers='test test HTTP/1.1') + + qs = 'test' + response = core.parse_request_body(qs) + + assert response == 'test' + +def test_fake_gethostname(): + + response = fake_gethostname() + + assert response == 'localhost' + +def test_fake_gethostbyname(): + + host = 'test' + response = fake_gethostbyname(host=host) + + assert response == '127.0.0.1' \ No newline at end of file From b29fd4a997a105f6c3a3b19a87852a56e75db65b Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sat, 16 Feb 2019 20:53:54 -0600 Subject: [PATCH 109/175] Adding additional tests to increase coverage --- file.tmp | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 file.tmp diff --git a/file.tmp b/file.tmp deleted file mode 100644 index 80053c647..000000000 --- a/file.tmp +++ /dev/null @@ -1,9 +0,0 @@ - - AWSTemplateFormatVersion: '2010-09-09' - Description: Simple CloudFormation Test Template - Resources: - S3Bucket: - Type: AWS::S3::Bucket - Properties: - AccessControl: PublicRead - BucketName: cf-test-bucket-1 From e35d99ff09023e09b2e252db4ac1e1d58863dfb5 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 09:25:35 -0600 Subject: [PATCH 110/175] Trying to improve coverage --- file.tmp | 9 ++++++ tests/test_batch/test_batch.py | 59 ++++++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 file.tmp diff --git a/file.tmp b/file.tmp new file mode 100644 index 000000000..80053c647 --- /dev/null +++ b/file.tmp @@ -0,0 +1,9 @@ + + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 diff --git a/tests/test_batch/test_batch.py b/tests/test_batch/test_batch.py index ec24cd911..310ac0b48 100644 --- a/tests/test_batch/test_batch.py +++ b/tests/test_batch/test_batch.py @@ -323,6 +323,54 @@ def test_create_job_queue(): resp.should.contain('jobQueues') len(resp['jobQueues']).should.equal(0) + # Create job queue which already exists + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue', + state='ENABLED', + priority=123, + computeEnvironmentOrder=[ + { + 'order': 123, + 'computeEnvironment': arn + }, + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') + + + # Create job queue with incorrect state + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue2', + state='JUNK', + priority=123, + computeEnvironmentOrder=[ + { + 'order': 123, + 'computeEnvironment': arn + }, + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') + + # Create job queue with no compute env + try: + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue3', + state='JUNK', + priority=123, + computeEnvironmentOrder=[ + + ] + ) + + except ClientError as err: + err.response['Error']['Code'].should.equal('ClientException') @mock_ec2 @mock_ecs @@ -397,6 +445,17 @@ def test_update_job_queue(): len(resp['jobQueues']).should.equal(1) resp['jobQueues'][0]['priority'].should.equal(5) + batch_client.update_job_queue( + jobQueue='test_job_queue', + priority=5 + ) + + resp = batch_client.describe_job_queues() + resp.should.contain('jobQueues') + len(resp['jobQueues']).should.equal(1) + resp['jobQueues'][0]['priority'].should.equal(5) + + @mock_ec2 @mock_ecs From 921a993330a96130b12331f29944d93578a8bd2c Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 14:30:43 -0600 Subject: [PATCH 111/175] cleaning up files --- file.tmp | 9 ---- moto/iam/models.py | 13 +++-- moto/iam/responses.py | 79 +++++++++++++++++++++++++++-- moto/packages/httpretty/core.py | 62 +--------------------- moto/packages/httpretty/http.py | 2 - tests/test_awslambda/test_lambda.py | 2 - tests/test_kms/test_kms.py | 1 - 7 files changed, 85 insertions(+), 83 deletions(-) delete mode 100644 file.tmp diff --git a/file.tmp b/file.tmp deleted file mode 100644 index 80053c647..000000000 --- a/file.tmp +++ /dev/null @@ -1,9 +0,0 @@ - - AWSTemplateFormatVersion: '2010-09-09' - Description: Simple CloudFormation Test Template - Resources: - S3Bucket: - Type: AWS::S3::Bucket - Properties: - AccessControl: PublicRead - BucketName: cf-test-bucket-1 diff --git a/moto/iam/models.py b/moto/iam/models.py index 86a5d7a32..a5f40b996 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -51,8 +51,8 @@ class Policy(BaseModel): self.default_version_id = default_version_id or 'v1' self.versions = [PolicyVersion(self.arn, document, True)] - self.create_datetime = datetime.now(pytz.utc) - self.update_datetime = datetime.now(pytz.utc) + self.create_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") + self.update_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") class SAMLProvider(BaseModel): @@ -76,7 +76,7 @@ class PolicyVersion(object): self.is_default = is_default self.version_id = 'v1' - self.create_datetime = datetime.now(pytz.utc) + self.create_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") class ManagedPolicy(Policy): @@ -132,8 +132,9 @@ class Role(BaseModel): self.path = path or '/' self.policies = {} self.managed_policies = {} - self.create_date = datetime.now(pytz.utc) + self.create_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") self.tags = {} + self.description = "" @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): @@ -473,6 +474,10 @@ class IAMBackend(BaseBackend): policy = arns[policy_arn] policy.attach_to(self.get_role(role_name)) + def update_role_description(self, role_name, role_description): + role = self.get_role(role_name) + role.description = role_description + def detach_role_policy(self, policy_arn, role_name): arns = dict((p.arn, p) for p in self.managed_policies.values()) try: diff --git a/moto/iam/responses.py b/moto/iam/responses.py index e3cc4b90b..e624c18c3 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -107,6 +107,10 @@ class IamResponse(BaseResponse): template = self.response_template(LIST_POLICIES_TEMPLATE) return template.render(policies=policies, marker=marker) + def list_entities_for_policy(self): + template = self.response_template(LIST_ENTITIES_FOR_POLICY_TEMPLATE) + return template.render() + def create_role(self): role_name = self._get_param('RoleName') path = self._get_param('Path') @@ -169,6 +173,20 @@ class IamResponse(BaseResponse): template = self.response_template(GENERIC_EMPTY_TEMPLATE) return template.render(name="UpdateAssumeRolePolicyResponse") + def update_role_description(self): + role_name = self._get_param('RoleName') + description = self._get_param('Description') + role = iam_backend.update_role_description(role_name,description) + template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) + return template.render(role=role) + + def update_role(self): + role_name = self._get_param('RoleName') + description = self._get_param('Description') + role = iam_backend.update_role_description(role_name,description) + template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) + return template.render(role=role) + def create_policy_version(self): policy_arn = self._get_param('PolicyArn') policy_document = self._get_param('PolicyDocument') @@ -654,6 +672,33 @@ class IamResponse(BaseResponse): template = self.response_template(UNTAG_ROLE_TEMPLATE) return template.render() +LIST_ENTITIES_FOR_POLICY_TEMPLATE = """ + + + + DevRole + + + + + Dev + + + false + + + Alice + + + Bob + + + + + eb358e22-9d1f-11e4-93eb-190ecEXAMPLE + +""" + ATTACH_ROLE_POLICY_TEMPLATE = """ @@ -696,12 +741,12 @@ CREATE_POLICY_TEMPLATE = """ {{ policy.arn }} {{ policy.attachment_count }} - {{ policy.create_datetime.isoformat() }} + {{ policy.create_datetime }} {{ policy.default_version_id }} {{ policy.path }} {{ policy.id }} {{ policy.name }} - {{ policy.update_datetime.isoformat() }} + {{ policy.update_datetime }} @@ -719,8 +764,8 @@ GET_POLICY_TEMPLATE = """ {{ policy.path }} {{ policy.arn }} {{ policy.attachment_count }} - {{ policy.create_datetime.isoformat() }} - {{ policy.update_datetime.isoformat() }} + {{ policy.create_datetime }} + {{ policy.update_datetime }} @@ -898,6 +943,32 @@ GET_ROLE_POLICY_TEMPLATE = """ + + + {{ role.path }} + {{ role.arn }} + {{ role.name }} + {{ role.assume_role_policy_document }} + {{ role.create_date }} + {{ role.id }} + {% if role.tags %} + + {% for tag in role.get_tags() %} + + {{ tag['Key'] }} + {{ tag['Value'] }} + + {% endfor %} + + {% endif %} + + + + df37e965-9967-11e1-a4c3-270EXAMPLE04 + +""" + GET_ROLE_TEMPLATE = """ diff --git a/moto/packages/httpretty/core.py b/moto/packages/httpretty/core.py index 168f18431..4eb92108f 100644 --- a/moto/packages/httpretty/core.py +++ b/moto/packages/httpretty/core.py @@ -72,19 +72,6 @@ from datetime import datetime from datetime import timedelta from errno import EAGAIN -import logging -from inspect import currentframe -import inspect - -logging.basicConfig(filename='/tmp/models.log',level=logging.DEBUG) - -DEBUG=0 - -def get_linenumber(): - cf = currentframe() - return " - "+str(cf.f_back.f_lineno) - - # Some versions of python internally shadowed the # SocketType variable incorrectly https://bugs.python.org/issue20386 BAD_SOCKET_SHADOW = socket.socket != socket.SocketType @@ -168,34 +155,15 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): """ def __init__(self, headers, body=''): - - if DEBUG: - logging.debug('__init__ - ' - ' -caller: ' + str( - inspect.stack()[1][3]) + "-" + get_linenumber()) - logging.debug('headers: '+str(headers)) - # first of all, lets make sure that if headers or body are # unicode strings, it must be converted into a utf-8 encoded # byte string self.raw_headers = utf8(headers.strip()) - - if DEBUG: - logging.debug('raw_headers: '+str(self.raw_headers)) - self.body = utf8(body) - if DEBUG: - logging.debug('body: '+str(self.body)) - # Now let's concatenate the headers with the body, and create # `rfile` based on it self.rfile = StringIO(b'\r\n\r\n'.join([self.raw_headers, self.body])) - - if DEBUG: - logging.debug('rfile: '+str(self.rfile)) - - self.wfile = StringIO() # Creating `wfile` as an empty # StringIO, just to avoid any real # I/O calls @@ -203,10 +171,6 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): # parsing the request line preemptively self.raw_requestline = self.rfile.readline() - if DEBUG: - logging.debug('raw_requestline: '+str(self.raw_requestline)) - - # initiating the error attributes with None self.error_code = None self.error_message = None @@ -218,9 +182,6 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): # making the HTTP method string available as the command self.method = self.command - if DEBUG: - logging.debug('method: '+str(self.method)) - # Now 2 convenient attributes for the HTTPretty API: # `querystring` holds a dictionary with the parsed query string @@ -246,23 +207,8 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): ) def parse_querystring(self, qs): - - if DEBUG: - logging.debug('parse_querystring - ' - ' -caller: ' + str( - inspect.stack()[1][3]) + "-" + get_linenumber()) - logging.debug('qs: '+str(qs)) - expanded = unquote_utf8(qs) - - if DEBUG: - logging.debug('expanded: '+str(expanded)) - parsed = parse_qs(expanded) - - if DEBUG: - logging.debug('parsed: '+str(parsed)) - result = {} for k in parsed: result[k] = list(map(decode_utf8, parsed[k])) @@ -272,12 +218,6 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass): def parse_request_body(self, body): """ Attempt to parse the post based on the content-type passed. Return the regular body if not """ - if DEBUG: - logging.debug('parse_request_body - ' - ' -caller: ' + str( - inspect.stack()[1][3]) + "-" + get_linenumber()) - logging.debug('body: '+str(body)) - PARSING_FUNCTIONS = { 'application/json': json.loads, 'text/json': json.loads, @@ -1173,4 +1113,4 @@ def httprettified(test): if isinstance(test, ClassTypes): return decorate_class(test) - return decorate_callable(test) + return decorate_callable(test) \ No newline at end of file diff --git a/moto/packages/httpretty/http.py b/moto/packages/httpretty/http.py index 17d580f5f..ee1625905 100644 --- a/moto/packages/httpretty/http.py +++ b/moto/packages/httpretty/http.py @@ -133,7 +133,6 @@ def parse_requestline(s): ... ValueError: Not a Request-Line """ - methods = '|'.join(HttpBaseClass.METHODS) m = re.match(r'(' + methods + ')\s+(.*)\s+HTTP/(1.[0|1])', s, re.I) if m: @@ -146,7 +145,6 @@ def last_requestline(sent_data): """ Find the last line in sent_data that can be parsed with parse_requestline """ - for line in reversed(sent_data): try: parse_requestline(decode_utf8(line)) diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index c05f9f0ac..71f6746a9 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -821,7 +821,6 @@ def get_function_policy(): assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction' - @mock_lambda @mock_s3 def test_list_versions_by_function(): @@ -854,7 +853,6 @@ def test_list_versions_by_function(): assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' - @mock_lambda @mock_s3 def test_create_function_with_already_exists(): diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 520c7262c..0f7bab4cd 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -1,6 +1,5 @@ from __future__ import unicode_literals import os, re - import boto3 import boto.kms from boto.exception import JSONResponseError From 37cb5ab4e69f1ef977256aba07f38f546d40e918 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 14:36:53 -0600 Subject: [PATCH 112/175] Add test for roles --- moto/iam/responses.py | 5 +++-- tests/test_iam/test_iam.py | 27 +++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index e624c18c3..c981f9b35 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -176,14 +176,14 @@ class IamResponse(BaseResponse): def update_role_description(self): role_name = self._get_param('RoleName') description = self._get_param('Description') - role = iam_backend.update_role_description(role_name,description) + role = iam_backend.update_role_description(role_name, description) template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) return template.render(role=role) def update_role(self): role_name = self._get_param('RoleName') description = self._get_param('Description') - role = iam_backend.update_role_description(role_name,description) + role = iam_backend.update_role_description(role_name, description) template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) return template.render(role=role) @@ -672,6 +672,7 @@ class IamResponse(BaseResponse): template = self.response_template(UNTAG_ROLE_TEMPLATE) return template.render() + LIST_ENTITIES_FOR_POLICY_TEMPLATE = """ diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index d5f1bb4f9..80a7fe99e 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1151,3 +1151,30 @@ def test_untag_role(): # With a role that doesn't exist: with assert_raises(ClientError): conn.untag_role(RoleName='notarole', TagKeys=['somevalue']) + + +@mock_iam() +def test_update_role_description(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + role = conn.get_role(RoleName="my-role") + response = conn.update_role_description(RoleName="my-role", Description="test") + + assert response['Role']['RoleName'] == 'my-role' + +@mock_iam() +def test_update_role(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + role = conn.get_role(RoleName="my-role") + response = conn.update_role_description(RoleName="my-role", Description="test") + + assert response['Role']['RoleName'] == 'my-role' \ No newline at end of file From 31258e9e9e35f254259ad20759a39a88423266fe Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 15:23:59 -0600 Subject: [PATCH 113/175] Add test for roles --- moto/iam/models.py | 8 ++++---- moto/iam/responses.py | 12 ++++++------ tests/test_iam/test_iam.py | 4 +--- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index a5f40b996..e0f725299 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -51,8 +51,8 @@ class Policy(BaseModel): self.default_version_id = default_version_id or 'v1' self.versions = [PolicyVersion(self.arn, document, True)] - self.create_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") - self.update_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") + self.create_datetime = datetime.now(pytz.utc) + self.update_datetime = datetime.now(pytz.utc) class SAMLProvider(BaseModel): @@ -76,7 +76,7 @@ class PolicyVersion(object): self.is_default = is_default self.version_id = 'v1' - self.create_datetime = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") + self.create_datetime = datetime.now(pytz.utc) class ManagedPolicy(Policy): @@ -132,7 +132,7 @@ class Role(BaseModel): self.path = path or '/' self.policies = {} self.managed_policies = {} - self.create_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ") + self.create_date = datetime.now(pytz.utc) self.tags = {} self.description = "" diff --git a/moto/iam/responses.py b/moto/iam/responses.py index c981f9b35..5be07430f 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -742,12 +742,12 @@ CREATE_POLICY_TEMPLATE = """ {{ policy.arn }} {{ policy.attachment_count }} - {{ policy.create_datetime }} + {{ policy.create_datetime.isoformat() }} {{ policy.default_version_id }} {{ policy.path }} {{ policy.id }} {{ policy.name }} - {{ policy.update_datetime }} + {{ policy.update_datetime.isoformat() }} @@ -765,8 +765,8 @@ GET_POLICY_TEMPLATE = """ {{ policy.path }} {{ policy.arn }} {{ policy.attachment_count }} - {{ policy.create_datetime }} - {{ policy.update_datetime }} + {{ policy.create_datetime.isoformat() }} + {{ policy.update_datetime.isoformat() }} @@ -858,7 +858,7 @@ LIST_POLICIES_TEMPLATE = """ {{ policy.path }} {{ policy.id }} {{ policy.name }} - {{ policy.update_datetime.isoformat() }} + {{ policy.update_datetime }} {% endfor %} @@ -951,7 +951,7 @@ UPDATE_ROLE_DESCRIPTION_TEMPLATE = """ Date: Sun, 17 Feb 2019 15:35:49 -0600 Subject: [PATCH 114/175] Add test for roles --- moto/iam/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/iam/models.py b/moto/iam/models.py index e0f725299..f71ad5352 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -477,6 +477,7 @@ class IAMBackend(BaseBackend): def update_role_description(self, role_name, role_description): role = self.get_role(role_name) role.description = role_description + return role def detach_role_policy(self, policy_arn, role_name): arns = dict((p.arn, p) for p in self.managed_policies.values()) From 6e7bd088b30452bff0f1601cde089cdfd9a8ee4d Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 16:04:28 -0600 Subject: [PATCH 115/175] Add test for roles --- tests/test_iam/test_iam.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 85c51012f..f728f0dca 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1176,3 +1176,21 @@ def test_update_role(): response = conn.update_role_description(RoleName="my-role", Description="test") assert response['Role']['RoleName'] == 'my-role' + +@mock_iam() +def test_list_entities_for_policy(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + + role = conn.get_role(RoleName="my-role") + arn = role.get('Role').get('Arn') + + response = conn.list_entities_for_policy( + PolicyArn=arn + ) + + assert response['PolicyGroups'][0]['GroupName'] == 'Dev' From 0e73cddf2fb8039e056b4b67a6948f8c4eefcae9 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 16:25:49 -0600 Subject: [PATCH 116/175] Add test for roles --- tests/test_iam/test_iam.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index f728f0dca..be25c2e96 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1174,7 +1174,6 @@ def test_update_role(): conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") response = conn.update_role_description(RoleName="my-role", Description="test") - assert response['Role']['RoleName'] == 'my-role' @mock_iam() @@ -1192,5 +1191,4 @@ def test_list_entities_for_policy(): response = conn.list_entities_for_policy( PolicyArn=arn ) - assert response['PolicyGroups'][0]['GroupName'] == 'Dev' From 63b692356d07a65468ae0f507d5390397e07663c Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 16:49:54 -0600 Subject: [PATCH 117/175] Fix policy date --- moto/iam/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 5be07430f..7ee4b6345 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -858,7 +858,7 @@ LIST_POLICIES_TEMPLATE = """ {{ policy.path }} {{ policy.id }} {{ policy.name }} - {{ policy.update_datetime }} + {{ policy.update_datetime.isoformat() }} {% endfor %} From 37a765db8d13faa30d64240ff2dc7b098c0237d7 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 17:12:27 -0600 Subject: [PATCH 118/175] Fix policy date --- tests/test_iam/test_iam.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index be25c2e96..1114f72de 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1176,6 +1176,17 @@ def test_update_role(): response = conn.update_role_description(RoleName="my-role", Description="test") assert response['Role']['RoleName'] == 'my-role' +@mock_iam() +def test_update_role(): + conn = boto3.client('iam', region_name='us-east-1') + + with assert_raises(ClientError): + conn.delete_role(RoleName="my-role") + + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + response = conn.update_role(RoleName="my-role", Description="test") + assert response['Role']['RoleName'] == 'my-role' + @mock_iam() def test_list_entities_for_policy(): conn = boto3.client('iam', region_name='us-east-1') From ce7b5ebf54021f64cf855f17f60a77f4b09262ce Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 21:37:33 -0600 Subject: [PATCH 119/175] Fix policy date --- file.tmp | 9 +++++++++ moto/iam/models.py | 5 +++++ moto/iam/responses.py | 12 ++++++++++-- tests/test_iam/test_iam.py | 2 +- 4 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 file.tmp diff --git a/file.tmp b/file.tmp new file mode 100644 index 000000000..80053c647 --- /dev/null +++ b/file.tmp @@ -0,0 +1,9 @@ + + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CloudFormation Test Template + Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + AccessControl: PublicRead + BucketName: cf-test-bucket-1 diff --git a/moto/iam/models.py b/moto/iam/models.py index f71ad5352..80c7da29a 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -479,6 +479,11 @@ class IAMBackend(BaseBackend): role.description = role_description return role + def update_role(self, role_name, role_description): + role = self.get_role(role_name) + role.description = role_description + return role + def detach_role_policy(self, policy_arn, role_name): arns = dict((p.arn, p) for p in self.managed_policies.values()) try: diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 7ee4b6345..26781ab6f 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -183,8 +183,8 @@ class IamResponse(BaseResponse): def update_role(self): role_name = self._get_param('RoleName') description = self._get_param('Description') - role = iam_backend.update_role_description(role_name, description) - template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE) + role = iam_backend.update_role(role_name, description) + template = self.response_template(UPDATE_ROLE_TEMPLATE) return template.render(role=role) def create_policy_version(self): @@ -944,6 +944,14 @@ GET_ROLE_POLICY_TEMPLATE = """ + + + + df37e965-9967-11e1-a4c3-270EXAMPLE04 + +""" + UPDATE_ROLE_DESCRIPTION_TEMPLATE = """ diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 1114f72de..77ba17a5a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1185,7 +1185,7 @@ def test_update_role(): conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") response = conn.update_role(RoleName="my-role", Description="test") - assert response['Role']['RoleName'] == 'my-role' + assert len(response.keys()) == 1 @mock_iam() def test_list_entities_for_policy(): From 8048e39dc0053d3772fc9d586b05f7fe7fa338be Mon Sep 17 00:00:00 2001 From: William Rubel Date: Sun, 17 Feb 2019 22:32:39 -0600 Subject: [PATCH 120/175] Fix policy date --- tests/test_awslambda/test_lambda.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 71f6746a9..171f6360a 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -12,6 +12,8 @@ import sure # noqa from freezegun import freeze_time from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings +from nose.tools import assert_raises +from botocore.exceptions import ClientError _lambda_region = 'us-west-2' @@ -397,6 +399,11 @@ def test_get_function(): result = conn.get_function(FunctionName='testFunction', Qualifier='$LATEST') result['Configuration']['Version'].should.equal('$LATEST') + # Test get function when can't find function name + with assert_raises(ClientError): + conn.get_function(FunctionName='junk', Qualifier='$LATEST') + + @mock_lambda @mock_s3 From e229d248ad4d207a00c84ac441ab3517157b3444 Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 08:52:37 -0600 Subject: [PATCH 121/175] Trying to improve coverage --- file.tmp | 9 --------- tests/test_awslambda/test_lambda.py | 2 +- tests/test_events/test_events.py | 8 +++++++- 3 files changed, 8 insertions(+), 11 deletions(-) delete mode 100644 file.tmp diff --git a/file.tmp b/file.tmp deleted file mode 100644 index 80053c647..000000000 --- a/file.tmp +++ /dev/null @@ -1,9 +0,0 @@ - - AWSTemplateFormatVersion: '2010-09-09' - Description: Simple CloudFormation Test Template - Resources: - S3Bucket: - Type: AWS::S3::Bucket - Properties: - AccessControl: PublicRead - BucketName: cf-test-bucket-1 diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 171f6360a..435d394e3 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -910,4 +910,4 @@ def test_list_versions_by_function_for_nonexistent_function(): versions = conn.list_versions_by_function(FunctionName='testFunction') - assert len(versions['Versions']) == 0 \ No newline at end of file + assert len(versions['Versions']) == 0 diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py index 80630c5b8..810e0bbe5 100644 --- a/tests/test_events/test_events.py +++ b/tests/test_events/test_events.py @@ -7,7 +7,6 @@ from moto.events import mock_events from botocore.exceptions import ClientError from nose.tools import assert_raises - RULES = [ {'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'}, {'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'}, @@ -109,6 +108,13 @@ def test_enable_disable_rule(): rule = client.describe_rule(Name=rule_name) assert(rule['State'] == 'ENABLED') + # Test invalid name + try: + client.enable_rule(Name='junk') + + except ClientError as ce: + assert ce.response['Error']['Code'] == 'ResourceNotFoundException' + @mock_events def test_list_rule_names_by_target(): From c46bc9ae83a180d8c451d907cfa430c97680ddab Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 09:15:07 -0600 Subject: [PATCH 122/175] Trying to improve coverage --- tests/test_events/test_events.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py index 810e0bbe5..a9d90ec32 100644 --- a/tests/test_events/test_events.py +++ b/tests/test_events/test_events.py @@ -1,5 +1,4 @@ import random - import boto3 import json From 37845e41a687441815606e7bfe2412288488b32a Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 09:44:48 -0600 Subject: [PATCH 123/175] Trying to improve coverage --- tests/test_awslambda/test_lambda.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 435d394e3..7f3b44b79 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -907,7 +907,6 @@ def test_create_function_with_already_exists(): @mock_s3 def test_list_versions_by_function_for_nonexistent_function(): conn = boto3.client('lambda', 'us-west-2') - versions = conn.list_versions_by_function(FunctionName='testFunction') assert len(versions['Versions']) == 0 From e9d8021c86e15da9dc1e5f634ecf877d5fb147ff Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 21:20:29 -0600 Subject: [PATCH 124/175] Fixing list entities for policy --- moto/iam/models.py | 10 +++++ moto/iam/responses.py | 90 ++++++++++++++++++++++++++++++++------ tests/test_iam/test_iam.py | 79 +++++++++++++++++++++++++++++---- 3 files changed, 158 insertions(+), 21 deletions(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index 80c7da29a..92ac19da7 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -892,6 +892,16 @@ class IAMBackend(BaseBackend): return users + def list_roles(self, path_prefix, marker, max_items): + roles = None + try: + roles = self.roles.values() + except KeyError: + raise IAMNotFoundException( + "Users {0}, {1}, {2} not found".format(path_prefix, marker, max_items)) + + return roles + def upload_signing_certificate(self, user_name, body): user = self.get_user(user_name) cert_id = random_resource_id(size=32) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 26781ab6f..278f13f2d 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -108,8 +108,69 @@ class IamResponse(BaseResponse): return template.render(policies=policies, marker=marker) def list_entities_for_policy(self): + policy_arn = self._get_param('PolicyArn') + + # Options 'User'|'Role'|'Group'|'LocalManagedPolicy'|'AWSManagedPolicy + entity = self._get_param('EntityFilter') + path_prefix = self._get_param('PathPrefix') + policy_usage_filter = self._get_param('PolicyUsageFilter') + marker = self._get_param('Marker') + max_items = self._get_param('MaxItems') + + entity_roles = [] + entity_groups = [] + entity_users = [] + + if entity == 'User': + users = iam_backend.list_users(path_prefix, marker, max_items) + if users: + for user in users: + for p in user.managed_policies: + if p == policy_arn: + entity_users.append(user.name) + + elif entity == 'Role': + roles = iam_backend.list_roles(path_prefix, marker, max_items) + if roles: + for role in roles: + for p in role.managed_policies: + if p == policy_arn: + entity_roles.append(role.name) + + elif entity == 'Group': + groups = iam_backend.list_groups() + if groups: + for group in groups: + for p in group.managed_policies: + if p == policy_arn: + entity_groups.append(group.name) + + elif entity == 'LocalManagedPolicy' or entity == 'AWSManagedPolicy': + users = iam_backend.list_users(path_prefix, marker, max_items) + if users: + for user in users: + for p in user.managed_policies: + if p == policy_arn: + entity_users.append(user.name) + + roles = iam_backend.list_roles(path_prefix, marker, max_items) + if roles: + for role in roles: + for p in role.managed_policies: + if p == policy_arn: + entity_roles.append(role.name) + + groups = iam_backend.list_groups() + if groups: + for group in groups: + for p in group.managed_policies: + if p == policy_arn: + entity_groups.append(group.name) + + template = self.response_template(LIST_ENTITIES_FOR_POLICY_TEMPLATE) - return template.render() + return template.render(roles=entity_roles, users=entity_users, groups=entity_groups) + def create_role(self): role_name = self._get_param('RoleName') @@ -676,23 +737,26 @@ class IamResponse(BaseResponse): LIST_ENTITIES_FOR_POLICY_TEMPLATE = """ - - DevRole - + {% for role in roles %} + + {{ role }} + + {% endfor %} - - Dev - + {% for group in groups %} + + {{ group }} + + {% endfor %} false - - Alice - - - Bob - + {% for user in users %} + + {{ user }} + + {% endfor %} diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 77ba17a5a..ceec5e06a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1187,19 +1187,82 @@ def test_update_role(): response = conn.update_role(RoleName="my-role", Description="test") assert len(response.keys()) == 1 + @mock_iam() def test_list_entities_for_policy(): + import json + test_policy = json.dumps({ + "Version": "2012-10-17", + "Statement": [ + { + "Action": "s3:ListBucket", + "Resource": "*", + "Effect": "Allow", + } + ] + }) + conn = boto3.client('iam', region_name='us-east-1') - - with assert_raises(ClientError): - conn.delete_role(RoleName="my-role") - conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + conn.create_user(Path='/', UserName='testUser') + conn.create_group(Path='/', GroupName='testGroup') + conn.create_policy( + PolicyName='testPolicy', + Path='/', + PolicyDocument=test_policy, + Description='Test Policy' + ) - role = conn.get_role(RoleName="my-role") - arn = role.get('Role').get('Arn') + # Attach things to the user and group: + conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy) + conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy) + + conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') + + conn.add_user_to_group(UserName='testUser', GroupName='testGroup') + + # Add things to the role: + conn.create_instance_profile(InstanceProfileName='ipn') + conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role') + conn.tag_role(RoleName='my-role', Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ]) + conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy) + conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy') response = conn.list_entities_for_policy( - PolicyArn=arn + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='Role' ) - assert response['PolicyGroups'][0]['GroupName'] == 'Dev' + assert response['PolicyRoles'] == [{'RoleName': 'my-role'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='User', + ) + assert response['PolicyUsers'] == [{'UserName': 'testUser'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='Group', + ) + assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}] + + response = conn.list_entities_for_policy( + PolicyArn='arn:aws:iam::123456789012:policy/testPolicy', + EntityFilter='LocalManagedPolicy', + ) + assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}] + assert response['PolicyUsers'] == [{'UserName': 'testUser'}] + assert response['PolicyRoles'] == [{'RoleName': 'my-role'}] + + + From 59deb4d6c0d3199f8d21a881f87e33a18cc95b0b Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 21:25:29 -0600 Subject: [PATCH 125/175] Fixing list entities for policy --- moto/iam/responses.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 278f13f2d..72b2e464c 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -113,7 +113,7 @@ class IamResponse(BaseResponse): # Options 'User'|'Role'|'Group'|'LocalManagedPolicy'|'AWSManagedPolicy entity = self._get_param('EntityFilter') path_prefix = self._get_param('PathPrefix') - policy_usage_filter = self._get_param('PolicyUsageFilter') + #policy_usage_filter = self._get_param('PolicyUsageFilter') marker = self._get_param('Marker') max_items = self._get_param('MaxItems') @@ -167,11 +167,9 @@ class IamResponse(BaseResponse): if p == policy_arn: entity_groups.append(group.name) - template = self.response_template(LIST_ENTITIES_FOR_POLICY_TEMPLATE) return template.render(roles=entity_roles, users=entity_users, groups=entity_groups) - def create_role(self): role_name = self._get_param('RoleName') path = self._get_param('Path') From a5208222b4d017ed8d6f02fe0aed0e2da2dbaa3c Mon Sep 17 00:00:00 2001 From: William Rubel Date: Mon, 18 Feb 2019 21:29:09 -0600 Subject: [PATCH 126/175] Fixing list entities for policy --- moto/iam/responses.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 72b2e464c..5b19c9cdc 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -113,7 +113,7 @@ class IamResponse(BaseResponse): # Options 'User'|'Role'|'Group'|'LocalManagedPolicy'|'AWSManagedPolicy entity = self._get_param('EntityFilter') path_prefix = self._get_param('PathPrefix') - #policy_usage_filter = self._get_param('PolicyUsageFilter') + # policy_usage_filter = self._get_param('PolicyUsageFilter') marker = self._get_param('Marker') max_items = self._get_param('MaxItems') @@ -739,7 +739,7 @@ LIST_ENTITIES_FOR_POLICY_TEMPLATE = """ {{ role }} - {% endfor %} + {% endfor %} {% for group in groups %} From e6e7f235acc165a52cdc4a3e0d9180142d17d735 Mon Sep 17 00:00:00 2001 From: woohooyayaya Date: Mon, 25 Feb 2019 16:14:24 -0800 Subject: [PATCH 127/175] fix creation date in create key response to unix timestamp type --- moto/kms/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/kms/models.py b/moto/kms/models.py index 9d13589c1..9fbb2b587 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -36,7 +36,7 @@ class Key(BaseModel): "KeyMetadata": { "AWSAccountId": self.account_id, "Arn": self.arn, - "CreationDate": datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ"), + "CreationDate": datetime.strftime(datetime.utcnow(), "%s"), "Description": self.description, "Enabled": self.enabled, "KeyId": self.id, From c43f311a5e57008a5a875321d02cba2cf2cd9db9 Mon Sep 17 00:00:00 2001 From: Julian Mehnle Date: Mon, 25 Feb 2019 21:10:36 -0800 Subject: [PATCH 128/175] IMPLEMENTATION_COVERAGE.md: SQS get_queue_url is implemented SQS get_queue_url was implemented in dbf2368aa. --- IMPLEMENTATION_COVERAGE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index fe12c069a..a5650f572 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -3919,7 +3919,7 @@ - [ ] delete_message_batch - [X] delete_queue - [ ] get_queue_attributes -- [ ] get_queue_url +- [X] get_queue_url - [X] list_dead_letter_source_queues - [ ] list_queue_tags - [X] list_queues From 7b7cf5bd1149dd385a047173347a2d5c984aac01 Mon Sep 17 00:00:00 2001 From: Chih-Hsuan Yen Date: Fri, 1 Mar 2019 17:14:18 +0800 Subject: [PATCH 129/175] Bump idna to 2.8 requests 2.21.0 allows idna 2.8 [1] [1] https://github.com/kennethreitz/requests/commit/8761e9736f7d5508a5547cdf3adecbe0b7306278 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8ac06c4dd..99be632db 100755 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ install_requires = [ "jsondiff==1.1.2", "aws-xray-sdk!=0.96,>=0.93", "responses>=0.9.0", - "idna<2.8,>=2.5", + "idna<2.9,>=2.5", "cfn-lint", ] From 9992e23e68fb7f9fb8e07a163d89069aaf662096 Mon Sep 17 00:00:00 2001 From: Chih-Hsuan Yen Date: Fri, 1 Mar 2019 19:22:26 +0800 Subject: [PATCH 130/175] Fix compatibility with cryptography 2.6 [1] https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#26---2019-02-27 --- moto/acm/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/acm/models.py b/moto/acm/models.py index 39be8945d..15a1bd44d 100644 --- a/moto/acm/models.py +++ b/moto/acm/models.py @@ -243,7 +243,7 @@ class CertBundle(BaseModel): 'KeyAlgorithm': key_algo, 'NotAfter': datetime_to_epoch(self._cert.not_valid_after), 'NotBefore': datetime_to_epoch(self._cert.not_valid_before), - 'Serial': self._cert.serial, + 'Serial': self._cert.serial_number, 'SignatureAlgorithm': self._cert.signature_algorithm_oid._name.upper().replace('ENCRYPTION', ''), 'Status': self.status, # One of PENDING_VALIDATION, ISSUED, INACTIVE, EXPIRED, VALIDATION_TIMED_OUT, REVOKED, FAILED. 'Subject': 'CN={0}'.format(self.common_name), From d61ce0584bf53640409b1011e9e46a589a21dab0 Mon Sep 17 00:00:00 2001 From: Mark Challoner Date: Wed, 27 Feb 2019 10:54:55 +0000 Subject: [PATCH 131/175] Check record type when upserting. Previously this was not checked so an existing record (e.g. with type A) would be overwritten on upsert by a record with the same name but different type (e.g. TXT). This commit also: * publicizes the type variable appending the underscore affix (required to maintain compatibility with CloudFormation which sets type as the CF type), * fixes a wrong assumption in tests that UPSERT applies a change to Type (it creates a distinct record instead), * Updates ACM model to use serial_number instead of deprecated and remove serial causing Travis failures. --- moto/acm/models.py | 2 +- moto/route53/models.py | 12 ++++++------ tests/test_route53/test_route53.py | 20 +++++++++++++++----- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/moto/acm/models.py b/moto/acm/models.py index 39be8945d..15a1bd44d 100644 --- a/moto/acm/models.py +++ b/moto/acm/models.py @@ -243,7 +243,7 @@ class CertBundle(BaseModel): 'KeyAlgorithm': key_algo, 'NotAfter': datetime_to_epoch(self._cert.not_valid_after), 'NotBefore': datetime_to_epoch(self._cert.not_valid_before), - 'Serial': self._cert.serial, + 'Serial': self._cert.serial_number, 'SignatureAlgorithm': self._cert.signature_algorithm_oid._name.upper().replace('ENCRYPTION', ''), 'Status': self.status, # One of PENDING_VALIDATION, ISSUED, INACTIVE, EXPIRED, VALIDATION_TIMED_OUT, REVOKED, FAILED. 'Subject': 'CN={0}'.format(self.common_name), diff --git a/moto/route53/models.py b/moto/route53/models.py index d483d22e2..3760d3817 100644 --- a/moto/route53/models.py +++ b/moto/route53/models.py @@ -24,7 +24,7 @@ class HealthCheck(BaseModel): self.id = health_check_id self.ip_address = health_check_args.get("ip_address") self.port = health_check_args.get("port", 80) - self._type = health_check_args.get("type") + self.type_ = health_check_args.get("type") self.resource_path = health_check_args.get("resource_path") self.fqdn = health_check_args.get("fqdn") self.search_string = health_check_args.get("search_string") @@ -58,7 +58,7 @@ class HealthCheck(BaseModel): {{ health_check.ip_address }} {{ health_check.port }} - {{ health_check._type }} + {{ health_check.type_ }} {{ health_check.resource_path }} {{ health_check.fqdn }} {{ health_check.request_interval }} @@ -76,7 +76,7 @@ class RecordSet(BaseModel): def __init__(self, kwargs): self.name = kwargs.get('Name') - self._type = kwargs.get('Type') + self.type_ = kwargs.get('Type') self.ttl = kwargs.get('TTL') self.records = kwargs.get('ResourceRecords', []) self.set_identifier = kwargs.get('SetIdentifier') @@ -130,7 +130,7 @@ class RecordSet(BaseModel): def to_xml(self): template = Template(""" {{ record_set.name }} - {{ record_set._type }} + {{ record_set.type_ }} {% if record_set.set_identifier %} {{ record_set.set_identifier }} {% endif %} @@ -183,7 +183,7 @@ class FakeZone(BaseModel): def upsert_rrset(self, record_set): new_rrset = RecordSet(record_set) for i, rrset in enumerate(self.rrsets): - if rrset.name == new_rrset.name: + if rrset.name == new_rrset.name and rrset.type_ == new_rrset.type_: self.rrsets[i] = new_rrset break else: @@ -202,7 +202,7 @@ class FakeZone(BaseModel): record_sets = list(self.rrsets) # Copy the list if start_type: record_sets = [ - record_set for record_set in record_sets if record_set._type >= start_type] + record_set for record_set in record_sets if record_set.type_ >= start_type] if start_name: record_sets = [ record_set for record_set in record_sets if record_set.name >= start_name] diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 1ced9d937..d730f8dcf 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -98,6 +98,16 @@ def test_rrset(): rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('5.6.7.8') + changes = ResourceRecordSets(conn, zoneid) + change = changes.add_change("UPSERT", "foo.bar.testdns.aws.com", "TXT") + change.add_value("foo") + changes.commit() + + rrsets = conn.get_all_rrsets(zoneid) + rrsets.should.have.length_of(2) + rrsets[0].resource_records[0].should.equal('5.6.7.8') + rrsets[1].resource_records[0].should.equal('foo') + changes = ResourceRecordSets(conn, zoneid) changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") changes.commit() @@ -520,7 +530,7 @@ def test_change_resource_record_sets_crud_valid(): # Create A Record. a_record_endpoint_payload = { - 'Comment': 'create A record prod.redis.db', + 'Comment': 'Create A record prod.redis.db', 'Changes': [ { 'Action': 'CREATE', @@ -545,15 +555,15 @@ def test_change_resource_record_sets_crud_valid(): a_record_detail['TTL'].should.equal(10) a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}]) - # Update type to CNAME + # Update A Record. cname_record_endpoint_payload = { - 'Comment': 'Update to CNAME prod.redis.db', + 'Comment': 'Update A record prod.redis.db', 'Changes': [ { 'Action': 'UPSERT', 'ResourceRecordSet': { 'Name': 'prod.redis.db.', - 'Type': 'CNAME', + 'Type': 'A', 'TTL': 60, 'ResourceRecords': [{ 'Value': '192.168.1.1' @@ -568,7 +578,7 @@ def test_change_resource_record_sets_crud_valid(): len(response['ResourceRecordSets']).should.equal(1) cname_record_detail = response['ResourceRecordSets'][0] cname_record_detail['Name'].should.equal('prod.redis.db.') - cname_record_detail['Type'].should.equal('CNAME') + cname_record_detail['Type'].should.equal('A') cname_record_detail['TTL'].should.equal(60) cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}]) From 840045c688969de17923e92308685ebc90fbbda4 Mon Sep 17 00:00:00 2001 From: Waldemar Hummer Date: Sat, 2 Mar 2019 18:56:49 -0500 Subject: [PATCH 132/175] fix sorting of log groups --- moto/logs/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/moto/logs/models.py b/moto/logs/models.py index ca1fdc4ad..e105d4d14 100644 --- a/moto/logs/models.py +++ b/moto/logs/models.py @@ -242,7 +242,8 @@ class LogsBackend(BaseBackend): if next_token is None: next_token = 0 - groups = sorted(group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix)) + groups = [group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix)] + groups = sorted(groups, key=lambda x: x['creationTime'], reverse=True) groups_page = groups[next_token:next_token + limit] next_token += limit From bc116ab750f8c83623a3a759825a29e4ff916720 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Mon, 25 Feb 2019 16:27:25 -0800 Subject: [PATCH 133/175] Basic AWS Config service support. --- .gitignore | 3 +- README.md | 4 +- moto/backends.py | 3 +- moto/config/__init__.py | 4 + moto/config/exceptions.py | 149 ++++++++++ moto/config/models.py | 333 +++++++++++++++++++++ moto/config/responses.py | 53 ++++ moto/config/urls.py | 10 + tests/test_config/test_config.py | 487 +++++++++++++++++++++++++++++++ 9 files changed, 1043 insertions(+), 3 deletions(-) create mode 100644 moto/config/__init__.py create mode 100644 moto/config/exceptions.py create mode 100644 moto/config/models.py create mode 100644 moto/config/responses.py create mode 100644 moto/config/urls.py create mode 100644 tests/test_config/test_config.py diff --git a/.gitignore b/.gitignore index 47e5efbe0..efb489651 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ python_env .pytest_cache/ venv/ .python-version -.vscode/ \ No newline at end of file +.vscode/ +tests/file.tmp diff --git a/README.md b/README.md index d6e9f30a1..aeff847ed 100644 --- a/README.md +++ b/README.md @@ -70,10 +70,12 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L |------------------------------------------------------------------------------| | CloudwatchEvents | @mock_events | all endpoints done | |------------------------------------------------------------------------------| -| Cognito Identity | @mock_cognitoidentity| basic endpoints done | +| Cognito Identity | @mock_cognitoidentity| basic endpoints done | |------------------------------------------------------------------------------| | Cognito Identity Provider | @mock_cognitoidp| basic endpoints done | |------------------------------------------------------------------------------| +| Config | @mock_config | basic endpoints done | +|------------------------------------------------------------------------------| | Data Pipeline | @mock_datapipeline| basic endpoints done | |------------------------------------------------------------------------------| | DynamoDB | @mock_dynamodb | core endpoints done | diff --git a/moto/backends.py b/moto/backends.py index 1a333415e..90cc803a7 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -46,7 +46,7 @@ from moto.iot import iot_backends from moto.iotdata import iotdata_backends from moto.batch import batch_backends from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends - +from moto.config import config_backends BACKENDS = { 'acm': acm_backends, @@ -57,6 +57,7 @@ BACKENDS = { 'cloudwatch': cloudwatch_backends, 'cognito-identity': cognitoidentity_backends, 'cognito-idp': cognitoidp_backends, + 'config': config_backends, 'datapipeline': datapipeline_backends, 'dynamodb': dynamodb_backends, 'dynamodb2': dynamodb_backends2, diff --git a/moto/config/__init__.py b/moto/config/__init__.py new file mode 100644 index 000000000..9ca6a5917 --- /dev/null +++ b/moto/config/__init__.py @@ -0,0 +1,4 @@ +from .models import config_backends +from ..core.models import base_decorator + +mock_config = base_decorator(config_backends) diff --git a/moto/config/exceptions.py b/moto/config/exceptions.py new file mode 100644 index 000000000..b2b01d6a0 --- /dev/null +++ b/moto/config/exceptions.py @@ -0,0 +1,149 @@ +from __future__ import unicode_literals +from moto.core.exceptions import JsonRESTError + + +class NameTooLongException(JsonRESTError): + code = 400 + + def __init__(self, name, location): + message = '1 validation error detected: Value \'{name}\' at \'{location}\' failed to satisfy' \ + ' constraint: Member must have length less than or equal to 256'.format(name=name, location=location) + super(NameTooLongException, self).__init__("ValidationException", message) + + +class InvalidConfigurationRecorderNameException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'The configuration recorder name \'{name}\' is not valid, blank string.'.format(name=name) + super(InvalidConfigurationRecorderNameException, self).__init__("InvalidConfigurationRecorderNameException", + message) + + +class MaxNumberOfConfigurationRecordersExceededException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to put configuration recorder \'{name}\' because the maximum number of ' \ + 'configuration recorders: 1 is reached.'.format(name=name) + super(MaxNumberOfConfigurationRecordersExceededException, self).__init__( + "MaxNumberOfConfigurationRecordersExceededException", message) + + +class InvalidRecordingGroupException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'The recording group provided is not valid' + super(InvalidRecordingGroupException, self).__init__("InvalidRecordingGroupException", message) + + +class InvalidResourceTypeException(JsonRESTError): + code = 400 + + def __init__(self, bad_list, good_list): + message = '{num} validation error detected: Value \'{bad_list}\' at ' \ + '\'configurationRecorder.recordingGroup.resourceTypes\' failed to satisfy constraint: ' \ + 'Member must satisfy constraint: [Member must satisfy enum value set: {good_list}]'.format( + num=len(bad_list), bad_list=bad_list, good_list=good_list) + # For PY2: + message = str(message) + + super(InvalidResourceTypeException, self).__init__("ValidationException", message) + + +class NoSuchConfigurationRecorderException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Cannot find configuration recorder with the specified name \'{name}\'.'.format(name=name) + super(NoSuchConfigurationRecorderException, self).__init__("NoSuchConfigurationRecorderException", message) + + +class InvalidDeliveryChannelNameException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'The delivery channel name \'{name}\' is not valid, blank string.'.format(name=name) + super(InvalidDeliveryChannelNameException, self).__init__("InvalidDeliveryChannelNameException", + message) + + +class NoSuchBucketException(JsonRESTError): + """We are *only* validating that there is value that is not '' here.""" + code = 400 + + def __init__(self): + message = 'Cannot find a S3 bucket with an empty bucket name.' + super(NoSuchBucketException, self).__init__("NoSuchBucketException", message) + + +class InvalidS3KeyPrefixException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'The s3 key prefix \'\' is not valid, empty s3 key prefix.' + super(InvalidS3KeyPrefixException, self).__init__("InvalidS3KeyPrefixException", message) + + +class InvalidSNSTopicARNException(JsonRESTError): + """We are *only* validating that there is value that is not '' here.""" + code = 400 + + def __init__(self): + message = 'The sns topic arn \'\' is not valid.' + super(InvalidSNSTopicARNException, self).__init__("InvalidSNSTopicARNException", message) + + +class InvalidDeliveryFrequency(JsonRESTError): + code = 400 + + def __init__(self, value, good_list): + message = '1 validation error detected: Value \'{value}\' at ' \ + '\'deliveryChannel.configSnapshotDeliveryProperties.deliveryFrequency\' failed to satisfy ' \ + 'constraint: Member must satisfy enum value set: {good_list}'.format(value=value, good_list=good_list) + super(InvalidDeliveryFrequency, self).__init__("InvalidDeliveryFrequency", message) + + +class MaxNumberOfDeliveryChannelsExceededException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to put delivery channel \'{name}\' because the maximum number of ' \ + 'delivery channels: 1 is reached.'.format(name=name) + super(MaxNumberOfDeliveryChannelsExceededException, self).__init__( + "MaxNumberOfDeliveryChannelsExceededException", message) + + +class NoSuchDeliveryChannelException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Cannot find delivery channel with specified name \'{name}\'.'.format(name=name) + super(NoSuchDeliveryChannelException, self).__init__("NoSuchDeliveryChannelException", message) + + +class NoAvailableConfigurationRecorderException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'Configuration recorder is not available to put delivery channel.' + super(NoAvailableConfigurationRecorderException, self).__init__("NoAvailableConfigurationRecorderException", + message) + + +class NoAvailableDeliveryChannelException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'Delivery channel is not available to start configuration recorder.' + super(NoAvailableDeliveryChannelException, self).__init__("NoAvailableDeliveryChannelException", message) + + +class LastDeliveryChannelDeleteFailedException(JsonRESTError): + code = 400 + + def __init__(self, name): + message = 'Failed to delete last specified delivery channel with name \'{name}\', because there, ' \ + 'because there is a running configuration recorder.'.format(name=name) + super(LastDeliveryChannelDeleteFailedException, self).__init__("LastDeliveryChannelDeleteFailedException", message) diff --git a/moto/config/models.py b/moto/config/models.py new file mode 100644 index 000000000..5e43c6b12 --- /dev/null +++ b/moto/config/models.py @@ -0,0 +1,333 @@ +import json +import time +import pkg_resources + +from datetime import datetime + +from boto3 import Session + +from moto.config.exceptions import InvalidResourceTypeException, InvalidDeliveryFrequency, \ + InvalidConfigurationRecorderNameException, NameTooLongException, \ + MaxNumberOfConfigurationRecordersExceededException, InvalidRecordingGroupException, \ + NoSuchConfigurationRecorderException, NoAvailableConfigurationRecorderException, \ + InvalidDeliveryChannelNameException, NoSuchBucketException, InvalidS3KeyPrefixException, \ + InvalidSNSTopicARNException, MaxNumberOfDeliveryChannelsExceededException, NoAvailableDeliveryChannelException, \ + NoSuchDeliveryChannelException, LastDeliveryChannelDeleteFailedException + +from moto.core import BaseBackend, BaseModel + +DEFAULT_ACCOUNT_ID = 123456789012 + + +def datetime2int(date): + return int(time.mktime(date.timetuple())) + + +def snake_to_camels(original): + parts = original.split('_') + + camel_cased = parts[0].lower() + ''.join(p.title() for p in parts[1:]) + camel_cased = camel_cased.replace('Arn', 'ARN') # Config uses 'ARN' instead of 'Arn' + + return camel_cased + + +class ConfigEmptyDictable(BaseModel): + """Base class to make serialization easy. This assumes that the sub-class will NOT return 'None's in the JSON.""" + + def to_dict(self): + data = {} + for item, value in self.__dict__.items(): + if value is not None: + if isinstance(value, ConfigEmptyDictable): + data[snake_to_camels(item)] = value.to_dict() + else: + data[snake_to_camels(item)] = value + + return data + + +class ConfigRecorderStatus(ConfigEmptyDictable): + + def __init__(self, name): + self.name = name + + self.recording = False + self.last_start_time = None + self.last_stop_time = None + self.last_status = None + self.last_error_code = None + self.last_error_message = None + self.last_status_change_time = None + + def start(self): + self.recording = True + self.last_status = 'PENDING' + self.last_start_time = datetime2int(datetime.utcnow()) + self.last_status_change_time = datetime2int(datetime.utcnow()) + + def stop(self): + self.recording = False + self.last_stop_time = datetime2int(datetime.utcnow()) + self.last_status_change_time = datetime2int(datetime.utcnow()) + + +class ConfigDeliverySnapshotProperties(ConfigEmptyDictable): + + def __init__(self, delivery_frequency): + self.delivery_frequency = delivery_frequency + + +class ConfigDeliveryChannel(ConfigEmptyDictable): + + def __init__(self, name, s3_bucket_name, prefix=None, sns_arn=None, snapshot_properties=None): + self.name = name + self.s3_bucket_name = s3_bucket_name + self.s3_key_prefix = prefix + self.sns_topic_arn = sns_arn + self.config_snapshot_delivery_properties = snapshot_properties + + +class RecordingGroup(ConfigEmptyDictable): + + def __init__(self, all_supported=True, include_global_resource_types=False, resource_types=None): + self.all_supported = all_supported + self.include_global_resource_types = include_global_resource_types + self.resource_types = resource_types + + +class ConfigRecorder(ConfigEmptyDictable): + + def __init__(self, role_arn, recording_group, name='default', status=None): + self.name = name + self.role_arn = role_arn + self.recording_group = recording_group + + if not status: + self.status = ConfigRecorderStatus(name) + else: + self.status = status + + +class ConfigBackend(BaseBackend): + + def __init__(self): + self.recorders = {} + self.delivery_channels = {} + + @staticmethod + def _validate_resource_types(resource_list): + # Load the service file: + resource_package = 'botocore' + resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json')) + conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) + + # Verify that each entry exists in the supported list: + bad_list = [] + for resource in resource_list: + # For PY2: + r_str = str(resource) + + if r_str not in conifg_schema['shapes']['ResourceType']['enum']: + bad_list.append(r_str) + + if bad_list: + raise InvalidResourceTypeException(bad_list, conifg_schema['shapes']['ResourceType']['enum']) + + @staticmethod + def _validate_delivery_snapshot_properties(properties): + # Load the service file: + resource_package = 'botocore' + resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json')) + conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) + + # Verify that the deliveryFrequency is set to an acceptable value: + if properties.get('deliveryFrequency', None) not in \ + conifg_schema['shapes']['MaximumExecutionFrequency']['enum']: + raise InvalidDeliveryFrequency(properties.get('deliveryFrequency', None), + conifg_schema['shapes']['MaximumExecutionFrequency']['enum']) + + def put_configuration_recorder(self, config_recorder): + # Validate the name: + if not config_recorder.get('name'): + raise InvalidConfigurationRecorderNameException(config_recorder.get('name')) + if len(config_recorder.get('name')) > 256: + raise NameTooLongException(config_recorder.get('name'), 'configurationRecorder.name') + + # We're going to assume that the passed in Role ARN is correct. + + # Config currently only allows 1 configuration recorder for an account: + if len(self.recorders) == 1 and not self.recorders.get(config_recorder['name']): + raise MaxNumberOfConfigurationRecordersExceededException(config_recorder['name']) + + # Is this updating an existing one? + recorder_status = None + if self.recorders.get(config_recorder['name']): + recorder_status = self.recorders[config_recorder['name']].status + + # Validate the Recording Group: + if not config_recorder.get('recordingGroup'): + recording_group = RecordingGroup() + else: + rg = config_recorder['recordingGroup'] + + # Can't have both the resource types specified and the other flags as True. + if rg.get('resourceTypes') and ( + rg.get('allSupported', True) or + rg.get('includeGlobalResourceTypes', False)): + raise InvalidRecordingGroupException() + + # If an empty dict is provided, then bad: + if not rg.get('resourceTypes', False) \ + and not rg.get('resourceTypes') \ + and not rg.get('includeGlobalResourceTypes', False): + raise InvalidRecordingGroupException() + + # Validate that the list provided is correct: + self._validate_resource_types(rg.get('resourceTypes', [])) + + recording_group = RecordingGroup( + all_supported=rg.get('allSupported', True), + include_global_resource_types=rg.get('includeGlobalResourceTypes', False), + resource_types=rg.get('resourceTypes', []) + ) + + self.recorders[config_recorder['name']] = \ + ConfigRecorder(config_recorder['roleARN'], recording_group, name=config_recorder['name'], + status=recorder_status) + + def describe_configuration_recorders(self, recorder_names): + recorders = [] + + if recorder_names: + for rn in recorder_names: + if not self.recorders.get(rn): + raise NoSuchConfigurationRecorderException(rn) + + # Format the recorder: + recorders.append(self.recorders[rn].to_dict()) + + else: + for recorder in self.recorders.values(): + recorders.append(recorder.to_dict()) + + return recorders + + def describe_configuration_recorder_status(self, recorder_names): + recorders = [] + + if recorder_names: + for rn in recorder_names: + if not self.recorders.get(rn): + raise NoSuchConfigurationRecorderException(rn) + + # Format the recorder: + recorders.append(self.recorders[rn].status.to_dict()) + + else: + for recorder in self.recorders.values(): + recorders.append(recorder.status.to_dict()) + + return recorders + + def put_delivery_channel(self, delivery_channel): + # Must have a configuration recorder: + if not self.recorders: + raise NoAvailableConfigurationRecorderException() + + # Validate the name: + if not delivery_channel.get('name'): + raise InvalidDeliveryChannelNameException(delivery_channel.get('name')) + if len(delivery_channel.get('name')) > 256: + raise NameTooLongException(delivery_channel.get('name'), 'deliveryChannel.name') + + # We are going to assume that the bucket exists -- but will verify if the bucket provided is blank: + if not delivery_channel.get('s3BucketName'): + raise NoSuchBucketException() + + # We are going to assume that the bucket has the correct policy attached to it. We are only going to verify + # if the prefix provided is not an empty string: + if delivery_channel.get('s3KeyPrefix', None) == '': + raise InvalidS3KeyPrefixException() + + # Ditto for SNS -- Only going to assume that the ARN provided is not an empty string: + if delivery_channel.get('snsTopicARN', None) == '': + raise InvalidSNSTopicARNException() + + # Config currently only allows 1 delivery channel for an account: + if len(self.delivery_channels) == 1 and not self.delivery_channels.get(delivery_channel['name']): + raise MaxNumberOfDeliveryChannelsExceededException(delivery_channel['name']) + + if not delivery_channel.get('configSnapshotDeliveryProperties'): + dp = None + + else: + # Validate the config snapshot delivery properties: + self._validate_delivery_snapshot_properties(delivery_channel['configSnapshotDeliveryProperties']) + + dp = ConfigDeliverySnapshotProperties( + delivery_channel['configSnapshotDeliveryProperties']['deliveryFrequency']) + + self.delivery_channels[delivery_channel['name']] = \ + ConfigDeliveryChannel(delivery_channel['name'], delivery_channel['s3BucketName'], + prefix=delivery_channel.get('s3KeyPrefix', None), + sns_arn=delivery_channel.get('snsTopicARN', None), + snapshot_properties=dp) + + def describe_delivery_channels(self, channel_names): + channels = [] + + if channel_names: + for cn in channel_names: + if not self.delivery_channels.get(cn): + raise NoSuchDeliveryChannelException(cn) + + # Format the delivery channel: + channels.append(self.delivery_channels[cn].to_dict()) + + else: + for channel in self.delivery_channels.values(): + channels.append(channel.to_dict()) + + return channels + + def start_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + # Must have a delivery channel available as well: + if not self.delivery_channels: + raise NoAvailableDeliveryChannelException() + + # Start recording: + self.recorders[recorder_name].status.start() + + def stop_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + # Stop recording: + self.recorders[recorder_name].status.stop() + + def delete_configuration_recorder(self, recorder_name): + if not self.recorders.get(recorder_name): + raise NoSuchConfigurationRecorderException(recorder_name) + + del self.recorders[recorder_name] + + def delete_delivery_channel(self, channel_name): + if not self.delivery_channels.get(channel_name): + raise NoSuchDeliveryChannelException(channel_name) + + # Check if a channel is recording -- if so, bad -- (there can only be 1 recorder): + for recorder in self.recorders.values(): + if recorder.status.recording: + raise LastDeliveryChannelDeleteFailedException(channel_name) + + del self.delivery_channels[channel_name] + + +config_backends = {} +boto3_session = Session() +for region in boto3_session.get_available_regions('config'): + config_backends[region] = ConfigBackend() diff --git a/moto/config/responses.py b/moto/config/responses.py new file mode 100644 index 000000000..286b2349f --- /dev/null +++ b/moto/config/responses.py @@ -0,0 +1,53 @@ +import json +from moto.core.responses import BaseResponse +from .models import config_backends + + +class ConfigResponse(BaseResponse): + + @property + def config_backend(self): + return config_backends[self.region] + + def put_configuration_recorder(self): + self.config_backend.put_configuration_recorder(self._get_param('ConfigurationRecorder')) + return "" + + def describe_configuration_recorders(self): + recorders = self.config_backend.describe_configuration_recorders(self._get_param('ConfigurationRecorderNames')) + schema = {'ConfigurationRecorders': recorders} + return json.dumps(schema) + + def describe_configuration_recorder_status(self): + recorder_statuses = self.config_backend.describe_configuration_recorder_status( + self._get_param('ConfigurationRecorderNames')) + schema = {'ConfigurationRecordersStatus': recorder_statuses} + return json.dumps(schema) + + def put_delivery_channel(self): + self.config_backend.put_delivery_channel(self._get_param('DeliveryChannel')) + return "" + + def describe_delivery_channels(self): + delivery_channels = self.config_backend.describe_delivery_channels(self._get_param('DeliveryChannelNames')) + schema = {'DeliveryChannels': delivery_channels} + return json.dumps(schema) + + def describe_delivery_channel_status(self): + raise NotImplementedError() + + def delete_delivery_channel(self): + self.config_backend.delete_delivery_channel(self._get_param('DeliveryChannelName')) + return "" + + def delete_configuration_recorder(self): + self.config_backend.delete_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" + + def start_configuration_recorder(self): + self.config_backend.start_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" + + def stop_configuration_recorder(self): + self.config_backend.stop_configuration_recorder(self._get_param('ConfigurationRecorderName')) + return "" diff --git a/moto/config/urls.py b/moto/config/urls.py new file mode 100644 index 000000000..fd7b6969f --- /dev/null +++ b/moto/config/urls.py @@ -0,0 +1,10 @@ +from __future__ import unicode_literals +from .responses import ConfigResponse + +url_bases = [ + "https?://config.(.+).amazonaws.com", +] + +url_paths = { + '{0}/$': ConfigResponse.dispatch, +} diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py new file mode 100644 index 000000000..a61d2a29c --- /dev/null +++ b/tests/test_config/test_config.py @@ -0,0 +1,487 @@ +from datetime import datetime, timedelta + +import boto3 +from botocore.exceptions import ClientError +from nose.tools import assert_raises + +from moto.config import mock_config + + +@mock_config +def test_put_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Try without a name supplied: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={'roleARN': 'somearn'}) + assert ce.exception.response['Error']['Code'] == 'InvalidConfigurationRecorderNameException' + assert 'is not valid, blank string.' in ce.exception.response['Error']['Message'] + + # Try with a really long name: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={'name': 'a' * 257, 'roleARN': 'somearn'}) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + + # With resource types and flags set to True: + bad_groups = [ + {'allSupported': True, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, + {'allSupported': False, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, + {'allSupported': True, 'includeGlobalResourceTypes': False, 'resourceTypes': ['item']}, + {'allSupported': False, 'includeGlobalResourceTypes': False, 'resourceTypes': []} + ] + + for bg in bad_groups: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'default', + 'roleARN': 'somearn', + 'recordingGroup': bg + }) + assert ce.exception.response['Error']['Code'] == 'InvalidRecordingGroupException' + assert ce.exception.response['Error']['Message'] == 'The recording group provided is not valid' + + # With an invalid Resource Type: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'default', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + # 2 good, and 2 bad: + 'resourceTypes': ['AWS::EC2::Volume', 'LOLNO', 'AWS::EC2::VPC', 'LOLSTILLNO'] + } + }) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert "2 validation error detected: Value '['LOLNO', 'LOLSTILLNO']" in str(ce.exception.response['Error']['Message']) + assert 'AWS::EC2::Instance' in ce.exception.response['Error']['Message'] + + # Create a proper one: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert not result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 2 + assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \ + and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes'] + + # Now update the configuration recorder: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': True, + 'includeGlobalResourceTypes': True + } + }) + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert result[0]['recordingGroup']['allSupported'] + assert result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 0 + + # With a default recording group (i.e. lacking one) + client.put_configuration_recorder(ConfigurationRecorder={'name': 'testrecorder', 'roleARN': 'somearn'}) + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert not result[0]['recordingGroup'].get('resourceTypes') + + # Can currently only have exactly 1 Config Recorder in an account/region: + with assert_raises(ClientError) as ce: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'someotherrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + } + }) + assert ce.exception.response['Error']['Code'] == 'MaxNumberOfConfigurationRecordersExceededException' + assert "maximum number of configuration recorders: 1 is reached." in ce.exception.response['Error']['Message'] + + +@mock_config +def test_describe_configurations(): + client = boto3.client('config', region_name='us-west-2') + + # Without any configurations: + result = client.describe_configuration_recorders() + assert not result['ConfigurationRecorders'] + + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + result = client.describe_configuration_recorders()['ConfigurationRecorders'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert result[0]['roleARN'] == 'somearn' + assert not result[0]['recordingGroup']['allSupported'] + assert not result[0]['recordingGroup']['includeGlobalResourceTypes'] + assert len(result[0]['recordingGroup']['resourceTypes']) == 2 + assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \ + and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes'] + + # Specify an incorrect name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorders(ConfigurationRecorderNames=['wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + # And with both a good and wrong name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorders(ConfigurationRecorderNames=['testrecorder', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_delivery_channels(): + client = boto3.client('config', region_name='us-west-2') + + # Try without a config recorder: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={}) + assert ce.exception.response['Error']['Code'] == 'NoAvailableConfigurationRecorderException' + assert ce.exception.response['Error']['Message'] == 'Configuration recorder is not available to ' \ + 'put delivery channel.' + + # Create a config recorder to continue testing: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Try without a name supplied: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={}) + assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryChannelNameException' + assert 'is not valid, blank string.' in ce.exception.response['Error']['Message'] + + # Try with a really long name: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'a' * 257}) + assert ce.exception.response['Error']['Code'] == 'ValidationException' + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + + # Without specifying a bucket name: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel'}) + assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException' + assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.' + + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': ''}) + assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException' + assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.' + + # With an empty string for the S3 key prefix: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', 's3BucketName': 'somebucket', 's3KeyPrefix': ''}) + assert ce.exception.response['Error']['Code'] == 'InvalidS3KeyPrefixException' + assert 'empty s3 key prefix.' in ce.exception.response['Error']['Message'] + + # With an empty string for the SNS ARN: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', 's3BucketName': 'somebucket', 'snsTopicARN': ''}) + assert ce.exception.response['Error']['Code'] == 'InvalidSNSTopicARNException' + assert 'The sns topic arn' in ce.exception.response['Error']['Message'] + + # With an invalid delivery frequency: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'WRONG'} + }) + assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryFrequency' + assert 'WRONG' in ce.exception.response['Error']['Message'] + assert 'TwentyFour_Hours' in ce.exception.response['Error']['Message'] + + # Create a proper one: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 2 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + + # Overwrite it with another proper configuration: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'snsTopicARN': 'sometopicarn', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'} + }) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 4 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + assert result[0]['snsTopicARN'] == 'sometopicarn' + assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours' + + # Can only have 1: + with assert_raises(ClientError) as ce: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel2', 's3BucketName': 'somebucket'}) + assert ce.exception.response['Error']['Code'] == 'MaxNumberOfDeliveryChannelsExceededException' + assert 'because the maximum number of delivery channels: 1 is reached.' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_describe_delivery_channels(): + client = boto3.client('config', region_name='us-west-2') + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without any channels: + result = client.describe_delivery_channels() + assert not result['DeliveryChannels'] + + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 2 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + + # Overwrite it with another proper configuration: + client.put_delivery_channel(DeliveryChannel={ + 'name': 'testchannel', + 's3BucketName': 'somebucket', + 'snsTopicARN': 'sometopicarn', + 'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'} + }) + result = client.describe_delivery_channels()['DeliveryChannels'] + assert len(result) == 1 + assert len(result[0].keys()) == 4 + assert result[0]['name'] == 'testchannel' + assert result[0]['s3BucketName'] == 'somebucket' + assert result[0]['snsTopicARN'] == 'sometopicarn' + assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours' + + # Specify an incorrect name: + with assert_raises(ClientError) as ce: + client.describe_delivery_channels(DeliveryChannelNames=['wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + # And with both a good and wrong name: + with assert_raises(ClientError) as ce: + client.describe_delivery_channels(DeliveryChannelNames=['testchannel', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_start_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Without a config recorder: + with assert_raises(ClientError) as ce: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without a delivery channel: + with assert_raises(ClientError) as ce: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoAvailableDeliveryChannelException' + + # Make the delivery channel: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + + # Start it: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Verify it's enabled: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + lower_bound = (datetime.utcnow() - timedelta(minutes=5)) + assert result[0]['recording'] + assert result[0]['lastStatus'] == 'PENDING' + assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow() + + +@mock_config +def test_stop_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Without a config recorder: + with assert_raises(ClientError) as ce: + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Make the delivery channel for creation: + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + + # Start it: + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Verify it's disabled: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + lower_bound = (datetime.utcnow() - timedelta(minutes=5)) + assert not result[0]['recording'] + assert result[0]['lastStatus'] == 'PENDING' + assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStopTime'].replace(tzinfo=None) <= datetime.utcnow() + assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow() + + +@mock_config +def test_describe_configuration_recorder_status(): + client = boto3.client('config', region_name='us-west-2') + + # Without any: + result = client.describe_configuration_recorder_status() + assert not result['ConfigurationRecordersStatus'] + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Without specifying a config recorder: + result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert not result[0]['recording'] + + # With a proper name: + result = client.describe_configuration_recorder_status( + ConfigurationRecorderNames=['testrecorder'])['ConfigurationRecordersStatus'] + assert len(result) == 1 + assert result[0]['name'] == 'testrecorder' + assert not result[0]['recording'] + + # Invalid name: + with assert_raises(ClientError) as ce: + client.describe_configuration_recorder_status(ConfigurationRecorderNames=['testrecorder', 'wrong']) + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + assert 'wrong' in ce.exception.response['Error']['Message'] + + +@mock_config +def test_delete_configuration_recorder(): + client = boto3.client('config', region_name='us-west-2') + + # Make the config recorder; + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + + # Delete it: + client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Try again -- it should be deleted: + with assert_raises(ClientError) as ce: + client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder') + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException' + + +@mock_config +def test_delete_delivery_channel(): + client = boto3.client('config', region_name='us-west-2') + + # Need a recorder to test the constraint on recording being enabled: + client.put_configuration_recorder(ConfigurationRecorder={ + 'name': 'testrecorder', + 'roleARN': 'somearn', + 'recordingGroup': { + 'allSupported': False, + 'includeGlobalResourceTypes': False, + 'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC'] + } + }) + client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'}) + client.start_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # With the recorder enabled: + with assert_raises(ClientError) as ce: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + assert ce.exception.response['Error']['Code'] == 'LastDeliveryChannelDeleteFailedException' + assert 'because there is a running configuration recorder.' in ce.exception.response['Error']['Message'] + + # Stop recording: + client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder') + + # Try again: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + + # Verify: + with assert_raises(ClientError) as ce: + client.delete_delivery_channel(DeliveryChannelName='testchannel') + assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException' From 74ff2ccc959ef3950318a71a884b70877841bb3d Mon Sep 17 00:00:00 2001 From: Domenico Testa Date: Mon, 4 Mar 2019 09:16:43 +0100 Subject: [PATCH 134/175] Reformatting to get better coverage results --- moto/s3/responses.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index e4f97cd43..6240c6b53 100755 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -19,8 +19,7 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \ FakeTag -from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, \ - parse_region_from_url +from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url from xml.dom import minidom From d6022417f5c45b1f5c245f2125ffcfeef40a2ce9 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Thu, 7 Mar 2019 12:53:01 -0800 Subject: [PATCH 135/175] Forgot to add Config to the `__init__.py`. --- moto/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/moto/__init__.py b/moto/__init__.py index e86c499a7..8e9b91bce 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -13,6 +13,7 @@ from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa +from .config import mock_config # flake8: noqa from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa From 15b3ede3cc3b1eaadbd2fedff8404c213883e017 Mon Sep 17 00:00:00 2001 From: Tay Frost Date: Thu, 7 Mar 2019 16:59:03 -0500 Subject: [PATCH 136/175] Add test for case where ebs volume has no tags. This commit adds a test for a case where an EBS volume has no tags. When an EBS volume has no tags, calls to the aws ec2 endpoints `create_volume` and `describe_volumes` do not include the `Tags` key in the `response.Volumes[]` object. However, moto does include the `Tags` key in this case. This discrepancy in behaviour can result in code passing a moto test but failing in production. Sample snippets that trigger this condition: ``` def create_volume_and_then_get_tags_from_response(): client = boto3.client('ec2', region_name='us-east-1') volume_response = client.create_volume( Size=10, AvailabilityZone='us-east-1a' ) keys = volume_response['Keys'] ``` ``` def create_volume_and_then_get_tags_from_describe_volumes(): client = boto3.client('ec2', region_name='us-east-1') volume_response = client.create_volume( Size=10, AvailabilityZone='us-east-1a' ) volume_describe_response = client.describe_volumes() keys = volume_describe_response['Volumes'][0]['Keys'] ``` Both sample snippets will succeed in a moto test, but fail with a `KeyError` when using the aws api. --- tests/test_ec2/test_elastic_block_store.py | 24 ++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/test_ec2/test_elastic_block_store.py b/tests/test_ec2/test_elastic_block_store.py index 442e41dde..8f4a00b13 100644 --- a/tests/test_ec2/test_elastic_block_store.py +++ b/tests/test_ec2/test_elastic_block_store.py @@ -589,6 +589,18 @@ def test_volume_tag_escaping(): dict(snaps[0].tags).should.equal({'key': ''}) +@mock_ec2 +def test_volume_property_hidden_when_no_tags_exist(): + ec2_client = boto3.client('ec2', region_name='us-east-1') + + volume_response = ec2_client.create_volume( + Size=10, + AvailabilityZone='us-east-1a' + ) + + volume_response.get('Tags').should.equal(None) + + @freeze_time @mock_ec2 def test_copy_snapshot(): @@ -602,26 +614,26 @@ def test_copy_snapshot(): create_snapshot_response = ec2_client.create_snapshot( VolumeId=volume_response['VolumeId'] ) - + copy_snapshot_response = dest_ec2_client.copy_snapshot( SourceSnapshotId=create_snapshot_response['SnapshotId'], SourceRegion="eu-west-1" ) - + ec2 = boto3.resource('ec2', region_name='eu-west-1') dest_ec2 = boto3.resource('ec2', region_name='eu-west-2') - + source = ec2.Snapshot(create_snapshot_response['SnapshotId']) dest = dest_ec2.Snapshot(copy_snapshot_response['SnapshotId']) - + attribs = ['data_encryption_key_id', 'encrypted', 'kms_key_id', 'owner_alias', 'owner_id', 'progress', 'state', 'state_message', 'tags', 'volume_id', 'volume_size'] - + for attrib in attribs: getattr(source, attrib).should.equal(getattr(dest, attrib)) - + # Copy from non-existent source ID. with assert_raises(ClientError) as cm: create_snapshot_error = ec2_client.create_snapshot( From 7b236c4dedac874486ed9947c94847375404212a Mon Sep 17 00:00:00 2001 From: Tay Frost Date: Thu, 7 Mar 2019 17:07:15 -0500 Subject: [PATCH 137/175] bugfix ebs volume tag behaviour This commit modifies the response format of the ec2 calls `create_volume` and `describe_volumes`. Previously, these calls would always include a `Tags` key in the response, even when a volume has no tags. Now, the `Tags` key will not be included in the response if the volume has no tags. When an EBS volume has no tags, calls to the aws ec2 endpoints `create_volume` and `describe_volumes` do not include the `Tags` key in the `response.Volumes[]` object. However, moto does include the `Tags` key in this case. This discrepancy in behaviour can result in code passing a moto test but failing in production. Sample snippets that trigger this condition: ``` def create_volume_and_then_get_tags_from_response(): client = boto3.client('ec2', region_name='us-east-1') volume_response = client.create_volume( Size=10, AvailabilityZone='us-east-1a' ) keys = volume_response['Keys'] ``` ``` def create_volume_and_then_get_tags_from_describe_volumes(): client = boto3.client('ec2', region_name='us-east-1') volume_response = client.create_volume( Size=10, AvailabilityZone='us-east-1a' ) volume_describe_response = client.describe_volumes() keys = volume_describe_response['Volumes'][0]['Keys'] ``` Both sample snippets will succeed in a moto test, but fail with a `KeyError` when using the aws api. --- moto/ec2/responses/elastic_block_store.py | 44 ++++++++++++----------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/moto/ec2/responses/elastic_block_store.py b/moto/ec2/responses/elastic_block_store.py index aa0d7f73b..acd37b283 100644 --- a/moto/ec2/responses/elastic_block_store.py +++ b/moto/ec2/responses/elastic_block_store.py @@ -150,16 +150,18 @@ CREATE_VOLUME_RESPONSE = """ {% for key in key_list %} {{ key.name }} - {{ key.version_id }} + {% if key.version_id is none %}null{% else %}{{ key.version_id }}{% endif %} {% if latest_versions[key.name] == key.version_id %}true{% else %}false{% endif %} {{ key.last_modified_ISO8601 }} {{ key.etag }} diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index eea720240..cf45822b5 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -444,7 +444,12 @@ def test_copy_key_with_version(): key.set_contents_from_string("some value") key.set_contents_from_string("another value") - bucket.copy_key('new-key', 'foobar', 'the-key', src_version_id='0') + key = [ + key.version_id + for key in bucket.get_all_versions() + if not key.is_latest + ][0] + bucket.copy_key('new-key', 'foobar', 'the-key', src_version_id=key) bucket.get_key( "the-key").get_contents_as_string().should.equal(b"another value") @@ -818,16 +823,19 @@ def test_key_version(): bucket = conn.create_bucket('foobar') bucket.configure_versioning(versioning=True) + versions = [] + key = Key(bucket) key.key = 'the-key' key.version_id.should.be.none key.set_contents_from_string('some string') - key.version_id.should.equal('0') + versions.append(key.version_id) key.set_contents_from_string('some string') - key.version_id.should.equal('1') + versions.append(key.version_id) + set(versions).should.have.length_of(2) key = bucket.get_key('the-key') - key.version_id.should.equal('1') + key.version_id.should.equal(versions[-1]) @mock_s3_deprecated @@ -836,23 +844,25 @@ def test_list_versions(): bucket = conn.create_bucket('foobar') bucket.configure_versioning(versioning=True) + key_versions = [] + key = Key(bucket, 'the-key') key.version_id.should.be.none key.set_contents_from_string("Version 1") - key.version_id.should.equal('0') + key_versions.append(key.version_id) key.set_contents_from_string("Version 2") - key.version_id.should.equal('1') + key_versions.append(key.version_id) + key_versions.should.have.length_of(2) versions = list(bucket.list_versions()) - versions.should.have.length_of(2) versions[0].name.should.equal('the-key') - versions[0].version_id.should.equal('0') + versions[0].version_id.should.equal(key_versions[0]) versions[0].get_contents_as_string().should.equal(b"Version 1") versions[1].name.should.equal('the-key') - versions[1].version_id.should.equal('1') + versions[1].version_id.should.equal(key_versions[1]) versions[1].get_contents_as_string().should.equal(b"Version 2") key = Key(bucket, 'the2-key') @@ -1483,16 +1493,22 @@ def test_boto3_head_object_with_versioning(): s3.Object('blah', 'hello.txt').put(Body=old_content) s3.Object('blah', 'hello.txt').put(Body=new_content) + versions = list(s3.Bucket('blah').object_versions.all()) + latest = list(filter(lambda item: item.is_latest, versions))[0] + oldest = list(filter(lambda item: not item.is_latest, versions))[0] + head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( Bucket='blah', Key='hello.txt') - head_object['VersionId'].should.equal('1') + head_object['VersionId'].should.equal(latest.id) head_object['ContentLength'].should.equal(len(new_content)) old_head_object = s3.Object('blah', 'hello.txt').meta.client.head_object( - Bucket='blah', Key='hello.txt', VersionId='0') - old_head_object['VersionId'].should.equal('0') + Bucket='blah', Key='hello.txt', VersionId=oldest.id) + old_head_object['VersionId'].should.equal(oldest.id) old_head_object['ContentLength'].should.equal(len(old_content)) + old_head_object['VersionId'].should_not.equal(head_object['VersionId']) + @mock_s3 def test_boto3_copy_object_with_versioning(): @@ -1507,9 +1523,6 @@ def test_boto3_copy_object_with_versioning(): obj1_version = client.get_object(Bucket='blah', Key='test1')['VersionId'] obj2_version = client.get_object(Bucket='blah', Key='test2')['VersionId'] - # Versions should be the same - obj1_version.should.equal(obj2_version) - client.copy_object(CopySource={'Bucket': 'blah', 'Key': 'test1'}, Bucket='blah', Key='test2') obj2_version_new = client.get_object(Bucket='blah', Key='test2')['VersionId'] @@ -2507,6 +2520,75 @@ def test_boto3_list_object_versions(): response['Body'].read().should.equal(items[-1]) +@mock_s3 +def test_boto3_list_object_versions_with_versioning_disabled(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + s3.create_bucket(Bucket=bucket_name) + items = (six.b('v1'), six.b('v2')) + for body in items: + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=body + ) + response = s3.list_object_versions( + Bucket=bucket_name + ) + + # One object version should be returned + len(response['Versions']).should.equal(1) + response['Versions'][0]['Key'].should.equal(key) + + # The version id should be the string null + response['Versions'][0]['VersionId'].should.equal('null') + + # Test latest object version is returned + response = s3.get_object(Bucket=bucket_name, Key=key) + response['Body'].read().should.equal(items[-1]) + + +@mock_s3 +def test_boto3_list_object_versions_with_versioning_enabled_late(): + s3 = boto3.client('s3', region_name='us-east-1') + bucket_name = 'mybucket' + key = 'key-with-versions' + s3.create_bucket(Bucket=bucket_name) + items = (six.b('v1'), six.b('v2')) + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=six.b('v1') + ) + s3.put_bucket_versioning( + Bucket=bucket_name, + VersioningConfiguration={ + 'Status': 'Enabled' + } + ) + s3.put_object( + Bucket=bucket_name, + Key=key, + Body=six.b('v2') + ) + response = s3.list_object_versions( + Bucket=bucket_name + ) + + # Two object versions should be returned + len(response['Versions']).should.equal(2) + keys = set([item['Key'] for item in response['Versions']]) + keys.should.equal({key}) + + # There should still be a null version id. + versionsId = set([item['VersionId'] for item in response['Versions']]) + versionsId.should.contain('null') + + # Test latest object version is returned + response = s3.get_object(Bucket=bucket_name, Key=key) + response['Body'].read().should.equal(items[-1]) + @mock_s3 def test_boto3_bad_prefix_list_object_versions(): s3 = boto3.client('s3', region_name='us-east-1') @@ -2563,18 +2645,25 @@ def test_boto3_delete_markers(): Bucket=bucket_name, Key=key ) - e.response['Error']['Code'].should.equal('404') + e.exception.response['Error']['Code'].should.equal('NoSuchKey') + + response = s3.list_object_versions( + Bucket=bucket_name + ) + response['Versions'].should.have.length_of(2) + response['DeleteMarkers'].should.have.length_of(1) s3.delete_object( Bucket=bucket_name, Key=key, - VersionId='2' + VersionId=response['DeleteMarkers'][0]['VersionId'] ) response = s3.get_object( Bucket=bucket_name, Key=key ) response['Body'].read().should.equal(items[-1]) + response = s3.list_object_versions( Bucket=bucket_name ) @@ -2583,10 +2672,8 @@ def test_boto3_delete_markers(): # We've asserted there is only 2 records so one is newest, one is oldest latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0] oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] - # Double check ordering of version ID's - latest['VersionId'].should.equal('1') - oldest['VersionId'].should.equal('0') + latest['VersionId'].should_not.equal(oldest['VersionId']) # Double check the name is still unicode latest['Key'].should.equal('key-with-versions-and-unicode-ó') @@ -2631,12 +2718,12 @@ def test_boto3_multiple_delete_markers(): s3.delete_object( Bucket=bucket_name, Key=key, - VersionId='2' + VersionId=response['DeleteMarkers'][0]['VersionId'] ) s3.delete_object( Bucket=bucket_name, Key=key, - VersionId='3' + VersionId=response['DeleteMarkers'][1]['VersionId'] ) response = s3.get_object( @@ -2652,8 +2739,7 @@ def test_boto3_multiple_delete_markers(): oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0] # Double check ordering of version ID's - latest['VersionId'].should.equal('1') - oldest['VersionId'].should.equal('0') + latest['VersionId'].should_not.equal(oldest['VersionId']) # Double check the name is still unicode latest['Key'].should.equal('key-with-versions-and-unicode-ó') From 6d67418c415f65870adbd6d3ecfa4e48163f46e1 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Mon, 11 Mar 2019 13:25:36 -0700 Subject: [PATCH 139/175] Fixed validation bugs in put_configuration_recorder --- moto/config/models.py | 14 ++++++++------ tests/test_config/test_config.py | 6 +++++- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/moto/config/models.py b/moto/config/models.py index 5e43c6b12..cd6e07afa 100644 --- a/moto/config/models.py +++ b/moto/config/models.py @@ -166,21 +166,23 @@ class ConfigBackend(BaseBackend): recorder_status = self.recorders[config_recorder['name']].status # Validate the Recording Group: - if not config_recorder.get('recordingGroup'): + if config_recorder.get('recordingGroup') is None: recording_group = RecordingGroup() else: rg = config_recorder['recordingGroup'] + # If an empty dict is passed in, then bad: + if not rg: + raise InvalidRecordingGroupException() + # Can't have both the resource types specified and the other flags as True. if rg.get('resourceTypes') and ( - rg.get('allSupported', True) or + rg.get('allSupported', False) or rg.get('includeGlobalResourceTypes', False)): raise InvalidRecordingGroupException() - # If an empty dict is provided, then bad: - if not rg.get('resourceTypes', False) \ - and not rg.get('resourceTypes') \ - and not rg.get('includeGlobalResourceTypes', False): + # Must supply resourceTypes if 'allSupported' is not supplied: + if not rg.get('allSupported') and not rg.get('resourceTypes'): raise InvalidRecordingGroupException() # Validate that the list provided is correct: diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index a61d2a29c..96c62455c 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -28,7 +28,11 @@ def test_put_configuration_recorder(): {'allSupported': True, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, {'allSupported': False, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']}, {'allSupported': True, 'includeGlobalResourceTypes': False, 'resourceTypes': ['item']}, - {'allSupported': False, 'includeGlobalResourceTypes': False, 'resourceTypes': []} + {'allSupported': False, 'includeGlobalResourceTypes': False, 'resourceTypes': []}, + {'includeGlobalResourceTypes': False, 'resourceTypes': []}, + {'includeGlobalResourceTypes': True}, + {'resourceTypes': []}, + {} ] for bg in bad_groups: From d53626ad9a1a0ac616ba7cdd77185a17b4efdc23 Mon Sep 17 00:00:00 2001 From: Andy Tumelty Date: Tue, 12 Mar 2019 16:27:37 +0000 Subject: [PATCH 140/175] Add support for iam update_user This covers both the NewPath and NewUserName parameters for update_user, but without regex validation for these values. --- IMPLEMENTATION_COVERAGE.md | 2 +- moto/iam/models.py | 12 ++++++++++++ moto/iam/responses.py | 12 ++++++++++++ tests/test_iam/test_iam.py | 13 +++++++++++++ 4 files changed, 38 insertions(+), 1 deletion(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index a5650f572..1c76b04cf 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2334,7 +2334,7 @@ - [ ] update_service_specific_credential - [X] update_signing_certificate - [ ] update_ssh_public_key -- [ ] update_user +- [X] update_user - [X] upload_server_certificate - [X] upload_signing_certificate - [ ] upload_ssh_public_key diff --git a/moto/iam/models.py b/moto/iam/models.py index 92ac19da7..ae993ebfd 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -892,6 +892,18 @@ class IAMBackend(BaseBackend): return users + def update_user(self, user_name, new_path=None, new_user_name=None): + try: + user = self.users[user_name] + except KeyError: + raise IAMNotFoundException("User {0} not found".format(user_name)) + + if new_path: + user.path = new_path + if new_user_name: + user.name = new_user_name + self.users[new_user_name] = self.users.pop(user_name) + def list_roles(self, path_prefix, marker, max_items): roles = None try: diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 5b19c9cdc..e5b4c9070 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -440,6 +440,18 @@ class IamResponse(BaseResponse): template = self.response_template(LIST_USERS_TEMPLATE) return template.render(action='List', users=users) + def update_user(self): + user_name = self._get_param('UserName') + new_path = self._get_param('NewPath') + new_user_name = self._get_param('NewUserName') + iam_backend.update_user(user_name, new_path, new_user_name) + if new_user_name: + user = iam_backend.get_user(new_user_name) + else: + user = iam_backend.get_user(user_name) + template = self.response_template(USER_TEMPLATE) + return template.render(action='Update', user=user) + def create_login_profile(self): user_name = self._get_param('UserName') password = self._get_param('Password') diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index ceec5e06a..5875b747a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -401,6 +401,19 @@ def test_get_user(): conn.get_user('my-user') +@mock_iam() +def test_update_user(): + conn = boto3.client('iam', region_name='us-east-1') + with assert_raises(conn.exceptions.NoSuchEntityException): + conn.update_user(UserName='my-user') + conn.create_user(UserName='my-user') + conn.update_user(UserName='my-user', NewPath='/new-path/', NewUserName='new-user') + response = conn.get_user(UserName='new-user') + response['User'].get('Path').should.equal('/new-path/') + with assert_raises(conn.exceptions.NoSuchEntityException): + conn.get_user(UserName='my-user') + + @mock_iam_deprecated() def test_get_current_user(): """If no user is specific, IAM returns the current user""" From 9ed80f14e8a3d7af81e51886b29f2a033678a34f Mon Sep 17 00:00:00 2001 From: Robert Jensen Date: Tue, 12 Mar 2019 17:52:34 -0400 Subject: [PATCH 141/175] fix coverage, update IMPLEMENTATION_COVERAGE --- IMPLEMENTATION_COVERAGE.md | 2 +- tests/test_cognitoidp/test_cognitoidp.py | 44 ++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index a5650f572..1451f421a 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -916,7 +916,7 @@ - [ ] update_auth_event_feedback - [ ] update_device_status - [ ] update_group -- [ ] update_identity_provider +- [x] update_identity_provider - [ ] update_resource_server - [ ] update_user_attributes - [ ] update_user_pool diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index 5706c9c8d..e4e38e821 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -516,6 +516,50 @@ def test_update_identity_provider(): result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(new_value) +@mock_cognitoidp +def test_update_identity_provider_no_user_pool(): + conn = boto3.client("cognito-idp", "us-west-2") + + new_value = str(uuid.uuid4()) + + with assert_raises(conn.exceptions.ResourceNotFoundException) as cm: + conn.update_identity_provider( + UserPoolId="foo", + ProviderName="bar", + ProviderDetails={ + "thing": new_value + }, + ) + + cm.exception.operation_name.should.equal('UpdateIdentityProvider') + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + +@mock_cognitoidp +def test_update_identity_provider_no_identity_provider(): + conn = boto3.client("cognito-idp", "us-west-2") + + provider_name = str(uuid.uuid4()) + provider_type = "Facebook" + value = str(uuid.uuid4()) + new_value = str(uuid.uuid4()) + user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"] + + with assert_raises(conn.exceptions.ResourceNotFoundException) as cm: + conn.update_identity_provider( + UserPoolId=user_pool_id, + ProviderName="foo", + ProviderDetails={ + "thing": new_value + }, + ) + + cm.exception.operation_name.should.equal('UpdateIdentityProvider') + cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException') + cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400) + + @mock_cognitoidp def test_delete_identity_providers(): conn = boto3.client("cognito-idp", "us-west-2") From 11ff548d147c88727b2858b2ebc730e09899f228 Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Sun, 17 Mar 2019 17:54:34 +0900 Subject: [PATCH 142/175] fix #2113 moto must return Http status code 201 when lambda publish_version has succeeded --- moto/awslambda/responses.py | 2 +- tests/test_awslambda/test_lambda.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index d4eb73bc3..1c43ef84b 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -183,7 +183,7 @@ class LambdaResponse(BaseResponse): fn = self.lambda_backend.publish_function(function_name) if fn: config = fn.get_configuration() - return 200, {}, json.dumps(config) + return 201, {}, json.dumps(config) else: return 404, {}, "{}" diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 7f3b44b79..34fd9c9b3 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -4,6 +4,7 @@ import base64 import botocore.client import boto3 import hashlib +from http import HTTPStatus import io import json import time @@ -471,7 +472,8 @@ def test_publish(): function_list['Functions'].should.have.length_of(1) latest_arn = function_list['Functions'][0]['FunctionArn'] - conn.publish_version(FunctionName='testFunction') + res = conn.publish_version(FunctionName='testFunction') + assert res['ResponseMetadata']['HTTPStatusCode'] == HTTPStatus.CREATED function_list = conn.list_functions() function_list['Functions'].should.have.length_of(2) @@ -853,8 +855,8 @@ def test_list_versions_by_function(): Publish=True, ) - conn.publish_version(FunctionName='testFunction') - + res = conn.publish_version(FunctionName='testFunction') + assert res['ResponseMetadata']['HTTPStatusCode'] == HTTPStatus.CREATED versions = conn.list_versions_by_function(FunctionName='testFunction') assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' From 8644b2ff1d48cc326cbf20dc423289964ea84d68 Mon Sep 17 00:00:00 2001 From: Jessie Nadler Date: Mon, 25 Mar 2019 19:11:06 -0400 Subject: [PATCH 143/175] Add get_cfn_attribute support for ECS Cluster and Service --- moto/ecs/models.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 4a6737ceb..f51493c3f 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -94,6 +94,12 @@ class Cluster(BaseObject): # no-op when nothing changed between old and new resources return original_resource + def get_cfn_attribute(self, attribute_name): + from moto.cloudformation.exceptions import UnformattedGetAttTemplateException + if attribute_name == 'Arn': + return self.arn + raise UnformattedGetAttTemplateException() + class TaskDefinition(BaseObject): @@ -271,6 +277,12 @@ class Service(BaseObject): else: return ecs_backend.update_service(cluster_name, service_name, task_definition, desired_count) + def get_cfn_attribute(self, attribute_name): + from moto.cloudformation.exceptions import UnformattedGetAttTemplateException + if attribute_name == 'Name': + return self.name + raise UnformattedGetAttTemplateException() + class ContainerInstance(BaseObject): From 497965fadc6f1463da63820dfafd3bee9fd50ffb Mon Sep 17 00:00:00 2001 From: Jessie Nadler Date: Tue, 26 Mar 2019 14:36:31 -0400 Subject: [PATCH 144/175] Return InstanceProfile arn instead of NotImplementedError for get_cfn_attribute --- moto/iam/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index 92ac19da7..8937d262d 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -213,7 +213,7 @@ class InstanceProfile(BaseModel): def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException if attribute_name == 'Arn': - raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"') + return self.arn raise UnformattedGetAttTemplateException() From d4e39146b70d78d198041012a3b4f05e4f14cf0b Mon Sep 17 00:00:00 2001 From: Hugo Lopes Tavares Date: Wed, 27 Mar 2019 16:23:49 -0400 Subject: [PATCH 145/175] Make sure every NetworkInterface has a private IP AWS always assigns a primary IP address to Network Interfaces. Using a test account (modified the IP): >>> import boto >>> vpc = boto.connect_vpc() >>> eni = vpc.create_network_interface(subnet_id) >>> eni.private_ip_addresses [PrivateIPAddress(10.1.2.3, primary=True)] --- moto/ec2/models.py | 2 +- tests/test_ec2/test_elastic_network_interfaces.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index ff766d7b8..ef1425ea7 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -189,7 +189,7 @@ class NetworkInterface(TaggedEC2Resource): self.ec2_backend = ec2_backend self.id = random_eni_id() self.device_index = device_index - self.private_ip_address = private_ip_address + self.private_ip_address = private_ip_address or random_private_ip() self.subnet = subnet self.instance = None self.attachment_id = None diff --git a/tests/test_ec2/test_elastic_network_interfaces.py b/tests/test_ec2/test_elastic_network_interfaces.py index 828f9d917..581106b42 100644 --- a/tests/test_ec2/test_elastic_network_interfaces.py +++ b/tests/test_ec2/test_elastic_network_interfaces.py @@ -36,7 +36,8 @@ def test_elastic_network_interfaces(): all_enis.should.have.length_of(1) eni = all_enis[0] eni.groups.should.have.length_of(0) - eni.private_ip_addresses.should.have.length_of(0) + eni.private_ip_addresses.should.have.length_of(1) + eni.private_ip_addresses[0].private_ip_address.startswith('10.').should.be.true with assert_raises(EC2ResponseError) as ex: conn.delete_network_interface(eni.id, dry_run=True) From 7c62f4a75c3ae3e9ae22e009793d3578c143bb0b Mon Sep 17 00:00:00 2001 From: Hugo Lopes Tavares Date: Wed, 27 Mar 2019 16:28:18 -0400 Subject: [PATCH 146/175] Add test to CloudFormation and PrimaryPrivateIpAddress GetAtt This test would raise an error before d4e39146b70d78d198041012a3b4f05e4f14cf0b --- tests/test_cloudformation/fixtures/vpc_eni.py | 4 ++++ tests/test_ec2/test_elastic_network_interfaces.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/tests/test_cloudformation/fixtures/vpc_eni.py b/tests/test_cloudformation/fixtures/vpc_eni.py index ef9eb1d08..3f8eb2d03 100644 --- a/tests/test_cloudformation/fixtures/vpc_eni.py +++ b/tests/test_cloudformation/fixtures/vpc_eni.py @@ -29,6 +29,10 @@ template = { "NinjaENI": { "Description": "Elastic IP mapping to Auto-Scaling Group", "Value": {"Ref": "ENI"} + }, + "ENIIpAddress": { + "Description": "ENI's Private IP address", + "Value": {"Fn::GetAtt": ["ENI", "PrimaryPrivateIpAddress"]} } } } diff --git a/tests/test_ec2/test_elastic_network_interfaces.py b/tests/test_ec2/test_elastic_network_interfaces.py index 581106b42..70e78ae12 100644 --- a/tests/test_ec2/test_elastic_network_interfaces.py +++ b/tests/test_ec2/test_elastic_network_interfaces.py @@ -355,9 +355,13 @@ def test_elastic_network_interfaces_cloudformation(): ) ec2_conn = boto.ec2.connect_to_region("us-west-1") eni = ec2_conn.get_all_network_interfaces()[0] + eni.private_ip_addresses.should.have.length_of(1) stack = conn.describe_stacks()[0] resources = stack.describe_resources() cfn_eni = [resource for resource in resources if resource.resource_type == 'AWS::EC2::NetworkInterface'][0] cfn_eni.physical_resource_id.should.equal(eni.id) + + outputs = {output.key: output.value for output in stack.outputs} + outputs['ENIIpAddress'].should.equal(eni.private_ip_addresses[0].private_ip_address) From 921b5a322788b6269f68405db4bc7ffcef3584f7 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Mon, 25 Mar 2019 13:19:01 -0700 Subject: [PATCH 147/175] Fixing broken tests #2126 - KMS - S3 - CloudFormation (Thanks kgutwin!) --- moto/cloudformation/models.py | 4 ++-- tests/test_cloudformation/test_stack_parsing.py | 4 ++-- tests/test_kms/test_kms.py | 8 ++++---- tests/test_s3/test_server.py | 1 + tests/test_s3bucket_path/test_bucket_path_server.py | 1 + 5 files changed, 10 insertions(+), 8 deletions(-) diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index 864e98a92..242cca1fc 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -85,9 +85,9 @@ class FakeStack(BaseModel): def _parse_template(self): yaml.add_multi_constructor('', yaml_tag_constructor) try: - self.template_dict = yaml.load(self.template) + self.template_dict = yaml.load(self.template, Loader=yaml.Loader) except yaml.parser.ParserError: - self.template_dict = json.loads(self.template) + self.template_dict = json.loads(self.template, Loader=yaml.Loader) @property def stack_parameters(self): diff --git a/tests/test_cloudformation/test_stack_parsing.py b/tests/test_cloudformation/test_stack_parsing.py index d25c69cf1..d21db2d48 100644 --- a/tests/test_cloudformation/test_stack_parsing.py +++ b/tests/test_cloudformation/test_stack_parsing.py @@ -448,8 +448,8 @@ def test_short_form_func_in_yaml_teamplate(): KeySplit: !Split [A, B] KeySub: !Sub A """ - yaml.add_multi_constructor('', yaml_tag_constructor) - template_dict = yaml.load(template) + yaml.add_multi_constructor('', yaml_tag_constructor, Loader=yaml.Loader) + template_dict = yaml.load(template, Loader=yaml.Loader) key_and_expects = [ ['KeyRef', {'Ref': 'foo'}], ['KeyB64', {'Fn::Base64': 'valueToEncode'}], diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 0f7bab4cd..95e39a0e7 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -8,8 +8,8 @@ import sure # noqa from moto import mock_kms, mock_kms_deprecated from nose.tools import assert_raises from freezegun import freeze_time -from datetime import datetime, timedelta -from dateutil.tz import tzlocal +from datetime import datetime +from dateutil.tz import tzutc @mock_kms_deprecated @@ -660,7 +660,7 @@ def test_schedule_key_deletion(): KeyId=key['KeyMetadata']['KeyId'] ) assert response['KeyId'] == key['KeyMetadata']['KeyId'] - assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzlocal()) + assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzutc()) else: # Can't manipulate time in server mode response = client.schedule_key_deletion( @@ -685,7 +685,7 @@ def test_schedule_key_deletion_custom(): PendingWindowInDays=7 ) assert response['KeyId'] == key['KeyMetadata']['KeyId'] - assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzlocal()) + assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzutc()) else: # Can't manipulate time in server mode response = client.schedule_key_deletion( diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py index 9c8252a04..b179a2329 100644 --- a/tests/test_s3/test_server.py +++ b/tests/test_s3/test_server.py @@ -15,6 +15,7 @@ class AuthenticatedClient(FlaskClient): def open(self, *args, **kwargs): kwargs['headers'] = kwargs.get('headers', {}) kwargs['headers']['Authorization'] = "Any authorization header" + kwargs['content_length'] = 0 # Fixes content-length complaints. return super(AuthenticatedClient, self).open(*args, **kwargs) diff --git a/tests/test_s3bucket_path/test_bucket_path_server.py b/tests/test_s3bucket_path/test_bucket_path_server.py index 434110e87..f6238dd28 100644 --- a/tests/test_s3bucket_path/test_bucket_path_server.py +++ b/tests/test_s3bucket_path/test_bucket_path_server.py @@ -13,6 +13,7 @@ class AuthenticatedClient(FlaskClient): def open(self, *args, **kwargs): kwargs['headers'] = kwargs.get('headers', {}) kwargs['headers']['Authorization'] = "Any authorization header" + kwargs['content_length'] = 0 # Fixes content-length complaints. return super(AuthenticatedClient, self).open(*args, **kwargs) From 79e47fd98ff005848f1592a62b18a485e199ebba Mon Sep 17 00:00:00 2001 From: Kyle Decot Date: Wed, 27 Feb 2019 15:05:58 -0500 Subject: [PATCH 148/175] Returns an empty list when the cluster does not exist --- moto/ecs/models.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 4a6737ceb..13de67268 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -428,9 +428,6 @@ class EC2ContainerServiceBackend(BaseBackend): if cluster_name in self.clusters: list_clusters.append( self.clusters[cluster_name].response_object) - else: - raise Exception( - "{0} is not a cluster".format(cluster_name)) return list_clusters def delete_cluster(self, cluster_str): From d181897ec91f3776a44b21d8307b72d407c044c0 Mon Sep 17 00:00:00 2001 From: Nick Venenga Date: Fri, 22 Mar 2019 12:08:02 -0400 Subject: [PATCH 149/175] Add proper failure response to describe_clusters --- moto/ecs/models.py | 19 ++++++++++++++++++- moto/ecs/responses.py | 4 ++-- tests/test_ecs/test_ecs_boto3.py | 9 +++++++++ 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 13de67268..c9fdb9808 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -358,6 +358,20 @@ class ContainerInstance(BaseObject): return formatted_attr +class ClusterFailure(BaseObject): + def __init__(self, reason, cluster_name): + self.reason = reason + self.arn = "arn:aws:ecs:us-east-1:012345678910:cluster/{0}".format( + cluster_name) + + @property + def response_object(self): + response_object = self.gen_response_object() + response_object['reason'] = self.reason + response_object['arn'] = self.arn + return response_object + + class ContainerInstanceFailure(BaseObject): def __init__(self, reason, container_instance_id): @@ -419,6 +433,7 @@ class EC2ContainerServiceBackend(BaseBackend): def describe_clusters(self, list_clusters_name=None): list_clusters = [] + failures = [] if list_clusters_name is None: if 'default' in self.clusters: list_clusters.append(self.clusters['default'].response_object) @@ -428,7 +443,9 @@ class EC2ContainerServiceBackend(BaseBackend): if cluster_name in self.clusters: list_clusters.append( self.clusters[cluster_name].response_object) - return list_clusters + else: + failures.append(ClusterFailure('MISSING', cluster_name)) + return list_clusters, failures def delete_cluster(self, cluster_str): cluster_name = cluster_str.split('/')[-1] diff --git a/moto/ecs/responses.py b/moto/ecs/responses.py index e0bfefc02..964ef59d2 100644 --- a/moto/ecs/responses.py +++ b/moto/ecs/responses.py @@ -45,10 +45,10 @@ class EC2ContainerServiceResponse(BaseResponse): def describe_clusters(self): list_clusters_name = self._get_param('clusters') - clusters = self.ecs_backend.describe_clusters(list_clusters_name) + clusters, failures = self.ecs_backend.describe_clusters(list_clusters_name) return json.dumps({ 'clusters': clusters, - 'failures': [] + 'failures': [cluster.response_object for cluster in failures] }) def delete_cluster(self): diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index a0d470935..3bf25b8fc 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -47,6 +47,15 @@ def test_list_clusters(): 'arn:aws:ecs:us-east-1:012345678910:cluster/test_cluster1') +@mock_ecs +def test_describe_clusters(): + client = boto3.client('ecs', region_name='us-east-1') + response = client.describe_clusters(clusters=["some-cluster"]) + response['failures'].should.contain({ + 'arn': 'arn:aws:ecs:us-east-1:012345678910:cluster/some-cluster', + 'reason': 'MISSING' + }) + @mock_ecs def test_delete_cluster(): client = boto3.client('ecs', region_name='us-east-1') From 52ad71879dda750ff8d5626888365b81938ee14b Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Fri, 29 Mar 2019 15:52:47 +0900 Subject: [PATCH 150/175] modified test code for python2.7 --- tests/test_awslambda/test_lambda.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 34fd9c9b3..479aaaa8a 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -4,7 +4,6 @@ import base64 import botocore.client import boto3 import hashlib -from http import HTTPStatus import io import json import time @@ -473,7 +472,7 @@ def test_publish(): latest_arn = function_list['Functions'][0]['FunctionArn'] res = conn.publish_version(FunctionName='testFunction') - assert res['ResponseMetadata']['HTTPStatusCode'] == HTTPStatus.CREATED + assert res['ResponseMetadata']['HTTPStatusCode'] == 201 function_list = conn.list_functions() function_list['Functions'].should.have.length_of(2) @@ -856,7 +855,7 @@ def test_list_versions_by_function(): ) res = conn.publish_version(FunctionName='testFunction') - assert res['ResponseMetadata']['HTTPStatusCode'] == HTTPStatus.CREATED + assert res['ResponseMetadata']['HTTPStatusCode'] == 201 versions = conn.list_versions_by_function(FunctionName='testFunction') assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST' From dbdc8925e35a0bb244c265be87429f7a81d543f4 Mon Sep 17 00:00:00 2001 From: Earl Robinson Date: Fri, 29 Mar 2019 21:02:25 -0400 Subject: [PATCH 151/175] add KeyId value to kms.responses.encrypt and kms.responses.decrypt --- moto/kms/responses.py | 4 ++-- tests/test_kms/test_kms.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 2674f765c..ed6accc78 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -249,11 +249,11 @@ class KmsResponse(BaseResponse): value = self.parameters.get("Plaintext") if isinstance(value, six.text_type): value = value.encode('utf-8') - return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8")}) + return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8"), 'KeyId': 'key_id'}) def decrypt(self): value = self.parameters.get("CiphertextBlob") - return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")}) + return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8"), 'KeyId': 'key_id'}) def disable_key(self): key_id = self.parameters.get('KeyId') diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 95e39a0e7..79f95fa1b 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -171,6 +171,7 @@ def test_encrypt(): conn = boto.kms.connect_to_region("us-west-2") response = conn.encrypt('key_id', 'encryptme'.encode('utf-8')) response['CiphertextBlob'].should.equal(b'ZW5jcnlwdG1l') + response['KeyId'].should.equal('key_id') @mock_kms_deprecated @@ -178,6 +179,7 @@ def test_decrypt(): conn = boto.kms.connect_to_region('us-west-2') response = conn.decrypt('ZW5jcnlwdG1l'.encode('utf-8')) response['Plaintext'].should.equal(b'encryptme') + response['KeyId'].should.equal('key_id') @mock_kms_deprecated From 6b7282f93c4e45a2b26a9f3b732e8aabb9acd232 Mon Sep 17 00:00:00 2001 From: hsuhans Date: Sat, 30 Mar 2019 23:26:50 +0800 Subject: [PATCH 152/175] Fix #2129 EC2 tag should raise ClientError when resource is empty Raise MissingParameterError exception in models/validate_resource_ids of ec2. Add ec2 create tag with empty resource test case. Add ec2 delete tag with empty resource test case. Related: #2129 Reference boto3 create_tags https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.create_tags boto3 delete_tags https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.delete_tags Amazon EC2 API Reference Actions CreateTags https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_CreateTags.html Amazon EC2 API Reference Actions DeleteTags https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DeleteTags.html --- moto/ec2/models.py | 2 ++ tests/test_ec2/test_tags.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index ff766d7b8..1b4a6c112 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -134,6 +134,8 @@ def utc_date_and_time(): def validate_resource_ids(resource_ids): + if not resource_ids: + raise MissingParameterError(parameter='resourceIdSet') for resource_id in resource_ids: if not is_valid_resource_id(resource_id): raise InvalidID(resource_id=resource_id) diff --git a/tests/test_ec2/test_tags.py b/tests/test_ec2/test_tags.py index c92a4f81f..2294979ba 100644 --- a/tests/test_ec2/test_tags.py +++ b/tests/test_ec2/test_tags.py @@ -5,6 +5,7 @@ import itertools import boto import boto3 +from botocore.exceptions import ClientError from boto.exception import EC2ResponseError from boto.ec2.instance import Reservation import sure # noqa @@ -451,3 +452,31 @@ def test_create_snapshot_with_tags(): }] assert snapshot['Tags'] == expected_tags + + +@mock_ec2 +def test_create_tag_empty_resource(): + # create ec2 client in us-west-1 + client = boto3.client('ec2', region_name='us-west-1') + # create tag with empty resource + with assert_raises(ClientError) as ex: + client.create_tags( + Resources=[], + Tags=[{'Key': 'Value'}] + ) + ex.exception.response['Error']['Code'].should.equal('MissingParameter') + ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet') + + +@mock_ec2 +def test_delete_tag_empty_resource(): + # create ec2 client in us-west-1 + client = boto3.client('ec2', region_name='us-west-1') + # delete tag with empty resource + with assert_raises(ClientError) as ex: + client.delete_tags( + Resources=[], + Tags=[{'Key': 'Value'}] + ) + ex.exception.response['Error']['Code'].should.equal('MissingParameter') + ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet') From b85d21b8fe9ebef52cf2b11d95edf758de1e4400 Mon Sep 17 00:00:00 2001 From: Yaroslav Admin Date: Tue, 2 Apr 2019 15:30:01 +0200 Subject: [PATCH 153/175] Fixed copy-object from unversioned bucket to versioned bucket The response of the copy-object operation was missing VersionId property when source bucket is not versioned. --- moto/s3/models.py | 19 ++++++++++--------- tests/test_s3/test_s3.py | 17 +++++++++++++++++ 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/moto/s3/models.py b/moto/s3/models.py index 37fed3335..9e4a6a766 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -87,10 +87,13 @@ class FakeKey(BaseModel): new_value = new_value.encode(DEFAULT_TEXT_ENCODING) self._value_buffer.write(new_value) - def copy(self, new_name=None): + def copy(self, new_name=None, new_is_versioned=None): r = copy.deepcopy(self) if new_name is not None: r.name = new_name + if new_is_versioned is not None: + r._is_versioned = new_is_versioned + r.refresh_version() return r def set_metadata(self, metadata, replace=False): @@ -973,17 +976,15 @@ class S3Backend(BaseBackend): dest_bucket = self.get_bucket(dest_bucket_name) key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id) - if dest_key_name != src_key_name: - key = key.copy(dest_key_name) - dest_bucket.keys[dest_key_name] = key - # By this point, the destination key must exist, or KeyError - if dest_bucket.is_versioned: - dest_bucket.keys[dest_key_name].refresh_version() + new_key = key.copy(dest_key_name, dest_bucket.is_versioned) + if storage is not None: - key.set_storage_class(storage) + new_key.set_storage_class(storage) if acl is not None: - key.set_acl(acl) + new_key.set_acl(acl) + + dest_bucket.keys[dest_key_name] = new_key def set_bucket_acl(self, bucket_name, acl): bucket = self.get_bucket(bucket_name) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index cf45822b5..6af23849c 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1530,6 +1530,23 @@ def test_boto3_copy_object_with_versioning(): obj2_version_new.should_not.equal(obj2_version) +@mock_s3 +def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='src', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.create_bucket(Bucket='dest', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.put_bucket_versioning(Bucket='dest', VersioningConfiguration={'Status': 'Enabled'}) + + client.put_object(Bucket='src', Key='test', Body=b'content') + + obj2_version_new = client.copy_object(CopySource={'Bucket': 'src', 'Key': 'test'}, Bucket='dest', Key='test') \ + .get('VersionId') + + # VersionId should be present in the response + obj2_version_new.should_not.equal(None) + + @mock_s3 def test_boto3_deleted_versionings_list(): client = boto3.client('s3', region_name='us-east-1') From 120874e408515f5bdf91d5915f9c813c96183ad4 Mon Sep 17 00:00:00 2001 From: Chris K Date: Fri, 5 Apr 2019 11:00:02 +0100 Subject: [PATCH 154/175] Feature: AWS Secrets Manager list-secrets --- moto/secretsmanager/models.py | 4 ++++ moto/secretsmanager/responses.py | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 1350ab469..0d276c944 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -188,6 +188,10 @@ class SecretsManagerBackend(BaseBackend): return response + def list_secrets(self, max_results, next_token): + # implement here + return secret_list, next_token + available_regions = ( boto3.session.Session().get_available_regions("secretsmanager") diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index 932e7bfd7..73921a11a 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -64,3 +64,13 @@ class SecretsManagerResponse(BaseResponse): rotation_lambda_arn=rotation_lambda_arn, rotation_rules=rotation_rules ) + + def list_secrets(self): + max_results = self._get_int_param("MaxResults") + next_token = self._get_param("NextToken") + secret_list, next_token = self.secretsmanager_backend.list_secrets( + max_results=max_results, + next_token=next_token, + ) + # TODO: adjust response + return json.dumps(dict(secretList=secret_list, nextToken=next_token)) From 89e4ab93eeefa07e827a0653dd775472c34d3c48 Mon Sep 17 00:00:00 2001 From: Chris K Date: Fri, 5 Apr 2019 13:33:28 +0100 Subject: [PATCH 155/175] Implement ListSecrets --- IMPLEMENTATION_COVERAGE.md | 2 +- moto/secretsmanager/models.py | 25 +++++++++++-- moto/secretsmanager/responses.py | 7 ++-- .../test_secretsmanager.py | 36 +++++++++++++++++++ 4 files changed, 64 insertions(+), 6 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index a5650f572..e206467b5 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -3659,7 +3659,7 @@ - [X] get_random_password - [X] get_secret_value - [ ] list_secret_version_ids -- [ ] list_secrets +- [x] list_secrets - [ ] put_secret_value - [ ] restore_secret - [X] rotate_secret diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 0d276c944..a6aec8b81 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -189,9 +189,30 @@ class SecretsManagerBackend(BaseBackend): return response def list_secrets(self, max_results, next_token): - # implement here + # TODO implement pagination + + secret_list = [{ + "ARN": secret_arn(self.region, secret['secret_id']), + "DeletedDate": None, + "Description": "", + "KmsKeyId": "", + "LastAccessedDate": None, + "LastChangedDate": None, + "LastRotatedDate": None, + "Name": secret['name'], + "RotationEnabled": secret['rotation_enabled'], + "RotationLambdaARN": secret['rotation_lambda_arn'], + "RotationRules": { + "AutomaticallyAfterDays": secret['auto_rotate_after_days'] + }, + "SecretVersionsToStages": { + secret['version_id']: ["AWSCURRENT"] + }, + "Tags": secret['tags'] + } for secret in self.secrets.values()] + return secret_list, next_token - + available_regions = ( boto3.session.Session().get_available_regions("secretsmanager") diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index 73921a11a..ab1fe0d9d 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -4,6 +4,8 @@ from moto.core.responses import BaseResponse from .models import secretsmanager_backends +import json + class SecretsManagerResponse(BaseResponse): @@ -68,9 +70,8 @@ class SecretsManagerResponse(BaseResponse): def list_secrets(self): max_results = self._get_int_param("MaxResults") next_token = self._get_param("NextToken") - secret_list, next_token = self.secretsmanager_backend.list_secrets( + secret_list, next_token = secretsmanager_backends[self.region].list_secrets( max_results=max_results, next_token=next_token, ) - # TODO: adjust response - return json.dumps(dict(secretList=secret_list, nextToken=next_token)) + return json.dumps(dict(SecretList=secret_list, NextToken=next_token)) diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 169282421..6146698d9 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -203,6 +203,42 @@ def test_describe_secret_that_does_not_match(): with assert_raises(ClientError): result = conn.get_secret_value(SecretId='i-dont-match') + +@mock_secretsmanager +def test_list_secrets_empty(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + secrets = conn.list_secrets() + + assert secrets['SecretList'] == [] + + +@mock_secretsmanager +def test_list_secrets(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.create_secret(Name='test-secret-2', + SecretString='barsecret', + Tags=[{ + 'Key': 'a', + 'Value': '1' + }]) + + secrets = conn.list_secrets() + + assert secrets['SecretList'][0]['ARN'] is not None + assert secrets['SecretList'][0]['Name'] == 'test-secret' + assert secrets['SecretList'][1]['ARN'] is not None + assert secrets['SecretList'][1]['Name'] == 'test-secret-2' + assert secrets['SecretList'][1]['Tags'] == [{ + 'Key': 'a', + 'Value': '1' + }] + + @mock_secretsmanager def test_rotate_secret(): secret_name = 'test-secret' From 2d6be24ffcc94a69c8340a7f3c1bf0d584429a1e Mon Sep 17 00:00:00 2001 From: Chris K Date: Fri, 5 Apr 2019 13:54:11 +0100 Subject: [PATCH 156/175] Fix lint error --- moto/secretsmanager/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index ab1fe0d9d..f017ec0dc 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -66,7 +66,7 @@ class SecretsManagerResponse(BaseResponse): rotation_lambda_arn=rotation_lambda_arn, rotation_rules=rotation_rules ) - + def list_secrets(self): max_results = self._get_int_param("MaxResults") next_token = self._get_param("NextToken") From 7fcedcb783d8bd5fc0e944029ccaf25c647aadef Mon Sep 17 00:00:00 2001 From: Chris K Date: Fri, 5 Apr 2019 15:59:38 +0100 Subject: [PATCH 157/175] Fix: Ensure the returned next_token is None (avoid client going round in a loop) --- moto/secretsmanager/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index a6aec8b81..74cf89039 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -189,7 +189,7 @@ class SecretsManagerBackend(BaseBackend): return response def list_secrets(self, max_results, next_token): - # TODO implement pagination + # TODO implement pagination and limits secret_list = [{ "ARN": secret_arn(self.region, secret['secret_id']), @@ -211,7 +211,7 @@ class SecretsManagerBackend(BaseBackend): "Tags": secret['tags'] } for secret in self.secrets.values()] - return secret_list, next_token + return secret_list, None available_regions = ( From eff4cdfbeead3ea332f91b1fc055bb9732f3b743 Mon Sep 17 00:00:00 2001 From: Olle Jonsson Date: Sun, 7 Apr 2019 13:34:26 +0200 Subject: [PATCH 158/175] README: Use SVG badges for readability --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index aeff847ed..56f73e28e 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ [![Join the chat at https://gitter.im/awsmoto/Lobby](https://badges.gitter.im/awsmoto/Lobby.svg)](https://gitter.im/awsmoto/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Build Status](https://travis-ci.org/spulec/moto.png?branch=master)](https://travis-ci.org/spulec/moto) -[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.png?branch=master)](https://coveralls.io/r/spulec/moto) +[![Build Status](https://travis-ci.org/spulec/moto.svg?branch=master)](https://travis-ci.org/spulec/moto) +[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.svg?branch=master)](https://coveralls.io/r/spulec/moto) [![Docs](https://readthedocs.org/projects/pip/badge/?version=stable)](http://docs.getmoto.org) # In a nutshell From e28bcf20ea28afab7602ef4b2e488055e406ad52 Mon Sep 17 00:00:00 2001 From: Dan Palmer Date: Mon, 15 Apr 2019 10:48:55 +0100 Subject: [PATCH 159/175] Bump Jinja2 to >=2.10.1, addresses CVE-2019-10906 Given how moto is intended to be used, and how it uses Jinja2, [CVE-2019-10906](https://nvd.nist.gov/vuln/detail/CVE-2019-10906) is unlikely to affect many users, but we should use a secure version anyway just in case moto is being used in unforeseen ways. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 99be632db..2981d9fc5 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ def read(*parts): install_requires = [ - "Jinja2>=2.7.3", + "Jinja2>=2.10.1", "boto>=2.36.0", "boto3>=1.9.86", "botocore>=1.12.86", From c7b0905c236ad0c1fad1c2418d1610196e9eccf7 Mon Sep 17 00:00:00 2001 From: TBurnip Date: Mon, 15 Apr 2019 12:40:04 +0100 Subject: [PATCH 160/175] A fix for tags not mimicking AWS --- moto/ec2/responses/instances.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py index 49f1face7..68fd1e879 100644 --- a/moto/ec2/responses/instances.py +++ b/moto/ec2/responses/instances.py @@ -450,6 +450,7 @@ EC2_DESCRIBE_INSTANCES = """ Date: Wed, 17 Apr 2019 19:47:21 +0100 Subject: [PATCH 162/175] Updating to meet your requirements --- moto/ec2/responses/instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py index 68fd1e879..ed05b6856 100644 --- a/moto/ec2/responses/instances.py +++ b/moto/ec2/responses/instances.py @@ -450,7 +450,7 @@ EC2_DESCRIBE_INSTANCES = """ datetime.fromtimestamp(1, deletion_date.tzinfo) + + assert result['Name'] == 'test-secret' + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='test-secret') + + +@mock_secretsmanager +def test_delete_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='i-dont-exist', ForceDeleteWithoutRecovery=True) + + +@mock_secretsmanager +def test_delete_secret_requires_force_delete_flag(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', ForceDeleteWithoutRecovery=False) + + +@mock_secretsmanager +def test_delete_secret_fails_with_both_force_delete_flag_and_recovery_window_flag(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=1, ForceDeleteWithoutRecovery=True) + + @mock_secretsmanager def test_get_random_password_default_length(): conn = boto3.client('secretsmanager', region_name='us-west-2') From 749f4f63e6800f5f4127c6a5f38cb2dd6648f906 Mon Sep 17 00:00:00 2001 From: Chris Kilding Date: Thu, 18 Apr 2019 12:58:50 +0100 Subject: [PATCH 165/175] Allow soft deletion of secrets --- moto/secretsmanager/exceptions.py | 7 ++ moto/secretsmanager/models.py | 41 +++++++-- .../test_secretsmanager.py | 87 +++++++++++++++---- 3 files changed, 110 insertions(+), 25 deletions(-) diff --git a/moto/secretsmanager/exceptions.py b/moto/secretsmanager/exceptions.py index a72a32645..5ee96f12c 100644 --- a/moto/secretsmanager/exceptions.py +++ b/moto/secretsmanager/exceptions.py @@ -27,3 +27,10 @@ class InvalidParameterException(SecretsManagerClientError): super(InvalidParameterException, self).__init__( 'InvalidParameterException', message) + + +class InvalidRequestException(SecretsManagerClientError): + def __init__(self, message): + super(InvalidRequestException, self).__init__( + 'InvalidRequestException', + message) \ No newline at end of file diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 10b636359..c272957ad 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -3,6 +3,7 @@ from __future__ import unicode_literals import time import json import uuid +import datetime import boto3 @@ -10,6 +11,7 @@ from moto.core import BaseBackend, BaseModel from .exceptions import ( ResourceNotFoundException, InvalidParameterException, + InvalidRequestException, ClientError ) from .utils import random_password, secret_arn @@ -36,11 +38,21 @@ class SecretsManagerBackend(BaseBackend): def _is_valid_identifier(self, identifier): return identifier in self.secrets + def _unix_time_secs(self, dt): + epoch = datetime.datetime.utcfromtimestamp(0) + return (dt - epoch).total_seconds() + def get_secret_value(self, secret_id, version_id, version_stage): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException() + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the DeleteSecret operation: You tried to \ + perform the operation on a secret that's currently marked deleted." + ) + secret = self.secrets[secret_id] response = json.dumps({ @@ -101,7 +113,7 @@ class SecretsManagerBackend(BaseBackend): "LastRotatedDate": None, "LastChangedDate": None, "LastAccessedDate": None, - "DeletedDate": None, + "DeletedDate": secret.get('deleted_date', None), "Tags": secret['tags'] }) @@ -193,7 +205,7 @@ class SecretsManagerBackend(BaseBackend): secret_list = [{ "ARN": secret_arn(self.region, secret['secret_id']), - "DeletedDate": None, + "DeletedDate": secret.get('deleted_date', None), "Description": "", "KmsKeyId": "", "LastAccessedDate": None, @@ -218,10 +230,10 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException - if not force_delete_without_recovery: - raise InvalidParameterException( - "An error occurred (InvalidParameterException) when calling the DeleteSecret operation: \ - ForceDeleteWithoutRecovery must be true (Moto cannot simulate soft deletion with a recovery window)" + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the DeleteSecret operation: You tried to \ + perform the operation on a secret that's currently marked deleted." ) if recovery_window_in_days and force_delete_without_recovery: @@ -230,9 +242,20 @@ class SecretsManagerBackend(BaseBackend): use ForceDeleteWithoutRecovery in conjunction with RecoveryWindowInDays." ) - secret = self.secrets.pop(secret_id, None) + if recovery_window_in_days and (recovery_window_in_days < 7 or recovery_window_in_days > 30): + raise InvalidParameterException( + "An error occurred (InvalidParameterException) when calling the DeleteSecret operation: The \ + RecoveryWindowInDays value must be between 7 and 30 days (inclusive)." + ) - deletion_date = int(time.time()) + deletion_date = datetime.datetime.utcnow() + + if force_delete_without_recovery: + secret = self.secrets.pop(secret_id, None) + else: + deletion_date += datetime.timedelta(days=recovery_window_in_days or 30) + self.secrets[secret_id]['deleted_date'] = self._unix_time_secs(deletion_date) + secret = self.secrets.get(secret_id, None) if not secret: raise ResourceNotFoundException @@ -240,7 +263,7 @@ class SecretsManagerBackend(BaseBackend): arn = secret_arn(self.region, secret['secret_id']) name = secret['name'] - return arn, name, deletion_date + return arn, name, self._unix_time_secs(deletion_date) available_regions = ( diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index f9a7d0d64..7ce8788e2 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -6,7 +6,7 @@ from moto import mock_secretsmanager from botocore.exceptions import ClientError import sure # noqa import string -from datetime import datetime +from datetime import datetime, timezone import unittest from nose.tools import assert_raises @@ -35,6 +35,20 @@ def test_get_secret_that_does_not_match(): with assert_raises(ClientError): result = conn.get_secret_value(SecretId='i-dont-match') + +@mock_secretsmanager +def test_get_secret_value_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + deleted_secret = conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.get_secret_value(SecretId='test-secret') + + @mock_secretsmanager def test_create_secret(): conn = boto3.client('secretsmanager', region_name='us-east-1') @@ -67,16 +81,33 @@ def test_create_secret_with_tags(): def test_delete_secret(): conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + deleted_secret = conn.delete_secret(SecretId='test-secret') + + assert deleted_secret['ARN'] + assert deleted_secret['Name'] == 'test-secret' + assert deleted_secret['DeletionDate'] > datetime.fromtimestamp(1, timezone.utc) + + secret_details = conn.describe_secret(SecretId='test-secret') + + assert secret_details['ARN'] + assert secret_details['Name'] == 'test-secret' + assert secret_details['DeletedDate'] > datetime.fromtimestamp(1, timezone.utc) + + +@mock_secretsmanager +def test_delete_secret_force(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + conn.create_secret(Name='test-secret', SecretString='foosecret') result = conn.delete_secret(SecretId='test-secret', ForceDeleteWithoutRecovery=True) - deletion_date = result['DeletionDate'] assert result['ARN'] - - assert deletion_date > datetime.fromtimestamp(1, deletion_date.tzinfo) - + assert result['DeletionDate'] > datetime.fromtimestamp(1, timezone.utc) assert result['Name'] == 'test-secret' with assert_raises(ClientError): @@ -91,17 +122,6 @@ def test_delete_secret_that_does_not_exist(): result = conn.delete_secret(SecretId='i-dont-exist', ForceDeleteWithoutRecovery=True) -@mock_secretsmanager -def test_delete_secret_requires_force_delete_flag(): - conn = boto3.client('secretsmanager', region_name='us-west-2') - - conn.create_secret(Name='test-secret', - SecretString='foosecret') - - with assert_raises(ClientError): - result = conn.delete_secret(SecretId='test-secret', ForceDeleteWithoutRecovery=False) - - @mock_secretsmanager def test_delete_secret_fails_with_both_force_delete_flag_and_recovery_window_flag(): conn = boto3.client('secretsmanager', region_name='us-west-2') @@ -113,6 +133,41 @@ def test_delete_secret_fails_with_both_force_delete_flag_and_recovery_window_fla result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=1, ForceDeleteWithoutRecovery=True) +@mock_secretsmanager +def test_delete_secret_recovery_window_too_short(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=6) + + +@mock_secretsmanager +def test_delete_secret_recovery_window_too_long(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret', RecoveryWindowInDays=31) + + +@mock_secretsmanager +def test_delete_secret_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + deleted_secret = conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.delete_secret(SecretId='test-secret') + + @mock_secretsmanager def test_get_random_password_default_length(): conn = boto3.client('secretsmanager', region_name='us-west-2') From bd8aa341f22a3ffe8f41a86468538ec5835137ef Mon Sep 17 00:00:00 2001 From: Chris Kilding Date: Thu, 18 Apr 2019 16:47:15 +0100 Subject: [PATCH 166/175] Also throw exception if client tries to RotateSecret on a soft-deleted secret --- moto/secretsmanager/models.py | 8 +++++++- tests/test_secretsmanager/test_secretsmanager.py | 16 +++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index c272957ad..af8846a66 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -49,7 +49,7 @@ class SecretsManagerBackend(BaseBackend): if 'deleted_date' in self.secrets[secret_id]: raise InvalidRequestException( - "An error occurred (InvalidRequestException) when calling the DeleteSecret operation: You tried to \ + "An error occurred (InvalidRequestException) when calling the GetSecretValue operation: You tried to \ perform the operation on a secret that's currently marked deleted." ) @@ -127,6 +127,12 @@ class SecretsManagerBackend(BaseBackend): if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException + if 'deleted_date' in self.secrets[secret_id]: + raise InvalidRequestException( + "An error occurred (InvalidRequestException) when calling the RotateSecret operation: You tried to \ + perform the operation on a secret that's currently marked deleted." + ) + if client_request_token: token_length = len(client_request_token) if token_length < 32 or token_length > 64: diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 7ce8788e2..48cce5077 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -43,7 +43,7 @@ def test_get_secret_value_that_is_marked_deleted(): conn.create_secret(Name='test-secret', SecretString='foosecret') - deleted_secret = conn.delete_secret(SecretId='test-secret') + conn.delete_secret(SecretId='test-secret') with assert_raises(ClientError): result = conn.get_secret_value(SecretId='test-secret') @@ -380,6 +380,20 @@ def test_rotate_secret_enable_rotation(): assert rotated_description['RotationEnabled'] is True assert rotated_description['RotationRules']['AutomaticallyAfterDays'] == 42 + +@mock_secretsmanager +def test_rotate_secret_that_is_marked_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.delete_secret(SecretId='test-secret') + + with assert_raises(ClientError): + result = conn.rotate_secret(SecretId='test-secret') + + @mock_secretsmanager def test_rotate_secret_that_does_not_exist(): conn = boto3.client('secretsmanager', 'us-west-2') From 97d31e9aa54f71ff473cd3f2d5dc6441526084b5 Mon Sep 17 00:00:00 2001 From: Chris Kilding Date: Thu, 18 Apr 2019 16:53:27 +0100 Subject: [PATCH 167/175] Fix flake8 --- moto/secretsmanager/exceptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/secretsmanager/exceptions.py b/moto/secretsmanager/exceptions.py index 5ee96f12c..06010c411 100644 --- a/moto/secretsmanager/exceptions.py +++ b/moto/secretsmanager/exceptions.py @@ -33,4 +33,4 @@ class InvalidRequestException(SecretsManagerClientError): def __init__(self, message): super(InvalidRequestException, self).__init__( 'InvalidRequestException', - message) \ No newline at end of file + message) From 8f21272e5703f62769f62c0f6b6d602960d402dc Mon Sep 17 00:00:00 2001 From: Chris Kilding Date: Thu, 18 Apr 2019 17:27:13 +0100 Subject: [PATCH 168/175] Use pytz instead of timezone --- tests/test_secretsmanager/test_secretsmanager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 48cce5077..7456c7d13 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -6,7 +6,8 @@ from moto import mock_secretsmanager from botocore.exceptions import ClientError import sure # noqa import string -from datetime import datetime, timezone +import pytz +from datetime import datetime import unittest from nose.tools import assert_raises @@ -88,13 +89,13 @@ def test_delete_secret(): assert deleted_secret['ARN'] assert deleted_secret['Name'] == 'test-secret' - assert deleted_secret['DeletionDate'] > datetime.fromtimestamp(1, timezone.utc) + assert deleted_secret['DeletionDate'] > datetime.fromtimestamp(1, pytz.utc) secret_details = conn.describe_secret(SecretId='test-secret') assert secret_details['ARN'] assert secret_details['Name'] == 'test-secret' - assert secret_details['DeletedDate'] > datetime.fromtimestamp(1, timezone.utc) + assert secret_details['DeletedDate'] > datetime.fromtimestamp(1, pytz.utc) @mock_secretsmanager @@ -107,7 +108,7 @@ def test_delete_secret_force(): result = conn.delete_secret(SecretId='test-secret', ForceDeleteWithoutRecovery=True) assert result['ARN'] - assert result['DeletionDate'] > datetime.fromtimestamp(1, timezone.utc) + assert result['DeletionDate'] > datetime.fromtimestamp(1, pytz.utc) assert result['Name'] == 'test-secret' with assert_raises(ClientError): From 431269bcd0d705b8bd0f1bef5d39391e382d9669 Mon Sep 17 00:00:00 2001 From: Tomoya Iwata Date: Thu, 18 Apr 2019 20:31:46 +0900 Subject: [PATCH 169/175] fix #2161 mock_dynamodb2 query fails when using GSI with range key if target table has record that have no range key attribute --- moto/dynamodb2/models.py | 29 +++++++----- tests/test_dynamodb2/test_dynamodb.py | 67 +++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 12 deletions(-) diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py index 677bbfb07..0f4594aa4 100644 --- a/moto/dynamodb2/models.py +++ b/moto/dynamodb2/models.py @@ -570,6 +570,7 @@ class Table(BaseModel): exclusive_start_key, scan_index_forward, projection_expression, index_name=None, filter_expression=None, **filter_kwargs): results = [] + if index_name: all_indexes = (self.global_indexes or []) + (self.indexes or []) indexes_by_name = dict((i['IndexName'], i) for i in all_indexes) @@ -586,24 +587,28 @@ class Table(BaseModel): raise ValueError('Missing Hash Key. KeySchema: %s' % index['KeySchema']) - possible_results = [] - for item in self.all_items(): - if not isinstance(item, Item): - continue - item_hash_key = item.attrs.get(index_hash_key['AttributeName']) - if item_hash_key and item_hash_key == hash_key: - possible_results.append(item) - else: - possible_results = [item for item in list(self.all_items()) if isinstance( - item, Item) and item.hash_key == hash_key] - - if index_name: try: index_range_key = [key for key in index[ 'KeySchema'] if key['KeyType'] == 'RANGE'][0] except IndexError: index_range_key = None + possible_results = [] + for item in self.all_items(): + if not isinstance(item, Item): + continue + item_hash_key = item.attrs.get(index_hash_key['AttributeName']) + if index_range_key is None: + if item_hash_key and item_hash_key == hash_key: + possible_results.append(item) + else: + item_range_key = item.attrs.get(index_range_key['AttributeName']) + if item_hash_key and item_hash_key == hash_key and item_range_key: + possible_results.append(item) + else: + possible_results = [item for item in list(self.all_items()) if isinstance( + item, Item) and item.hash_key == hash_key] + if range_comparison: if index_name and not index_range_key: raise ValueError( diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index 3d6f1de65..32fd61d16 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -1612,3 +1612,70 @@ def test_condition_expressions(): ':match': {'S': 'match2'} } ) + + +@mock_dynamodb2 +def test_query_gsi_with_range_key(): + dynamodb = boto3.client('dynamodb', region_name='us-east-1') + dynamodb.create_table( + TableName='test', + KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], + AttributeDefinitions=[ + {'AttributeName': 'id', 'AttributeType': 'S'}, + {'AttributeName': 'gsi_hash_key', 'AttributeType': 'S'}, + {'AttributeName': 'gsi_range_key', 'AttributeType': 'S'} + ], + ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}, + GlobalSecondaryIndexes=[ + { + 'IndexName': 'test_gsi', + 'KeySchema': [ + { + 'AttributeName': 'gsi_hash_key', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'gsi_range_key', + 'KeyType': 'RANGE' + }, + ], + 'Projection': { + 'ProjectionType': 'ALL', + }, + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1 + } + }, + ] + ) + + dynamodb.put_item( + TableName='test', + Item={ + 'id': {'S': 'test1'}, + 'gsi_hash_key': {'S': 'key1'}, + 'gsi_range_key': {'S': 'range1'}, + } + ) + dynamodb.put_item( + TableName='test', + Item={ + 'id': {'S': 'test2'}, + 'gsi_hash_key': {'S': 'key1'}, + } + ) + + res = dynamodb.query(TableName='test', IndexName='test_gsi', + KeyConditionExpression='gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key', + ExpressionAttributeValues={ + ':gsi_hash_key': {'S': 'key1'}, + ':gsi_range_key': {'S': 'range1'} + }) + res.should.have.key("Count").equal(1) + res.should.have.key("Items") + res['Items'][0].should.equal({ + 'id': {'S': 'test1'}, + 'gsi_hash_key': {'S': 'key1'}, + 'gsi_range_key': {'S': 'range1'}, + }) From dd898add4dcce9d0e8db3fce131cffa8d2d03777 Mon Sep 17 00:00:00 2001 From: Chris Opland Date: Sun, 21 Apr 2019 18:23:00 -0500 Subject: [PATCH 170/175] Add test verifying create_role path defaults to / --- tests/test_iam/test_iam.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 3240e0f13..1cd6f9e62 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -1287,4 +1287,9 @@ def test_list_entities_for_policy(): assert response['PolicyRoles'] == [{'RoleName': 'my-role'}] +@mock_iam() +def test_create_role_no_path(): + conn = boto3.client('iam', region_name='us-east-1') + resp = conn.create_role(RoleName='my-role', AssumeRolePolicyDocument='some policy', Description='test') + resp.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-role') From 036d6a8698323b7072f51584f406bee7b2d51eee Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Mon, 22 Apr 2019 19:23:45 -0500 Subject: [PATCH 171/175] 1.3.8 --- moto/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/moto/__init__.py b/moto/__init__.py index 8e9b91bce..5eeac8471 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -3,7 +3,7 @@ import logging # logging.getLogger('boto').setLevel(logging.CRITICAL) __title__ = 'moto' -__version__ = '1.3.7' +__version__ = '1.3.8' from .acm import mock_acm # flake8: noqa from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa diff --git a/setup.py b/setup.py index 2981d9fc5..7ee84cf6e 100755 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ else: setup( name='moto', - version='1.3.7', + version='1.3.8', description='A library that allows your python tests to easily' ' mock out the boto library', long_description=read('README.md'), From 55fe629112dee63308b747b7ed428200ecbca211 Mon Sep 17 00:00:00 2001 From: Christopher Kilding Date: Sun, 21 Apr 2019 18:11:20 +0100 Subject: [PATCH 172/175] Feature: Secrets Manager restore-secret --- IMPLEMENTATION_COVERAGE.md | 2 +- moto/secretsmanager/models.py | 14 +++++++ moto/secretsmanager/responses.py | 7 ++++ .../test_secretsmanager.py | 40 +++++++++++++++++++ 4 files changed, 62 insertions(+), 1 deletion(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 2afd3f19c..34d0097b4 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -3661,7 +3661,7 @@ - [ ] list_secret_version_ids - [x] list_secrets - [ ] put_secret_value -- [ ] restore_secret +- [X] restore_secret - [X] rotate_secret - [ ] tag_resource - [ ] untag_resource diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index af8846a66..44ac1ef47 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -271,6 +271,20 @@ class SecretsManagerBackend(BaseBackend): return arn, name, self._unix_time_secs(deletion_date) + def restore_secret(self, secret_id): + + if not self._is_valid_identifier(secret_id): + raise ResourceNotFoundException + + self.secrets[secret_id].pop('deleted_date', None) + + secret = self.secrets[secret_id] + + arn = secret_arn(self.region, secret['secret_id']) + name = secret['name'] + + return arn, name + available_regions = ( boto3.session.Session().get_available_regions("secretsmanager") diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py index a33890202..0eb02e39b 100644 --- a/moto/secretsmanager/responses.py +++ b/moto/secretsmanager/responses.py @@ -86,3 +86,10 @@ class SecretsManagerResponse(BaseResponse): force_delete_without_recovery=force_delete_without_recovery, ) return json.dumps(dict(ARN=arn, Name=name, DeletionDate=deletion_date)) + + def restore_secret(self): + secret_id = self._get_param("SecretId") + arn, name = secretsmanager_backends[self.region].restore_secret( + secret_id=secret_id, + ) + return json.dumps(dict(ARN=arn, Name=name)) diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 7456c7d13..81ce93cc3 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -347,6 +347,46 @@ def test_list_secrets(): }] +@mock_secretsmanager +def test_restore_secret(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + conn.delete_secret(SecretId='test-secret') + + described_secret_before = conn.describe_secret(SecretId='test-secret') + assert described_secret_before['DeletedDate'] > datetime.fromtimestamp(1, pytz.utc) + + restored_secret = conn.restore_secret(SecretId='test-secret') + assert restored_secret['ARN'] + assert restored_secret['Name'] == 'test-secret' + + described_secret_after = conn.describe_secret(SecretId='test-secret') + assert 'DeletedDate' not in described_secret_after + + +@mock_secretsmanager +def test_restore_secret_that_is_not_deleted(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + conn.create_secret(Name='test-secret', + SecretString='foosecret') + + restored_secret = conn.restore_secret(SecretId='test-secret') + assert restored_secret['ARN'] + assert restored_secret['Name'] == 'test-secret' + + +@mock_secretsmanager +def test_restore_secret_that_does_not_exist(): + conn = boto3.client('secretsmanager', region_name='us-west-2') + + with assert_raises(ClientError): + result = conn.restore_secret(SecretId='i-dont-exist') + + @mock_secretsmanager def test_rotate_secret(): secret_name = 'test-secret' From 4a286c4bc288933bb023396e2784a6fdbb966bc9 Mon Sep 17 00:00:00 2001 From: Terry Cain Date: Fri, 26 Apr 2019 20:52:24 +0100 Subject: [PATCH 173/175] KMS generate_data_key (#2071) * Added KMS.generate_data_key and KMS.generate_date_key_without_plaintext Increase test coverage to cover Key not found * Added test for kms.put_key_policy key not found --- moto/kms/exceptions.py | 36 ++++++ moto/kms/models.py | 42 ++++--- moto/kms/responses.py | 153 +++++++++++++++---------- tests/test_kms/test_kms.py | 227 +++++++++++++++++++++++++++++++++++-- 4 files changed, 370 insertions(+), 88 deletions(-) create mode 100644 moto/kms/exceptions.py diff --git a/moto/kms/exceptions.py b/moto/kms/exceptions.py new file mode 100644 index 000000000..70edd3dcd --- /dev/null +++ b/moto/kms/exceptions.py @@ -0,0 +1,36 @@ +from __future__ import unicode_literals +from moto.core.exceptions import JsonRESTError + + +class NotFoundException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(NotFoundException, self).__init__( + "NotFoundException", message) + + +class ValidationException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(ValidationException, self).__init__( + "ValidationException", message) + + +class AlreadyExistsException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(AlreadyExistsException, self).__init__( + "AlreadyExistsException", message) + + +class NotAuthorizedException(JsonRESTError): + code = 400 + + def __init__(self): + super(NotAuthorizedException, self).__init__( + "NotAuthorizedException", None) + + self.description = '{"__type":"NotAuthorizedException"}' diff --git a/moto/kms/models.py b/moto/kms/models.py index 9fbb2b587..b49e9dd09 100644 --- a/moto/kms/models.py +++ b/moto/kms/models.py @@ -1,5 +1,6 @@ from __future__ import unicode_literals +import os import boto.kms from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_without_milliseconds @@ -159,27 +160,38 @@ class KmsBackend(BaseBackend): return self.keys[self.get_key_id(key_id)].policy def disable_key(self, key_id): - if key_id in self.keys: - self.keys[key_id].enabled = False - self.keys[key_id].key_state = 'Disabled' + self.keys[key_id].enabled = False + self.keys[key_id].key_state = 'Disabled' def enable_key(self, key_id): - if key_id in self.keys: - self.keys[key_id].enabled = True - self.keys[key_id].key_state = 'Enabled' + self.keys[key_id].enabled = True + self.keys[key_id].key_state = 'Enabled' def cancel_key_deletion(self, key_id): - if key_id in self.keys: - self.keys[key_id].key_state = 'Disabled' - self.keys[key_id].deletion_date = None + self.keys[key_id].key_state = 'Disabled' + self.keys[key_id].deletion_date = None def schedule_key_deletion(self, key_id, pending_window_in_days): - if key_id in self.keys: - if 7 <= pending_window_in_days <= 30: - self.keys[key_id].enabled = False - self.keys[key_id].key_state = 'PendingDeletion' - self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days) - return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date) + if 7 <= pending_window_in_days <= 30: + self.keys[key_id].enabled = False + self.keys[key_id].key_state = 'PendingDeletion' + self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days) + return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date) + + def generate_data_key(self, key_id, encryption_context, number_of_bytes, key_spec, grant_tokens): + key = self.keys[self.get_key_id(key_id)] + + if key_spec: + if key_spec == 'AES_128': + bytes = 16 + else: + bytes = 32 + else: + bytes = number_of_bytes + + plaintext = os.urandom(bytes) + + return plaintext, key.arn kms_backends = {} diff --git a/moto/kms/responses.py b/moto/kms/responses.py index ed6accc78..92195ed6b 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -5,11 +5,9 @@ import json import re import six -from boto.exception import JSONResponseError -from boto.kms.exceptions import AlreadyExistsException, NotFoundException - from moto.core.responses import BaseResponse from .models import kms_backends +from .exceptions import NotFoundException, ValidationException, AlreadyExistsException, NotAuthorizedException reserved_aliases = [ 'alias/aws/ebs', @@ -88,36 +86,28 @@ class KmsResponse(BaseResponse): def create_alias(self): alias_name = self.parameters['AliasName'] target_key_id = self.parameters['TargetKeyId'] - region = self.region if not alias_name.startswith('alias/'): - raise JSONResponseError(400, 'Bad Request', - body={'message': 'Invalid identifier', '__type': 'ValidationException'}) + raise ValidationException('Invalid identifier') if alias_name in reserved_aliases: - raise JSONResponseError(400, 'Bad Request', body={ - '__type': 'NotAuthorizedException'}) + raise NotAuthorizedException() if ':' in alias_name: - raise JSONResponseError(400, 'Bad Request', body={ - 'message': '{alias_name} contains invalid characters for an alias'.format(**locals()), - '__type': 'ValidationException'}) + raise ValidationException('{alias_name} contains invalid characters for an alias'.format(alias_name=alias_name)) if not re.match(r'^[a-zA-Z0-9:/_-]+$', alias_name): - raise JSONResponseError(400, 'Bad Request', body={ - 'message': "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$" - .format(**locals()), - '__type': 'ValidationException'}) + raise ValidationException("1 validation error detected: Value '{alias_name}' at 'aliasName' " + "failed to satisfy constraint: Member must satisfy regular " + "expression pattern: ^[a-zA-Z0-9:/_-]+$" + .format(alias_name=alias_name)) if self.kms_backend.alias_exists(target_key_id): - raise JSONResponseError(400, 'Bad Request', body={ - 'message': 'Aliases must refer to keys. Not aliases', - '__type': 'ValidationException'}) + raise ValidationException('Aliases must refer to keys. Not aliases') if self.kms_backend.alias_exists(alias_name): - raise AlreadyExistsException(400, 'Bad Request', body={ - 'message': 'An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} already exists' - .format(**locals()), '__type': 'AlreadyExistsException'}) + raise AlreadyExistsException('An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} ' + 'already exists'.format(region=self.region, alias_name=alias_name)) self.kms_backend.add_alias(target_key_id, alias_name) @@ -125,16 +115,13 @@ class KmsResponse(BaseResponse): def delete_alias(self): alias_name = self.parameters['AliasName'] - region = self.region if not alias_name.startswith('alias/'): - raise JSONResponseError(400, 'Bad Request', - body={'message': 'Invalid identifier', '__type': 'ValidationException'}) + raise ValidationException('Invalid identifier') if not self.kms_backend.alias_exists(alias_name): - raise NotFoundException(400, 'Bad Request', body={ - 'message': 'Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format(**locals()), - '__type': 'NotFoundException'}) + raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:' + '{alias_name} is not found.'.format(region=self.region, alias_name=alias_name)) self.kms_backend.delete_alias(alias_name) @@ -172,9 +159,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.enable_key_rotation(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) @@ -184,9 +170,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.disable_key_rotation(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def get_key_rotation_status(self): @@ -195,9 +180,8 @@ class KmsResponse(BaseResponse): try: rotation_enabled = self.kms_backend.get_key_rotation_status(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'KeyRotationEnabled': rotation_enabled}) def put_key_policy(self): @@ -210,9 +194,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.put_key_policy(key_id, policy) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) @@ -225,9 +208,8 @@ class KmsResponse(BaseResponse): try: return json.dumps({'Policy': self.kms_backend.get_key_policy(key_id)}) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) def list_key_policies(self): key_id = self.parameters.get('KeyId') @@ -235,9 +217,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.describe_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'Truncated': False, 'PolicyNames': ['default']}) @@ -252,8 +233,14 @@ class KmsResponse(BaseResponse): return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8"), 'KeyId': 'key_id'}) def decrypt(self): + # TODO refuse decode if EncryptionContext is not the same as when it was encrypted / generated + value = self.parameters.get("CiphertextBlob") - return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8"), 'KeyId': 'key_id'}) + try: + return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")}) + except UnicodeDecodeError: + # Generate data key will produce random bytes which when decrypted is still returned as base64 + return json.dumps({"Plaintext": value}) def disable_key(self): key_id = self.parameters.get('KeyId') @@ -261,9 +248,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.disable_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def enable_key(self): @@ -272,9 +258,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.enable_key(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps(None) def cancel_key_deletion(self): @@ -283,9 +268,8 @@ class KmsResponse(BaseResponse): try: self.kms_backend.cancel_key_deletion(key_id) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) return json.dumps({'KeyId': key_id}) def schedule_key_deletion(self): @@ -301,19 +285,62 @@ class KmsResponse(BaseResponse): 'DeletionDate': self.kms_backend.schedule_key_deletion(key_id, pending_window_in_days) }) except KeyError: - raise JSONResponseError(404, 'Not Found', body={ - 'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id), - '__type': 'NotFoundException'}) + raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/" + "{key_id}' does not exist".format(region=self.region, key_id=key_id)) + + def generate_data_key(self): + key_id = self.parameters.get('KeyId') + encryption_context = self.parameters.get('EncryptionContext') + number_of_bytes = self.parameters.get('NumberOfBytes') + key_spec = self.parameters.get('KeySpec') + grant_tokens = self.parameters.get('GrantTokens') + + # Param validation + if key_id.startswith('alias'): + if self.kms_backend.get_key_id_from_alias(key_id) is None: + raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format( + region=self.region, alias_name=key_id)) + else: + if self.kms_backend.get_key_id(key_id) not in self.kms_backend.keys: + raise NotFoundException('Invalid keyId') + + if number_of_bytes and (number_of_bytes > 1024 or number_of_bytes < 0): + raise ValidationException("1 validation error detected: Value '2048' at 'numberOfBytes' failed " + "to satisfy constraint: Member must have value less than or " + "equal to 1024") + + if key_spec and key_spec not in ('AES_256', 'AES_128'): + raise ValidationException("1 validation error detected: Value 'AES_257' at 'keySpec' failed " + "to satisfy constraint: Member must satisfy enum value set: " + "[AES_256, AES_128]") + if not key_spec and not number_of_bytes: + raise ValidationException("Please specify either number of bytes or key spec.") + if key_spec and number_of_bytes: + raise ValidationException("Please specify either number of bytes or key spec.") + + plaintext, key_arn = self.kms_backend.generate_data_key(key_id, encryption_context, + number_of_bytes, key_spec, grant_tokens) + + plaintext = base64.b64encode(plaintext).decode() + + return json.dumps({ + 'CiphertextBlob': plaintext, + 'Plaintext': plaintext, + 'KeyId': key_arn # not alias + }) + + def generate_data_key_without_plaintext(self): + result = json.loads(self.generate_data_key()) + del result['Plaintext'] + + return json.dumps(result) def _assert_valid_key_id(key_id): if not re.match(r'^[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}$', key_id, re.IGNORECASE): - raise JSONResponseError(404, 'Not Found', body={ - 'message': ' Invalid keyId', '__type': 'NotFoundException'}) + raise NotFoundException('Invalid keyId') def _assert_default_policy(policy_name): if policy_name != 'default': - raise JSONResponseError(404, 'Not Found', body={ - 'message': "No such policy exists", - '__type': 'NotFoundException'}) + raise NotFoundException("No such policy exists") diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 79f95fa1b..e7ce9f74b 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -2,8 +2,11 @@ from __future__ import unicode_literals import os, re import boto3 import boto.kms +import botocore.exceptions from boto.exception import JSONResponseError from boto.kms.exceptions import AlreadyExistsException, NotFoundException + +from moto.kms.exceptions import NotFoundException as MotoNotFoundException import sure # noqa from moto import mock_kms, mock_kms_deprecated from nose.tools import assert_raises @@ -127,7 +130,7 @@ def test_enable_key_rotation_via_arn(): def test_enable_key_rotation_with_missing_key(): conn = boto.kms.connect_to_region("us-west-2") conn.enable_key_rotation.when.called_with( - "not-a-key").should.throw(JSONResponseError) + "not-a-key").should.throw(NotFoundException) @mock_kms_deprecated @@ -142,7 +145,7 @@ def test_enable_key_rotation_with_alias_name_should_fail(): alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn']) conn.enable_key_rotation.when.called_with( - 'alias/my-alias').should.throw(JSONResponseError) + 'alias/my-alias').should.throw(NotFoundException) @mock_kms_deprecated @@ -179,21 +182,20 @@ def test_decrypt(): conn = boto.kms.connect_to_region('us-west-2') response = conn.decrypt('ZW5jcnlwdG1l'.encode('utf-8')) response['Plaintext'].should.equal(b'encryptme') - response['KeyId'].should.equal('key_id') @mock_kms_deprecated def test_disable_key_rotation_with_missing_key(): conn = boto.kms.connect_to_region("us-west-2") conn.disable_key_rotation.when.called_with( - "not-a-key").should.throw(JSONResponseError) + "not-a-key").should.throw(NotFoundException) @mock_kms_deprecated def test_get_key_rotation_status_with_missing_key(): conn = boto.kms.connect_to_region("us-west-2") conn.get_key_rotation_status.when.called_with( - "not-a-key").should.throw(JSONResponseError) + "not-a-key").should.throw(NotFoundException) @mock_kms_deprecated @@ -279,7 +281,7 @@ def test_put_key_policy_via_alias_should_not_update(): target_key_id=key['KeyMetadata']['KeyId']) conn.put_key_policy.when.called_with( - 'alias/my-key-alias', 'default', 'new policy').should.throw(JSONResponseError) + 'alias/my-key-alias', 'default', 'new policy').should.throw(NotFoundException) policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default') policy['Policy'].should.equal('my policy') @@ -599,9 +601,9 @@ def test__assert_valid_key_id(): import uuid _assert_valid_key_id.when.called_with( - "not-a-key").should.throw(JSONResponseError) + "not-a-key").should.throw(MotoNotFoundException) _assert_valid_key_id.when.called_with( - str(uuid.uuid4())).should_not.throw(JSONResponseError) + str(uuid.uuid4())).should_not.throw(MotoNotFoundException) @mock_kms_deprecated @@ -609,9 +611,9 @@ def test__assert_default_policy(): from moto.kms.responses import _assert_default_policy _assert_default_policy.when.called_with( - "not-default").should.throw(JSONResponseError) + "not-default").should.throw(MotoNotFoundException) _assert_default_policy.when.called_with( - "default").should_not.throw(JSONResponseError) + "default").should_not.throw(MotoNotFoundException) @mock_kms @@ -775,3 +777,208 @@ def test_list_resource_tags(): response = client.list_resource_tags(KeyId=keyid) assert response['Tags'][0]['TagKey'] == 'string' assert response['Tags'][0]['TagValue'] == 'string' + + +@mock_kms +def test_generate_data_key_sizes(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + resp1 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + resp2 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_128' + ) + resp3 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + NumberOfBytes=64 + ) + + assert len(resp1['Plaintext']) == 32 + assert len(resp2['Plaintext']) == 16 + assert len(resp3['Plaintext']) == 64 + + +@mock_kms +def test_generate_data_key_decrypt(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-decrypt') + + resp1 = client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + resp2 = client.decrypt( + CiphertextBlob=resp1['CiphertextBlob'] + ) + + assert resp1['Plaintext'] == resp2['Plaintext'] + + +@mock_kms +def test_generate_data_key_invalid_size_params(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_257' + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_128', + NumberOfBytes=16 + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'], + NumberOfBytes=2048 + ) + + with assert_raises(botocore.exceptions.ClientError) as err: + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'] + ) + + +@mock_kms +def test_generate_data_key_invalid_key(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-size') + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key( + KeyId='alias/randomnonexistantkey', + KeySpec='AES_256' + ) + + with assert_raises(client.exceptions.NotFoundException): + client.generate_data_key( + KeyId=key['KeyMetadata']['KeyId'] + '4', + KeySpec='AES_256' + ) + + +@mock_kms +def test_generate_data_key_without_plaintext_decrypt(): + client = boto3.client('kms', region_name='us-east-1') + key = client.create_key(Description='generate-data-key-decrypt') + + resp1 = client.generate_data_key_without_plaintext( + KeyId=key['KeyMetadata']['KeyId'], + KeySpec='AES_256' + ) + + assert 'Plaintext' not in resp1 + + +@mock_kms +def test_enable_key_rotation_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key_rotation( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_disable_key_rotation_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key_rotation( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_enable_key_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.enable_key( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_disable_key_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.disable_key( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_cancel_key_deletion_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.cancel_key_deletion( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_schedule_key_deletion_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.schedule_key_deletion( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_get_key_rotation_status_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_rotation_status( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_get_key_policy_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.get_key_policy( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02', + PolicyName='default' + ) + + +@mock_kms +def test_list_key_policies_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.list_key_policies( + KeyId='12366f9b-1230-123d-123e-123e6ae60c02' + ) + + +@mock_kms +def test_put_key_policy_key_not_found(): + client = boto3.client('kms', region_name='us-east-1') + + with assert_raises(client.exceptions.NotFoundException): + client.put_key_policy( + KeyId='00000000-0000-0000-0000-000000000000', + PolicyName='default', + Policy='new policy' + ) + + From b822db8d8c9edeffcbc715ac77c568f489822b5a Mon Sep 17 00:00:00 2001 From: shiba24 Date: Tue, 30 Apr 2019 19:35:21 +0900 Subject: [PATCH 174/175] Support create_table with PAY_PER_REQUEST billing mode of DynamoDB --- moto/dynamodb2/responses.py | 12 ++++++++++-- tests/test_dynamodb2/test_dynamodb.py | 27 +++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index 49095f09c..e382efc1d 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -156,8 +156,16 @@ class DynamoHandler(BaseResponse): body = self.body # get the table name table_name = body['TableName'] - # get the throughput - throughput = body["ProvisionedThroughput"] + # check billing mode and get the throughput + if "BillingMode" in body.keys() and body["BillingMode"] == "PAY_PER_REQUEST": + if "ProvisionedThroughput" in body.keys(): + er = 'com.amazonaws.dynamodb.v20111205#ValidationException' + return self.error(er, + 'ProvisionedThroughput cannot be specified \ + when BillingMode is PAY_PER_REQUEST') + throughput = None + else: # Provisioned (default billing mode) + throughput = body["ProvisionedThroughput"] # getting the schema key_schema = body['KeySchema'] # getting attribute definition diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index 32fd61d16..208453f0a 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -949,6 +949,33 @@ def test_bad_scan_filter(): raise RuntimeError('Should of raised ResourceInUseException') +@mock_dynamodb2 +def test_create_table_pay_per_request(): + client = boto3.client('dynamodb', region_name='us-east-1') + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + BillingMode="PAY_PER_REQUEST" + ) + + +@mock_dynamodb2 +def test_create_table_error_pay_per_request_with_provisioned_param(): + client = boto3.client('dynamodb', region_name='us-east-1') + + try: + client.create_table( + TableName='test1', + AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}], + KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}], + ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123}, + BillingMode="PAY_PER_REQUEST" + ) + except ClientError as err: + err.response['Error']['Code'].should.equal('ValidationException') + + @mock_dynamodb2 def test_duplicate_create(): client = boto3.client('dynamodb', region_name='us-east-1') From 8cb4db1896521af99ff295b028f604c6ab599401 Mon Sep 17 00:00:00 2001 From: Kyle Decot Date: Thu, 2 May 2019 14:00:28 -0400 Subject: [PATCH 175/175] Adds Support for filtering on schedulingStrategy in ECS#list_services (#2180) --- moto/ecs/models.py | 7 +++++-- moto/ecs/responses.py | 3 ++- tests/test_ecs/test_ecs_boto3.py | 17 +++++++++++++---- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index efa8a6cd0..a314c7776 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -699,12 +699,15 @@ class EC2ContainerServiceBackend(BaseBackend): return service - def list_services(self, cluster_str): + def list_services(self, cluster_str, scheduling_strategy=None): cluster_name = cluster_str.split('/')[-1] service_arns = [] for key, value in self.services.items(): if cluster_name + ':' in key: - service_arns.append(self.services[key].arn) + service = self.services[key] + if scheduling_strategy is None or service.scheduling_strategy == scheduling_strategy: + service_arns.append(service.arn) + return sorted(service_arns) def describe_services(self, cluster_str, service_names_or_arns): diff --git a/moto/ecs/responses.py b/moto/ecs/responses.py index 964ef59d2..92b769fad 100644 --- a/moto/ecs/responses.py +++ b/moto/ecs/responses.py @@ -163,7 +163,8 @@ class EC2ContainerServiceResponse(BaseResponse): def list_services(self): cluster_str = self._get_param('cluster') - service_arns = self.ecs_backend.list_services(cluster_str) + scheduling_strategy = self._get_param('schedulingStrategy') + service_arns = self.ecs_backend.list_services(cluster_str, scheduling_strategy) return json.dumps({ 'serviceArns': service_arns # , diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index 3bf25b8fc..b147c4159 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -388,23 +388,32 @@ def test_list_services(): cluster='test_ecs_cluster', serviceName='test_ecs_service1', taskDefinition='test_ecs_task', + schedulingStrategy='REPLICA', desiredCount=2 ) _ = client.create_service( cluster='test_ecs_cluster', serviceName='test_ecs_service2', taskDefinition='test_ecs_task', + schedulingStrategy='DAEMON', desiredCount=2 ) - response = client.list_services( + unfiltered_response = client.list_services( cluster='test_ecs_cluster' ) - len(response['serviceArns']).should.equal(2) - response['serviceArns'][0].should.equal( + len(unfiltered_response['serviceArns']).should.equal(2) + unfiltered_response['serviceArns'][0].should.equal( 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1') - response['serviceArns'][1].should.equal( + unfiltered_response['serviceArns'][1].should.equal( 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2') + filtered_response = client.list_services( + cluster='test_ecs_cluster', + schedulingStrategy='REPLICA' + ) + len(filtered_response['serviceArns']).should.equal(1) + filtered_response['serviceArns'][0].should.equal( + 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1') @mock_ecs def test_describe_services():