diff --git a/.gitignore b/.gitignore
index f0118e85e..47e5efbe0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,3 +16,4 @@ python_env
.pytest_cache/
venv/
.python-version
+.vscode/
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 3a5de0fa2..d386102fc 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -23,8 +23,6 @@ matrix:
sudo: true
before_install:
- export BOTO_CONFIG=/dev/null
- - export AWS_SECRET_ACCESS_KEY=foobar_secret
- - export AWS_ACCESS_KEY_ID=foobar_key
install:
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
# -d for docker run so the logs show up in travis
diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md
index 7c68c0e31..4646e8a6c 100644
--- a/IMPLEMENTATION_COVERAGE.md
+++ b/IMPLEMENTATION_COVERAGE.md
@@ -827,25 +827,25 @@
- [ ] unlink_identity
- [ ] update_identity_pool
-## cognito-idp - 0% implemented
+## cognito-idp - 34% implemented
- [ ] add_custom_attributes
-- [ ] admin_add_user_to_group
+- [X] admin_add_user_to_group
- [ ] admin_confirm_sign_up
-- [ ] admin_create_user
-- [ ] admin_delete_user
+- [X] admin_create_user
+- [X] admin_delete_user
- [ ] admin_delete_user_attributes
- [ ] admin_disable_provider_for_user
- [X] admin_disable_user
- [X] admin_enable_user
- [ ] admin_forget_device
- [ ] admin_get_device
-- [ ] admin_get_user
-- [ ] admin_initiate_auth
+- [X] admin_get_user
+- [X] admin_initiate_auth
- [ ] admin_link_provider_for_user
- [ ] admin_list_devices
-- [ ] admin_list_groups_for_user
+- [X] admin_list_groups_for_user
- [ ] admin_list_user_auth_events
-- [ ] admin_remove_user_from_group
+- [X] admin_remove_user_from_group
- [ ] admin_reset_user_password
- [ ] admin_respond_to_auth_challenge
- [ ] admin_set_user_mfa_preference
@@ -855,37 +855,37 @@
- [ ] admin_update_user_attributes
- [ ] admin_user_global_sign_out
- [ ] associate_software_token
-- [ ] change_password
+- [X] change_password
- [ ] confirm_device
-- [ ] confirm_forgot_password
+- [X] confirm_forgot_password
- [ ] confirm_sign_up
-- [ ] create_group
-- [ ] create_identity_provider
+- [X] create_group
+- [X] create_identity_provider
- [ ] create_resource_server
- [ ] create_user_import_job
-- [ ] create_user_pool
-- [ ] create_user_pool_client
-- [ ] create_user_pool_domain
-- [ ] delete_group
-- [ ] delete_identity_provider
+- [X] create_user_pool
+- [X] create_user_pool_client
+- [X] create_user_pool_domain
+- [X] delete_group
+- [X] delete_identity_provider
- [ ] delete_resource_server
- [ ] delete_user
- [ ] delete_user_attributes
-- [ ] delete_user_pool
-- [ ] delete_user_pool_client
-- [ ] delete_user_pool_domain
-- [ ] describe_identity_provider
+- [X] delete_user_pool
+- [X] delete_user_pool_client
+- [X] delete_user_pool_domain
+- [X] describe_identity_provider
- [ ] describe_resource_server
- [ ] describe_risk_configuration
- [ ] describe_user_import_job
-- [ ] describe_user_pool
-- [ ] describe_user_pool_client
-- [ ] describe_user_pool_domain
+- [X] describe_user_pool
+- [X] describe_user_pool_client
+- [X] describe_user_pool_domain
- [ ] forget_device
- [ ] forgot_password
- [ ] get_csv_header
- [ ] get_device
-- [ ] get_group
+- [X] get_group
- [ ] get_identity_provider_by_identifier
- [ ] get_signing_certificate
- [ ] get_ui_customization
@@ -895,16 +895,16 @@
- [ ] global_sign_out
- [ ] initiate_auth
- [ ] list_devices
-- [ ] list_groups
-- [ ] list_identity_providers
+- [X] list_groups
+- [X] list_identity_providers
- [ ] list_resource_servers
- [ ] list_user_import_jobs
-- [ ] list_user_pool_clients
-- [ ] list_user_pools
-- [ ] list_users
-- [ ] list_users_in_group
+- [X] list_user_pool_clients
+- [X] list_user_pools
+- [X] list_users
+- [X] list_users_in_group
- [ ] resend_confirmation_code
-- [ ] respond_to_auth_challenge
+- [X] respond_to_auth_challenge
- [ ] set_risk_configuration
- [ ] set_ui_customization
- [ ] set_user_mfa_preference
@@ -920,7 +920,7 @@
- [ ] update_resource_server
- [ ] update_user_attributes
- [ ] update_user_pool
-- [ ] update_user_pool_client
+- [X] update_user_pool_client
- [ ] verify_software_token
- [ ] verify_user_attribute
@@ -2376,11 +2376,11 @@
- [ ] unsubscribe_from_event
- [ ] update_assessment_target
-## iot - 30% implemented
+## iot - 32% implemented
- [ ] accept_certificate_transfer
- [X] add_thing_to_thing_group
- [ ] associate_targets_with_job
-- [ ] attach_policy
+- [X] attach_policy
- [X] attach_principal_policy
- [X] attach_thing_principal
- [ ] cancel_certificate_transfer
@@ -2429,7 +2429,7 @@
- [X] describe_thing_group
- [ ] describe_thing_registration_task
- [X] describe_thing_type
-- [ ] detach_policy
+- [X] detach_policy
- [X] detach_principal_policy
- [X] detach_thing_principal
- [ ] disable_topic_rule
diff --git a/README.md b/README.md
index 791226d6b..d6e9f30a1 100644
--- a/README.md
+++ b/README.md
@@ -259,7 +259,7 @@ It uses flask, which isn't a default dependency. You can install the
server 'extra' package with:
```python
-pip install moto[server]
+pip install "moto[server]"
```
You can then start it running a service:
diff --git a/moto/__init__.py b/moto/__init__.py
index dd3593d5d..e86c499a7 100644
--- a/moto/__init__.py
+++ b/moto/__init__.py
@@ -16,6 +16,7 @@ from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: n
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
+from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa
diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py
index 5586c51dd..845db0136 100644
--- a/moto/autoscaling/responses.py
+++ b/moto/autoscaling/responses.py
@@ -508,6 +508,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """
{% endif %}
+ {% if group.target_group_arns %}
+
+ {% for target_group_arn in group.target_group_arns %}
+ {{ target_group_arn }}
+ {% endfor %}
+
+ {% else %}
+
+ {% endif %}
{{ group.min_size }}
{% if group.vpc_zone_identifier %}
{{ group.vpc_zone_identifier }}
diff --git a/moto/backends.py b/moto/backends.py
index d95424385..1a333415e 100644
--- a/moto/backends.py
+++ b/moto/backends.py
@@ -12,6 +12,7 @@ from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
+from moto.dynamodbstreams import dynamodbstreams_backends
from moto.ec2 import ec2_backends
from moto.ecr import ecr_backends
from moto.ecs import ecs_backends
@@ -59,6 +60,7 @@ BACKENDS = {
'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2,
+ 'dynamodbstreams': dynamodbstreams_backends,
'ec2': ec2_backends,
'ecr': ecr_backends,
'ecs': ecs_backends,
diff --git a/moto/batch/responses.py b/moto/batch/responses.py
index e626b7d4c..7fb606184 100644
--- a/moto/batch/responses.py
+++ b/moto/batch/responses.py
@@ -27,7 +27,7 @@ class BatchResponse(BaseResponse):
elif not hasattr(self, '_json'):
try:
self._json = json.loads(self.body)
- except json.JSONDecodeError:
+ except ValueError:
print()
return self._json
diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py
index e5ab7255d..6ec821b42 100644
--- a/moto/cloudformation/models.py
+++ b/moto/cloudformation/models.py
@@ -13,6 +13,7 @@ from .utils import (
generate_changeset_id,
generate_stack_id,
yaml_tag_constructor,
+ validate_template_cfn_lint,
)
from .exceptions import ValidationError
@@ -270,6 +271,9 @@ class CloudFormationBackend(BaseBackend):
next_token = str(token + 100) if len(all_exports) > token + 100 else None
return exports, next_token
+ def validate_template(self, template):
+ return validate_template_cfn_lint(template)
+
def _validate_export_uniqueness(self, stack):
new_stack_export_names = [x.name for x in stack.exports]
export_names = self.exports.keys()
diff --git a/moto/cloudformation/responses.py b/moto/cloudformation/responses.py
index a1295a20d..9e67e931a 100644
--- a/moto/cloudformation/responses.py
+++ b/moto/cloudformation/responses.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
import json
+import yaml
from six.moves.urllib.parse import urlparse
from moto.core.responses import BaseResponse
@@ -87,7 +88,8 @@ class CloudFormationResponse(BaseResponse):
role_arn = self._get_param('RoleARN')
update_or_create = self._get_param('ChangeSetType', 'CREATE')
parameters_list = self._get_list_prefix("Parameters.member")
- tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")}
+ tags = dict((item['key'], item['value'])
+ for item in self._get_list_prefix("Tags.member"))
parameters = {param['parameter_key']: param['parameter_value']
for param in parameters_list}
if template_url:
@@ -294,6 +296,32 @@ class CloudFormationResponse(BaseResponse):
template = self.response_template(LIST_EXPORTS_RESPONSE)
return template.render(exports=exports, next_token=next_token)
+ def validate_template(self):
+ cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody'))
+ if cfn_lint:
+ raise ValidationError(cfn_lint[0].message)
+ description = ""
+ try:
+ description = json.loads(self._get_param('TemplateBody'))['Description']
+ except (ValueError, KeyError):
+ pass
+ try:
+ description = yaml.load(self._get_param('TemplateBody'))['Description']
+ except (yaml.ParserError, KeyError):
+ pass
+ template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE)
+ return template.render(description=description)
+
+
+VALIDATE_STACK_RESPONSE_TEMPLATE = """
+
+
+
+
+{{ description }}
+
+
+"""
CREATE_STACK_RESPONSE_TEMPLATE = """
diff --git a/moto/cloudformation/utils.py b/moto/cloudformation/utils.py
index f3b8874ed..f963ce7c8 100644
--- a/moto/cloudformation/utils.py
+++ b/moto/cloudformation/utils.py
@@ -3,6 +3,9 @@ import uuid
import six
import random
import yaml
+import os
+
+from cfnlint import decode, core
def generate_stack_id(stack_name):
@@ -38,3 +41,33 @@ def yaml_tag_constructor(loader, tag, node):
key = 'Fn::{}'.format(tag[1:])
return {key: _f(loader, tag, node)}
+
+
+def validate_template_cfn_lint(template):
+
+ # Save the template to a temporary file -- cfn-lint requires a file
+ filename = "file.tmp"
+ with open(filename, "w") as file:
+ file.write(template)
+ abs_filename = os.path.abspath(filename)
+
+ # decode handles both yaml and json
+ template, matches = decode.decode(abs_filename, False)
+
+ # Set cfn-lint to info
+ core.configure_logging(None)
+
+ # Initialize the ruleset to be applied (no overrules, no excludes)
+ rules = core.get_rules([], [], [])
+
+ # Use us-east-1 region (spec file) for validation
+ regions = ['us-east-1']
+
+ # Process all the rules and gather the errors
+ matches = core.run_checks(
+ abs_filename,
+ template,
+ rules,
+ regions)
+
+ return matches
diff --git a/moto/cognitoidp/exceptions.py b/moto/cognitoidp/exceptions.py
index 1f1ec2309..452670213 100644
--- a/moto/cognitoidp/exceptions.py
+++ b/moto/cognitoidp/exceptions.py
@@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest):
})
+class GroupExistsException(BadRequest):
+
+ def __init__(self, message):
+ super(GroupExistsException, self).__init__()
+ self.description = json.dumps({
+ "message": message,
+ '__type': 'GroupExistsException',
+ })
+
+
class NotAuthorizedError(BadRequest):
def __init__(self, message):
diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py
index 476d470b9..00868f7b3 100644
--- a/moto/cognitoidp/models.py
+++ b/moto/cognitoidp/models.py
@@ -1,6 +1,8 @@
from __future__ import unicode_literals
import datetime
+import functools
+import itertools
import json
import os
import time
@@ -11,8 +13,7 @@ from jose import jws
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
-from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
-
+from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
UserStatus = {
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
@@ -20,6 +21,39 @@ UserStatus = {
}
+def paginate(limit, start_arg="next_token", limit_arg="max_results"):
+ """Returns a limited result list, and an offset into list of remaining items
+
+ Takes the next_token, and max_results kwargs given to a function and handles
+ the slicing of the results. The kwarg `next_token` is the offset into the
+ list to begin slicing from. `max_results` is the size of the result required
+
+ If the max_results is not supplied then the `limit` parameter is used as a
+ default
+
+ :param limit_arg: the name of argument in the decorated function that
+ controls amount of items returned
+ :param start_arg: the name of the argument in the decorated that provides
+ the starting offset
+ :param limit: A default maximum items to return
+ :return: a tuple containing a list of items, and the offset into the list
+ """
+ default_start = 0
+
+ def outer_wrapper(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg])
+ lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg])
+ stop = start + lim
+ result = func(*args, **kwargs)
+ limited_results = list(itertools.islice(result, start, stop))
+ next_token = stop if stop < len(result) else None
+ return limited_results, next_token
+ return wrapper
+ return outer_wrapper
+
+
class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config):
@@ -33,6 +67,7 @@ class CognitoIdpUserPool(BaseModel):
self.clients = OrderedDict()
self.identity_providers = OrderedDict()
+ self.groups = OrderedDict()
self.users = OrderedDict()
self.refresh_tokens = {}
self.access_tokens = {}
@@ -185,6 +220,33 @@ class CognitoIdpIdentityProvider(BaseModel):
return identity_provider_json
+class CognitoIdpGroup(BaseModel):
+
+ def __init__(self, user_pool_id, group_name, description, role_arn, precedence):
+ self.user_pool_id = user_pool_id
+ self.group_name = group_name
+ self.description = description or ""
+ self.role_arn = role_arn
+ self.precedence = precedence
+ self.last_modified_date = datetime.datetime.now()
+ self.creation_date = self.last_modified_date
+
+ # Users who are members of this group.
+ # Note that these links are bidirectional.
+ self.users = set()
+
+ def to_json(self):
+ return {
+ "GroupName": self.group_name,
+ "UserPoolId": self.user_pool_id,
+ "Description": self.description,
+ "RoleArn": self.role_arn,
+ "Precedence": self.precedence,
+ "LastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
+ "CreationDate": time.mktime(self.creation_date.timetuple()),
+ }
+
+
class CognitoIdpUser(BaseModel):
def __init__(self, user_pool_id, username, password, status, attributes):
@@ -198,6 +260,10 @@ class CognitoIdpUser(BaseModel):
self.create_date = datetime.datetime.utcnow()
self.last_modified_date = datetime.datetime.utcnow()
+ # Groups this user is a member of.
+ # Note that these links are bidirectional.
+ self.groups = set()
+
def _base_json(self):
return {
"UserPoolId": self.user_pool_id,
@@ -242,7 +308,8 @@ class CognitoIdpBackend(BaseBackend):
self.user_pools[user_pool.id] = user_pool
return user_pool
- def list_user_pools(self):
+ @paginate(60)
+ def list_user_pools(self, max_results=None, next_token=None):
return self.user_pools.values()
def describe_user_pool(self, user_pool_id):
@@ -289,7 +356,8 @@ class CognitoIdpBackend(BaseBackend):
user_pool.clients[user_pool_client.id] = user_pool_client
return user_pool_client
- def list_user_pool_clients(self, user_pool_id):
+ @paginate(60)
+ def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@@ -339,7 +407,8 @@ class CognitoIdpBackend(BaseBackend):
user_pool.identity_providers[name] = identity_provider
return identity_provider
- def list_identity_providers(self, user_pool_id):
+ @paginate(60)
+ def list_identity_providers(self, user_pool_id, max_results=None, next_token=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@@ -367,6 +436,72 @@ class CognitoIdpBackend(BaseBackend):
del user_pool.identity_providers[name]
+ # Group
+ def create_group(self, user_pool_id, group_name, description, role_arn, precedence):
+ user_pool = self.user_pools.get(user_pool_id)
+ if not user_pool:
+ raise ResourceNotFoundError(user_pool_id)
+
+ group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence)
+ if group.group_name in user_pool.groups:
+ raise GroupExistsException("A group with the name already exists")
+ user_pool.groups[group.group_name] = group
+
+ return group
+
+ def get_group(self, user_pool_id, group_name):
+ user_pool = self.user_pools.get(user_pool_id)
+ if not user_pool:
+ raise ResourceNotFoundError(user_pool_id)
+
+ if group_name not in user_pool.groups:
+ raise ResourceNotFoundError(group_name)
+
+ return user_pool.groups[group_name]
+
+ def list_groups(self, user_pool_id):
+ user_pool = self.user_pools.get(user_pool_id)
+ if not user_pool:
+ raise ResourceNotFoundError(user_pool_id)
+
+ return user_pool.groups.values()
+
+ def delete_group(self, user_pool_id, group_name):
+ user_pool = self.user_pools.get(user_pool_id)
+ if not user_pool:
+ raise ResourceNotFoundError(user_pool_id)
+
+ if group_name not in user_pool.groups:
+ raise ResourceNotFoundError(group_name)
+
+ group = user_pool.groups[group_name]
+ for user in group.users:
+ user.groups.remove(group)
+
+ del user_pool.groups[group_name]
+
+ def admin_add_user_to_group(self, user_pool_id, group_name, username):
+ group = self.get_group(user_pool_id, group_name)
+ user = self.admin_get_user(user_pool_id, username)
+
+ group.users.add(user)
+ user.groups.add(group)
+
+ def list_users_in_group(self, user_pool_id, group_name):
+ group = self.get_group(user_pool_id, group_name)
+ return list(group.users)
+
+ def admin_list_groups_for_user(self, user_pool_id, username):
+ user = self.admin_get_user(user_pool_id, username)
+ return list(user.groups)
+
+ def admin_remove_user_from_group(self, user_pool_id, group_name, username):
+ group = self.get_group(user_pool_id, group_name)
+ user = self.admin_get_user(user_pool_id, username)
+
+ group.users.discard(user)
+ user.groups.discard(group)
+
# User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
user_pool = self.user_pools.get(user_pool_id)
@@ -387,7 +522,8 @@ class CognitoIdpBackend(BaseBackend):
return user_pool.users[username]
- def list_users(self, user_pool_id):
+ @paginate(60, "pagination_token", "limit")
+ def list_users(self, user_pool_id, pagination_token=None, limit=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@@ -410,6 +546,10 @@ class CognitoIdpBackend(BaseBackend):
if username not in user_pool.users:
raise UserNotFoundError(username)
+ user = user_pool.users[username]
+ for group in user.groups:
+ group.users.remove(user)
+
del user_pool.users[username]
def _log_user_in(self, user_pool, client, username):
diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py
index 50939786b..8b3941c21 100644
--- a/moto/cognitoidp/responses.py
+++ b/moto/cognitoidp/responses.py
@@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse):
})
def list_user_pools(self):
- user_pools = cognitoidp_backends[self.region].list_user_pools()
- return json.dumps({
- "UserPools": [user_pool.to_json() for user_pool in user_pools]
- })
+ max_results = self._get_param("MaxResults")
+ next_token = self._get_param("NextToken", "0")
+ user_pools, next_token = cognitoidp_backends[self.region].list_user_pools(
+ max_results=max_results, next_token=next_token
+ )
+ response = {
+ "UserPools": [user_pool.to_json() for user_pool in user_pools],
+ }
+ if next_token:
+ response["NextToken"] = str(next_token)
+ return json.dumps(response)
def describe_user_pool(self):
user_pool_id = self._get_param("UserPoolId")
@@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse):
def list_user_pool_clients(self):
user_pool_id = self._get_param("UserPoolId")
- user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id)
- return json.dumps({
+ max_results = self._get_param("MaxResults")
+ next_token = self._get_param("NextToken", "0")
+ user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id,
+ max_results=max_results, next_token=next_token)
+ response = {
"UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients]
- })
+ }
+ if next_token:
+ response["NextToken"] = str(next_token)
+ return json.dumps(response)
def describe_user_pool_client(self):
user_pool_id = self._get_param("UserPoolId")
@@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse):
def list_identity_providers(self):
user_pool_id = self._get_param("UserPoolId")
- identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id)
- return json.dumps({
+ max_results = self._get_param("MaxResults")
+ next_token = self._get_param("NextToken", "0")
+ identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers(
+ user_pool_id, max_results=max_results, next_token=next_token
+ )
+ response = {
"Providers": [identity_provider.to_json() for identity_provider in identity_providers]
- })
+ }
+ if next_token:
+ response["NextToken"] = str(next_token)
+ return json.dumps(response)
def describe_identity_provider(self):
user_pool_id = self._get_param("UserPoolId")
@@ -129,6 +149,89 @@ class CognitoIdpResponse(BaseResponse):
cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name)
return ""
+ # Group
+ def create_group(self):
+ group_name = self._get_param("GroupName")
+ user_pool_id = self._get_param("UserPoolId")
+ description = self._get_param("Description")
+ role_arn = self._get_param("RoleArn")
+ precedence = self._get_param("Precedence")
+
+ group = cognitoidp_backends[self.region].create_group(
+ user_pool_id,
+ group_name,
+ description,
+ role_arn,
+ precedence,
+ )
+
+ return json.dumps({
+ "Group": group.to_json(),
+ })
+
+ def get_group(self):
+ group_name = self._get_param("GroupName")
+ user_pool_id = self._get_param("UserPoolId")
+ group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name)
+ return json.dumps({
+ "Group": group.to_json(),
+ })
+
+ def list_groups(self):
+ user_pool_id = self._get_param("UserPoolId")
+ groups = cognitoidp_backends[self.region].list_groups(user_pool_id)
+ return json.dumps({
+ "Groups": [group.to_json() for group in groups],
+ })
+
+ def delete_group(self):
+ group_name = self._get_param("GroupName")
+ user_pool_id = self._get_param("UserPoolId")
+ cognitoidp_backends[self.region].delete_group(user_pool_id, group_name)
+ return ""
+
+ def admin_add_user_to_group(self):
+ user_pool_id = self._get_param("UserPoolId")
+ username = self._get_param("Username")
+ group_name = self._get_param("GroupName")
+
+ cognitoidp_backends[self.region].admin_add_user_to_group(
+ user_pool_id,
+ group_name,
+ username,
+ )
+
+ return ""
+
+ def list_users_in_group(self):
+ user_pool_id = self._get_param("UserPoolId")
+ group_name = self._get_param("GroupName")
+ users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name)
+ return json.dumps({
+ "Users": [user.to_json(extended=True) for user in users],
+ })
+
+ def admin_list_groups_for_user(self):
+ username = self._get_param("Username")
+ user_pool_id = self._get_param("UserPoolId")
+ groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username)
+ return json.dumps({
+ "Groups": [group.to_json() for group in groups],
+ })
+
+ def admin_remove_user_from_group(self):
+ user_pool_id = self._get_param("UserPoolId")
+ username = self._get_param("Username")
+ group_name = self._get_param("GroupName")
+
+ cognitoidp_backends[self.region].admin_remove_user_from_group(
+ user_pool_id,
+ group_name,
+ username,
+ )
+
+ return ""
+
# User
def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId")
@@ -155,10 +258,15 @@ class CognitoIdpResponse(BaseResponse):
def list_users(self):
user_pool_id = self._get_param("UserPoolId")
- users = cognitoidp_backends[self.region].list_users(user_pool_id)
- return json.dumps({
- "Users": [user.to_json(extended=True) for user in users]
- })
+ limit = self._get_param("Limit")
+ token = self._get_param("PaginationToken")
+ users, token = cognitoidp_backends[self.region].list_users(user_pool_id,
+ limit=limit,
+ pagination_token=token)
+ response = {"Users": [user.to_json(extended=True) for user in users]}
+ if token:
+ response["PaginationToken"] = str(token)
+ return json.dumps(response)
def admin_disable_user(self):
user_pool_id = self._get_param("UserPoolId")
diff --git a/moto/core/models.py b/moto/core/models.py
index 19267ca08..9fe1e96bd 100644
--- a/moto/core/models.py
+++ b/moto/core/models.py
@@ -4,6 +4,7 @@ from __future__ import absolute_import
import functools
import inspect
+import os
import re
import six
from io import BytesIO
@@ -21,6 +22,11 @@ from .utils import (
)
+# "Mock" the AWS credentials as they can't be mocked in Botocore currently
+os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
+os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
+
+
class BaseMockAWS(object):
nested_count = 0
diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py
index a54c4f7d0..8187ceaf9 100644
--- a/moto/dynamodb2/models.py
+++ b/moto/dynamodb2/models.py
@@ -5,6 +5,7 @@ import datetime
import decimal
import json
import re
+import uuid
import boto3
from moto.compat import OrderedDict
@@ -292,9 +293,82 @@ class Item(BaseModel):
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
+class StreamRecord(BaseModel):
+ def __init__(self, table, stream_type, event_name, old, new, seq):
+ old_a = old.to_json()['Attributes'] if old is not None else {}
+ new_a = new.to_json()['Attributes'] if new is not None else {}
+
+ rec = old if old is not None else new
+ keys = {table.hash_key_attr: rec.hash_key.to_json()}
+ if table.range_key_attr is not None:
+ keys[table.range_key_attr] = rec.range_key.to_json()
+
+ self.record = {
+ 'eventID': uuid.uuid4().hex,
+ 'eventName': event_name,
+ 'eventSource': 'aws:dynamodb',
+ 'eventVersion': '1.0',
+ 'awsRegion': 'us-east-1',
+ 'dynamodb': {
+ 'StreamViewType': stream_type,
+ 'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(),
+ 'SequenceNumber': seq,
+ 'SizeBytes': 1,
+ 'Keys': keys
+ }
+ }
+
+ if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'):
+ self.record['dynamodb']['NewImage'] = new_a
+ if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'):
+ self.record['dynamodb']['OldImage'] = old_a
+
+ # This is a substantial overestimate but it's the easiest to do now
+ self.record['dynamodb']['SizeBytes'] = len(
+ json.dumps(self.record['dynamodb']))
+
+ def to_json(self):
+ return self.record
+
+
+class StreamShard(BaseModel):
+ def __init__(self, table):
+ self.table = table
+ self.id = 'shardId-00000001541626099285-f35f62ef'
+ self.starting_sequence_number = 1100000000017454423009
+ self.items = []
+ self.created_on = datetime.datetime.utcnow()
+
+ def to_json(self):
+ return {
+ 'ShardId': self.id,
+ 'SequenceNumberRange': {
+ 'StartingSequenceNumber': str(self.starting_sequence_number)
+ }
+ }
+
+ def add(self, old, new):
+ t = self.table.stream_specification['StreamViewType']
+ if old is None:
+ event_name = 'INSERT'
+ elif new is None:
+ event_name = 'DELETE'
+ else:
+ event_name = 'MODIFY'
+ seq = len(self.items) + self.starting_sequence_number
+ self.items.append(
+ StreamRecord(self.table, t, event_name, old, new, seq))
+
+ def get(self, start, quantity):
+ start -= self.starting_sequence_number
+ assert start >= 0
+ end = start + quantity
+ return [i.to_json() for i in self.items[start:end]]
+
+
class Table(BaseModel):
- def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None):
+ def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None):
self.name = table_name
self.attr = attr
self.schema = schema
@@ -325,10 +399,22 @@ class Table(BaseModel):
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
# 'AttributeName': 'string' # Can contain this
}
+ self.set_stream_specification(streams)
def _generate_arn(self, name):
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
+ def set_stream_specification(self, streams):
+ self.stream_specification = streams
+ if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')):
+ self.stream_specification['StreamEnabled'] = True
+ self.latest_stream_label = datetime.datetime.utcnow().isoformat()
+ self.stream_shard = StreamShard(self)
+ else:
+ self.stream_specification = {'StreamEnabled': False}
+ self.latest_stream_label = None
+ self.stream_shard = None
+
def describe(self, base_key='TableDescription'):
results = {
base_key: {
@@ -345,6 +431,11 @@ class Table(BaseModel):
'LocalSecondaryIndexes': [index for index in self.indexes],
}
}
+ if self.stream_specification and self.stream_specification['StreamEnabled']:
+ results[base_key]['StreamSpecification'] = self.stream_specification
+ if self.latest_stream_label:
+ results[base_key]['LatestStreamLabel'] = self.latest_stream_label
+ results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label
return results
def __len__(self):
@@ -385,23 +476,22 @@ class Table(BaseModel):
else:
range_value = None
+ if expected is None:
+ expected = {}
+ lookup_range_value = range_value
+ else:
+ expected_range_value = expected.get(
+ self.range_key_attr, {}).get("Value")
+ if(expected_range_value is None):
+ lookup_range_value = range_value
+ else:
+ lookup_range_value = DynamoType(expected_range_value)
+ current = self.get_item(hash_value, lookup_range_value)
+
item = Item(hash_value, self.hash_key_type, range_value,
self.range_key_type, item_attrs)
if not overwrite:
- if expected is None:
- expected = {}
- lookup_range_value = range_value
- else:
- expected_range_value = expected.get(
- self.range_key_attr, {}).get("Value")
- if(expected_range_value is None):
- lookup_range_value = range_value
- else:
- lookup_range_value = DynamoType(expected_range_value)
-
- current = self.get_item(hash_value, lookup_range_value)
-
if current is None:
current_attr = {}
elif hasattr(current, 'attrs'):
@@ -432,6 +522,10 @@ class Table(BaseModel):
self.items[hash_value][range_value] = item
else:
self.items[hash_value] = item
+
+ if self.stream_shard is not None:
+ self.stream_shard.add(current, item)
+
return item
def __nonzero__(self):
@@ -462,9 +556,14 @@ class Table(BaseModel):
def delete_item(self, hash_key, range_key):
try:
if range_key:
- return self.items[hash_key].pop(range_key)
+ item = self.items[hash_key].pop(range_key)
else:
- return self.items.pop(hash_key)
+ item = self.items.pop(hash_key)
+
+ if self.stream_shard is not None:
+ self.stream_shard.add(item, None)
+
+ return item
except KeyError:
return None
@@ -680,6 +779,13 @@ class DynamoDBBackend(BaseBackend):
table.throughput = throughput
return table
+ def update_table_streams(self, name, stream_specification):
+ table = self.tables[name]
+ if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label:
+ raise ValueError('Table already has stream enabled')
+ table.set_stream_specification(stream_specification)
+ return table
+
def update_table_global_indexes(self, name, global_index_updates):
table = self.tables[name]
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)
diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py
index e2f1ef1cc..a16d02c4c 100644
--- a/moto/dynamodb2/responses.py
+++ b/moto/dynamodb2/responses.py
@@ -104,13 +104,16 @@ class DynamoHandler(BaseResponse):
# getting the indexes
global_indexes = body.get("GlobalSecondaryIndexes", [])
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
+ # get the stream specification
+ streams = body.get("StreamSpecification")
table = self.dynamodb_backend.create_table(table_name,
schema=key_schema,
throughput=throughput,
attr=attr,
global_indexes=global_indexes,
- indexes=local_secondary_indexes)
+ indexes=local_secondary_indexes,
+ streams=streams)
if table is not None:
return dynamo_json_dump(table.describe())
else:
@@ -163,12 +166,20 @@ class DynamoHandler(BaseResponse):
def update_table(self):
name = self.body['TableName']
+ table = self.dynamodb_backend.get_table(name)
if 'GlobalSecondaryIndexUpdates' in self.body:
table = self.dynamodb_backend.update_table_global_indexes(
name, self.body['GlobalSecondaryIndexUpdates'])
if 'ProvisionedThroughput' in self.body:
throughput = self.body["ProvisionedThroughput"]
table = self.dynamodb_backend.update_table_throughput(name, throughput)
+ if 'StreamSpecification' in self.body:
+ try:
+ table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification'])
+ except ValueError:
+ er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException'
+ return self.error(er, 'Cannot enable stream')
+
return dynamo_json_dump(table.describe())
def describe_table(self):
@@ -183,6 +194,11 @@ class DynamoHandler(BaseResponse):
def put_item(self):
name = self.body['TableName']
item = self.body['Item']
+ return_values = self.body.get('ReturnValues', 'NONE')
+
+ if return_values not in ('ALL_OLD', 'NONE'):
+ er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
+ return self.error(er, 'Return values set to invalid value')
if has_empty_keys_or_values(item):
return get_empty_str_error()
@@ -193,6 +209,13 @@ class DynamoHandler(BaseResponse):
else:
expected = None
+ if return_values == 'ALL_OLD':
+ existing_item = self.dynamodb_backend.get_item(name, item)
+ if existing_item:
+ existing_attributes = existing_item.to_json()['Attributes']
+ else:
+ existing_attributes = {}
+
# Attempt to parse simple ConditionExpressions into an Expected
# expression
if not expected:
@@ -228,6 +251,10 @@ class DynamoHandler(BaseResponse):
'TableName': name,
'CapacityUnits': 1
}
+ if return_values == 'ALL_OLD':
+ item_dict['Attributes'] = existing_attributes
+ else:
+ item_dict.pop('Attributes', None)
return dynamo_json_dump(item_dict)
else:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
@@ -512,7 +539,11 @@ class DynamoHandler(BaseResponse):
def delete_item(self):
name = self.body['TableName']
keys = self.body['Key']
- return_values = self.body.get('ReturnValues', '')
+ return_values = self.body.get('ReturnValues', 'NONE')
+ if return_values not in ('ALL_OLD', 'NONE'):
+ er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
+ return self.error(er, 'Return values set to invalid value')
+
table = self.dynamodb_backend.get_table(name)
if not table:
er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException'
@@ -527,9 +558,9 @@ class DynamoHandler(BaseResponse):
return dynamo_json_dump(item_dict)
def update_item(self):
-
name = self.body['TableName']
key = self.body['Key']
+ return_values = self.body.get('ReturnValues', 'NONE')
update_expression = self.body.get('UpdateExpression')
attribute_updates = self.body.get('AttributeUpdates')
expression_attribute_names = self.body.get(
@@ -537,6 +568,15 @@ class DynamoHandler(BaseResponse):
expression_attribute_values = self.body.get(
'ExpressionAttributeValues', {})
existing_item = self.dynamodb_backend.get_item(name, key)
+ if existing_item:
+ existing_attributes = existing_item.to_json()['Attributes']
+ else:
+ existing_attributes = {}
+
+ if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD',
+ 'UPDATED_NEW'):
+ er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
+ return self.error(er, 'Return values set to invalid value')
if has_empty_keys_or_values(expression_attribute_values):
return get_empty_str_error()
@@ -591,8 +631,26 @@ class DynamoHandler(BaseResponse):
'TableName': name,
'CapacityUnits': 0.5
}
- if not existing_item:
+ unchanged_attributes = {
+ k for k in existing_attributes.keys()
+ if existing_attributes[k] == item_dict['Attributes'].get(k)
+ }
+ changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes)
+
+ if return_values == 'NONE':
item_dict['Attributes'] = {}
+ elif return_values == 'ALL_OLD':
+ item_dict['Attributes'] = existing_attributes
+ elif return_values == 'UPDATED_OLD':
+ item_dict['Attributes'] = {
+ k: v for k, v in existing_attributes.items()
+ if k in changed_attributes
+ }
+ elif return_values == 'UPDATED_NEW':
+ item_dict['Attributes'] = {
+ k: v for k, v in item_dict['Attributes'].items()
+ if k in changed_attributes
+ }
return dynamo_json_dump(item_dict)
diff --git a/moto/dynamodbstreams/__init__.py b/moto/dynamodbstreams/__init__.py
new file mode 100644
index 000000000..b35879eba
--- /dev/null
+++ b/moto/dynamodbstreams/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import unicode_literals
+from .models import dynamodbstreams_backends
+from ..core.models import base_decorator
+
+dynamodbstreams_backend = dynamodbstreams_backends['us-east-1']
+mock_dynamodbstreams = base_decorator(dynamodbstreams_backends)
diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py
new file mode 100644
index 000000000..41cc6e280
--- /dev/null
+++ b/moto/dynamodbstreams/models.py
@@ -0,0 +1,129 @@
+from __future__ import unicode_literals
+
+import os
+import json
+import boto3
+import base64
+
+from moto.core import BaseBackend, BaseModel
+from moto.dynamodb2.models import dynamodb_backends
+
+
+class ShardIterator(BaseModel):
+ def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None):
+ self.id = base64.b64encode(os.urandom(472)).decode('utf-8')
+ self.streams_backend = streams_backend
+ self.stream_shard = stream_shard
+ self.shard_iterator_type = shard_iterator_type
+ if shard_iterator_type == 'TRIM_HORIZON':
+ self.sequence_number = stream_shard.starting_sequence_number
+ elif shard_iterator_type == 'LATEST':
+ self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items)
+ elif shard_iterator_type == 'AT_SEQUENCE_NUMBER':
+ self.sequence_number = sequence_number
+ elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER':
+ self.sequence_number = sequence_number + 1
+
+ @property
+ def arn(self):
+ return '{}/stream/{}|1|{}'.format(
+ self.stream_shard.table.table_arn,
+ self.stream_shard.table.latest_stream_label,
+ self.id)
+
+ def to_json(self):
+ return {
+ 'ShardIterator': self.arn
+ }
+
+ def get(self, limit=1000):
+ items = self.stream_shard.get(self.sequence_number, limit)
+ try:
+ last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items)
+ new_shard_iterator = ShardIterator(self.streams_backend,
+ self.stream_shard,
+ 'AFTER_SEQUENCE_NUMBER',
+ last_sequence_number)
+ except ValueError:
+ new_shard_iterator = ShardIterator(self.streams_backend,
+ self.stream_shard,
+ 'AT_SEQUENCE_NUMBER',
+ self.sequence_number)
+
+ self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator
+ return {
+ 'NextShardIterator': new_shard_iterator.arn,
+ 'Records': items
+ }
+
+
+class DynamoDBStreamsBackend(BaseBackend):
+ def __init__(self, region):
+ self.region = region
+ self.shard_iterators = {}
+
+ def reset(self):
+ region = self.region
+ self.__dict__ = {}
+ self.__init__(region)
+
+ @property
+ def dynamodb(self):
+ return dynamodb_backends[self.region]
+
+ def _get_table_from_arn(self, arn):
+ table_name = arn.split(':', 6)[5].split('/')[1]
+ return self.dynamodb.get_table(table_name)
+
+ def describe_stream(self, arn):
+ table = self._get_table_from_arn(arn)
+ resp = {'StreamDescription': {
+ 'StreamArn': arn,
+ 'StreamLabel': table.latest_stream_label,
+ 'StreamStatus': ('ENABLED' if table.latest_stream_label
+ else 'DISABLED'),
+ 'StreamViewType': table.stream_specification['StreamViewType'],
+ 'CreationRequestDateTime': table.stream_shard.created_on.isoformat(),
+ 'TableName': table.name,
+ 'KeySchema': table.schema,
+ 'Shards': ([table.stream_shard.to_json()] if table.stream_shard
+ else [])
+ }}
+
+ return json.dumps(resp)
+
+ def list_streams(self, table_name=None):
+ streams = []
+ for table in self.dynamodb.tables.values():
+ if table_name is not None and table.name != table_name:
+ continue
+ if table.latest_stream_label:
+ d = table.describe(base_key='Table')
+ streams.append({
+ 'StreamArn': d['Table']['LatestStreamArn'],
+ 'TableName': d['Table']['TableName'],
+ 'StreamLabel': d['Table']['LatestStreamLabel']
+ })
+
+ return json.dumps({'Streams': streams})
+
+ def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None):
+ table = self._get_table_from_arn(arn)
+ assert table.stream_shard.id == shard_id
+
+ shard_iterator = ShardIterator(self, table.stream_shard,
+ shard_iterator_type,
+ sequence_number)
+ self.shard_iterators[shard_iterator.arn] = shard_iterator
+
+ return json.dumps(shard_iterator.to_json())
+
+ def get_records(self, iterator_arn, limit):
+ shard_iterator = self.shard_iterators[iterator_arn]
+ return json.dumps(shard_iterator.get(limit))
+
+
+available_regions = boto3.session.Session().get_available_regions(
+ 'dynamodbstreams')
+dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region)
+ for region in available_regions}
diff --git a/moto/dynamodbstreams/responses.py b/moto/dynamodbstreams/responses.py
new file mode 100644
index 000000000..c9c113615
--- /dev/null
+++ b/moto/dynamodbstreams/responses.py
@@ -0,0 +1,34 @@
+from __future__ import unicode_literals
+
+from moto.core.responses import BaseResponse
+
+from .models import dynamodbstreams_backends
+
+
+class DynamoDBStreamsHandler(BaseResponse):
+
+ @property
+ def backend(self):
+ return dynamodbstreams_backends[self.region]
+
+ def describe_stream(self):
+ arn = self._get_param('StreamArn')
+ return self.backend.describe_stream(arn)
+
+ def list_streams(self):
+ table_name = self._get_param('TableName')
+ return self.backend.list_streams(table_name)
+
+ def get_shard_iterator(self):
+ arn = self._get_param('StreamArn')
+ shard_id = self._get_param('ShardId')
+ shard_iterator_type = self._get_param('ShardIteratorType')
+ return self.backend.get_shard_iterator(arn, shard_id,
+ shard_iterator_type)
+
+ def get_records(self):
+ arn = self._get_param('ShardIterator')
+ limit = self._get_param('Limit')
+ if limit is None:
+ limit = 1000
+ return self.backend.get_records(arn, limit)
diff --git a/moto/dynamodbstreams/urls.py b/moto/dynamodbstreams/urls.py
new file mode 100644
index 000000000..1d0f94c35
--- /dev/null
+++ b/moto/dynamodbstreams/urls.py
@@ -0,0 +1,10 @@
+from __future__ import unicode_literals
+from .responses import DynamoDBStreamsHandler
+
+url_bases = [
+ "https?://streams.dynamodb.(.+).amazonaws.com"
+]
+
+url_paths = {
+ "{0}/$": DynamoDBStreamsHandler.dispatch,
+}
diff --git a/moto/ec2/models.py b/moto/ec2/models.py
index b94cac479..f7d1eb044 100755
--- a/moto/ec2/models.py
+++ b/moto/ec2/models.py
@@ -2230,6 +2230,10 @@ class VPCPeeringConnectionStatus(object):
self.code = code
self.message = message
+ def deleted(self):
+ self.code = 'deleted'
+ self.message = 'Deleted by {deleter ID}'
+
def initiating(self):
self.code = 'initiating-request'
self.message = 'Initiating Request to {accepter ID}'
@@ -2292,9 +2296,8 @@ class VPCPeeringConnectionBackend(object):
return self.vpc_pcxs.get(vpc_pcx_id)
def delete_vpc_peering_connection(self, vpc_pcx_id):
- deleted = self.vpc_pcxs.pop(vpc_pcx_id, None)
- if not deleted:
- raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
+ deleted = self.get_vpc_peering_connection(vpc_pcx_id)
+ deleted._status.deleted()
return deleted
def accept_vpc_peering_connection(self, vpc_pcx_id):
diff --git a/moto/ecs/models.py b/moto/ecs/models.py
index d00853843..4a6737ceb 100644
--- a/moto/ecs/models.py
+++ b/moto/ecs/models.py
@@ -769,6 +769,8 @@ class EC2ContainerServiceBackend(BaseBackend):
Container instances status should be one of [ACTIVE,DRAINING]")
failures = []
container_instance_objects = []
+ list_container_instance_ids = [x.split('/')[-1]
+ for x in list_container_instance_ids]
for container_instance_id in list_container_instance_ids:
container_instance = self.container_instances[cluster_name].get(container_instance_id, None)
if container_instance is not None:
diff --git a/moto/emr/responses.py b/moto/emr/responses.py
index 49e37ab9a..933e0177b 100644
--- a/moto/emr/responses.py
+++ b/moto/emr/responses.py
@@ -613,13 +613,11 @@ DESCRIBE_STEP_TEMPLATE = """
@@ -734,7 +766,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """
{{ policy }}
{% endfor %}
+ false
- false
7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
@@ -1243,8 +1275,8 @@ LIST_ACCESS_KEYS_TEMPLATE = """
CREDENTIAL_REPORT_GENERATING = """
- STARTED
- No report exists. Starting a new report generation task
+ STARTED
+ No report exists. Starting a new report generation task
fa788a82-aa8a-11e4-a278-1786c418872b"
@@ -1253,7 +1285,7 @@ CREDENTIAL_REPORT_GENERATING = """
CREDENTIAL_REPORT_GENERATED = """
- COMPLETE
+ COMPLETE
fa788a82-aa8a-11e4-a278-1786c418872b"
@@ -1262,7 +1294,7 @@ CREDENTIAL_REPORT_GENERATED = """
CREDENTIAL_REPORT = """
- {{ report }}
+ {{ report }}
2015-02-02T20:02:02Z
text/csv
@@ -1277,23 +1309,23 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """
{% for profile in instance_profiles %}
- {{ profile.id }}
-
- {% for role in profile.roles %}
-
- {{ role.path }}
- {{ role.arn }}
- {{ role.name }}
- {{ role.assume_policy_document }}
- 2012-05-09T15:45:35Z
- {{ role.id }}
-
- {% endfor %}
-
- {{ profile.name }}
- {{ profile.path }}
- {{ profile.arn }}
- 2012-05-09T16:27:11Z
+ {{ profile.id }}
+
+ {% for role in profile.roles %}
+
+ {{ role.path }}
+ {{ role.arn }}
+ {{ role.name }}
+ {{ role.assume_policy_document }}
+ {{ role.create_date }}
+ {{ role.id }}
+
+ {% endfor %}
+
+ {{ profile.name }}
+ {{ profile.path }}
+ {{ profile.arn }}
+ {{ profile.create_date }}
{% endfor %}
@@ -1382,7 +1414,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ user.path }}
{{ user.name }}
{{ user.arn }}
- 2012-05-09T15:45:35Z
+ {{ user.created_iso_8601 }}
{% endfor %}
@@ -1401,7 +1433,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ group.name }}
{{ group.path }}
{{ group.arn }}
- 2012-05-09T16:27:11Z
+ {{ group.create_date }}
{% endfor %}
@@ -1421,23 +1453,23 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """
{% for profile in instance_profiles %}
- {{ profile.id }}
-
- {% for role in profile.roles %}
-
- {{ role.path }}
- {{ role.arn }}
- {{ role.name }}
- {{ role.assume_role_policy_document }}
- 2012-05-09T15:45:35Z
- {{ role.id }}
-
- {% endfor %}
-
- {{ profile.name }}
- {{ profile.path }}
- {{ profile.arn }}
- 2012-05-09T16:27:11Z
+ {{ profile.id }}
+
+ {% for role in profile.roles %}
+
+ {{ role.path }}
+ {{ role.arn }}
+ {{ role.name }}
+ {{ role.assume_role_policy_document }}
+ {{ role.create_date }}
+ {{ role.id }}
+
+ {% endfor %}
+
+ {{ profile.name }}
+ {{ profile.path }}
+ {{ profile.arn }}
+ {{ profile.create_date }}
{% endfor %}
@@ -1445,7 +1477,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """{{ role.arn }}
{{ role.name }}
{{ role.assume_role_policy_document }}
- 2014-07-30T17:09:20Z
+ {{ role.create_date }}
{{ role.id }}
{% endfor %}
@@ -1474,9 +1506,9 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """
{{ policy.arn }}
1
- 2012-05-09T16:27:11Z
+ {{ policy.create_datetime }}
true
- 2012-05-09T16:27:11Z
+ {{ policy.update_datetime }}
{% endfor %}
@@ -1485,3 +1517,53 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """92e79ae7-7399-11e4-8c85-4b53eEXAMPLE
"""
+
+
+UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """
+
+
+ {{ cert.user_name }}
+ {{ cert.id }}
+ {{ cert.body }}
+ {{ cert.status }}
+
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+
+UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """
+
+ EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE
+
+"""
+
+
+DELETE_SIGNING_CERTIFICATE_TEMPLATE = """
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+
+LIST_SIGNING_CERTIFICATES_TEMPLATE = """
+
+ {{ user_name }}
+
+ {% for cert in certificates %}
+
+ {{ user_name }}
+ {{ cert.id }}
+ {{ cert.body }}
+ {{ cert.status }}
+
+ {% endfor %}
+
+ false
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
diff --git a/moto/iam/utils.py b/moto/iam/utils.py
index 1fae85a6c..f59bdfffe 100644
--- a/moto/iam/utils.py
+++ b/moto/iam/utils.py
@@ -12,8 +12,7 @@ def random_alphanumeric(length):
)
-def random_resource_id():
- size = 20
+def random_resource_id(size=20):
chars = list(range(10)) + list(string.ascii_lowercase)
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
diff --git a/moto/iot/exceptions.py b/moto/iot/exceptions.py
index 7bbdb706d..3af3751d9 100644
--- a/moto/iot/exceptions.py
+++ b/moto/iot/exceptions.py
@@ -31,3 +31,20 @@ class VersionConflictException(IoTClientError):
'VersionConflictException',
'The version for thing %s does not match the expected version.' % name
)
+
+
+class CertificateStateException(IoTClientError):
+ def __init__(self, msg, cert_id):
+ self.code = 406
+ super(CertificateStateException, self).__init__(
+ 'CertificateStateException',
+ '%s Id: %s' % (msg, cert_id)
+ )
+
+
+class DeleteConflictException(IoTClientError):
+ def __init__(self, msg):
+ self.code = 409
+ super(DeleteConflictException, self).__init__(
+ 'DeleteConflictException', msg
+ )
diff --git a/moto/iot/models.py b/moto/iot/models.py
index c36bb985f..b493f6b8d 100644
--- a/moto/iot/models.py
+++ b/moto/iot/models.py
@@ -13,6 +13,8 @@ import boto3
from moto.core import BaseBackend, BaseModel
from .exceptions import (
+ CertificateStateException,
+ DeleteConflictException,
ResourceNotFoundException,
InvalidRequestException,
VersionConflictException
@@ -378,7 +380,25 @@ class IoTBackend(BaseBackend):
return certificate, key_pair
def delete_certificate(self, certificate_id):
- self.describe_certificate(certificate_id)
+ cert = self.describe_certificate(certificate_id)
+ if cert.status == 'ACTIVE':
+ raise CertificateStateException(
+ 'Certificate must be deactivated (not ACTIVE) before deletion.', certificate_id)
+
+ certs = [k[0] for k, v in self.principal_things.items()
+ if self._get_principal(k[0]).certificate_id == certificate_id]
+ if len(certs) > 0:
+ raise DeleteConflictException(
+ 'Things must be detached before deletion (arn: %s)' % certs[0]
+ )
+
+ certs = [k[0] for k, v in self.principal_policies.items()
+ if self._get_principal(k[0]).certificate_id == certificate_id]
+ if len(certs) > 0:
+ raise DeleteConflictException(
+ 'Certificate policies must be detached before deletion (arn: %s)' % certs[0]
+ )
+
del self.certificates[certificate_id]
def describe_certificate(self, certificate_id):
@@ -411,6 +431,14 @@ class IoTBackend(BaseBackend):
return policies[0]
def delete_policy(self, policy_name):
+
+ policies = [k[1] for k, v in self.principal_policies.items() if k[1] == policy_name]
+ if len(policies) > 0:
+ raise DeleteConflictException(
+ 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)'
+ % policy_name
+ )
+
policy = self.get_policy(policy_name)
del self.policies[policy.name]
@@ -429,6 +457,14 @@ class IoTBackend(BaseBackend):
pass
raise ResourceNotFoundException()
+ def attach_policy(self, policy_name, target):
+ principal = self._get_principal(target)
+ policy = self.get_policy(policy_name)
+ k = (target, policy_name)
+ if k in self.principal_policies:
+ return
+ self.principal_policies[k] = (principal, policy)
+
def attach_principal_policy(self, policy_name, principal_arn):
principal = self._get_principal(principal_arn)
policy = self.get_policy(policy_name)
@@ -437,6 +473,15 @@ class IoTBackend(BaseBackend):
return
self.principal_policies[k] = (principal, policy)
+ def detach_policy(self, policy_name, target):
+ # this may raises ResourceNotFoundException
+ self._get_principal(target)
+ self.get_policy(policy_name)
+ k = (target, policy_name)
+ if k not in self.principal_policies:
+ raise ResourceNotFoundException()
+ del self.principal_policies[k]
+
def detach_principal_policy(self, policy_name, principal_arn):
# this may raises ResourceNotFoundException
self._get_principal(principal_arn)
diff --git a/moto/iot/responses.py b/moto/iot/responses.py
index 006c4c4cc..214576f52 100644
--- a/moto/iot/responses.py
+++ b/moto/iot/responses.py
@@ -224,6 +224,15 @@ class IoTResponse(BaseResponse):
)
return json.dumps(dict())
+ def attach_policy(self):
+ policy_name = self._get_param("policyName")
+ target = self._get_param('target')
+ self.iot_backend.attach_policy(
+ policy_name=policy_name,
+ target=target,
+ )
+ return json.dumps(dict())
+
def attach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')
@@ -233,6 +242,15 @@ class IoTResponse(BaseResponse):
)
return json.dumps(dict())
+ def detach_policy(self):
+ policy_name = self._get_param("policyName")
+ target = self._get_param('target')
+ self.iot_backend.detach_policy(
+ policy_name=policy_name,
+ target=target,
+ )
+ return json.dumps(dict())
+
def detach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')
diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py
index 26515dfd2..c7d82ddfd 100644
--- a/moto/s3/exceptions.py
+++ b/moto/s3/exceptions.py
@@ -178,3 +178,13 @@ class InvalidStorageClass(S3ClientError):
"InvalidStorageClass",
"The storage class you specified is not valid",
*args, **kwargs)
+
+
+class DuplicateTagKeys(S3ClientError):
+ code = 400
+
+ def __init__(self, *args, **kwargs):
+ super(DuplicateTagKeys, self).__init__(
+ "InvalidTag",
+ "Cannot provide multiple Tags with the same key",
+ *args, **kwargs)
diff --git a/moto/s3/models.py b/moto/s3/models.py
index bb4d7848c..fd53417fa 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -15,7 +15,7 @@ from bisect import insort
from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \
- InvalidNotificationDestination, MalformedXML, InvalidStorageClass
+ InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys
from .utils import clean_key_name, _VersionedKeyStore
UPLOAD_ID_BYTES = 43
@@ -773,6 +773,9 @@ class S3Backend(BaseBackend):
return key
def put_bucket_tagging(self, bucket_name, tagging):
+ tag_keys = [tag.key for tag in tagging.tag_set.tags]
+ if len(tag_keys) != len(set(tag_keys)):
+ raise DuplicateTagKeys()
bucket = self.get_bucket(bucket_name)
bucket.set_tags(tagging)
diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py
index 1404a0ec8..1350ab469 100644
--- a/moto/secretsmanager/models.py
+++ b/moto/secretsmanager/models.py
@@ -2,6 +2,7 @@ from __future__ import unicode_literals
import time
import json
+import uuid
import boto3
@@ -18,10 +19,6 @@ class SecretsManager(BaseModel):
def __init__(self, region_name, **kwargs):
self.region = region_name
- self.secret_id = kwargs.get('secret_id', '')
- self.version_id = kwargs.get('version_id', '')
- self.version_stage = kwargs.get('version_stage', '')
- self.secret_string = ''
class SecretsManagerBackend(BaseBackend):
@@ -29,14 +26,7 @@ class SecretsManagerBackend(BaseBackend):
def __init__(self, region_name=None, **kwargs):
super(SecretsManagerBackend, self).__init__()
self.region = region_name
- self.secret_id = kwargs.get('secret_id', '')
- self.name = kwargs.get('name', '')
- self.createdate = int(time.time())
- self.secret_string = ''
- self.rotation_enabled = False
- self.rotation_lambda_arn = ''
- self.auto_rotate_after_days = 0
- self.version_id = ''
+ self.secrets = {}
def reset(self):
region_name = self.region
@@ -44,36 +34,50 @@ class SecretsManagerBackend(BaseBackend):
self.__init__(region_name)
def _is_valid_identifier(self, identifier):
- return identifier in (self.name, self.secret_id)
+ return identifier in self.secrets
def get_secret_value(self, secret_id, version_id, version_stage):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException()
+ secret = self.secrets[secret_id]
+
response = json.dumps({
- "ARN": secret_arn(self.region, self.secret_id),
- "Name": self.name,
- "VersionId": "A435958A-D821-4193-B719-B7769357AER4",
- "SecretString": self.secret_string,
+ "ARN": secret_arn(self.region, secret['secret_id']),
+ "Name": secret['name'],
+ "VersionId": secret['version_id'],
+ "SecretString": secret['secret_string'],
"VersionStages": [
"AWSCURRENT",
],
- "CreatedDate": "2018-05-23 13:16:57.198000"
+ "CreatedDate": secret['createdate']
})
return response
- def create_secret(self, name, secret_string, **kwargs):
+ def create_secret(self, name, secret_string, tags, **kwargs):
- self.secret_string = secret_string
- self.secret_id = name
- self.name = name
+ generated_version_id = str(uuid.uuid4())
+
+ secret = {
+ 'secret_string': secret_string,
+ 'secret_id': name,
+ 'name': name,
+ 'createdate': int(time.time()),
+ 'rotation_enabled': False,
+ 'rotation_lambda_arn': '',
+ 'auto_rotate_after_days': 0,
+ 'version_id': generated_version_id,
+ 'tags': tags
+ }
+
+ self.secrets[name] = secret
response = json.dumps({
"ARN": secret_arn(self.region, name),
- "Name": self.name,
- "VersionId": "A435958A-D821-4193-B719-B7769357AER4",
+ "Name": name,
+ "VersionId": generated_version_id,
})
return response
@@ -82,26 +86,23 @@ class SecretsManagerBackend(BaseBackend):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException
+ secret = self.secrets[secret_id]
+
response = json.dumps({
- "ARN": secret_arn(self.region, self.secret_id),
- "Name": self.name,
+ "ARN": secret_arn(self.region, secret['secret_id']),
+ "Name": secret['name'],
"Description": "",
"KmsKeyId": "",
- "RotationEnabled": self.rotation_enabled,
- "RotationLambdaARN": self.rotation_lambda_arn,
+ "RotationEnabled": secret['rotation_enabled'],
+ "RotationLambdaARN": secret['rotation_lambda_arn'],
"RotationRules": {
- "AutomaticallyAfterDays": self.auto_rotate_after_days
+ "AutomaticallyAfterDays": secret['auto_rotate_after_days']
},
"LastRotatedDate": None,
"LastChangedDate": None,
"LastAccessedDate": None,
"DeletedDate": None,
- "Tags": [
- {
- "Key": "",
- "Value": ""
- },
- ]
+ "Tags": secret['tags']
})
return response
@@ -141,17 +142,19 @@ class SecretsManagerBackend(BaseBackend):
)
raise InvalidParameterException(msg)
- self.version_id = client_request_token or ''
- self.rotation_lambda_arn = rotation_lambda_arn or ''
+ secret = self.secrets[secret_id]
+
+ secret['version_id'] = client_request_token or ''
+ secret['rotation_lambda_arn'] = rotation_lambda_arn or ''
if rotation_rules:
- self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0)
- if self.auto_rotate_after_days > 0:
- self.rotation_enabled = True
+ secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0)
+ if secret['auto_rotate_after_days'] > 0:
+ secret['rotation_enabled'] = True
response = json.dumps({
- "ARN": secret_arn(self.region, self.secret_id),
- "Name": self.name,
- "VersionId": self.version_id
+ "ARN": secret_arn(self.region, secret['secret_id']),
+ "Name": secret['name'],
+ "VersionId": secret['version_id']
})
return response
diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py
index b8b6872a8..932e7bfd7 100644
--- a/moto/secretsmanager/responses.py
+++ b/moto/secretsmanager/responses.py
@@ -19,9 +19,11 @@ class SecretsManagerResponse(BaseResponse):
def create_secret(self):
name = self._get_param('Name')
secret_string = self._get_param('SecretString')
+ tags = self._get_param('Tags', if_none=[])
return secretsmanager_backends[self.region].create_secret(
name=name,
- secret_string=secret_string
+ secret_string=secret_string,
+ tags=tags
)
def get_random_password(self):
diff --git a/moto/secretsmanager/utils.py b/moto/secretsmanager/utils.py
index 2cb92020a..231fea296 100644
--- a/moto/secretsmanager/utils.py
+++ b/moto/secretsmanager/utils.py
@@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers,
def secret_arn(region, secret_id):
- return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format(
- region, secret_id)
+ id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5))
+ return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format(
+ region, secret_id, id_string)
def _exclude_characters(password, exclude_characters):
diff --git a/moto/server.py b/moto/server.py
index ba2470478..5ad02d383 100644
--- a/moto/server.py
+++ b/moto/server.py
@@ -80,10 +80,13 @@ class DomainDispatcherApplication(object):
region = 'us-east-1'
if service == 'dynamodb':
- dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
- # If Newer API version, use dynamodb2
- if dynamo_api_version > "20111205":
- host = "dynamodb2"
+ if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'):
+ host = 'dynamodbstreams'
+ else:
+ dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
+ # If Newer API version, use dynamodb2
+ if dynamo_api_version > "20111205":
+ host = "dynamodb2"
else:
host = "{service}.{region}.amazonaws.com".format(
service=service, region=region)
diff --git a/moto/sqs/models.py b/moto/sqs/models.py
index f3262a988..1404ded75 100644
--- a/moto/sqs/models.py
+++ b/moto/sqs/models.py
@@ -534,7 +534,7 @@ class SQSBackend(BaseBackend):
break
import time
- time.sleep(0.001)
+ time.sleep(0.01)
continue
previous_result_count = len(result)
diff --git a/setup.py b/setup.py
index a1b8c5dae..ce4fe27fa 100755
--- a/setup.py
+++ b/setup.py
@@ -1,10 +1,23 @@
#!/usr/bin/env python
from __future__ import unicode_literals
+import codecs
+import os
+import re
import setuptools
from setuptools import setup, find_packages
import sys
+# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15
+here = os.path.abspath(os.path.dirname(__file__))
+
+def read(*parts):
+ # intentionally *not* adding an encoding option to open, See:
+ # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
+ with codecs.open(os.path.join(here, *parts), 'r') as fp:
+ return fp.read()
+
+
install_requires = [
"Jinja2>=2.7.3",
"boto>=2.36.0",
@@ -18,12 +31,13 @@ install_requires = [
"pyaml",
"pytz",
"python-dateutil<3.0.0,>=2.1",
- "python-jose<3.0.0",
+ "python-jose<4.0.0",
"mock",
"docker>=2.5.1",
- "jsondiff==1.1.1",
+ "jsondiff==1.1.2",
"aws-xray-sdk!=0.96,>=0.93",
"responses>=0.9.0",
+ "cfn-lint"
]
extras_require = {
@@ -43,6 +57,8 @@ setup(
version='1.3.7',
description='A library that allows your python tests to easily'
' mock out the boto library',
+ long_description=read('README.md'),
+ long_description_content_type='text/markdown',
author='Steve Pulec',
author_email='spulec@gmail.com',
url='https://github.com/spulec/moto',
diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
index 9bfae6174..064e0fb33 100644
--- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
+++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
@@ -391,6 +391,9 @@ def test_create_change_set_from_s3_url():
TemplateURL=key_url,
ChangeSetName='NewChangeSet',
ChangeSetType='CREATE',
+ Tags=[
+ {'Key': 'tag-key', 'Value': 'tag-value'}
+ ],
)
assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id']
assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId']
diff --git a/tests/test_cloudformation/test_validate.py b/tests/test_cloudformation/test_validate.py
new file mode 100644
index 000000000..e2c3af05d
--- /dev/null
+++ b/tests/test_cloudformation/test_validate.py
@@ -0,0 +1,115 @@
+from collections import OrderedDict
+import json
+import yaml
+import os
+import boto3
+from nose.tools import raises
+import botocore
+
+
+from moto.cloudformation.exceptions import ValidationError
+from moto.cloudformation.models import FakeStack
+from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export
+from moto.sqs.models import Queue
+from moto.s3.models import FakeBucket
+from moto.cloudformation.utils import yaml_tag_constructor
+from boto.cloudformation.stack import Output
+from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2
+
+json_template = {
+ "AWSTemplateFormatVersion": "2010-09-09",
+ "Description": "Stack 1",
+ "Resources": {
+ "EC2Instance1": {
+ "Type": "AWS::EC2::Instance",
+ "Properties": {
+ "ImageId": "ami-d3adb33f",
+ "KeyName": "dummy",
+ "InstanceType": "t2.micro",
+ "Tags": [
+ {
+ "Key": "Description",
+ "Value": "Test tag"
+ },
+ {
+ "Key": "Name",
+ "Value": "Name tag for tests"
+ }
+ ]
+ }
+ }
+ }
+}
+
+# One resource is required
+json_bad_template = {
+ "AWSTemplateFormatVersion": "2010-09-09",
+ "Description": "Stack 1"
+}
+
+dummy_template_json = json.dumps(json_template)
+dummy_bad_template_json = json.dumps(json_bad_template)
+
+
+@mock_cloudformation
+def test_boto3_json_validate_successful():
+ cf_conn = boto3.client('cloudformation', region_name='us-east-1')
+ response = cf_conn.validate_template(
+ TemplateBody=dummy_template_json,
+ )
+ assert response['Description'] == "Stack 1"
+ assert response['Parameters'] == []
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+
+@mock_cloudformation
+def test_boto3_json_invalid_missing_resource():
+ cf_conn = boto3.client('cloudformation', region_name='us-east-1')
+ try:
+ cf_conn.validate_template(
+ TemplateBody=dummy_bad_template_json,
+ )
+ assert False
+ except botocore.exceptions.ClientError as e:
+ assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
+ ' with id Missing top level item Resources to file module does not exist'
+ assert True
+
+
+yaml_template = """
+ AWSTemplateFormatVersion: '2010-09-09'
+ Description: Simple CloudFormation Test Template
+ Resources:
+ S3Bucket:
+ Type: AWS::S3::Bucket
+ Properties:
+ AccessControl: PublicRead
+ BucketName: cf-test-bucket-1
+"""
+
+yaml_bad_template = """
+ AWSTemplateFormatVersion: '2010-09-09'
+ Description: Simple CloudFormation Test Template
+"""
+
+@mock_cloudformation
+def test_boto3_yaml_validate_successful():
+ cf_conn = boto3.client('cloudformation', region_name='us-east-1')
+ response = cf_conn.validate_template(
+ TemplateBody=yaml_template,
+ )
+ assert response['Description'] == "Simple CloudFormation Test Template"
+ assert response['Parameters'] == []
+ assert response['ResponseMetadata']['HTTPStatusCode'] == 200
+
+@mock_cloudformation
+def test_boto3_yaml_invalid_missing_resource():
+ cf_conn = boto3.client('cloudformation', region_name='us-east-1')
+ try:
+ cf_conn.validate_template(
+ TemplateBody=yaml_bad_template,
+ )
+ assert False
+ except botocore.exceptions.ClientError as e:
+ assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
+ ' with id Missing top level item Resources to file module does not exist'
+ assert True
diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py
index f72a44762..0ef082d5c 100644
--- a/tests/test_cognitoidp/test_cognitoidp.py
+++ b/tests/test_cognitoidp/test_cognitoidp.py
@@ -1,14 +1,18 @@
from __future__ import unicode_literals
-import boto3
import json
import os
+import random
import uuid
+import boto3
+# noinspection PyUnresolvedReferences
+import sure # noqa
+from botocore.exceptions import ClientError
from jose import jws
+from nose.tools import assert_raises
from moto import mock_cognitoidp
-import sure # noqa
@mock_cognitoidp
@@ -41,6 +45,56 @@ def test_list_user_pools():
result["UserPools"][0]["Name"].should.equal(name)
+@mock_cognitoidp
+def test_list_user_pools_returns_max_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ # Given 10 user pools
+ pool_count = 10
+ for i in range(pool_count):
+ conn.create_user_pool(PoolName=str(uuid.uuid4()))
+
+ max_results = 5
+ result = conn.list_user_pools(MaxResults=max_results)
+ result["UserPools"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_user_pools_returns_next_tokens():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ # Given 10 user pool clients
+ pool_count = 10
+ for i in range(pool_count):
+ conn.create_user_pool(PoolName=str(uuid.uuid4()))
+
+ max_results = 5
+ result = conn.list_user_pools(MaxResults=max_results)
+ result["UserPools"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+ next_token = result["NextToken"]
+ result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token)
+ result_2["UserPools"].should.have.length_of(max_results)
+ result_2.shouldnt.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_user_pools_when_max_items_more_than_total_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ # Given 10 user pool clients
+ pool_count = 10
+ for i in range(pool_count):
+ conn.create_user_pool(PoolName=str(uuid.uuid4()))
+
+ max_results = pool_count + 5
+ result = conn.list_user_pools(MaxResults=max_results)
+ result["UserPools"].should.have.length_of(pool_count)
+ result.shouldnt.have.key("NextToken")
+
+
@mock_cognitoidp
def test_describe_user_pool():
conn = boto3.client("cognito-idp", "us-west-2")
@@ -140,6 +194,67 @@ def test_list_user_pool_clients():
result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
+@mock_cognitoidp
+def test_list_user_pool_clients_returns_max_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 user pool clients
+ client_count = 10
+ for i in range(client_count):
+ client_name = str(uuid.uuid4())
+ conn.create_user_pool_client(UserPoolId=user_pool_id,
+ ClientName=client_name)
+ max_results = 5
+ result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
+ MaxResults=max_results)
+ result["UserPoolClients"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_user_pool_clients_returns_next_tokens():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 user pool clients
+ client_count = 10
+ for i in range(client_count):
+ client_name = str(uuid.uuid4())
+ conn.create_user_pool_client(UserPoolId=user_pool_id,
+ ClientName=client_name)
+ max_results = 5
+ result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
+ MaxResults=max_results)
+ result["UserPoolClients"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+ next_token = result["NextToken"]
+ result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id,
+ MaxResults=max_results,
+ NextToken=next_token)
+ result_2["UserPoolClients"].should.have.length_of(max_results)
+ result_2.shouldnt.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_user_pool_clients_when_max_items_more_than_total_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 user pool clients
+ client_count = 10
+ for i in range(client_count):
+ client_name = str(uuid.uuid4())
+ conn.create_user_pool_client(UserPoolId=user_pool_id,
+ ClientName=client_name)
+ max_results = client_count + 5
+ result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
+ MaxResults=max_results)
+ result["UserPoolClients"].should.have.length_of(client_count)
+ result.shouldnt.have.key("NextToken")
+
+
@mock_cognitoidp
def test_describe_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
@@ -264,6 +379,83 @@ def test_list_identity_providers():
result["Providers"][0]["ProviderType"].should.equal(provider_type)
+@mock_cognitoidp
+def test_list_identity_providers_returns_max_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 identity providers linked to a user pool
+ identity_provider_count = 10
+ for i in range(identity_provider_count):
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={},
+ )
+
+ max_results = 5
+ result = conn.list_identity_providers(UserPoolId=user_pool_id,
+ MaxResults=max_results)
+ result["Providers"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_identity_providers_returns_next_tokens():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 identity providers linked to a user pool
+ identity_provider_count = 10
+ for i in range(identity_provider_count):
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={},
+ )
+
+ max_results = 5
+ result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
+ result["Providers"].should.have.length_of(max_results)
+ result.should.have.key("NextToken")
+
+ next_token = result["NextToken"]
+ result_2 = conn.list_identity_providers(UserPoolId=user_pool_id,
+ MaxResults=max_results,
+ NextToken=next_token)
+ result_2["Providers"].should.have.length_of(max_results)
+ result_2.shouldnt.have.key("NextToken")
+
+
+@mock_cognitoidp
+def test_list_identity_providers_when_max_items_more_than_total_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 identity providers linked to a user pool
+ identity_provider_count = 10
+ for i in range(identity_provider_count):
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={},
+ )
+
+ max_results = identity_provider_count + 5
+ result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
+ result["Providers"].should.have.length_of(identity_provider_count)
+ result.shouldnt.have.key("NextToken")
+
+
@mock_cognitoidp
def test_describe_identity_providers():
conn = boto3.client("cognito-idp", "us-west-2")
@@ -323,6 +515,245 @@ def test_delete_identity_providers():
caught.should.be.true
+@mock_cognitoidp
+def test_create_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ description = str(uuid.uuid4())
+ role_arn = "arn:aws:iam:::role/my-iam-role"
+ precedence = random.randint(0, 100000)
+
+ result = conn.create_group(
+ GroupName=group_name,
+ UserPoolId=user_pool_id,
+ Description=description,
+ RoleArn=role_arn,
+ Precedence=precedence,
+ )
+
+ result["Group"]["GroupName"].should.equal(group_name)
+ result["Group"]["UserPoolId"].should.equal(user_pool_id)
+ result["Group"]["Description"].should.equal(description)
+ result["Group"]["RoleArn"].should.equal(role_arn)
+ result["Group"]["Precedence"].should.equal(precedence)
+ result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
+ result["Group"]["CreationDate"].should.be.a("datetime.datetime")
+
+
+@mock_cognitoidp
+def test_create_group_with_duplicate_name_raises_error():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ with assert_raises(ClientError) as cm:
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+ cm.exception.operation_name.should.equal('CreateGroup')
+ cm.exception.response['Error']['Code'].should.equal('GroupExistsException')
+ cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
+
+
+@mock_cognitoidp
+def test_get_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ result["Group"]["GroupName"].should.equal(group_name)
+ result["Group"]["UserPoolId"].should.equal(user_pool_id)
+ result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
+ result["Group"]["CreationDate"].should.be.a("datetime.datetime")
+
+
+@mock_cognitoidp
+def test_list_groups():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ result = conn.list_groups(UserPoolId=user_pool_id)
+
+ result["Groups"].should.have.length_of(1)
+ result["Groups"][0]["GroupName"].should.equal(group_name)
+
+
+@mock_cognitoidp
+def test_delete_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
+ list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
+
+ with assert_raises(ClientError) as cm:
+ conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
+ cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
+
+
+@mock_cognitoidp
+def test_admin_add_user_to_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
+
+
+@mock_cognitoidp
+def test_admin_add_user_to_group_again_is_noop():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+
+
+@mock_cognitoidp
+def test_list_users_in_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+
+ result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
+
+ result["Users"].should.have.length_of(1)
+ result["Users"][0]["Username"].should.equal(username)
+
+
+@mock_cognitoidp
+def test_list_users_in_group_ignores_deleted_user():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+ username2 = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username2)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name)
+ conn.admin_delete_user(UserPoolId=user_pool_id, Username=username)
+
+ result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
+
+ result["Users"].should.have.length_of(1)
+ result["Users"][0]["Username"].should.equal(username2)
+
+
+@mock_cognitoidp
+def test_admin_list_groups_for_user():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+
+ result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
+
+ result["Groups"].should.have.length_of(1)
+ result["Groups"][0]["GroupName"].should.equal(group_name)
+
+
+@mock_cognitoidp
+def test_admin_list_groups_for_user_ignores_deleted_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+ group_name2 = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2)
+ conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
+
+ result["Groups"].should.have.length_of(1)
+ result["Groups"][0]["GroupName"].should.equal(group_name2)
+
+
+@mock_cognitoidp
+def test_admin_remove_user_from_group():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+
+ result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
+ conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \
+ ["Users"].should.have.length_of(0)
+ conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \
+ ["Groups"].should.have.length_of(0)
+
+
+@mock_cognitoidp
+def test_admin_remove_user_from_group_again_is_noop():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ group_name = str(uuid.uuid4())
+ conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
+
+ username = str(uuid.uuid4())
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+ conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
+
+
@mock_cognitoidp
def test_admin_create_user():
conn = boto3.client("cognito-idp", "us-west-2")
@@ -396,6 +827,62 @@ def test_list_users():
result["Users"][0]["Username"].should.equal(username)
+@mock_cognitoidp
+def test_list_users_returns_limit_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 users
+ user_count = 10
+ for i in range(user_count):
+ conn.admin_create_user(UserPoolId=user_pool_id,
+ Username=str(uuid.uuid4()))
+ max_results = 5
+ result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
+ result["Users"].should.have.length_of(max_results)
+ result.should.have.key("PaginationToken")
+
+
+@mock_cognitoidp
+def test_list_users_returns_pagination_tokens():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 users
+ user_count = 10
+ for i in range(user_count):
+ conn.admin_create_user(UserPoolId=user_pool_id,
+ Username=str(uuid.uuid4()))
+
+ max_results = 5
+ result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
+ result["Users"].should.have.length_of(max_results)
+ result.should.have.key("PaginationToken")
+
+ next_token = result["PaginationToken"]
+ result_2 = conn.list_users(UserPoolId=user_pool_id,
+ Limit=max_results, PaginationToken=next_token)
+ result_2["Users"].should.have.length_of(max_results)
+ result_2.shouldnt.have.key("PaginationToken")
+
+
+@mock_cognitoidp
+def test_list_users_when_limit_more_than_total_items():
+ conn = boto3.client("cognito-idp", "us-west-2")
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ # Given 10 users
+ user_count = 10
+ for i in range(user_count):
+ conn.admin_create_user(UserPoolId=user_pool_id,
+ Username=str(uuid.uuid4()))
+
+ max_results = user_count + 5
+ result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
+ result["Users"].should.have.length_of(user_count)
+ result.shouldnt.have.key("PaginationToken")
+
+
@mock_cognitoidp
def test_admin_disable_user():
conn = boto3.client("cognito-idp", "us-west-2")
diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py
index afc919dd7..fb6c2a52b 100644
--- a/tests/test_dynamodb2/test_dynamodb.py
+++ b/tests/test_dynamodb2/test_dynamodb.py
@@ -1000,6 +1000,11 @@ def test_delete_item():
response = table.scan()
assert response['Count'] == 2
+ # Test ReturnValues validation
+ with assert_raises(ClientError) as ex:
+ table.delete_item(Key={'client': 'client1', 'app': 'app1'},
+ ReturnValues='ALL_NEW')
+
# Test deletion and returning old value
response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD')
response['Attributes'].should.contain('client')
@@ -1246,6 +1251,81 @@ def test_update_if_not_exists():
assert resp['Items'][0]['created_at'] == 123
+# https://github.com/spulec/moto/issues/1937
+@mock_dynamodb2
+def test_update_return_attributes():
+ dynamodb = boto3.client('dynamodb', region_name='us-east-1')
+
+ dynamodb.create_table(
+ TableName='moto-test',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
+ )
+
+ def update(col, to, rv):
+ return dynamodb.update_item(
+ TableName='moto-test',
+ Key={'id': {'S': 'foo'}},
+ AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}},
+ ReturnValues=rv
+ )
+
+ r = update('col1', 'val1', 'ALL_NEW')
+ assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
+
+ r = update('col1', 'val2', 'ALL_OLD')
+ assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
+
+ r = update('col2', 'val3', 'UPDATED_NEW')
+ assert r['Attributes'] == {'col2': {'S': 'val3'}}
+
+ r = update('col2', 'val4', 'UPDATED_OLD')
+ assert r['Attributes'] == {'col2': {'S': 'val3'}}
+
+ r = update('col1', 'val5', 'NONE')
+ assert r['Attributes'] == {}
+
+ with assert_raises(ClientError) as ex:
+ r = update('col1', 'val6', 'WRONG')
+
+
+@mock_dynamodb2
+def test_put_return_attributes():
+ dynamodb = boto3.client('dynamodb', region_name='us-east-1')
+
+ dynamodb.create_table(
+ TableName='moto-test',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
+ )
+
+ r = dynamodb.put_item(
+ TableName='moto-test',
+ Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}},
+ ReturnValues='NONE'
+ )
+ assert 'Attributes' not in r
+
+ r = dynamodb.put_item(
+ TableName='moto-test',
+ Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}},
+ ReturnValues='ALL_OLD'
+ )
+ assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
+
+ with assert_raises(ClientError) as ex:
+ dynamodb.put_item(
+ TableName='moto-test',
+ Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}},
+ ReturnValues='ALL_NEW'
+ )
+ ex.exception.response['Error']['Code'].should.equal('ValidationException')
+ ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
+ ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value')
+
+
@mock_dynamodb2
def test_query_global_secondary_index_when_created_via_update_table_resource():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
@@ -1336,3 +1416,62 @@ def test_query_global_secondary_index_when_created_via_update_table_resource():
assert len(forum_and_subject_items) == 1
assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats',
'subject': 'my pet is the cutest'}
+
+
+@mock_dynamodb2
+def test_dynamodb_streams_1():
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+
+ resp = conn.create_table(
+ TableName='test-streams',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
+ StreamSpecification={
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_AND_OLD_IMAGES'
+ }
+ )
+
+ assert 'StreamSpecification' in resp['TableDescription']
+ assert resp['TableDescription']['StreamSpecification'] == {
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_AND_OLD_IMAGES'
+ }
+ assert 'LatestStreamLabel' in resp['TableDescription']
+ assert 'LatestStreamArn' in resp['TableDescription']
+
+ resp = conn.delete_table(TableName='test-streams')
+
+ assert 'StreamSpecification' in resp['TableDescription']
+
+
+@mock_dynamodb2
+def test_dynamodb_streams_2():
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+
+ resp = conn.create_table(
+ TableName='test-stream-update',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
+ )
+
+ assert 'StreamSpecification' not in resp['TableDescription']
+
+ resp = conn.update_table(
+ TableName='test-stream-update',
+ StreamSpecification={
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_IMAGE'
+ }
+ )
+
+ assert 'StreamSpecification' in resp['TableDescription']
+ assert resp['TableDescription']['StreamSpecification'] == {
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_IMAGE'
+ }
+ assert 'LatestStreamLabel' in resp['TableDescription']
+ assert 'LatestStreamArn' in resp['TableDescription']
+
diff --git a/tests/test_dynamodbstreams/test_dynamodbstreams.py b/tests/test_dynamodbstreams/test_dynamodbstreams.py
new file mode 100644
index 000000000..b60c21053
--- /dev/null
+++ b/tests/test_dynamodbstreams/test_dynamodbstreams.py
@@ -0,0 +1,234 @@
+from __future__ import unicode_literals, print_function
+
+from nose.tools import assert_raises
+
+import boto3
+from moto import mock_dynamodb2, mock_dynamodbstreams
+
+
+class TestCore():
+ stream_arn = None
+ mocks = []
+
+ def setup(self):
+ self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
+ for m in self.mocks:
+ m.start()
+
+ # create a table with a stream
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+
+ resp = conn.create_table(
+ TableName='test-streams',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id',
+ 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1,
+ 'WriteCapacityUnits': 1},
+ StreamSpecification={
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_AND_OLD_IMAGES'
+ }
+ )
+ self.stream_arn = resp['TableDescription']['LatestStreamArn']
+
+ def teardown(self):
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+ conn.delete_table(TableName='test-streams')
+ self.stream_arn = None
+
+ for m in self.mocks:
+ m.stop()
+
+
+ def test_verify_stream(self):
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+ resp = conn.describe_table(TableName='test-streams')
+ assert 'LatestStreamArn' in resp['Table']
+
+ def test_describe_stream(self):
+ conn = boto3.client('dynamodbstreams', region_name='us-east-1')
+
+ resp = conn.describe_stream(StreamArn=self.stream_arn)
+ assert 'StreamDescription' in resp
+ desc = resp['StreamDescription']
+ assert desc['StreamArn'] == self.stream_arn
+ assert desc['TableName'] == 'test-streams'
+
+ def test_list_streams(self):
+ conn = boto3.client('dynamodbstreams', region_name='us-east-1')
+
+ resp = conn.list_streams()
+ assert resp['Streams'][0]['StreamArn'] == self.stream_arn
+
+ resp = conn.list_streams(TableName='no-stream')
+ assert not resp['Streams']
+
+ def test_get_shard_iterator(self):
+ conn = boto3.client('dynamodbstreams', region_name='us-east-1')
+
+ resp = conn.describe_stream(StreamArn=self.stream_arn)
+ shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
+
+ resp = conn.get_shard_iterator(
+ StreamArn=self.stream_arn,
+ ShardId=shard_id,
+ ShardIteratorType='TRIM_HORIZON'
+ )
+ assert 'ShardIterator' in resp
+
+ def test_get_records_empty(self):
+ conn = boto3.client('dynamodbstreams', region_name='us-east-1')
+
+ resp = conn.describe_stream(StreamArn=self.stream_arn)
+ shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
+
+ resp = conn.get_shard_iterator(
+ StreamArn=self.stream_arn,
+ ShardId=shard_id,
+ ShardIteratorType='LATEST'
+ )
+ iterator_id = resp['ShardIterator']
+
+ resp = conn.get_records(ShardIterator=iterator_id)
+ assert 'Records' in resp
+ assert len(resp['Records']) == 0
+
+ def test_get_records_seq(self):
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+
+ conn.put_item(
+ TableName='test-streams',
+ Item={
+ 'id': {'S': 'entry1'},
+ 'first_col': {'S': 'foo'}
+ }
+ )
+ conn.put_item(
+ TableName='test-streams',
+ Item={
+ 'id': {'S': 'entry1'},
+ 'first_col': {'S': 'bar'},
+ 'second_col': {'S': 'baz'}
+ }
+ )
+ conn.delete_item(
+ TableName='test-streams',
+ Key={'id': {'S': 'entry1'}}
+ )
+
+ conn = boto3.client('dynamodbstreams', region_name='us-east-1')
+
+ resp = conn.describe_stream(StreamArn=self.stream_arn)
+ shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
+
+ resp = conn.get_shard_iterator(
+ StreamArn=self.stream_arn,
+ ShardId=shard_id,
+ ShardIteratorType='TRIM_HORIZON'
+ )
+ iterator_id = resp['ShardIterator']
+
+ resp = conn.get_records(ShardIterator=iterator_id)
+ assert len(resp['Records']) == 3
+ assert resp['Records'][0]['eventName'] == 'INSERT'
+ assert resp['Records'][1]['eventName'] == 'MODIFY'
+ assert resp['Records'][2]['eventName'] == 'DELETE'
+
+ # now try fetching from the next shard iterator, it should be
+ # empty
+ resp = conn.get_records(ShardIterator=resp['NextShardIterator'])
+ assert len(resp['Records']) == 0
+
+
+class TestEdges():
+ mocks = []
+
+ def setup(self):
+ self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
+ for m in self.mocks:
+ m.start()
+
+ def teardown(self):
+ for m in self.mocks:
+ m.stop()
+
+
+ def test_enable_stream_on_table(self):
+ conn = boto3.client('dynamodb', region_name='us-east-1')
+ resp = conn.create_table(
+ TableName='test-streams',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
+ AttributeDefinitions=[{'AttributeName': 'id',
+ 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1,
+ 'WriteCapacityUnits': 1}
+ )
+ assert 'StreamSpecification' not in resp['TableDescription']
+
+ resp = conn.update_table(
+ TableName='test-streams',
+ StreamSpecification={
+ 'StreamViewType': 'KEYS_ONLY'
+ }
+ )
+ assert 'StreamSpecification' in resp['TableDescription']
+ assert resp['TableDescription']['StreamSpecification'] == {
+ 'StreamEnabled': True,
+ 'StreamViewType': 'KEYS_ONLY'
+ }
+ assert 'LatestStreamLabel' in resp['TableDescription']
+
+ # now try to enable it again
+ with assert_raises(conn.exceptions.ResourceInUseException):
+ resp = conn.update_table(
+ TableName='test-streams',
+ StreamSpecification={
+ 'StreamViewType': 'OLD_IMAGES'
+ }
+ )
+
+ def test_stream_with_range_key(self):
+ dyn = boto3.client('dynamodb', region_name='us-east-1')
+
+ resp = dyn.create_table(
+ TableName='test-streams',
+ KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'},
+ {'AttributeName': 'color', 'KeyType': 'RANGE'}],
+ AttributeDefinitions=[{'AttributeName': 'id',
+ 'AttributeType': 'S'},
+ {'AttributeName': 'color',
+ 'AttributeType': 'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits': 1,
+ 'WriteCapacityUnits': 1},
+ StreamSpecification={
+ 'StreamViewType': 'NEW_IMAGES'
+ }
+ )
+ stream_arn = resp['TableDescription']['LatestStreamArn']
+
+ streams = boto3.client('dynamodbstreams', region_name='us-east-1')
+ resp = streams.describe_stream(StreamArn=stream_arn)
+ shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
+
+ resp = streams.get_shard_iterator(
+ StreamArn=stream_arn,
+ ShardId=shard_id,
+ ShardIteratorType='LATEST'
+ )
+ iterator_id = resp['ShardIterator']
+
+ dyn.put_item(
+ TableName='test-streams',
+ Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}}
+ )
+ dyn.put_item(
+ TableName='test-streams',
+ Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}}
+ )
+
+ resp = streams.get_records(ShardIterator=iterator_id)
+ assert len(resp['Records']) == 2
+ assert resp['Records'][0]['eventName'] == 'INSERT'
+ assert resp['Records'][1]['eventName'] == 'INSERT'
+
diff --git a/tests/test_ec2/test_vpc_peering.py b/tests/test_ec2/test_vpc_peering.py
index 1f98791b3..082499a72 100644
--- a/tests/test_ec2/test_vpc_peering.py
+++ b/tests/test_ec2/test_vpc_peering.py
@@ -89,7 +89,8 @@ def test_vpc_peering_connections_delete():
verdict.should.equal(True)
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
- all_vpc_pcxs.should.have.length_of(0)
+ all_vpc_pcxs.should.have.length_of(1)
+ all_vpc_pcxs[0]._status.code.should.equal('deleted')
with assert_raises(EC2ResponseError) as cm:
conn.delete_vpc_peering_connection("pcx-1234abcd")
diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py
index a0e8318da..a0d470935 100644
--- a/tests/test_ecs/test_ecs_boto3.py
+++ b/tests/test_ecs/test_ecs_boto3.py
@@ -925,6 +925,65 @@ def test_update_container_instances_state():
status='test_status').should.throw(Exception)
+@mock_ec2
+@mock_ecs
+def test_update_container_instances_state_by_arn():
+ ecs_client = boto3.client('ecs', region_name='us-east-1')
+ ec2 = boto3.resource('ec2', region_name='us-east-1')
+
+ test_cluster_name = 'test_ecs_cluster'
+ _ = ecs_client.create_cluster(
+ clusterName=test_cluster_name
+ )
+
+ instance_to_create = 3
+ test_instance_arns = []
+ for i in range(0, instance_to_create):
+ test_instance = ec2.create_instances(
+ ImageId="ami-1234abcd",
+ MinCount=1,
+ MaxCount=1,
+ )[0]
+
+ instance_id_document = json.dumps(
+ ec2_utils.generate_instance_identity_document(test_instance)
+ )
+
+ response = ecs_client.register_container_instance(
+ cluster=test_cluster_name,
+ instanceIdentityDocument=instance_id_document)
+
+ test_instance_arns.append(response['containerInstance']['containerInstanceArn'])
+
+ response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
+ containerInstances=test_instance_arns,
+ status='DRAINING')
+ len(response['failures']).should.equal(0)
+ len(response['containerInstances']).should.equal(instance_to_create)
+ response_statuses = [ci['status'] for ci in response['containerInstances']]
+ for status in response_statuses:
+ status.should.equal('DRAINING')
+ response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
+ containerInstances=test_instance_arns,
+ status='DRAINING')
+ len(response['failures']).should.equal(0)
+ len(response['containerInstances']).should.equal(instance_to_create)
+ response_statuses = [ci['status'] for ci in response['containerInstances']]
+ for status in response_statuses:
+ status.should.equal('DRAINING')
+ response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
+ containerInstances=test_instance_arns,
+ status='ACTIVE')
+ len(response['failures']).should.equal(0)
+ len(response['containerInstances']).should.equal(instance_to_create)
+ response_statuses = [ci['status'] for ci in response['containerInstances']]
+ for status in response_statuses:
+ status.should.equal('ACTIVE')
+ ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name,
+ containerInstances=test_instance_arns,
+ status='test_status').should.throw(Exception)
+
+
@mock_ec2
@mock_ecs
def test_run_task():
diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py
index bc23ff712..72eef5276 100644
--- a/tests/test_iam/test_iam.py
+++ b/tests/test_iam/test_iam.py
@@ -14,6 +14,19 @@ from nose.tools import raises
from tests.helpers import requires_boto_gte
+MOCK_CERT = """-----BEGIN CERTIFICATE-----
+MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt
+b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx
+FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC
+gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt
+ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO
+vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG
+9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj
+FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt
+8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ==
+-----END CERTIFICATE-----"""
+
+
@mock_iam_deprecated()
def test_get_all_server_certs():
conn = boto.connect_iam()
@@ -108,6 +121,10 @@ def test_create_role_and_instance_profile():
conn.list_roles().roles[0].role_name.should.equal('my-role')
+ # Test with an empty path:
+ profile = conn.create_instance_profile('my-other-profile')
+ profile.path.should.equal('/')
+
@mock_iam_deprecated()
def test_remove_role_from_instance_profile():
@@ -536,6 +553,14 @@ def test_generate_credential_report():
result['generate_credential_report_response'][
'generate_credential_report_result']['state'].should.equal('COMPLETE')
+@mock_iam
+def test_boto3_generate_credential_report():
+ conn = boto3.client('iam', region_name='us-east-1')
+ result = conn.generate_credential_report()
+ result['State'].should.equal('STARTED')
+ result = conn.generate_credential_report()
+ result['State'].should.equal('COMPLETE')
+
@mock_iam_deprecated()
def test_get_credential_report():
@@ -551,6 +576,19 @@ def test_get_credential_report():
'get_credential_report_result']['content'].encode('ascii')).decode('ascii')
report.should.match(r'.*my-user.*')
+@mock_iam
+def test_boto3_get_credential_report():
+ conn = boto3.client('iam', region_name='us-east-1')
+ conn.create_user(UserName='my-user')
+ with assert_raises(ClientError):
+ conn.get_credential_report()
+ result = conn.generate_credential_report()
+ while result['State'] != 'COMPLETE':
+ result = conn.generate_credential_report()
+ result = conn.get_credential_report()
+ report = result['Content'].decode('utf-8')
+ report.should.match(r'.*my-user.*')
+
@requires_boto_gte('2.39')
@mock_iam_deprecated()
@@ -700,10 +738,10 @@ def test_get_account_authorization_details():
import json
conn = boto3.client('iam', region_name='us-east-1')
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
- conn.create_user(Path='/', UserName='testCloudAuxUser')
- conn.create_group(Path='/', GroupName='testCloudAuxGroup')
+ conn.create_user(Path='/', UserName='testUser')
+ conn.create_group(Path='/', GroupName='testGroup')
conn.create_policy(
- PolicyName='testCloudAuxPolicy',
+ PolicyName='testPolicy',
Path='/',
PolicyDocument=json.dumps({
"Version": "2012-10-17",
@@ -715,46 +753,110 @@ def test_get_account_authorization_details():
}
]
}),
- Description='Test CloudAux Policy'
+ Description='Test Policy'
)
+ conn.create_instance_profile(InstanceProfileName='ipn')
+ conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role')
+
result = conn.get_account_authorization_details(Filter=['Role'])
- len(result['RoleDetailList']) == 1
- len(result['UserDetailList']) == 0
- len(result['GroupDetailList']) == 0
- len(result['Policies']) == 0
+ assert len(result['RoleDetailList']) == 1
+ assert len(result['UserDetailList']) == 0
+ assert len(result['GroupDetailList']) == 0
+ assert len(result['Policies']) == 0
+ assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1
result = conn.get_account_authorization_details(Filter=['User'])
- len(result['RoleDetailList']) == 0
- len(result['UserDetailList']) == 1
- len(result['GroupDetailList']) == 0
- len(result['Policies']) == 0
+ assert len(result['RoleDetailList']) == 0
+ assert len(result['UserDetailList']) == 1
+ assert len(result['GroupDetailList']) == 0
+ assert len(result['Policies']) == 0
result = conn.get_account_authorization_details(Filter=['Group'])
- len(result['RoleDetailList']) == 0
- len(result['UserDetailList']) == 0
- len(result['GroupDetailList']) == 1
- len(result['Policies']) == 0
+ assert len(result['RoleDetailList']) == 0
+ assert len(result['UserDetailList']) == 0
+ assert len(result['GroupDetailList']) == 1
+ assert len(result['Policies']) == 0
result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
- len(result['RoleDetailList']) == 0
- len(result['UserDetailList']) == 0
- len(result['GroupDetailList']) == 0
- len(result['Policies']) == 1
+ assert len(result['RoleDetailList']) == 0
+ assert len(result['UserDetailList']) == 0
+ assert len(result['GroupDetailList']) == 0
+ assert len(result['Policies']) == 1
# Check for greater than 1 since this should always be greater than one but might change.
# See iam/aws_managed_policies.py
result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
- len(result['RoleDetailList']) == 0
- len(result['UserDetailList']) == 0
- len(result['GroupDetailList']) == 0
- len(result['Policies']) > 1
+ assert len(result['RoleDetailList']) == 0
+ assert len(result['UserDetailList']) == 0
+ assert len(result['GroupDetailList']) == 0
+ assert len(result['Policies']) > 1
result = conn.get_account_authorization_details()
- len(result['RoleDetailList']) == 1
- len(result['UserDetailList']) == 1
- len(result['GroupDetailList']) == 1
- len(result['Policies']) > 1
+ assert len(result['RoleDetailList']) == 1
+ assert len(result['UserDetailList']) == 1
+ assert len(result['GroupDetailList']) == 1
+ assert len(result['Policies']) > 1
+@mock_iam
+def test_signing_certs():
+ client = boto3.client('iam', region_name='us-east-1')
+ # Create the IAM user first:
+ client.create_user(UserName='testing')
+
+ # Upload the cert:
+ resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate']
+ cert_id = resp['CertificateId']
+
+ assert resp['UserName'] == 'testing'
+ assert resp['Status'] == 'Active'
+ assert resp['CertificateBody'] == MOCK_CERT
+ assert resp['CertificateId']
+
+ # Upload a the cert with an invalid body:
+ with assert_raises(ClientError) as ce:
+ client.upload_signing_certificate(UserName='testing', CertificateBody='notacert')
+ assert ce.exception.response['Error']['Code'] == 'MalformedCertificate'
+
+ # Upload with an invalid user:
+ with assert_raises(ClientError):
+ client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT)
+
+ # Update:
+ client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive')
+
+ with assert_raises(ClientError):
+ client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive')
+
+ with assert_raises(ClientError) as ce:
+ client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive')
+
+ assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
+ id='x' * 32)
+
+ # List the certs:
+ resp = client.list_signing_certificates(UserName='testing')['Certificates']
+ assert len(resp) == 1
+ assert resp[0]['CertificateBody'] == MOCK_CERT
+ assert resp[0]['Status'] == 'Inactive' # Changed with the update call above.
+
+ with assert_raises(ClientError):
+ client.list_signing_certificates(UserName='notauser')
+
+ # Delete:
+ client.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
+
+ with assert_raises(ClientError):
+ client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id)
+
+ with assert_raises(ClientError) as ce:
+ client.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
+
+ assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
+ id=cert_id)
+
+ # Verify that it's not in the list:
+ resp = client.list_signing_certificates(UserName='testing')
+ assert not resp['Certificates']
diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py
index 5c6effd7a..826d2c56b 100644
--- a/tests/test_iot/test_iot.py
+++ b/tests/test_iot/test_iot.py
@@ -5,6 +5,8 @@ import sure # noqa
import boto3
from moto import mock_iot
+from botocore.exceptions import ClientError
+from nose.tools import assert_raises
@mock_iot
@@ -261,6 +263,96 @@ def test_certs():
res.should.have.key('certificates').which.should.have.length_of(0)
+@mock_iot
+def test_delete_policy_validation():
+ doc = """{
+ "Version": "2012-10-17",
+ "Statement":[
+ {
+ "Effect":"Allow",
+ "Action":[
+ "iot: *"
+ ],
+ "Resource":"*"
+ }
+ ]
+ }
+ """
+ client = boto3.client('iot', region_name='ap-northeast-1')
+ cert = client.create_keys_and_certificate(setAsActive=True)
+ cert_arn = cert['certificateArn']
+ policy_name = 'my-policy'
+ client.create_policy(policyName=policy_name, policyDocument=doc)
+ client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
+
+ with assert_raises(ClientError) as e:
+ client.delete_policy(policyName=policy_name)
+ e.exception.response['Error']['Message'].should.contain(
+ 'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' % policy_name)
+ res = client.list_policies()
+ res.should.have.key('policies').which.should.have.length_of(1)
+
+ client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
+ client.delete_policy(policyName=policy_name)
+ res = client.list_policies()
+ res.should.have.key('policies').which.should.have.length_of(0)
+
+
+@mock_iot
+def test_delete_certificate_validation():
+ doc = """{
+ "Version": "2012-10-17",
+ "Statement":[
+ {
+ "Effect":"Allow",
+ "Action":[
+ "iot: *"
+ ],
+ "Resource":"*"
+ }
+ ]
+ }
+ """
+ client = boto3.client('iot', region_name='ap-northeast-1')
+ cert = client.create_keys_and_certificate(setAsActive=True)
+ cert_id = cert['certificateId']
+ cert_arn = cert['certificateArn']
+ policy_name = 'my-policy'
+ thing_name = 'thing-1'
+ client.create_policy(policyName=policy_name, policyDocument=doc)
+ client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
+ client.create_thing(thingName=thing_name)
+ client.attach_thing_principal(thingName=thing_name, principal=cert_arn)
+
+ with assert_raises(ClientError) as e:
+ client.delete_certificate(certificateId=cert_id)
+ e.exception.response['Error']['Message'].should.contain(
+ 'Certificate must be deactivated (not ACTIVE) before deletion.')
+ res = client.list_certificates()
+ res.should.have.key('certificates').which.should.have.length_of(1)
+
+ client.update_certificate(certificateId=cert_id, newStatus='REVOKED')
+ with assert_raises(ClientError) as e:
+ client.delete_certificate(certificateId=cert_id)
+ e.exception.response['Error']['Message'].should.contain(
+ 'Things must be detached before deletion (arn: %s)' % cert_arn)
+ res = client.list_certificates()
+ res.should.have.key('certificates').which.should.have.length_of(1)
+
+ client.detach_thing_principal(thingName=thing_name, principal=cert_arn)
+ with assert_raises(ClientError) as e:
+ client.delete_certificate(certificateId=cert_id)
+ e.exception.response['Error']['Message'].should.contain(
+ 'Certificate policies must be detached before deletion (arn: %s)' % cert_arn)
+ res = client.list_certificates()
+ res.should.have.key('certificates').which.should.have.length_of(1)
+
+ client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
+ client.delete_certificate(certificateId=cert_id)
+ res = client.list_certificates()
+ res.should.have.key('certificates').which.should.have.length_of(0)
+
+
@mock_iot
def test_certs_create_inactive():
client = boto3.client('iot', region_name='ap-northeast-1')
@@ -309,6 +401,47 @@ def test_policy():
@mock_iot
def test_principal_policy():
+ client = boto3.client('iot', region_name='ap-northeast-1')
+ policy_name = 'my-policy'
+ doc = '{}'
+ client.create_policy(policyName=policy_name, policyDocument=doc)
+ cert = client.create_keys_and_certificate(setAsActive=True)
+ cert_arn = cert['certificateArn']
+
+ client.attach_policy(policyName=policy_name, target=cert_arn)
+
+ res = client.list_principal_policies(principal=cert_arn)
+ res.should.have.key('policies').which.should.have.length_of(1)
+ for policy in res['policies']:
+ policy.should.have.key('policyName').which.should_not.be.none
+ policy.should.have.key('policyArn').which.should_not.be.none
+
+ # do nothing if policy have already attached to certificate
+ client.attach_policy(policyName=policy_name, target=cert_arn)
+
+ res = client.list_principal_policies(principal=cert_arn)
+ res.should.have.key('policies').which.should.have.length_of(1)
+ for policy in res['policies']:
+ policy.should.have.key('policyName').which.should_not.be.none
+ policy.should.have.key('policyArn').which.should_not.be.none
+
+ res = client.list_policy_principals(policyName=policy_name)
+ res.should.have.key('principals').which.should.have.length_of(1)
+ for principal in res['principals']:
+ principal.should_not.be.none
+
+ client.detach_policy(policyName=policy_name, target=cert_arn)
+ res = client.list_principal_policies(principal=cert_arn)
+ res.should.have.key('policies').which.should.have.length_of(0)
+ res = client.list_policy_principals(policyName=policy_name)
+ res.should.have.key('principals').which.should.have.length_of(0)
+ with assert_raises(ClientError) as e:
+ client.detach_policy(policyName=policy_name, target=cert_arn)
+ e.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
+
+
+@mock_iot
+def test_principal_policy_deprecated():
client = boto3.client('iot', region_name='ap-northeast-1')
policy_name = 'my-policy'
doc = '{}'
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 6e339abb6..ffafc0dfd 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -1553,6 +1553,24 @@ def test_boto3_put_bucket_tagging():
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
+ # With duplicate tag keys:
+ with assert_raises(ClientError) as err:
+ resp = s3.put_bucket_tagging(Bucket=bucket_name,
+ Tagging={
+ "TagSet": [
+ {
+ "Key": "TagOne",
+ "Value": "ValueOne"
+ },
+ {
+ "Key": "TagOne",
+ "Value": "ValueOneAgain"
+ }
+ ]
+ })
+ e = err.exception
+ e.response["Error"]["Code"].should.equal("InvalidTag")
+ e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key")
@mock_s3
def test_boto3_get_bucket_tagging():
diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py
index ec384a660..169282421 100644
--- a/tests/test_secretsmanager/test_secretsmanager.py
+++ b/tests/test_secretsmanager/test_secretsmanager.py
@@ -39,12 +39,28 @@ def test_create_secret():
conn = boto3.client('secretsmanager', region_name='us-east-1')
result = conn.create_secret(Name='test-secret', SecretString="foosecret")
- assert result['ARN'] == (
- 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
+ assert result['ARN']
assert result['Name'] == 'test-secret'
secret = conn.get_secret_value(SecretId='test-secret')
assert secret['SecretString'] == 'foosecret'
+@mock_secretsmanager
+def test_create_secret_with_tags():
+ conn = boto3.client('secretsmanager', region_name='us-east-1')
+ secret_name = 'test-secret-with-tags'
+
+ result = conn.create_secret(
+ Name=secret_name,
+ SecretString="foosecret",
+ Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}]
+ )
+ assert result['ARN']
+ assert result['Name'] == secret_name
+ secret_value = conn.get_secret_value(SecretId=secret_name)
+ assert secret_value['SecretString'] == 'foosecret'
+ secret_details = conn.describe_secret(SecretId=secret_name)
+ assert secret_details['Tags'] == [{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}]
+
@mock_secretsmanager
def test_get_random_password_default_length():
conn = boto3.client('secretsmanager', region_name='us-west-2')
@@ -159,10 +175,17 @@ def test_describe_secret():
conn.create_secret(Name='test-secret',
SecretString='foosecret')
+ conn.create_secret(Name='test-secret-2',
+ SecretString='barsecret')
+
secret_description = conn.describe_secret(SecretId='test-secret')
+ secret_description_2 = conn.describe_secret(SecretId='test-secret-2')
+
assert secret_description # Returned dict is not empty
- assert secret_description['ARN'] == (
- 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad')
+ assert secret_description['Name'] == ('test-secret')
+ assert secret_description['ARN'] != '' # Test arn not empty
+ assert secret_description_2['Name'] == ('test-secret-2')
+ assert secret_description_2['ARN'] != '' # Test arn not empty
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
@@ -190,9 +213,7 @@ def test_rotate_secret():
rotated_secret = conn.rotate_secret(SecretId=secret_name)
assert rotated_secret
- assert rotated_secret['ARN'] == (
- 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad'
- )
+ assert rotated_secret['ARN'] != '' # Test arn not empty
assert rotated_secret['Name'] == secret_name
assert rotated_secret['VersionId'] != ''
diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py
index e573f9b67..d0f495f57 100644
--- a/tests/test_secretsmanager/test_server.py
+++ b/tests/test_secretsmanager/test_server.py
@@ -82,11 +82,20 @@ def test_create_secret():
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"},
)
+ res_2 = test_client.post('/',
+ data={"Name": "test-secret-2",
+ "SecretString": "bar-secret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"},
+ )
json_data = json.loads(res.data.decode("utf-8"))
- assert json_data['ARN'] == (
- 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
+ assert json_data['ARN'] != ''
assert json_data['Name'] == 'test-secret'
+
+ json_data_2 = json.loads(res_2.data.decode("utf-8"))
+ assert json_data_2['ARN'] != ''
+ assert json_data_2['Name'] == 'test-secret-2'
@mock_secretsmanager
def test_describe_secret():
@@ -107,12 +116,30 @@ def test_describe_secret():
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
+
+ create_secret_2 = test_client.post('/',
+ data={"Name": "test-secret-2",
+ "SecretString": "barsecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+ describe_secret_2 = test_client.post('/',
+ data={"SecretId": "test-secret-2"},
+ headers={
+ "X-Amz-Target": "secretsmanager.DescribeSecret"
+ },
+ )
json_data = json.loads(describe_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
- assert json_data['ARN'] == (
- 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
- )
+ assert json_data['ARN'] != ''
+ assert json_data['Name'] == 'test-secret'
+
+ json_data_2 = json.loads(describe_secret_2.data.decode("utf-8"))
+ assert json_data_2 # Returned dict is not empty
+ assert json_data_2['ARN'] != ''
+ assert json_data_2['Name'] == 'test-secret-2'
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
@@ -179,9 +206,7 @@ def test_rotate_secret():
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
- assert json_data['ARN'] == (
- 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
- )
+ assert json_data['ARN'] != ''
assert json_data['Name'] == 'test-secret'
assert json_data['VersionId'] == client_request_token