Merge branch 'master' into bucket-name-length-limit

This commit is contained in:
Steve Pulec 2018-12-28 21:04:10 -05:00 committed by GitHub
commit e681f55ba1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
58 changed files with 2794 additions and 277 deletions

3
.gitignore vendored
View File

@ -15,4 +15,5 @@ python_env
.ropeproject/
.pytest_cache/
venv/
.python-version
.vscode/

View File

@ -23,8 +23,6 @@ matrix:
sudo: true
before_install:
- export BOTO_CONFIG=/dev/null
- export AWS_SECRET_ACCESS_KEY=foobar_secret
- export AWS_ACCESS_KEY_ID=foobar_key
install:
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
# -d for docker run so the logs show up in travis

View File

@ -827,25 +827,25 @@
- [ ] unlink_identity
- [ ] update_identity_pool
## cognito-idp - 0% implemented
## cognito-idp - 34% implemented
- [ ] add_custom_attributes
- [ ] admin_add_user_to_group
- [X] admin_add_user_to_group
- [ ] admin_confirm_sign_up
- [ ] admin_create_user
- [ ] admin_delete_user
- [X] admin_create_user
- [X] admin_delete_user
- [ ] admin_delete_user_attributes
- [ ] admin_disable_provider_for_user
- [X] admin_disable_user
- [X] admin_enable_user
- [ ] admin_forget_device
- [ ] admin_get_device
- [ ] admin_get_user
- [ ] admin_initiate_auth
- [X] admin_get_user
- [X] admin_initiate_auth
- [ ] admin_link_provider_for_user
- [ ] admin_list_devices
- [ ] admin_list_groups_for_user
- [X] admin_list_groups_for_user
- [ ] admin_list_user_auth_events
- [ ] admin_remove_user_from_group
- [X] admin_remove_user_from_group
- [ ] admin_reset_user_password
- [ ] admin_respond_to_auth_challenge
- [ ] admin_set_user_mfa_preference
@ -855,37 +855,37 @@
- [ ] admin_update_user_attributes
- [ ] admin_user_global_sign_out
- [ ] associate_software_token
- [ ] change_password
- [X] change_password
- [ ] confirm_device
- [ ] confirm_forgot_password
- [X] confirm_forgot_password
- [ ] confirm_sign_up
- [ ] create_group
- [ ] create_identity_provider
- [X] create_group
- [X] create_identity_provider
- [ ] create_resource_server
- [ ] create_user_import_job
- [ ] create_user_pool
- [ ] create_user_pool_client
- [ ] create_user_pool_domain
- [ ] delete_group
- [ ] delete_identity_provider
- [X] create_user_pool
- [X] create_user_pool_client
- [X] create_user_pool_domain
- [X] delete_group
- [X] delete_identity_provider
- [ ] delete_resource_server
- [ ] delete_user
- [ ] delete_user_attributes
- [ ] delete_user_pool
- [ ] delete_user_pool_client
- [ ] delete_user_pool_domain
- [ ] describe_identity_provider
- [X] delete_user_pool
- [X] delete_user_pool_client
- [X] delete_user_pool_domain
- [X] describe_identity_provider
- [ ] describe_resource_server
- [ ] describe_risk_configuration
- [ ] describe_user_import_job
- [ ] describe_user_pool
- [ ] describe_user_pool_client
- [ ] describe_user_pool_domain
- [X] describe_user_pool
- [X] describe_user_pool_client
- [X] describe_user_pool_domain
- [ ] forget_device
- [ ] forgot_password
- [ ] get_csv_header
- [ ] get_device
- [ ] get_group
- [X] get_group
- [ ] get_identity_provider_by_identifier
- [ ] get_signing_certificate
- [ ] get_ui_customization
@ -895,16 +895,16 @@
- [ ] global_sign_out
- [ ] initiate_auth
- [ ] list_devices
- [ ] list_groups
- [ ] list_identity_providers
- [X] list_groups
- [X] list_identity_providers
- [ ] list_resource_servers
- [ ] list_user_import_jobs
- [ ] list_user_pool_clients
- [ ] list_user_pools
- [ ] list_users
- [ ] list_users_in_group
- [X] list_user_pool_clients
- [X] list_user_pools
- [X] list_users
- [X] list_users_in_group
- [ ] resend_confirmation_code
- [ ] respond_to_auth_challenge
- [X] respond_to_auth_challenge
- [ ] set_risk_configuration
- [ ] set_ui_customization
- [ ] set_user_mfa_preference
@ -920,7 +920,7 @@
- [ ] update_resource_server
- [ ] update_user_attributes
- [ ] update_user_pool
- [ ] update_user_pool_client
- [X] update_user_pool_client
- [ ] verify_software_token
- [ ] verify_user_attribute
@ -2225,7 +2225,7 @@
- [X] create_policy
- [X] create_policy_version
- [X] create_role
- [ ] create_saml_provider
- [X] create_saml_provider
- [ ] create_service_linked_role
- [ ] create_service_specific_credential
- [X] create_user
@ -2243,7 +2243,7 @@
- [X] delete_policy_version
- [X] delete_role
- [X] delete_role_policy
- [ ] delete_saml_provider
- [X] delete_saml_provider
- [X] delete_server_certificate
- [ ] delete_service_linked_role
- [ ] delete_service_specific_credential
@ -2273,7 +2273,7 @@
- [X] get_policy_version
- [X] get_role
- [X] get_role_policy
- [ ] get_saml_provider
- [X] get_saml_provider
- [X] get_server_certificate
- [ ] get_service_linked_role_deletion_status
- [ ] get_ssh_public_key
@ -2296,7 +2296,7 @@
- [X] list_policy_versions
- [X] list_role_policies
- [ ] list_roles
- [ ] list_saml_providers
- [X] list_saml_providers
- [ ] list_server_certificates
- [ ] list_service_specific_credentials
- [ ] list_signing_certificates
@ -2323,7 +2323,7 @@
- [ ] update_open_id_connect_provider_thumbprint
- [ ] update_role
- [ ] update_role_description
- [ ] update_saml_provider
- [X] update_saml_provider
- [ ] update_server_certificate
- [ ] update_service_specific_credential
- [ ] update_signing_certificate
@ -2376,11 +2376,11 @@
- [ ] unsubscribe_from_event
- [ ] update_assessment_target
## iot - 30% implemented
## iot - 32% implemented
- [ ] accept_certificate_transfer
- [X] add_thing_to_thing_group
- [ ] associate_targets_with_job
- [ ] attach_policy
- [X] attach_policy
- [X] attach_principal_policy
- [X] attach_thing_principal
- [ ] cancel_certificate_transfer
@ -2429,7 +2429,7 @@
- [X] describe_thing_group
- [ ] describe_thing_registration_task
- [X] describe_thing_type
- [ ] detach_policy
- [X] detach_policy
- [X] detach_principal_policy
- [X] detach_thing_principal
- [ ] disable_topic_rule

View File

@ -259,7 +259,7 @@ It uses flask, which isn't a default dependency. You can install the
server 'extra' package with:
```python
pip install moto[server]
pip install "moto[server]"
```
You can then start it running a service:

View File

@ -16,6 +16,7 @@ from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: n
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa

View File

@ -508,6 +508,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
{% else %}
<LoadBalancerNames/>
{% endif %}
{% if group.target_group_arns %}
<TargetGroupARNs>
{% for target_group_arn in group.target_group_arns %}
<member>{{ target_group_arn }}</member>
{% endfor %}
</TargetGroupARNs>
{% else %}
<TargetGroupARNs/>
{% endif %}
<MinSize>{{ group.min_size }}</MinSize>
{% if group.vpc_zone_identifier %}
<VPCZoneIdentifier>{{ group.vpc_zone_identifier }}</VPCZoneIdentifier>

View File

@ -12,6 +12,7 @@ from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
from moto.ec2 import ec2_backends
from moto.ecr import ecr_backends
from moto.ecs import ecs_backends
@ -59,6 +60,7 @@ BACKENDS = {
'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2,
'dynamodbstreams': dynamodbstreams_backends,
'ec2': ec2_backends,
'ecr': ecr_backends,
'ecs': ecs_backends,

View File

@ -27,7 +27,7 @@ class BatchResponse(BaseResponse):
elif not hasattr(self, '_json'):
try:
self._json = json.loads(self.body)
except json.JSONDecodeError:
except ValueError:
print()
return self._json

View File

@ -13,6 +13,7 @@ from .utils import (
generate_changeset_id,
generate_stack_id,
yaml_tag_constructor,
validate_template_cfn_lint,
)
from .exceptions import ValidationError
@ -270,6 +271,9 @@ class CloudFormationBackend(BaseBackend):
next_token = str(token + 100) if len(all_exports) > token + 100 else None
return exports, next_token
def validate_template(self, template):
return validate_template_cfn_lint(template)
def _validate_export_uniqueness(self, stack):
new_stack_export_names = [x.name for x in stack.exports]
export_names = self.exports.keys()

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals
import json
import yaml
from six.moves.urllib.parse import urlparse
from moto.core.responses import BaseResponse
@ -87,7 +88,8 @@ class CloudFormationResponse(BaseResponse):
role_arn = self._get_param('RoleARN')
update_or_create = self._get_param('ChangeSetType', 'CREATE')
parameters_list = self._get_list_prefix("Parameters.member")
tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")}
tags = dict((item['key'], item['value'])
for item in self._get_list_prefix("Tags.member"))
parameters = {param['parameter_key']: param['parameter_value']
for param in parameters_list}
if template_url:
@ -294,6 +296,32 @@ class CloudFormationResponse(BaseResponse):
template = self.response_template(LIST_EXPORTS_RESPONSE)
return template.render(exports=exports, next_token=next_token)
def validate_template(self):
cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody'))
if cfn_lint:
raise ValidationError(cfn_lint[0].message)
description = ""
try:
description = json.loads(self._get_param('TemplateBody'))['Description']
except (ValueError, KeyError):
pass
try:
description = yaml.load(self._get_param('TemplateBody'))['Description']
except (yaml.ParserError, KeyError):
pass
template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE)
return template.render(description=description)
VALIDATE_STACK_RESPONSE_TEMPLATE = """<ValidateTemplateResponse>
<ValidateTemplateResult>
<Capabilities></Capabilities>
<CapabilitiesReason></CapabilitiesReason>
<DeclaredTransforms></DeclaredTransforms>
<Description>{{ description }}</Description>
<Parameters></Parameters>
</ValidateTemplateResult>
</ValidateTemplateResponse>"""
CREATE_STACK_RESPONSE_TEMPLATE = """<CreateStackResponse>
<CreateStackResult>

View File

@ -3,6 +3,9 @@ import uuid
import six
import random
import yaml
import os
from cfnlint import decode, core
def generate_stack_id(stack_name):
@ -38,3 +41,33 @@ def yaml_tag_constructor(loader, tag, node):
key = 'Fn::{}'.format(tag[1:])
return {key: _f(loader, tag, node)}
def validate_template_cfn_lint(template):
# Save the template to a temporary file -- cfn-lint requires a file
filename = "file.tmp"
with open(filename, "w") as file:
file.write(template)
abs_filename = os.path.abspath(filename)
# decode handles both yaml and json
template, matches = decode.decode(abs_filename, False)
# Set cfn-lint to info
core.configure_logging(None)
# Initialize the ruleset to be applied (no overrules, no excludes)
rules = core.get_rules([], [], [])
# Use us-east-1 region (spec file) for validation
regions = ['us-east-1']
# Process all the rules and gather the errors
matches = core.run_checks(
abs_filename,
template,
rules,
regions)
return matches

View File

@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest):
})
class GroupExistsException(BadRequest):
def __init__(self, message):
super(GroupExistsException, self).__init__()
self.description = json.dumps({
"message": message,
'__type': 'GroupExistsException',
})
class NotAuthorizedError(BadRequest):
def __init__(self, message):

View File

@ -1,6 +1,8 @@
from __future__ import unicode_literals
import datetime
import functools
import itertools
import json
import os
import time
@ -11,8 +13,7 @@ from jose import jws
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
UserStatus = {
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
@ -20,6 +21,39 @@ UserStatus = {
}
def paginate(limit, start_arg="next_token", limit_arg="max_results"):
"""Returns a limited result list, and an offset into list of remaining items
Takes the next_token, and max_results kwargs given to a function and handles
the slicing of the results. The kwarg `next_token` is the offset into the
list to begin slicing from. `max_results` is the size of the result required
If the max_results is not supplied then the `limit` parameter is used as a
default
:param limit_arg: the name of argument in the decorated function that
controls amount of items returned
:param start_arg: the name of the argument in the decorated that provides
the starting offset
:param limit: A default maximum items to return
:return: a tuple containing a list of items, and the offset into the list
"""
default_start = 0
def outer_wrapper(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg])
lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg])
stop = start + lim
result = func(*args, **kwargs)
limited_results = list(itertools.islice(result, start, stop))
next_token = stop if stop < len(result) else None
return limited_results, next_token
return wrapper
return outer_wrapper
class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config):
@ -33,6 +67,7 @@ class CognitoIdpUserPool(BaseModel):
self.clients = OrderedDict()
self.identity_providers = OrderedDict()
self.groups = OrderedDict()
self.users = OrderedDict()
self.refresh_tokens = {}
self.access_tokens = {}
@ -185,6 +220,33 @@ class CognitoIdpIdentityProvider(BaseModel):
return identity_provider_json
class CognitoIdpGroup(BaseModel):
def __init__(self, user_pool_id, group_name, description, role_arn, precedence):
self.user_pool_id = user_pool_id
self.group_name = group_name
self.description = description or ""
self.role_arn = role_arn
self.precedence = precedence
self.last_modified_date = datetime.datetime.now()
self.creation_date = self.last_modified_date
# Users who are members of this group.
# Note that these links are bidirectional.
self.users = set()
def to_json(self):
return {
"GroupName": self.group_name,
"UserPoolId": self.user_pool_id,
"Description": self.description,
"RoleArn": self.role_arn,
"Precedence": self.precedence,
"LastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
"CreationDate": time.mktime(self.creation_date.timetuple()),
}
class CognitoIdpUser(BaseModel):
def __init__(self, user_pool_id, username, password, status, attributes):
@ -198,6 +260,10 @@ class CognitoIdpUser(BaseModel):
self.create_date = datetime.datetime.utcnow()
self.last_modified_date = datetime.datetime.utcnow()
# Groups this user is a member of.
# Note that these links are bidirectional.
self.groups = set()
def _base_json(self):
return {
"UserPoolId": self.user_pool_id,
@ -242,7 +308,8 @@ class CognitoIdpBackend(BaseBackend):
self.user_pools[user_pool.id] = user_pool
return user_pool
def list_user_pools(self):
@paginate(60)
def list_user_pools(self, max_results=None, next_token=None):
return self.user_pools.values()
def describe_user_pool(self, user_pool_id):
@ -289,7 +356,8 @@ class CognitoIdpBackend(BaseBackend):
user_pool.clients[user_pool_client.id] = user_pool_client
return user_pool_client
def list_user_pool_clients(self, user_pool_id):
@paginate(60)
def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@ -339,7 +407,8 @@ class CognitoIdpBackend(BaseBackend):
user_pool.identity_providers[name] = identity_provider
return identity_provider
def list_identity_providers(self, user_pool_id):
@paginate(60)
def list_identity_providers(self, user_pool_id, max_results=None, next_token=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@ -367,6 +436,72 @@ class CognitoIdpBackend(BaseBackend):
del user_pool.identity_providers[name]
# Group
def create_group(self, user_pool_id, group_name, description, role_arn, precedence):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence)
if group.group_name in user_pool.groups:
raise GroupExistsException("A group with the name already exists")
user_pool.groups[group.group_name] = group
return group
def get_group(self, user_pool_id, group_name):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if group_name not in user_pool.groups:
raise ResourceNotFoundError(group_name)
return user_pool.groups[group_name]
def list_groups(self, user_pool_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
return user_pool.groups.values()
def delete_group(self, user_pool_id, group_name):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if group_name not in user_pool.groups:
raise ResourceNotFoundError(group_name)
group = user_pool.groups[group_name]
for user in group.users:
user.groups.remove(group)
del user_pool.groups[group_name]
def admin_add_user_to_group(self, user_pool_id, group_name, username):
group = self.get_group(user_pool_id, group_name)
user = self.admin_get_user(user_pool_id, username)
group.users.add(user)
user.groups.add(group)
def list_users_in_group(self, user_pool_id, group_name):
group = self.get_group(user_pool_id, group_name)
return list(group.users)
def admin_list_groups_for_user(self, user_pool_id, username):
user = self.admin_get_user(user_pool_id, username)
return list(user.groups)
def admin_remove_user_from_group(self, user_pool_id, group_name, username):
group = self.get_group(user_pool_id, group_name)
user = self.admin_get_user(user_pool_id, username)
group.users.discard(user)
user.groups.discard(group)
# User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
user_pool = self.user_pools.get(user_pool_id)
@ -387,7 +522,8 @@ class CognitoIdpBackend(BaseBackend):
return user_pool.users[username]
def list_users(self, user_pool_id):
@paginate(60, "pagination_token", "limit")
def list_users(self, user_pool_id, pagination_token=None, limit=None):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
@ -410,6 +546,10 @@ class CognitoIdpBackend(BaseBackend):
if username not in user_pool.users:
raise UserNotFoundError(username)
user = user_pool.users[username]
for group in user.groups:
group.users.remove(user)
del user_pool.users[username]
def _log_user_in(self, user_pool, client, username):

View File

@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse):
})
def list_user_pools(self):
user_pools = cognitoidp_backends[self.region].list_user_pools()
return json.dumps({
"UserPools": [user_pool.to_json() for user_pool in user_pools]
})
max_results = self._get_param("MaxResults")
next_token = self._get_param("NextToken", "0")
user_pools, next_token = cognitoidp_backends[self.region].list_user_pools(
max_results=max_results, next_token=next_token
)
response = {
"UserPools": [user_pool.to_json() for user_pool in user_pools],
}
if next_token:
response["NextToken"] = str(next_token)
return json.dumps(response)
def describe_user_pool(self):
user_pool_id = self._get_param("UserPoolId")
@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse):
def list_user_pool_clients(self):
user_pool_id = self._get_param("UserPoolId")
user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id)
return json.dumps({
max_results = self._get_param("MaxResults")
next_token = self._get_param("NextToken", "0")
user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id,
max_results=max_results, next_token=next_token)
response = {
"UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients]
})
}
if next_token:
response["NextToken"] = str(next_token)
return json.dumps(response)
def describe_user_pool_client(self):
user_pool_id = self._get_param("UserPoolId")
@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse):
def list_identity_providers(self):
user_pool_id = self._get_param("UserPoolId")
identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id)
return json.dumps({
max_results = self._get_param("MaxResults")
next_token = self._get_param("NextToken", "0")
identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers(
user_pool_id, max_results=max_results, next_token=next_token
)
response = {
"Providers": [identity_provider.to_json() for identity_provider in identity_providers]
})
}
if next_token:
response["NextToken"] = str(next_token)
return json.dumps(response)
def describe_identity_provider(self):
user_pool_id = self._get_param("UserPoolId")
@ -129,6 +149,89 @@ class CognitoIdpResponse(BaseResponse):
cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name)
return ""
# Group
def create_group(self):
group_name = self._get_param("GroupName")
user_pool_id = self._get_param("UserPoolId")
description = self._get_param("Description")
role_arn = self._get_param("RoleArn")
precedence = self._get_param("Precedence")
group = cognitoidp_backends[self.region].create_group(
user_pool_id,
group_name,
description,
role_arn,
precedence,
)
return json.dumps({
"Group": group.to_json(),
})
def get_group(self):
group_name = self._get_param("GroupName")
user_pool_id = self._get_param("UserPoolId")
group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name)
return json.dumps({
"Group": group.to_json(),
})
def list_groups(self):
user_pool_id = self._get_param("UserPoolId")
groups = cognitoidp_backends[self.region].list_groups(user_pool_id)
return json.dumps({
"Groups": [group.to_json() for group in groups],
})
def delete_group(self):
group_name = self._get_param("GroupName")
user_pool_id = self._get_param("UserPoolId")
cognitoidp_backends[self.region].delete_group(user_pool_id, group_name)
return ""
def admin_add_user_to_group(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
group_name = self._get_param("GroupName")
cognitoidp_backends[self.region].admin_add_user_to_group(
user_pool_id,
group_name,
username,
)
return ""
def list_users_in_group(self):
user_pool_id = self._get_param("UserPoolId")
group_name = self._get_param("GroupName")
users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name)
return json.dumps({
"Users": [user.to_json(extended=True) for user in users],
})
def admin_list_groups_for_user(self):
username = self._get_param("Username")
user_pool_id = self._get_param("UserPoolId")
groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username)
return json.dumps({
"Groups": [group.to_json() for group in groups],
})
def admin_remove_user_from_group(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
group_name = self._get_param("GroupName")
cognitoidp_backends[self.region].admin_remove_user_from_group(
user_pool_id,
group_name,
username,
)
return ""
# User
def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId")
@ -155,10 +258,15 @@ class CognitoIdpResponse(BaseResponse):
def list_users(self):
user_pool_id = self._get_param("UserPoolId")
users = cognitoidp_backends[self.region].list_users(user_pool_id)
return json.dumps({
"Users": [user.to_json(extended=True) for user in users]
})
limit = self._get_param("Limit")
token = self._get_param("PaginationToken")
users, token = cognitoidp_backends[self.region].list_users(user_pool_id,
limit=limit,
pagination_token=token)
response = {"Users": [user.to_json(extended=True) for user in users]}
if token:
response["PaginationToken"] = str(token)
return json.dumps(response)
def admin_disable_user(self):
user_pool_id = self._get_param("UserPoolId")

View File

@ -4,6 +4,7 @@ from __future__ import absolute_import
import functools
import inspect
import os
import re
import six
from io import BytesIO
@ -21,6 +22,11 @@ from .utils import (
)
# "Mock" the AWS credentials as they can't be mocked in Botocore currently
os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
class BaseMockAWS(object):
nested_count = 0

View File

@ -5,6 +5,7 @@ import datetime
import decimal
import json
import re
import uuid
import boto3
from moto.compat import OrderedDict
@ -292,9 +293,82 @@ class Item(BaseModel):
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
class StreamRecord(BaseModel):
def __init__(self, table, stream_type, event_name, old, new, seq):
old_a = old.to_json()['Attributes'] if old is not None else {}
new_a = new.to_json()['Attributes'] if new is not None else {}
rec = old if old is not None else new
keys = {table.hash_key_attr: rec.hash_key.to_json()}
if table.range_key_attr is not None:
keys[table.range_key_attr] = rec.range_key.to_json()
self.record = {
'eventID': uuid.uuid4().hex,
'eventName': event_name,
'eventSource': 'aws:dynamodb',
'eventVersion': '1.0',
'awsRegion': 'us-east-1',
'dynamodb': {
'StreamViewType': stream_type,
'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(),
'SequenceNumber': seq,
'SizeBytes': 1,
'Keys': keys
}
}
if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['NewImage'] = new_a
if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['OldImage'] = old_a
# This is a substantial overestimate but it's the easiest to do now
self.record['dynamodb']['SizeBytes'] = len(
json.dumps(self.record['dynamodb']))
def to_json(self):
return self.record
class StreamShard(BaseModel):
def __init__(self, table):
self.table = table
self.id = 'shardId-00000001541626099285-f35f62ef'
self.starting_sequence_number = 1100000000017454423009
self.items = []
self.created_on = datetime.datetime.utcnow()
def to_json(self):
return {
'ShardId': self.id,
'SequenceNumberRange': {
'StartingSequenceNumber': str(self.starting_sequence_number)
}
}
def add(self, old, new):
t = self.table.stream_specification['StreamViewType']
if old is None:
event_name = 'INSERT'
elif new is None:
event_name = 'DELETE'
else:
event_name = 'MODIFY'
seq = len(self.items) + self.starting_sequence_number
self.items.append(
StreamRecord(self.table, t, event_name, old, new, seq))
def get(self, start, quantity):
start -= self.starting_sequence_number
assert start >= 0
end = start + quantity
return [i.to_json() for i in self.items[start:end]]
class Table(BaseModel):
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None):
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None):
self.name = table_name
self.attr = attr
self.schema = schema
@ -325,10 +399,22 @@ class Table(BaseModel):
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
# 'AttributeName': 'string' # Can contain this
}
self.set_stream_specification(streams)
def _generate_arn(self, name):
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
def set_stream_specification(self, streams):
self.stream_specification = streams
if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')):
self.stream_specification['StreamEnabled'] = True
self.latest_stream_label = datetime.datetime.utcnow().isoformat()
self.stream_shard = StreamShard(self)
else:
self.stream_specification = {'StreamEnabled': False}
self.latest_stream_label = None
self.stream_shard = None
def describe(self, base_key='TableDescription'):
results = {
base_key: {
@ -345,6 +431,11 @@ class Table(BaseModel):
'LocalSecondaryIndexes': [index for index in self.indexes],
}
}
if self.stream_specification and self.stream_specification['StreamEnabled']:
results[base_key]['StreamSpecification'] = self.stream_specification
if self.latest_stream_label:
results[base_key]['LatestStreamLabel'] = self.latest_stream_label
results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label
return results
def __len__(self):
@ -385,23 +476,22 @@ class Table(BaseModel):
else:
range_value = None
if expected is None:
expected = {}
lookup_range_value = range_value
else:
expected_range_value = expected.get(
self.range_key_attr, {}).get("Value")
if(expected_range_value is None):
lookup_range_value = range_value
else:
lookup_range_value = DynamoType(expected_range_value)
current = self.get_item(hash_value, lookup_range_value)
item = Item(hash_value, self.hash_key_type, range_value,
self.range_key_type, item_attrs)
if not overwrite:
if expected is None:
expected = {}
lookup_range_value = range_value
else:
expected_range_value = expected.get(
self.range_key_attr, {}).get("Value")
if(expected_range_value is None):
lookup_range_value = range_value
else:
lookup_range_value = DynamoType(expected_range_value)
current = self.get_item(hash_value, lookup_range_value)
if current is None:
current_attr = {}
elif hasattr(current, 'attrs'):
@ -432,6 +522,10 @@ class Table(BaseModel):
self.items[hash_value][range_value] = item
else:
self.items[hash_value] = item
if self.stream_shard is not None:
self.stream_shard.add(current, item)
return item
def __nonzero__(self):
@ -462,9 +556,14 @@ class Table(BaseModel):
def delete_item(self, hash_key, range_key):
try:
if range_key:
return self.items[hash_key].pop(range_key)
item = self.items[hash_key].pop(range_key)
else:
return self.items.pop(hash_key)
item = self.items.pop(hash_key)
if self.stream_shard is not None:
self.stream_shard.add(item, None)
return item
except KeyError:
return None
@ -680,6 +779,13 @@ class DynamoDBBackend(BaseBackend):
table.throughput = throughput
return table
def update_table_streams(self, name, stream_specification):
table = self.tables[name]
if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label:
raise ValueError('Table already has stream enabled')
table.set_stream_specification(stream_specification)
return table
def update_table_global_indexes(self, name, global_index_updates):
table = self.tables[name]
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)

View File

@ -104,13 +104,16 @@ class DynamoHandler(BaseResponse):
# getting the indexes
global_indexes = body.get("GlobalSecondaryIndexes", [])
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
# get the stream specification
streams = body.get("StreamSpecification")
table = self.dynamodb_backend.create_table(table_name,
schema=key_schema,
throughput=throughput,
attr=attr,
global_indexes=global_indexes,
indexes=local_secondary_indexes)
indexes=local_secondary_indexes,
streams=streams)
if table is not None:
return dynamo_json_dump(table.describe())
else:
@ -163,12 +166,20 @@ class DynamoHandler(BaseResponse):
def update_table(self):
name = self.body['TableName']
table = self.dynamodb_backend.get_table(name)
if 'GlobalSecondaryIndexUpdates' in self.body:
table = self.dynamodb_backend.update_table_global_indexes(
name, self.body['GlobalSecondaryIndexUpdates'])
if 'ProvisionedThroughput' in self.body:
throughput = self.body["ProvisionedThroughput"]
table = self.dynamodb_backend.update_table_throughput(name, throughput)
if 'StreamSpecification' in self.body:
try:
table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification'])
except ValueError:
er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException'
return self.error(er, 'Cannot enable stream')
return dynamo_json_dump(table.describe())
def describe_table(self):
@ -183,6 +194,11 @@ class DynamoHandler(BaseResponse):
def put_item(self):
name = self.body['TableName']
item = self.body['Item']
return_values = self.body.get('ReturnValues', 'NONE')
if return_values not in ('ALL_OLD', 'NONE'):
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return self.error(er, 'Return values set to invalid value')
if has_empty_keys_or_values(item):
return get_empty_str_error()
@ -193,6 +209,13 @@ class DynamoHandler(BaseResponse):
else:
expected = None
if return_values == 'ALL_OLD':
existing_item = self.dynamodb_backend.get_item(name, item)
if existing_item:
existing_attributes = existing_item.to_json()['Attributes']
else:
existing_attributes = {}
# Attempt to parse simple ConditionExpressions into an Expected
# expression
if not expected:
@ -228,6 +251,10 @@ class DynamoHandler(BaseResponse):
'TableName': name,
'CapacityUnits': 1
}
if return_values == 'ALL_OLD':
item_dict['Attributes'] = existing_attributes
else:
item_dict.pop('Attributes', None)
return dynamo_json_dump(item_dict)
else:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
@ -512,7 +539,11 @@ class DynamoHandler(BaseResponse):
def delete_item(self):
name = self.body['TableName']
keys = self.body['Key']
return_values = self.body.get('ReturnValues', '')
return_values = self.body.get('ReturnValues', 'NONE')
if return_values not in ('ALL_OLD', 'NONE'):
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return self.error(er, 'Return values set to invalid value')
table = self.dynamodb_backend.get_table(name)
if not table:
er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException'
@ -527,9 +558,9 @@ class DynamoHandler(BaseResponse):
return dynamo_json_dump(item_dict)
def update_item(self):
name = self.body['TableName']
key = self.body['Key']
return_values = self.body.get('ReturnValues', 'NONE')
update_expression = self.body.get('UpdateExpression')
attribute_updates = self.body.get('AttributeUpdates')
expression_attribute_names = self.body.get(
@ -537,6 +568,15 @@ class DynamoHandler(BaseResponse):
expression_attribute_values = self.body.get(
'ExpressionAttributeValues', {})
existing_item = self.dynamodb_backend.get_item(name, key)
if existing_item:
existing_attributes = existing_item.to_json()['Attributes']
else:
existing_attributes = {}
if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD',
'UPDATED_NEW'):
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return self.error(er, 'Return values set to invalid value')
if has_empty_keys_or_values(expression_attribute_values):
return get_empty_str_error()
@ -591,8 +631,26 @@ class DynamoHandler(BaseResponse):
'TableName': name,
'CapacityUnits': 0.5
}
if not existing_item:
unchanged_attributes = {
k for k in existing_attributes.keys()
if existing_attributes[k] == item_dict['Attributes'].get(k)
}
changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes)
if return_values == 'NONE':
item_dict['Attributes'] = {}
elif return_values == 'ALL_OLD':
item_dict['Attributes'] = existing_attributes
elif return_values == 'UPDATED_OLD':
item_dict['Attributes'] = {
k: v for k, v in existing_attributes.items()
if k in changed_attributes
}
elif return_values == 'UPDATED_NEW':
item_dict['Attributes'] = {
k: v for k, v in item_dict['Attributes'].items()
if k in changed_attributes
}
return dynamo_json_dump(item_dict)

View File

@ -0,0 +1,6 @@
from __future__ import unicode_literals
from .models import dynamodbstreams_backends
from ..core.models import base_decorator
dynamodbstreams_backend = dynamodbstreams_backends['us-east-1']
mock_dynamodbstreams = base_decorator(dynamodbstreams_backends)

View File

@ -0,0 +1,129 @@
from __future__ import unicode_literals
import os
import json
import boto3
import base64
from moto.core import BaseBackend, BaseModel
from moto.dynamodb2.models import dynamodb_backends
class ShardIterator(BaseModel):
def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None):
self.id = base64.b64encode(os.urandom(472)).decode('utf-8')
self.streams_backend = streams_backend
self.stream_shard = stream_shard
self.shard_iterator_type = shard_iterator_type
if shard_iterator_type == 'TRIM_HORIZON':
self.sequence_number = stream_shard.starting_sequence_number
elif shard_iterator_type == 'LATEST':
self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items)
elif shard_iterator_type == 'AT_SEQUENCE_NUMBER':
self.sequence_number = sequence_number
elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER':
self.sequence_number = sequence_number + 1
@property
def arn(self):
return '{}/stream/{}|1|{}'.format(
self.stream_shard.table.table_arn,
self.stream_shard.table.latest_stream_label,
self.id)
def to_json(self):
return {
'ShardIterator': self.arn
}
def get(self, limit=1000):
items = self.stream_shard.get(self.sequence_number, limit)
try:
last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items)
new_shard_iterator = ShardIterator(self.streams_backend,
self.stream_shard,
'AFTER_SEQUENCE_NUMBER',
last_sequence_number)
except ValueError:
new_shard_iterator = ShardIterator(self.streams_backend,
self.stream_shard,
'AT_SEQUENCE_NUMBER',
self.sequence_number)
self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator
return {
'NextShardIterator': new_shard_iterator.arn,
'Records': items
}
class DynamoDBStreamsBackend(BaseBackend):
def __init__(self, region):
self.region = region
self.shard_iterators = {}
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
@property
def dynamodb(self):
return dynamodb_backends[self.region]
def _get_table_from_arn(self, arn):
table_name = arn.split(':', 6)[5].split('/')[1]
return self.dynamodb.get_table(table_name)
def describe_stream(self, arn):
table = self._get_table_from_arn(arn)
resp = {'StreamDescription': {
'StreamArn': arn,
'StreamLabel': table.latest_stream_label,
'StreamStatus': ('ENABLED' if table.latest_stream_label
else 'DISABLED'),
'StreamViewType': table.stream_specification['StreamViewType'],
'CreationRequestDateTime': table.stream_shard.created_on.isoformat(),
'TableName': table.name,
'KeySchema': table.schema,
'Shards': ([table.stream_shard.to_json()] if table.stream_shard
else [])
}}
return json.dumps(resp)
def list_streams(self, table_name=None):
streams = []
for table in self.dynamodb.tables.values():
if table_name is not None and table.name != table_name:
continue
if table.latest_stream_label:
d = table.describe(base_key='Table')
streams.append({
'StreamArn': d['Table']['LatestStreamArn'],
'TableName': d['Table']['TableName'],
'StreamLabel': d['Table']['LatestStreamLabel']
})
return json.dumps({'Streams': streams})
def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None):
table = self._get_table_from_arn(arn)
assert table.stream_shard.id == shard_id
shard_iterator = ShardIterator(self, table.stream_shard,
shard_iterator_type,
sequence_number)
self.shard_iterators[shard_iterator.arn] = shard_iterator
return json.dumps(shard_iterator.to_json())
def get_records(self, iterator_arn, limit):
shard_iterator = self.shard_iterators[iterator_arn]
return json.dumps(shard_iterator.get(limit))
available_regions = boto3.session.Session().get_available_regions(
'dynamodbstreams')
dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region)
for region in available_regions}

View File

@ -0,0 +1,34 @@
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from .models import dynamodbstreams_backends
class DynamoDBStreamsHandler(BaseResponse):
@property
def backend(self):
return dynamodbstreams_backends[self.region]
def describe_stream(self):
arn = self._get_param('StreamArn')
return self.backend.describe_stream(arn)
def list_streams(self):
table_name = self._get_param('TableName')
return self.backend.list_streams(table_name)
def get_shard_iterator(self):
arn = self._get_param('StreamArn')
shard_id = self._get_param('ShardId')
shard_iterator_type = self._get_param('ShardIteratorType')
return self.backend.get_shard_iterator(arn, shard_id,
shard_iterator_type)
def get_records(self):
arn = self._get_param('ShardIterator')
limit = self._get_param('Limit')
if limit is None:
limit = 1000
return self.backend.get_records(arn, limit)

View File

@ -0,0 +1,10 @@
from __future__ import unicode_literals
from .responses import DynamoDBStreamsHandler
url_bases = [
"https?://streams.dynamodb.(.+).amazonaws.com"
]
url_paths = {
"{0}/$": DynamoDBStreamsHandler.dispatch,
}

View File

@ -2230,6 +2230,10 @@ class VPCPeeringConnectionStatus(object):
self.code = code
self.message = message
def deleted(self):
self.code = 'deleted'
self.message = 'Deleted by {deleter ID}'
def initiating(self):
self.code = 'initiating-request'
self.message = 'Initiating Request to {accepter ID}'
@ -2292,9 +2296,8 @@ class VPCPeeringConnectionBackend(object):
return self.vpc_pcxs.get(vpc_pcx_id)
def delete_vpc_peering_connection(self, vpc_pcx_id):
deleted = self.vpc_pcxs.pop(vpc_pcx_id, None)
if not deleted:
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
deleted = self.get_vpc_peering_connection(vpc_pcx_id)
deleted._status.deleted()
return deleted
def accept_vpc_peering_connection(self, vpc_pcx_id):

View File

@ -769,6 +769,8 @@ class EC2ContainerServiceBackend(BaseBackend):
Container instances status should be one of [ACTIVE,DRAINING]")
failures = []
container_instance_objects = []
list_container_instance_ids = [x.split('/')[-1]
for x in list_container_instance_ids]
for container_instance_id in list_container_instance_ids:
container_instance = self.container_instances[cluster_name].get(container_instance_id, None)
if container_instance is not None:

View File

@ -613,13 +613,11 @@ DESCRIBE_STEP_TEMPLATE = """<DescribeStepResponse xmlns="http://elasticmapreduce
<Id>{{ step.id }}</Id>
<Name>{{ step.name | escape }}</Name>
<Status>
<!-- does not exist for botocore 1.4.28
<FailureDetails>
<Reason/>
<Message/>
<LogFile/>
</FailureDetails>
-->
<State>{{ step.state }}</State>
<StateChangeReason>{{ step.state_change_reason }}</StateChangeReason>
<Timeline>

View File

@ -24,3 +24,11 @@ class IAMReportNotPresentException(RESTError):
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
class MalformedCertificate(RESTError):
code = 400
def __init__(self, cert):
super(MalformedCertificate, self).__init__(
'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert))

View File

@ -1,14 +1,18 @@
from __future__ import unicode_literals
import base64
import sys
from datetime import datetime
import json
from cryptography import x509
from cryptography.hazmat.backends import default_backend
import pytz
from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_without_milliseconds
from .aws_managed_policies import aws_managed_policies_data
from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException
from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate
from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id
ACCOUNT_ID = 123456789012
@ -50,6 +54,16 @@ class Policy(BaseModel):
self.update_datetime = datetime.now(pytz.utc)
class SAMLProvider(BaseModel):
def __init__(self, name, saml_metadata_document=None):
self.name = name
self.saml_metadata_document = saml_metadata_document
@property
def arn(self):
return "arn:aws:iam::{0}:saml-provider/{1}".format(ACCOUNT_ID, self.name)
class PolicyVersion(object):
def __init__(self,
@ -114,9 +128,10 @@ class Role(BaseModel):
self.id = role_id
self.name = name
self.assume_role_policy_document = assume_role_policy_document
self.path = path
self.path = path or '/'
self.policies = {}
self.managed_policies = {}
self.create_date = datetime.now(pytz.utc)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
@ -166,8 +181,9 @@ class InstanceProfile(BaseModel):
def __init__(self, instance_profile_id, name, path, roles):
self.id = instance_profile_id
self.name = name
self.path = path
self.path = path or '/'
self.roles = roles if roles else []
self.create_date = datetime.now(pytz.utc)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
@ -213,6 +229,16 @@ class Certificate(BaseModel):
return "arn:aws:iam::{0}:server-certificate{1}{2}".format(ACCOUNT_ID, self.path, self.cert_name)
class SigningCertificate(BaseModel):
def __init__(self, id, user_name, body):
self.id = id
self.user_name = user_name
self.body = body
self.upload_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%d-%H-%M-%S")
self.status = 'Active'
class AccessKey(BaseModel):
def __init__(self, user_name):
@ -297,6 +323,7 @@ class User(BaseModel):
self.access_keys = []
self.password = None
self.password_reset_required = False
self.signing_certificates = {}
@property
def arn(self):
@ -427,6 +454,7 @@ class IAMBackend(BaseBackend):
self.credential_report = None
self.managed_policies = self._init_managed_policies()
self.account_aliases = []
self.saml_providers = {}
super(IAMBackend, self).__init__()
def _init_managed_policies(self):
@ -765,6 +793,48 @@ class IAMBackend(BaseBackend):
return users
def upload_signing_certificate(self, user_name, body):
user = self.get_user(user_name)
cert_id = random_resource_id(size=32)
# Validate the signing cert:
try:
if sys.version_info < (3, 0):
data = bytes(body)
else:
data = bytes(body, 'utf8')
x509.load_pem_x509_certificate(data, default_backend())
except Exception:
raise MalformedCertificate(body)
user.signing_certificates[cert_id] = SigningCertificate(cert_id, user_name, body)
return user.signing_certificates[cert_id]
def delete_signing_certificate(self, user_name, cert_id):
user = self.get_user(user_name)
try:
del user.signing_certificates[cert_id]
except KeyError:
raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id))
def list_signing_certificates(self, user_name):
user = self.get_user(user_name)
return list(user.signing_certificates.values())
def update_signing_certificate(self, user_name, cert_id, status):
user = self.get_user(user_name)
try:
user.signing_certificates[cert_id].status = status
except KeyError:
raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id))
def create_login_profile(self, user_name, password):
# This does not currently deal with PasswordPolicyViolation.
user = self.get_user(user_name)
@ -937,5 +1007,33 @@ class IAMBackend(BaseBackend):
'managed_policies': returned_policies
}
def create_saml_provider(self, name, saml_metadata_document):
saml_provider = SAMLProvider(name, saml_metadata_document)
self.saml_providers[name] = saml_provider
return saml_provider
def update_saml_provider(self, saml_provider_arn, saml_metadata_document):
saml_provider = self.get_saml_provider(saml_provider_arn)
saml_provider.saml_metadata_document = saml_metadata_document
return saml_provider
def delete_saml_provider(self, saml_provider_arn):
try:
for saml_provider in list(self.list_saml_providers()):
if saml_provider.arn == saml_provider_arn:
del self.saml_providers[saml_provider.name]
except KeyError:
raise IAMNotFoundException(
"SAMLProvider {0} not found".format(saml_provider_arn))
def list_saml_providers(self):
return self.saml_providers.values()
def get_saml_provider(self, saml_provider_arn):
for saml_provider in self.list_saml_providers():
if saml_provider.arn == saml_provider_arn:
return saml_provider
raise IAMNotFoundException("SamlProvider {0} not found".format(saml_provider_arn))
iam_backend = IAMBackend()

View File

@ -201,7 +201,7 @@ class IamResponse(BaseResponse):
def create_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
path = self._get_param('Path')
path = self._get_param('Path', '/')
profile = iam_backend.create_instance_profile(
profile_name, path, role_ids=[])
@ -552,6 +552,74 @@ class IamResponse(BaseResponse):
roles=account_details['roles']
)
def create_saml_provider(self):
saml_provider_name = self._get_param('Name')
saml_metadata_document = self._get_param('SAMLMetadataDocument')
saml_provider = iam_backend.create_saml_provider(saml_provider_name, saml_metadata_document)
template = self.response_template(CREATE_SAML_PROVIDER_TEMPLATE)
return template.render(saml_provider=saml_provider)
def update_saml_provider(self):
saml_provider_arn = self._get_param('SAMLProviderArn')
saml_metadata_document = self._get_param('SAMLMetadataDocument')
saml_provider = iam_backend.update_saml_provider(saml_provider_arn, saml_metadata_document)
template = self.response_template(UPDATE_SAML_PROVIDER_TEMPLATE)
return template.render(saml_provider=saml_provider)
def delete_saml_provider(self):
saml_provider_arn = self._get_param('SAMLProviderArn')
iam_backend.delete_saml_provider(saml_provider_arn)
template = self.response_template(DELETE_SAML_PROVIDER_TEMPLATE)
return template.render()
def list_saml_providers(self):
saml_providers = iam_backend.list_saml_providers()
template = self.response_template(LIST_SAML_PROVIDERS_TEMPLATE)
return template.render(saml_providers=saml_providers)
def get_saml_provider(self):
saml_provider_arn = self._get_param('SAMLProviderArn')
saml_provider = iam_backend.get_saml_provider(saml_provider_arn)
template = self.response_template(GET_SAML_PROVIDER_TEMPLATE)
return template.render(saml_provider=saml_provider)
def upload_signing_certificate(self):
user_name = self._get_param('UserName')
cert_body = self._get_param('CertificateBody')
cert = iam_backend.upload_signing_certificate(user_name, cert_body)
template = self.response_template(UPLOAD_SIGNING_CERTIFICATE_TEMPLATE)
return template.render(cert=cert)
def update_signing_certificate(self):
user_name = self._get_param('UserName')
cert_id = self._get_param('CertificateId')
status = self._get_param('Status')
iam_backend.update_signing_certificate(user_name, cert_id, status)
template = self.response_template(UPDATE_SIGNING_CERTIFICATE_TEMPLATE)
return template.render()
def delete_signing_certificate(self):
user_name = self._get_param('UserName')
cert_id = self._get_param('CertificateId')
iam_backend.delete_signing_certificate(user_name, cert_id)
template = self.response_template(DELETE_SIGNING_CERTIFICATE_TEMPLATE)
return template.render()
def list_signing_certificates(self):
user_name = self._get_param('UserName')
certs = iam_backend.list_signing_certificates(user_name)
template = self.response_template(LIST_SIGNING_CERTIFICATES_TEMPLATE)
return template.render(user_name=user_name, certificates=certs)
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
<ResponseMetadata>
@ -734,7 +802,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """<CreateInstanceProfileResponse xmlns="http
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10.222Z</CreateDate>
<CreateDate>{{ profile.create_date }}</CreateDate>
</InstanceProfile>
</CreateInstanceProfileResult>
<ResponseMetadata>
@ -753,7 +821,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
@ -761,7 +829,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10Z</CreateDate>
<CreateDate>{{ profile.create_date }}</CreateDate>
</InstanceProfile>
</GetInstanceProfileResult>
<ResponseMetadata>
@ -776,7 +844,7 @@ CREATE_ROLE_TEMPLATE = """<CreateRoleResponse xmlns="https://iam.amazonaws.com/d
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01.495Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</CreateRoleResult>
@ -803,7 +871,7 @@ GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/201
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</GetRoleResult>
@ -834,7 +902,7 @@ LIST_ROLES_TEMPLATE = """<ListRolesResponse xmlns="https://iam.amazonaws.com/doc
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
@ -865,7 +933,7 @@ CREATE_POLICY_VERSION_TEMPLATE = """<CreatePolicyVersionResponse xmlns="https://
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
</PolicyVersion>
</CreatePolicyVersionResult>
<ResponseMetadata>
@ -879,7 +947,7 @@ GET_POLICY_VERSION_TEMPLATE = """<GetPolicyVersionResponse xmlns="https://iam.am
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
</PolicyVersion>
</GetPolicyVersionResult>
<ResponseMetadata>
@ -896,7 +964,7 @@ LIST_POLICY_VERSIONS_TEMPLATE = """<ListPolicyVersionsResponse xmlns="https://ia
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
</member>
{% endfor %}
</Versions>
@ -912,7 +980,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
<InstanceProfiles>
{% for instance in instance_profiles %}
<member>
<Id>{{ instance.id }}</Id>
<InstanceProfileId>{{ instance.id }}</InstanceProfileId>
<Roles>
{% for role in instance.roles %}
<member>
@ -920,7 +988,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
@ -928,7 +996,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
<InstanceProfileName>{{ instance.name }}</InstanceProfileName>
<Path>{{ instance.path }}</Path>
<Arn>{{ instance.arn }}</Arn>
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
<CreateDate>{{ instance.create_date }}</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
@ -1199,8 +1267,8 @@ LIST_USER_POLICIES_TEMPLATE = """<ListUserPoliciesResponse>
<member>{{ policy }}</member>
{% endfor %}
</PolicyNames>
<IsTruncated>false</IsTruncated>
</ListUserPoliciesResult>
<IsTruncated>false</IsTruncated>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
@ -1243,8 +1311,8 @@ LIST_ACCESS_KEYS_TEMPLATE = """<ListAccessKeysResponse>
CREDENTIAL_REPORT_GENERATING = """
<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>STARTED</state>
<description>No report exists. Starting a new report generation task</description>
<State>STARTED</State>
<Description>No report exists. Starting a new report generation task</Description>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
@ -1253,7 +1321,7 @@ CREDENTIAL_REPORT_GENERATING = """
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>COMPLETE</state>
<State>COMPLETE</State>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
@ -1262,7 +1330,7 @@ CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
CREDENTIAL_REPORT = """<GetCredentialReportResponse>
<GetCredentialReportResult>
<content>{{ report }}</content>
<Content>{{ report }}</Content>
<GeneratedTime>2015-02-02T20:02:02Z</GeneratedTime>
<ReportFormat>text/csv</ReportFormat>
</GetCredentialReportResult>
@ -1277,23 +1345,23 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
<InstanceProfiles>
{% for profile in instance_profiles %}
<member>
<Id>{{ profile.id }}</Id>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
@ -1382,7 +1450,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<Arn>{{ user.arn }}</Arn>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
</member>
{% endfor %}
</UserDetailList>
@ -1401,7 +1469,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<GroupName>{{ group.name }}</GroupName>
<Path>{{ group.path }}</Path>
<Arn>{{ group.arn }}</Arn>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
<CreateDate>{{ group.create_date }}</CreateDate>
<GroupPolicyList/>
</member>
{% endfor %}
@ -1421,23 +1489,23 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<InstanceProfileList>
{% for profile in instance_profiles %}
<member>
<Id>{{ profile.id }}</Id>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate>
</member>
{% endfor %}
</InstanceProfileList>
@ -1445,7 +1513,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2014-07-30T17:09:20Z</CreateDate>
<CreateDate>{{ role.create_date }}</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
@ -1474,9 +1542,9 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
</PolicyVersionList>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>1</AttachmentCount>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
<CreateDate>{{ policy.create_datetime }}</CreateDate>
<IsAttachable>true</IsAttachable>
<UpdateDate>2012-05-09T16:27:11Z</UpdateDate>
<UpdateDate>{{ policy.update_datetime }}</UpdateDate>
</member>
{% endfor %}
</Policies>
@ -1485,3 +1553,105 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<RequestId>92e79ae7-7399-11e4-8c85-4b53eEXAMPLE</RequestId>
</ResponseMetadata>
</GetAccountAuthorizationDetailsResponse>"""
CREATE_SAML_PROVIDER_TEMPLATE = """<CreateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateSAMLProviderResult>
<SAMLProviderArn>{{ saml_provider.arn }}</SAMLProviderArn>
</CreateSAMLProviderResult>
<ResponseMetadata>
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</CreateSAMLProviderResponse>"""
LIST_SAML_PROVIDERS_TEMPLATE = """<ListSAMLProvidersResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListSAMLProvidersResult>
<SAMLProviderList>
{% for saml_provider in saml_providers %}
<member>
<Arn>{{ saml_provider.arn }}</Arn>
<ValidUntil>2032-05-09T16:27:11Z</ValidUntil>
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
</member>
{% endfor %}
</SAMLProviderList>
</ListSAMLProvidersResult>
<ResponseMetadata>
<RequestId>fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListSAMLProvidersResponse>"""
GET_SAML_PROVIDER_TEMPLATE = """<GetSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetSAMLProviderResult>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
<ValidUntil>2015-12-31T21:59:59Z</ValidUntil>
<SAMLMetadataDocument>{{ saml_provider.saml_metadata_document }}</SAMLMetadataDocument>
</GetSAMLProviderResult>
<ResponseMetadata>
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetSAMLProviderResponse>"""
DELETE_SAML_PROVIDER_TEMPLATE = """<DeleteSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>c749ee7f-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</DeleteSAMLProviderResponse>"""
UPDATE_SAML_PROVIDER_TEMPLATE = """<UpdateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<UpdateSAMLProviderResult>
<SAMLProviderArn>{{ saml_provider.arn }}</SAMLProviderArn>
</UpdateSAMLProviderResult>
<ResponseMetadata>
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</UpdateSAMLProviderResponse>"""
=======
UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """<UploadSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<UploadSigningCertificateResult>
<Certificate>
<UserName>{{ cert.user_name }}</UserName>
<CertificateId>{{ cert.id }}</CertificateId>
<CertificateBody>{{ cert.body }}</CertificateBody>
<Status>{{ cert.status }}</Status>
</Certificate>
</UploadSigningCertificateResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</UploadSigningCertificateResponse>"""
UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """<UpdateSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE</RequestId>
</ResponseMetadata>
</UpdateSigningCertificateResponse>"""
DELETE_SIGNING_CERTIFICATE_TEMPLATE = """<DeleteSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DeleteSigningCertificateResponse>"""
LIST_SIGNING_CERTIFICATES_TEMPLATE = """<ListSigningCertificatesResponse>
<ListSigningCertificatesResult>
<UserName>{{ user_name }}</UserName>
<Certificates>
{% for cert in certificates %}
<member>
<UserName>{{ user_name }}</UserName>
<CertificateId>{{ cert.id }}</CertificateId>
<CertificateBody>{{ cert.body }}</CertificateBody>
<Status>{{ cert.status }}</Status>
</member>
{% endfor %}
</Certificates>
<IsTruncated>false</IsTruncated>
</ListSigningCertificatesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListSigningCertificatesResponse>"""

View File

@ -12,8 +12,7 @@ def random_alphanumeric(length):
)
def random_resource_id():
size = 20
def random_resource_id(size=20):
chars = list(range(10)) + list(string.ascii_lowercase)
return ''.join(six.text_type(random.choice(chars)) for x in range(size))

View File

@ -31,3 +31,20 @@ class VersionConflictException(IoTClientError):
'VersionConflictException',
'The version for thing %s does not match the expected version.' % name
)
class CertificateStateException(IoTClientError):
def __init__(self, msg, cert_id):
self.code = 406
super(CertificateStateException, self).__init__(
'CertificateStateException',
'%s Id: %s' % (msg, cert_id)
)
class DeleteConflictException(IoTClientError):
def __init__(self, msg):
self.code = 409
super(DeleteConflictException, self).__init__(
'DeleteConflictException', msg
)

View File

@ -13,6 +13,8 @@ import boto3
from moto.core import BaseBackend, BaseModel
from .exceptions import (
CertificateStateException,
DeleteConflictException,
ResourceNotFoundException,
InvalidRequestException,
VersionConflictException
@ -378,7 +380,25 @@ class IoTBackend(BaseBackend):
return certificate, key_pair
def delete_certificate(self, certificate_id):
self.describe_certificate(certificate_id)
cert = self.describe_certificate(certificate_id)
if cert.status == 'ACTIVE':
raise CertificateStateException(
'Certificate must be deactivated (not ACTIVE) before deletion.', certificate_id)
certs = [k[0] for k, v in self.principal_things.items()
if self._get_principal(k[0]).certificate_id == certificate_id]
if len(certs) > 0:
raise DeleteConflictException(
'Things must be detached before deletion (arn: %s)' % certs[0]
)
certs = [k[0] for k, v in self.principal_policies.items()
if self._get_principal(k[0]).certificate_id == certificate_id]
if len(certs) > 0:
raise DeleteConflictException(
'Certificate policies must be detached before deletion (arn: %s)' % certs[0]
)
del self.certificates[certificate_id]
def describe_certificate(self, certificate_id):
@ -411,6 +431,14 @@ class IoTBackend(BaseBackend):
return policies[0]
def delete_policy(self, policy_name):
policies = [k[1] for k, v in self.principal_policies.items() if k[1] == policy_name]
if len(policies) > 0:
raise DeleteConflictException(
'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)'
% policy_name
)
policy = self.get_policy(policy_name)
del self.policies[policy.name]
@ -429,6 +457,14 @@ class IoTBackend(BaseBackend):
pass
raise ResourceNotFoundException()
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def attach_principal_policy(self, policy_name, principal_arn):
principal = self._get_principal(principal_arn)
policy = self.get_policy(policy_name)
@ -437,6 +473,15 @@ class IoTBackend(BaseBackend):
return
self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def detach_principal_policy(self, policy_name, principal_arn):
# this may raises ResourceNotFoundException
self._get_principal(principal_arn)

View File

@ -224,6 +224,15 @@ class IoTResponse(BaseResponse):
)
return json.dumps(dict())
def attach_policy(self):
policy_name = self._get_param("policyName")
target = self._get_param('target')
self.iot_backend.attach_policy(
policy_name=policy_name,
target=target,
)
return json.dumps(dict())
def attach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')
@ -233,6 +242,15 @@ class IoTResponse(BaseResponse):
)
return json.dumps(dict())
def detach_policy(self):
policy_name = self._get_param("policyName")
target = self._get_param('target')
self.iot_backend.detach_policy(
policy_name=policy_name,
target=target,
)
return json.dumps(dict())
def detach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')

View File

@ -123,6 +123,9 @@ class Route53(BaseResponse):
""" % (record_set['Name'], the_zone.name)
return 400, headers, error_msg
if not record_set['Name'].endswith('.'):
record_set['Name'] += '.'
if action in ('CREATE', 'UPSERT'):
if 'ResourceRecords' in record_set:
resource_records = list(

View File

@ -179,7 +179,7 @@ class InvalidStorageClass(S3ClientError):
"The storage class you specified is not valid",
*args, **kwargs)
class InvalidBucketName(S3ClientError):
code = 400
@ -187,4 +187,13 @@ class InvalidBucketName(S3ClientError):
super(InvalidBucketName, self).__init__(
"InvalidBucketName",
"The specified bucket is not valid.",
class DuplicateTagKeys(S3ClientError):
code = 400
def __init__(self, *args, **kwargs):
super(DuplicateTagKeys, self).__init__(
"InvalidTag",
"Cannot provide multiple Tags with the same key",
*args, **kwargs)

View File

@ -15,7 +15,7 @@ from bisect import insort
from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \
EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass
EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys
from .utils import clean_key_name, _VersionedKeyStore
MAX_BUCKET_NAME_LENGTH = 63
@ -777,6 +777,9 @@ class S3Backend(BaseBackend):
return key
def put_bucket_tagging(self, bucket_name, tagging):
tag_keys = [tag.key for tag in tagging.tag_set.tags]
if len(tag_keys) != len(set(tag_keys)):
raise DuplicateTagKeys()
bucket = self.get_bucket(bucket_name)
bucket.set_tags(tagging)

View File

@ -193,7 +193,13 @@ class ResponseObject(_TemplateEnvironmentMixin):
elif 'location' in querystring:
bucket = self.backend.get_bucket(bucket_name)
template = self.response_template(S3_BUCKET_LOCATION)
return template.render(location=bucket.location)
location = bucket.location
# us-east-1 is different - returns a None location
if location == DEFAULT_REGION_NAME:
location = None
return template.render(location=location)
elif 'lifecycle' in querystring:
bucket = self.backend.get_bucket(bucket_name)
if not bucket.rules:
@ -432,8 +438,19 @@ class ResponseObject(_TemplateEnvironmentMixin):
else:
if body:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
try:
region_name = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint']
forced_region = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint']
if forced_region == DEFAULT_REGION_NAME:
raise S3ClientError(
'InvalidLocationConstraint',
'The specified location-constraint is not valid'
)
else:
region_name = forced_region
except KeyError:
pass
@ -1176,7 +1193,7 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """<?xml version="1.0" encoding="UTF-8"?>
</Error>"""
S3_BUCKET_LOCATION = """<?xml version="1.0" encoding="UTF-8"?>
<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{{ location }}</LocationConstraint>"""
<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{% if location != None %}{{ location }}{% endif %}</LocationConstraint>"""
S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
<LifecycleConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">

View File

@ -2,6 +2,7 @@ from __future__ import unicode_literals
import time
import json
import uuid
import boto3
@ -18,10 +19,6 @@ class SecretsManager(BaseModel):
def __init__(self, region_name, **kwargs):
self.region = region_name
self.secret_id = kwargs.get('secret_id', '')
self.version_id = kwargs.get('version_id', '')
self.version_stage = kwargs.get('version_stage', '')
self.secret_string = ''
class SecretsManagerBackend(BaseBackend):
@ -29,14 +26,7 @@ class SecretsManagerBackend(BaseBackend):
def __init__(self, region_name=None, **kwargs):
super(SecretsManagerBackend, self).__init__()
self.region = region_name
self.secret_id = kwargs.get('secret_id', '')
self.name = kwargs.get('name', '')
self.createdate = int(time.time())
self.secret_string = ''
self.rotation_enabled = False
self.rotation_lambda_arn = ''
self.auto_rotate_after_days = 0
self.version_id = ''
self.secrets = {}
def reset(self):
region_name = self.region
@ -44,36 +34,50 @@ class SecretsManagerBackend(BaseBackend):
self.__init__(region_name)
def _is_valid_identifier(self, identifier):
return identifier in (self.name, self.secret_id)
return identifier in self.secrets
def get_secret_value(self, secret_id, version_id, version_stage):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException()
secret = self.secrets[secret_id]
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
"Name": self.name,
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
"SecretString": self.secret_string,
"ARN": secret_arn(self.region, secret['secret_id']),
"Name": secret['name'],
"VersionId": secret['version_id'],
"SecretString": secret['secret_string'],
"VersionStages": [
"AWSCURRENT",
],
"CreatedDate": "2018-05-23 13:16:57.198000"
"CreatedDate": secret['createdate']
})
return response
def create_secret(self, name, secret_string, **kwargs):
def create_secret(self, name, secret_string, tags, **kwargs):
self.secret_string = secret_string
self.secret_id = name
self.name = name
generated_version_id = str(uuid.uuid4())
secret = {
'secret_string': secret_string,
'secret_id': name,
'name': name,
'createdate': int(time.time()),
'rotation_enabled': False,
'rotation_lambda_arn': '',
'auto_rotate_after_days': 0,
'version_id': generated_version_id,
'tags': tags
}
self.secrets[name] = secret
response = json.dumps({
"ARN": secret_arn(self.region, name),
"Name": self.name,
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
"Name": name,
"VersionId": generated_version_id,
})
return response
@ -82,26 +86,23 @@ class SecretsManagerBackend(BaseBackend):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException
secret = self.secrets[secret_id]
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
"Name": self.name,
"ARN": secret_arn(self.region, secret['secret_id']),
"Name": secret['name'],
"Description": "",
"KmsKeyId": "",
"RotationEnabled": self.rotation_enabled,
"RotationLambdaARN": self.rotation_lambda_arn,
"RotationEnabled": secret['rotation_enabled'],
"RotationLambdaARN": secret['rotation_lambda_arn'],
"RotationRules": {
"AutomaticallyAfterDays": self.auto_rotate_after_days
"AutomaticallyAfterDays": secret['auto_rotate_after_days']
},
"LastRotatedDate": None,
"LastChangedDate": None,
"LastAccessedDate": None,
"DeletedDate": None,
"Tags": [
{
"Key": "",
"Value": ""
},
]
"Tags": secret['tags']
})
return response
@ -141,17 +142,19 @@ class SecretsManagerBackend(BaseBackend):
)
raise InvalidParameterException(msg)
self.version_id = client_request_token or ''
self.rotation_lambda_arn = rotation_lambda_arn or ''
secret = self.secrets[secret_id]
secret['version_id'] = client_request_token or ''
secret['rotation_lambda_arn'] = rotation_lambda_arn or ''
if rotation_rules:
self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0)
if self.auto_rotate_after_days > 0:
self.rotation_enabled = True
secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0)
if secret['auto_rotate_after_days'] > 0:
secret['rotation_enabled'] = True
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
"Name": self.name,
"VersionId": self.version_id
"ARN": secret_arn(self.region, secret['secret_id']),
"Name": secret['name'],
"VersionId": secret['version_id']
})
return response

View File

@ -19,9 +19,11 @@ class SecretsManagerResponse(BaseResponse):
def create_secret(self):
name = self._get_param('Name')
secret_string = self._get_param('SecretString')
tags = self._get_param('Tags', if_none=[])
return secretsmanager_backends[self.region].create_secret(
name=name,
secret_string=secret_string
secret_string=secret_string,
tags=tags
)
def get_random_password(self):

View File

@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers,
def secret_arn(region, secret_id):
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format(
region, secret_id)
id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5))
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format(
region, secret_id, id_string)
def _exclude_characters(password, exclude_characters):

View File

@ -80,10 +80,13 @@ class DomainDispatcherApplication(object):
region = 'us-east-1'
if service == 'dynamodb':
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
# If Newer API version, use dynamodb2
if dynamo_api_version > "20111205":
host = "dynamodb2"
if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'):
host = 'dynamodbstreams'
else:
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
# If Newer API version, use dynamodb2
if dynamo_api_version > "20111205":
host = "dynamodb2"
else:
host = "{service}.{region}.amazonaws.com".format(
service=service, region=region)

View File

@ -534,7 +534,7 @@ class SQSBackend(BaseBackend):
break
import time
time.sleep(0.001)
time.sleep(0.01)
continue
previous_result_count = len(result)

View File

@ -14,10 +14,12 @@ import itertools
class Parameter(BaseModel):
def __init__(self, name, value, type, description, keyid, last_modified_date, version):
def __init__(self, name, value, type, description, allowed_pattern, keyid,
last_modified_date, version):
self.name = name
self.type = type
self.description = description
self.allowed_pattern = allowed_pattern
self.keyid = keyid
self.last_modified_date = last_modified_date
self.version = version
@ -58,6 +60,10 @@ class Parameter(BaseModel):
if self.keyid:
r['KeyId'] = self.keyid
if self.allowed_pattern:
r['AllowedPattern'] = self.allowed_pattern
return r
@ -291,7 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend):
return self._parameters[name]
return None
def put_parameter(self, name, description, value, type, keyid, overwrite):
def put_parameter(self, name, description, value, type, allowed_pattern,
keyid, overwrite):
previous_parameter = self._parameters.get(name)
version = 1
@ -302,8 +309,8 @@ class SimpleSystemManagerBackend(BaseBackend):
return
last_modified_date = time.time()
self._parameters[name] = Parameter(
name, value, type, description, keyid, last_modified_date, version)
self._parameters[name] = Parameter(name, value, type, description,
allowed_pattern, keyid, last_modified_date, version)
return version
def add_tags_to_resource(self, resource_type, resource_id, tags):

View File

@ -160,11 +160,12 @@ class SimpleSystemManagerResponse(BaseResponse):
description = self._get_param('Description')
value = self._get_param('Value')
type_ = self._get_param('Type')
allowed_pattern = self._get_param('AllowedPattern')
keyid = self._get_param('KeyId')
overwrite = self._get_param('Overwrite', False)
result = self.ssm_backend.put_parameter(
name, description, value, type_, keyid, overwrite)
name, description, value, type_, allowed_pattern, keyid, overwrite)
if result is None:
error = {

View File

@ -1,10 +1,23 @@
#!/usr/bin/env python
from __future__ import unicode_literals
import codecs
import os
import re
import setuptools
from setuptools import setup, find_packages
import sys
# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, *parts), 'r') as fp:
return fp.read()
install_requires = [
"Jinja2>=2.7.3",
"boto>=2.36.0",
@ -18,12 +31,13 @@ install_requires = [
"pyaml",
"pytz",
"python-dateutil<3.0.0,>=2.1",
"python-jose<3.0.0",
"python-jose<4.0.0",
"mock",
"docker>=2.5.1",
"jsondiff==1.1.1",
"jsondiff==1.1.2",
"aws-xray-sdk!=0.96,>=0.93",
"responses>=0.9.0",
"cfn-lint"
]
extras_require = {
@ -43,6 +57,8 @@ setup(
version='1.3.7',
description='A library that allows your python tests to easily'
' mock out the boto library',
long_description=read('README.md'),
long_description_content_type='text/markdown',
author='Steve Pulec',
author_email='spulec@gmail.com',
url='https://github.com/spulec/moto',

View File

@ -391,6 +391,9 @@ def test_create_change_set_from_s3_url():
TemplateURL=key_url,
ChangeSetName='NewChangeSet',
ChangeSetType='CREATE',
Tags=[
{'Key': 'tag-key', 'Value': 'tag-value'}
],
)
assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id']
assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId']

View File

@ -0,0 +1,115 @@
from collections import OrderedDict
import json
import yaml
import os
import boto3
from nose.tools import raises
import botocore
from moto.cloudformation.exceptions import ValidationError
from moto.cloudformation.models import FakeStack
from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export
from moto.sqs.models import Queue
from moto.s3.models import FakeBucket
from moto.cloudformation.utils import yaml_tag_constructor
from boto.cloudformation.stack import Output
from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2
json_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Stack 1",
"Resources": {
"EC2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "ami-d3adb33f",
"KeyName": "dummy",
"InstanceType": "t2.micro",
"Tags": [
{
"Key": "Description",
"Value": "Test tag"
},
{
"Key": "Name",
"Value": "Name tag for tests"
}
]
}
}
}
}
# One resource is required
json_bad_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Stack 1"
}
dummy_template_json = json.dumps(json_template)
dummy_bad_template_json = json.dumps(json_bad_template)
@mock_cloudformation
def test_boto3_json_validate_successful():
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
response = cf_conn.validate_template(
TemplateBody=dummy_template_json,
)
assert response['Description'] == "Stack 1"
assert response['Parameters'] == []
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@mock_cloudformation
def test_boto3_json_invalid_missing_resource():
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
try:
cf_conn.validate_template(
TemplateBody=dummy_bad_template_json,
)
assert False
except botocore.exceptions.ClientError as e:
assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
' with id Missing top level item Resources to file module does not exist'
assert True
yaml_template = """
AWSTemplateFormatVersion: '2010-09-09'
Description: Simple CloudFormation Test Template
Resources:
S3Bucket:
Type: AWS::S3::Bucket
Properties:
AccessControl: PublicRead
BucketName: cf-test-bucket-1
"""
yaml_bad_template = """
AWSTemplateFormatVersion: '2010-09-09'
Description: Simple CloudFormation Test Template
"""
@mock_cloudformation
def test_boto3_yaml_validate_successful():
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
response = cf_conn.validate_template(
TemplateBody=yaml_template,
)
assert response['Description'] == "Simple CloudFormation Test Template"
assert response['Parameters'] == []
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
@mock_cloudformation
def test_boto3_yaml_invalid_missing_resource():
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
try:
cf_conn.validate_template(
TemplateBody=yaml_bad_template,
)
assert False
except botocore.exceptions.ClientError as e:
assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
' with id Missing top level item Resources to file module does not exist'
assert True

View File

@ -1,14 +1,18 @@
from __future__ import unicode_literals
import boto3
import json
import os
import random
import uuid
import boto3
# noinspection PyUnresolvedReferences
import sure # noqa
from botocore.exceptions import ClientError
from jose import jws
from nose.tools import assert_raises
from moto import mock_cognitoidp
import sure # noqa
@mock_cognitoidp
@ -41,6 +45,56 @@ def test_list_user_pools():
result["UserPools"][0]["Name"].should.equal(name)
@mock_cognitoidp
def test_list_user_pools_returns_max_items():
conn = boto3.client("cognito-idp", "us-west-2")
# Given 10 user pools
pool_count = 10
for i in range(pool_count):
conn.create_user_pool(PoolName=str(uuid.uuid4()))
max_results = 5
result = conn.list_user_pools(MaxResults=max_results)
result["UserPools"].should.have.length_of(max_results)
result.should.have.key("NextToken")
@mock_cognitoidp
def test_list_user_pools_returns_next_tokens():
conn = boto3.client("cognito-idp", "us-west-2")
# Given 10 user pool clients
pool_count = 10
for i in range(pool_count):
conn.create_user_pool(PoolName=str(uuid.uuid4()))
max_results = 5
result = conn.list_user_pools(MaxResults=max_results)
result["UserPools"].should.have.length_of(max_results)
result.should.have.key("NextToken")
next_token = result["NextToken"]
result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token)
result_2["UserPools"].should.have.length_of(max_results)
result_2.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_list_user_pools_when_max_items_more_than_total_items():
conn = boto3.client("cognito-idp", "us-west-2")
# Given 10 user pool clients
pool_count = 10
for i in range(pool_count):
conn.create_user_pool(PoolName=str(uuid.uuid4()))
max_results = pool_count + 5
result = conn.list_user_pools(MaxResults=max_results)
result["UserPools"].should.have.length_of(pool_count)
result.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_describe_user_pool():
conn = boto3.client("cognito-idp", "us-west-2")
@ -140,6 +194,67 @@ def test_list_user_pool_clients():
result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
@mock_cognitoidp
def test_list_user_pool_clients_returns_max_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 user pool clients
client_count = 10
for i in range(client_count):
client_name = str(uuid.uuid4())
conn.create_user_pool_client(UserPoolId=user_pool_id,
ClientName=client_name)
max_results = 5
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
MaxResults=max_results)
result["UserPoolClients"].should.have.length_of(max_results)
result.should.have.key("NextToken")
@mock_cognitoidp
def test_list_user_pool_clients_returns_next_tokens():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 user pool clients
client_count = 10
for i in range(client_count):
client_name = str(uuid.uuid4())
conn.create_user_pool_client(UserPoolId=user_pool_id,
ClientName=client_name)
max_results = 5
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
MaxResults=max_results)
result["UserPoolClients"].should.have.length_of(max_results)
result.should.have.key("NextToken")
next_token = result["NextToken"]
result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id,
MaxResults=max_results,
NextToken=next_token)
result_2["UserPoolClients"].should.have.length_of(max_results)
result_2.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_list_user_pool_clients_when_max_items_more_than_total_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 user pool clients
client_count = 10
for i in range(client_count):
client_name = str(uuid.uuid4())
conn.create_user_pool_client(UserPoolId=user_pool_id,
ClientName=client_name)
max_results = client_count + 5
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
MaxResults=max_results)
result["UserPoolClients"].should.have.length_of(client_count)
result.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_describe_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
@ -264,6 +379,83 @@ def test_list_identity_providers():
result["Providers"][0]["ProviderType"].should.equal(provider_type)
@mock_cognitoidp
def test_list_identity_providers_returns_max_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 identity providers linked to a user pool
identity_provider_count = 10
for i in range(identity_provider_count):
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={},
)
max_results = 5
result = conn.list_identity_providers(UserPoolId=user_pool_id,
MaxResults=max_results)
result["Providers"].should.have.length_of(max_results)
result.should.have.key("NextToken")
@mock_cognitoidp
def test_list_identity_providers_returns_next_tokens():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 identity providers linked to a user pool
identity_provider_count = 10
for i in range(identity_provider_count):
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={},
)
max_results = 5
result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
result["Providers"].should.have.length_of(max_results)
result.should.have.key("NextToken")
next_token = result["NextToken"]
result_2 = conn.list_identity_providers(UserPoolId=user_pool_id,
MaxResults=max_results,
NextToken=next_token)
result_2["Providers"].should.have.length_of(max_results)
result_2.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_list_identity_providers_when_max_items_more_than_total_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 identity providers linked to a user pool
identity_provider_count = 10
for i in range(identity_provider_count):
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={},
)
max_results = identity_provider_count + 5
result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
result["Providers"].should.have.length_of(identity_provider_count)
result.shouldnt.have.key("NextToken")
@mock_cognitoidp
def test_describe_identity_providers():
conn = boto3.client("cognito-idp", "us-west-2")
@ -323,6 +515,245 @@ def test_delete_identity_providers():
caught.should.be.true
@mock_cognitoidp
def test_create_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
description = str(uuid.uuid4())
role_arn = "arn:aws:iam:::role/my-iam-role"
precedence = random.randint(0, 100000)
result = conn.create_group(
GroupName=group_name,
UserPoolId=user_pool_id,
Description=description,
RoleArn=role_arn,
Precedence=precedence,
)
result["Group"]["GroupName"].should.equal(group_name)
result["Group"]["UserPoolId"].should.equal(user_pool_id)
result["Group"]["Description"].should.equal(description)
result["Group"]["RoleArn"].should.equal(role_arn)
result["Group"]["Precedence"].should.equal(precedence)
result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
result["Group"]["CreationDate"].should.be.a("datetime.datetime")
@mock_cognitoidp
def test_create_group_with_duplicate_name_raises_error():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
with assert_raises(ClientError) as cm:
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
cm.exception.operation_name.should.equal('CreateGroup')
cm.exception.response['Error']['Code'].should.equal('GroupExistsException')
cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
@mock_cognitoidp
def test_get_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
result["Group"]["GroupName"].should.equal(group_name)
result["Group"]["UserPoolId"].should.equal(user_pool_id)
result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
result["Group"]["CreationDate"].should.be.a("datetime.datetime")
@mock_cognitoidp
def test_list_groups():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
result = conn.list_groups(UserPoolId=user_pool_id)
result["Groups"].should.have.length_of(1)
result["Groups"][0]["GroupName"].should.equal(group_name)
@mock_cognitoidp
def test_delete_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
with assert_raises(ClientError) as cm:
conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
@mock_cognitoidp
def test_admin_add_user_to_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
@mock_cognitoidp
def test_admin_add_user_to_group_again_is_noop():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
@mock_cognitoidp
def test_list_users_in_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
result["Users"].should.have.length_of(1)
result["Users"][0]["Username"].should.equal(username)
@mock_cognitoidp
def test_list_users_in_group_ignores_deleted_user():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
username2 = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username2)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name)
conn.admin_delete_user(UserPoolId=user_pool_id, Username=username)
result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
result["Users"].should.have.length_of(1)
result["Users"][0]["Username"].should.equal(username2)
@mock_cognitoidp
def test_admin_list_groups_for_user():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
result["Groups"].should.have.length_of(1)
result["Groups"][0]["GroupName"].should.equal(group_name)
@mock_cognitoidp
def test_admin_list_groups_for_user_ignores_deleted_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
group_name2 = str(uuid.uuid4())
conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2)
conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
result["Groups"].should.have.length_of(1)
result["Groups"][0]["GroupName"].should.equal(group_name2)
@mock_cognitoidp
def test_admin_remove_user_from_group():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \
["Users"].should.have.length_of(0)
conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \
["Groups"].should.have.length_of(0)
@mock_cognitoidp
def test_admin_remove_user_from_group_again_is_noop():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
group_name = str(uuid.uuid4())
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
username = str(uuid.uuid4())
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
@mock_cognitoidp
def test_admin_create_user():
conn = boto3.client("cognito-idp", "us-west-2")
@ -396,6 +827,62 @@ def test_list_users():
result["Users"][0]["Username"].should.equal(username)
@mock_cognitoidp
def test_list_users_returns_limit_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 users
user_count = 10
for i in range(user_count):
conn.admin_create_user(UserPoolId=user_pool_id,
Username=str(uuid.uuid4()))
max_results = 5
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
result["Users"].should.have.length_of(max_results)
result.should.have.key("PaginationToken")
@mock_cognitoidp
def test_list_users_returns_pagination_tokens():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 users
user_count = 10
for i in range(user_count):
conn.admin_create_user(UserPoolId=user_pool_id,
Username=str(uuid.uuid4()))
max_results = 5
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
result["Users"].should.have.length_of(max_results)
result.should.have.key("PaginationToken")
next_token = result["PaginationToken"]
result_2 = conn.list_users(UserPoolId=user_pool_id,
Limit=max_results, PaginationToken=next_token)
result_2["Users"].should.have.length_of(max_results)
result_2.shouldnt.have.key("PaginationToken")
@mock_cognitoidp
def test_list_users_when_limit_more_than_total_items():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
# Given 10 users
user_count = 10
for i in range(user_count):
conn.admin_create_user(UserPoolId=user_pool_id,
Username=str(uuid.uuid4()))
max_results = user_count + 5
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
result["Users"].should.have.length_of(user_count)
result.shouldnt.have.key("PaginationToken")
@mock_cognitoidp
def test_admin_disable_user():
conn = boto3.client("cognito-idp", "us-west-2")

View File

@ -1000,6 +1000,11 @@ def test_delete_item():
response = table.scan()
assert response['Count'] == 2
# Test ReturnValues validation
with assert_raises(ClientError) as ex:
table.delete_item(Key={'client': 'client1', 'app': 'app1'},
ReturnValues='ALL_NEW')
# Test deletion and returning old value
response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD')
response['Attributes'].should.contain('client')
@ -1246,6 +1251,81 @@ def test_update_if_not_exists():
assert resp['Items'][0]['created_at'] == 123
# https://github.com/spulec/moto/issues/1937
@mock_dynamodb2
def test_update_return_attributes():
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
dynamodb.create_table(
TableName='moto-test',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
)
def update(col, to, rv):
return dynamodb.update_item(
TableName='moto-test',
Key={'id': {'S': 'foo'}},
AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}},
ReturnValues=rv
)
r = update('col1', 'val1', 'ALL_NEW')
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
r = update('col1', 'val2', 'ALL_OLD')
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
r = update('col2', 'val3', 'UPDATED_NEW')
assert r['Attributes'] == {'col2': {'S': 'val3'}}
r = update('col2', 'val4', 'UPDATED_OLD')
assert r['Attributes'] == {'col2': {'S': 'val3'}}
r = update('col1', 'val5', 'NONE')
assert r['Attributes'] == {}
with assert_raises(ClientError) as ex:
r = update('col1', 'val6', 'WRONG')
@mock_dynamodb2
def test_put_return_attributes():
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
dynamodb.create_table(
TableName='moto-test',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
)
r = dynamodb.put_item(
TableName='moto-test',
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}},
ReturnValues='NONE'
)
assert 'Attributes' not in r
r = dynamodb.put_item(
TableName='moto-test',
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}},
ReturnValues='ALL_OLD'
)
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
with assert_raises(ClientError) as ex:
dynamodb.put_item(
TableName='moto-test',
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}},
ReturnValues='ALL_NEW'
)
ex.exception.response['Error']['Code'].should.equal('ValidationException')
ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value')
@mock_dynamodb2
def test_query_global_secondary_index_when_created_via_update_table_resource():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
@ -1336,3 +1416,62 @@ def test_query_global_secondary_index_when_created_via_update_table_resource():
assert len(forum_and_subject_items) == 1
assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats',
'subject': 'my pet is the cutest'}
@mock_dynamodb2
def test_dynamodb_streams_1():
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
assert 'LatestStreamLabel' in resp['TableDescription']
assert 'LatestStreamArn' in resp['TableDescription']
resp = conn.delete_table(TableName='test-streams')
assert 'StreamSpecification' in resp['TableDescription']
@mock_dynamodb2
def test_dynamodb_streams_2():
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-stream-update',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
)
assert 'StreamSpecification' not in resp['TableDescription']
resp = conn.update_table(
TableName='test-stream-update',
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_IMAGE'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'NEW_IMAGE'
}
assert 'LatestStreamLabel' in resp['TableDescription']
assert 'LatestStreamArn' in resp['TableDescription']

View File

@ -0,0 +1,234 @@
from __future__ import unicode_literals, print_function
from nose.tools import assert_raises
import boto3
from moto import mock_dynamodb2, mock_dynamodbstreams
class TestCore():
stream_arn = None
mocks = []
def setup(self):
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
for m in self.mocks:
m.start()
# create a table with a stream
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1},
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
)
self.stream_arn = resp['TableDescription']['LatestStreamArn']
def teardown(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
conn.delete_table(TableName='test-streams')
self.stream_arn = None
for m in self.mocks:
m.stop()
def test_verify_stream(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.describe_table(TableName='test-streams')
assert 'LatestStreamArn' in resp['Table']
def test_describe_stream(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
assert 'StreamDescription' in resp
desc = resp['StreamDescription']
assert desc['StreamArn'] == self.stream_arn
assert desc['TableName'] == 'test-streams'
def test_list_streams(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.list_streams()
assert resp['Streams'][0]['StreamArn'] == self.stream_arn
resp = conn.list_streams(TableName='no-stream')
assert not resp['Streams']
def test_get_shard_iterator(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON'
)
assert 'ShardIterator' in resp
def test_get_records_empty(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='LATEST'
)
iterator_id = resp['ShardIterator']
resp = conn.get_records(ShardIterator=iterator_id)
assert 'Records' in resp
assert len(resp['Records']) == 0
def test_get_records_seq(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
conn.put_item(
TableName='test-streams',
Item={
'id': {'S': 'entry1'},
'first_col': {'S': 'foo'}
}
)
conn.put_item(
TableName='test-streams',
Item={
'id': {'S': 'entry1'},
'first_col': {'S': 'bar'},
'second_col': {'S': 'baz'}
}
)
conn.delete_item(
TableName='test-streams',
Key={'id': {'S': 'entry1'}}
)
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON'
)
iterator_id = resp['ShardIterator']
resp = conn.get_records(ShardIterator=iterator_id)
assert len(resp['Records']) == 3
assert resp['Records'][0]['eventName'] == 'INSERT'
assert resp['Records'][1]['eventName'] == 'MODIFY'
assert resp['Records'][2]['eventName'] == 'DELETE'
# now try fetching from the next shard iterator, it should be
# empty
resp = conn.get_records(ShardIterator=resp['NextShardIterator'])
assert len(resp['Records']) == 0
class TestEdges():
mocks = []
def setup(self):
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
for m in self.mocks:
m.start()
def teardown(self):
for m in self.mocks:
m.stop()
def test_enable_stream_on_table(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1}
)
assert 'StreamSpecification' not in resp['TableDescription']
resp = conn.update_table(
TableName='test-streams',
StreamSpecification={
'StreamViewType': 'KEYS_ONLY'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'KEYS_ONLY'
}
assert 'LatestStreamLabel' in resp['TableDescription']
# now try to enable it again
with assert_raises(conn.exceptions.ResourceInUseException):
resp = conn.update_table(
TableName='test-streams',
StreamSpecification={
'StreamViewType': 'OLD_IMAGES'
}
)
def test_stream_with_range_key(self):
dyn = boto3.client('dynamodb', region_name='us-east-1')
resp = dyn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'},
{'AttributeName': 'color', 'KeyType': 'RANGE'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'},
{'AttributeName': 'color',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1},
StreamSpecification={
'StreamViewType': 'NEW_IMAGES'
}
)
stream_arn = resp['TableDescription']['LatestStreamArn']
streams = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = streams.describe_stream(StreamArn=stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = streams.get_shard_iterator(
StreamArn=stream_arn,
ShardId=shard_id,
ShardIteratorType='LATEST'
)
iterator_id = resp['ShardIterator']
dyn.put_item(
TableName='test-streams',
Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}}
)
dyn.put_item(
TableName='test-streams',
Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}}
)
resp = streams.get_records(ShardIterator=iterator_id)
assert len(resp['Records']) == 2
assert resp['Records'][0]['eventName'] == 'INSERT'
assert resp['Records'][1]['eventName'] == 'INSERT'

View File

@ -89,7 +89,8 @@ def test_vpc_peering_connections_delete():
verdict.should.equal(True)
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
all_vpc_pcxs.should.have.length_of(0)
all_vpc_pcxs.should.have.length_of(1)
all_vpc_pcxs[0]._status.code.should.equal('deleted')
with assert_raises(EC2ResponseError) as cm:
conn.delete_vpc_peering_connection("pcx-1234abcd")

View File

@ -925,6 +925,65 @@ def test_update_container_instances_state():
status='test_status').should.throw(Exception)
@mock_ec2
@mock_ecs
def test_update_container_instances_state_by_arn():
ecs_client = boto3.client('ecs', region_name='us-east-1')
ec2 = boto3.resource('ec2', region_name='us-east-1')
test_cluster_name = 'test_ecs_cluster'
_ = ecs_client.create_cluster(
clusterName=test_cluster_name
)
instance_to_create = 3
test_instance_arns = []
for i in range(0, instance_to_create):
test_instance = ec2.create_instances(
ImageId="ami-1234abcd",
MinCount=1,
MaxCount=1,
)[0]
instance_id_document = json.dumps(
ec2_utils.generate_instance_identity_document(test_instance)
)
response = ecs_client.register_container_instance(
cluster=test_cluster_name,
instanceIdentityDocument=instance_id_document)
test_instance_arns.append(response['containerInstance']['containerInstanceArn'])
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
containerInstances=test_instance_arns,
status='DRAINING')
len(response['failures']).should.equal(0)
len(response['containerInstances']).should.equal(instance_to_create)
response_statuses = [ci['status'] for ci in response['containerInstances']]
for status in response_statuses:
status.should.equal('DRAINING')
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
containerInstances=test_instance_arns,
status='DRAINING')
len(response['failures']).should.equal(0)
len(response['containerInstances']).should.equal(instance_to_create)
response_statuses = [ci['status'] for ci in response['containerInstances']]
for status in response_statuses:
status.should.equal('DRAINING')
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
containerInstances=test_instance_arns,
status='ACTIVE')
len(response['failures']).should.equal(0)
len(response['containerInstances']).should.equal(instance_to_create)
response_statuses = [ci['status'] for ci in response['containerInstances']]
for status in response_statuses:
status.should.equal('ACTIVE')
ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name,
containerInstances=test_instance_arns,
status='test_status').should.throw(Exception)
@mock_ec2
@mock_ecs
def test_run_task():

View File

@ -3,7 +3,9 @@ import base64
import boto
import boto3
import os
import sure # noqa
import sys
from boto.exception import BotoServerError
from botocore.exceptions import ClientError
from moto import mock_iam, mock_iam_deprecated
@ -14,6 +16,19 @@ from nose.tools import raises
from tests.helpers import requires_boto_gte
MOCK_CERT = """-----BEGIN CERTIFICATE-----
MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt
b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx
FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC
gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt
ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO
vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG
9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj
FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt
8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ==
-----END CERTIFICATE-----"""
@mock_iam_deprecated()
def test_get_all_server_certs():
conn = boto.connect_iam()
@ -108,6 +123,10 @@ def test_create_role_and_instance_profile():
conn.list_roles().roles[0].role_name.should.equal('my-role')
# Test with an empty path:
profile = conn.create_instance_profile('my-other-profile')
profile.path.should.equal('/')
@mock_iam_deprecated()
def test_remove_role_from_instance_profile():
@ -536,6 +555,14 @@ def test_generate_credential_report():
result['generate_credential_report_response'][
'generate_credential_report_result']['state'].should.equal('COMPLETE')
@mock_iam
def test_boto3_generate_credential_report():
conn = boto3.client('iam', region_name='us-east-1')
result = conn.generate_credential_report()
result['State'].should.equal('STARTED')
result = conn.generate_credential_report()
result['State'].should.equal('COMPLETE')
@mock_iam_deprecated()
def test_get_credential_report():
@ -551,6 +578,19 @@ def test_get_credential_report():
'get_credential_report_result']['content'].encode('ascii')).decode('ascii')
report.should.match(r'.*my-user.*')
@mock_iam
def test_boto3_get_credential_report():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_user(UserName='my-user')
with assert_raises(ClientError):
conn.get_credential_report()
result = conn.generate_credential_report()
while result['State'] != 'COMPLETE':
result = conn.generate_credential_report()
result = conn.get_credential_report()
report = result['Content'].decode('utf-8')
report.should.match(r'.*my-user.*')
@requires_boto_gte('2.39')
@mock_iam_deprecated()
@ -700,10 +740,10 @@ def test_get_account_authorization_details():
import json
conn = boto3.client('iam', region_name='us-east-1')
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.create_user(Path='/', UserName='testCloudAuxUser')
conn.create_group(Path='/', GroupName='testCloudAuxGroup')
conn.create_user(Path='/', UserName='testUser')
conn.create_group(Path='/', GroupName='testGroup')
conn.create_policy(
PolicyName='testCloudAuxPolicy',
PolicyName='testPolicy',
Path='/',
PolicyDocument=json.dumps({
"Version": "2012-10-17",
@ -715,46 +755,157 @@ def test_get_account_authorization_details():
}
]
}),
Description='Test CloudAux Policy'
Description='Test Policy'
)
conn.create_instance_profile(InstanceProfileName='ipn')
conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role')
result = conn.get_account_authorization_details(Filter=['Role'])
len(result['RoleDetailList']) == 1
len(result['UserDetailList']) == 0
len(result['GroupDetailList']) == 0
len(result['Policies']) == 0
assert len(result['RoleDetailList']) == 1
assert len(result['UserDetailList']) == 0
assert len(result['GroupDetailList']) == 0
assert len(result['Policies']) == 0
assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1
result = conn.get_account_authorization_details(Filter=['User'])
len(result['RoleDetailList']) == 0
len(result['UserDetailList']) == 1
len(result['GroupDetailList']) == 0
len(result['Policies']) == 0
assert len(result['RoleDetailList']) == 0
assert len(result['UserDetailList']) == 1
assert len(result['GroupDetailList']) == 0
assert len(result['Policies']) == 0
result = conn.get_account_authorization_details(Filter=['Group'])
len(result['RoleDetailList']) == 0
len(result['UserDetailList']) == 0
len(result['GroupDetailList']) == 1
len(result['Policies']) == 0
assert len(result['RoleDetailList']) == 0
assert len(result['UserDetailList']) == 0
assert len(result['GroupDetailList']) == 1
assert len(result['Policies']) == 0
result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
len(result['RoleDetailList']) == 0
len(result['UserDetailList']) == 0
len(result['GroupDetailList']) == 0
len(result['Policies']) == 1
assert len(result['RoleDetailList']) == 0
assert len(result['UserDetailList']) == 0
assert len(result['GroupDetailList']) == 0
assert len(result['Policies']) == 1
# Check for greater than 1 since this should always be greater than one but might change.
# See iam/aws_managed_policies.py
result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
len(result['RoleDetailList']) == 0
len(result['UserDetailList']) == 0
len(result['GroupDetailList']) == 0
len(result['Policies']) > 1
assert len(result['RoleDetailList']) == 0
assert len(result['UserDetailList']) == 0
assert len(result['GroupDetailList']) == 0
assert len(result['Policies']) > 1
result = conn.get_account_authorization_details()
len(result['RoleDetailList']) == 1
len(result['UserDetailList']) == 1
len(result['GroupDetailList']) == 1
len(result['Policies']) > 1
assert len(result['RoleDetailList']) == 1
assert len(result['UserDetailList']) == 1
assert len(result['GroupDetailList']) == 1
assert len(result['Policies']) > 1
@mock_iam
def test_signing_certs():
client = boto3.client('iam', region_name='us-east-1')
# Create the IAM user first:
client.create_user(UserName='testing')
# Upload the cert:
resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate']
cert_id = resp['CertificateId']
assert resp['UserName'] == 'testing'
assert resp['Status'] == 'Active'
assert resp['CertificateBody'] == MOCK_CERT
assert resp['CertificateId']
# Upload a the cert with an invalid body:
with assert_raises(ClientError) as ce:
client.upload_signing_certificate(UserName='testing', CertificateBody='notacert')
assert ce.exception.response['Error']['Code'] == 'MalformedCertificate'
# Upload with an invalid user:
with assert_raises(ClientError):
client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT)
# Update:
client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive')
with assert_raises(ClientError):
client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive')
with assert_raises(ClientError) as ce:
client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive')
assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
id='x' * 32)
# List the certs:
resp = client.list_signing_certificates(UserName='testing')['Certificates']
assert len(resp) == 1
assert resp[0]['CertificateBody'] == MOCK_CERT
assert resp[0]['Status'] == 'Inactive' # Changed with the update call above.
with assert_raises(ClientError):
client.list_signing_certificates(UserName='notauser')
# Delete:
client.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
with assert_raises(ClientError):
client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id)
@mock_iam()
def test_create_saml_provider():
conn = boto3.client('iam', region_name='us-east-1')
response = conn.create_saml_provider(
Name="TestSAMLProvider",
SAMLMetadataDocument='a' * 1024
)
response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider")
@mock_iam()
def test_get_saml_provider():
conn = boto3.client('iam', region_name='us-east-1')
saml_provider_create = conn.create_saml_provider(
Name="TestSAMLProvider",
SAMLMetadataDocument='a' * 1024
)
response = conn.get_saml_provider(
SAMLProviderArn=saml_provider_create['SAMLProviderArn']
)
response['SAMLMetadataDocument'].should.equal('a' * 1024)
@mock_iam()
def test_list_saml_providers():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_saml_provider(
Name="TestSAMLProvider",
SAMLMetadataDocument='a' * 1024
)
response = conn.list_saml_providers()
response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider")
@mock_iam()
def test_delete_saml_provider():
conn = boto3.client('iam', region_name='us-east-1')
saml_provider_create = conn.create_saml_provider(
Name="TestSAMLProvider",
SAMLMetadataDocument='a' * 1024
)
response = conn.list_saml_providers()
print(response)
len(response['SAMLProviderList']).should.equal(1)
conn.delete_saml_provider(
SAMLProviderArn=saml_provider_create['SAMLProviderArn']
)
response = conn.list_saml_providers()
len(response['SAMLProviderList']).should.equal(0)
with assert_raises(ClientError) as ce:
client.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
id=cert_id)
# Verify that it's not in the list:
resp = client.list_signing_certificates(UserName='testing')
assert not resp['Certificates']

View File

@ -5,6 +5,8 @@ import sure # noqa
import boto3
from moto import mock_iot
from botocore.exceptions import ClientError
from nose.tools import assert_raises
@mock_iot
@ -261,6 +263,96 @@ def test_certs():
res.should.have.key('certificates').which.should.have.length_of(0)
@mock_iot
def test_delete_policy_validation():
doc = """{
"Version": "2012-10-17",
"Statement":[
{
"Effect":"Allow",
"Action":[
"iot: *"
],
"Resource":"*"
}
]
}
"""
client = boto3.client('iot', region_name='ap-northeast-1')
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert['certificateArn']
policy_name = 'my-policy'
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
with assert_raises(ClientError) as e:
client.delete_policy(policyName=policy_name)
e.exception.response['Error']['Message'].should.contain(
'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' % policy_name)
res = client.list_policies()
res.should.have.key('policies').which.should.have.length_of(1)
client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
client.delete_policy(policyName=policy_name)
res = client.list_policies()
res.should.have.key('policies').which.should.have.length_of(0)
@mock_iot
def test_delete_certificate_validation():
doc = """{
"Version": "2012-10-17",
"Statement":[
{
"Effect":"Allow",
"Action":[
"iot: *"
],
"Resource":"*"
}
]
}
"""
client = boto3.client('iot', region_name='ap-northeast-1')
cert = client.create_keys_and_certificate(setAsActive=True)
cert_id = cert['certificateId']
cert_arn = cert['certificateArn']
policy_name = 'my-policy'
thing_name = 'thing-1'
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
client.create_thing(thingName=thing_name)
client.attach_thing_principal(thingName=thing_name, principal=cert_arn)
with assert_raises(ClientError) as e:
client.delete_certificate(certificateId=cert_id)
e.exception.response['Error']['Message'].should.contain(
'Certificate must be deactivated (not ACTIVE) before deletion.')
res = client.list_certificates()
res.should.have.key('certificates').which.should.have.length_of(1)
client.update_certificate(certificateId=cert_id, newStatus='REVOKED')
with assert_raises(ClientError) as e:
client.delete_certificate(certificateId=cert_id)
e.exception.response['Error']['Message'].should.contain(
'Things must be detached before deletion (arn: %s)' % cert_arn)
res = client.list_certificates()
res.should.have.key('certificates').which.should.have.length_of(1)
client.detach_thing_principal(thingName=thing_name, principal=cert_arn)
with assert_raises(ClientError) as e:
client.delete_certificate(certificateId=cert_id)
e.exception.response['Error']['Message'].should.contain(
'Certificate policies must be detached before deletion (arn: %s)' % cert_arn)
res = client.list_certificates()
res.should.have.key('certificates').which.should.have.length_of(1)
client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
client.delete_certificate(certificateId=cert_id)
res = client.list_certificates()
res.should.have.key('certificates').which.should.have.length_of(0)
@mock_iot
def test_certs_create_inactive():
client = boto3.client('iot', region_name='ap-northeast-1')
@ -309,6 +401,47 @@ def test_policy():
@mock_iot
def test_principal_policy():
client = boto3.client('iot', region_name='ap-northeast-1')
policy_name = 'my-policy'
doc = '{}'
client.create_policy(policyName=policy_name, policyDocument=doc)
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert['certificateArn']
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_principal_policies(principal=cert_arn)
res.should.have.key('policies').which.should.have.length_of(1)
for policy in res['policies']:
policy.should.have.key('policyName').which.should_not.be.none
policy.should.have.key('policyArn').which.should_not.be.none
# do nothing if policy have already attached to certificate
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_principal_policies(principal=cert_arn)
res.should.have.key('policies').which.should.have.length_of(1)
for policy in res['policies']:
policy.should.have.key('policyName').which.should_not.be.none
policy.should.have.key('policyArn').which.should_not.be.none
res = client.list_policy_principals(policyName=policy_name)
res.should.have.key('principals').which.should.have.length_of(1)
for principal in res['principals']:
principal.should_not.be.none
client.detach_policy(policyName=policy_name, target=cert_arn)
res = client.list_principal_policies(principal=cert_arn)
res.should.have.key('policies').which.should.have.length_of(0)
res = client.list_policy_principals(policyName=policy_name)
res.should.have.key('principals').which.should.have.length_of(0)
with assert_raises(ClientError) as e:
client.detach_policy(policyName=policy_name, target=cert_arn)
e.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
@mock_iot
def test_principal_policy_deprecated():
client = boto3.client('iot', region_name='ap-northeast-1')
policy_name = 'my-policy'
doc = '{}'

View File

@ -164,8 +164,8 @@ def test_alias_rrset():
rrsets = conn.get_all_rrsets(zoneid, type="A")
rrset_records = [(rr_set.name, rr) for rr_set in rrsets for rr in rr_set.resource_records]
rrset_records.should.have.length_of(2)
rrset_records.should.contain(('foo.alias.testdns.aws.com', 'foo.testdns.aws.com'))
rrset_records.should.contain(('bar.alias.testdns.aws.com', 'bar.testdns.aws.com'))
rrset_records.should.contain(('foo.alias.testdns.aws.com.', 'foo.testdns.aws.com'))
rrset_records.should.contain(('bar.alias.testdns.aws.com.', 'bar.testdns.aws.com'))
rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com')
rrsets = conn.get_all_rrsets(zoneid, type="CNAME")
rrsets.should.have.length_of(1)
@ -525,7 +525,7 @@ def test_change_resource_record_sets_crud_valid():
{
'Action': 'CREATE',
'ResourceRecordSet': {
'Name': 'prod.redis.db',
'Name': 'prod.redis.db.',
'Type': 'A',
'TTL': 10,
'ResourceRecords': [{
@ -540,7 +540,7 @@ def test_change_resource_record_sets_crud_valid():
response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id)
len(response['ResourceRecordSets']).should.equal(1)
a_record_detail = response['ResourceRecordSets'][0]
a_record_detail['Name'].should.equal('prod.redis.db')
a_record_detail['Name'].should.equal('prod.redis.db.')
a_record_detail['Type'].should.equal('A')
a_record_detail['TTL'].should.equal(10)
a_record_detail['ResourceRecords'].should.equal([{'Value': '127.0.0.1'}])
@ -552,7 +552,7 @@ def test_change_resource_record_sets_crud_valid():
{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': 'prod.redis.db',
'Name': 'prod.redis.db.',
'Type': 'CNAME',
'TTL': 60,
'ResourceRecords': [{
@ -567,7 +567,7 @@ def test_change_resource_record_sets_crud_valid():
response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id)
len(response['ResourceRecordSets']).should.equal(1)
cname_record_detail = response['ResourceRecordSets'][0]
cname_record_detail['Name'].should.equal('prod.redis.db')
cname_record_detail['Name'].should.equal('prod.redis.db.')
cname_record_detail['Type'].should.equal('CNAME')
cname_record_detail['TTL'].should.equal(60)
cname_record_detail['ResourceRecords'].should.equal([{'Value': '192.168.1.1'}])
@ -688,12 +688,12 @@ def test_list_resource_record_sets_name_type_filters():
# record_type, record_name
all_records = [
('A', 'a.a.db'),
('A', 'a.b.db'),
('A', 'b.b.db'),
('CNAME', 'b.b.db'),
('CNAME', 'b.c.db'),
('CNAME', 'c.c.db')
('A', 'a.a.db.'),
('A', 'a.b.db.'),
('A', 'b.b.db.'),
('CNAME', 'b.b.db.'),
('CNAME', 'b.c.db.'),
('CNAME', 'c.c.db.')
]
for record_type, record_name in all_records:
create_resource_record_set(record_type, record_name)

View File

@ -977,6 +977,15 @@ def test_bucket_location():
bucket.get_location().should.equal("us-west-2")
@mock_s3_deprecated
def test_bucket_location_us_east_1():
cli = boto3.client('s3')
bucket_name = 'mybucket'
# No LocationConstraint ==> us-east-1
cli.create_bucket(Bucket=bucket_name)
cli.get_bucket_location(Bucket=bucket_name)['LocationConstraint'].should.equal(None)
@mock_s3_deprecated
def test_ranged_get():
conn = boto.connect_s3()
@ -1300,6 +1309,16 @@ def test_bucket_create_duplicate():
exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists')
@mock_s3
def test_bucket_create_force_us_east_1():
s3 = boto3.resource('s3', region_name='us-east-1')
with assert_raises(ClientError) as exc:
s3.create_bucket(Bucket="blah", CreateBucketConfiguration={
'LocationConstraint': 'us-east-1',
})
exc.exception.response['Error']['Code'].should.equal('InvalidLocationConstraint')
@mock_s3
def test_boto3_bucket_create_eu_central():
s3 = boto3.resource('s3', region_name='eu-central-1')
@ -1553,6 +1572,24 @@ def test_boto3_put_bucket_tagging():
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
# With duplicate tag keys:
with assert_raises(ClientError) as err:
resp = s3.put_bucket_tagging(Bucket=bucket_name,
Tagging={
"TagSet": [
{
"Key": "TagOne",
"Value": "ValueOne"
},
{
"Key": "TagOne",
"Value": "ValueOneAgain"
}
]
})
e = err.exception
e.response["Error"]["Code"].should.equal("InvalidTag")
e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key")
@mock_s3
def test_boto3_get_bucket_tagging():

View File

@ -39,12 +39,28 @@ def test_create_secret():
conn = boto3.client('secretsmanager', region_name='us-east-1')
result = conn.create_secret(Name='test-secret', SecretString="foosecret")
assert result['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
assert result['ARN']
assert result['Name'] == 'test-secret'
secret = conn.get_secret_value(SecretId='test-secret')
assert secret['SecretString'] == 'foosecret'
@mock_secretsmanager
def test_create_secret_with_tags():
conn = boto3.client('secretsmanager', region_name='us-east-1')
secret_name = 'test-secret-with-tags'
result = conn.create_secret(
Name=secret_name,
SecretString="foosecret",
Tags=[{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}]
)
assert result['ARN']
assert result['Name'] == secret_name
secret_value = conn.get_secret_value(SecretId=secret_name)
assert secret_value['SecretString'] == 'foosecret'
secret_details = conn.describe_secret(SecretId=secret_name)
assert secret_details['Tags'] == [{"Key": "Foo", "Value": "Bar"}, {"Key": "Mykey", "Value": "Myvalue"}]
@mock_secretsmanager
def test_get_random_password_default_length():
conn = boto3.client('secretsmanager', region_name='us-west-2')
@ -159,10 +175,17 @@ def test_describe_secret():
conn.create_secret(Name='test-secret',
SecretString='foosecret')
conn.create_secret(Name='test-secret-2',
SecretString='barsecret')
secret_description = conn.describe_secret(SecretId='test-secret')
secret_description_2 = conn.describe_secret(SecretId='test-secret-2')
assert secret_description # Returned dict is not empty
assert secret_description['ARN'] == (
'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad')
assert secret_description['Name'] == ('test-secret')
assert secret_description['ARN'] != '' # Test arn not empty
assert secret_description_2['Name'] == ('test-secret-2')
assert secret_description_2['ARN'] != '' # Test arn not empty
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
@ -190,9 +213,7 @@ def test_rotate_secret():
rotated_secret = conn.rotate_secret(SecretId=secret_name)
assert rotated_secret
assert rotated_secret['ARN'] == (
'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad'
)
assert rotated_secret['ARN'] != '' # Test arn not empty
assert rotated_secret['Name'] == secret_name
assert rotated_secret['VersionId'] != ''

View File

@ -82,11 +82,20 @@ def test_create_secret():
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"},
)
res_2 = test_client.post('/',
data={"Name": "test-secret-2",
"SecretString": "bar-secret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"},
)
json_data = json.loads(res.data.decode("utf-8"))
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
assert json_data['ARN'] != ''
assert json_data['Name'] == 'test-secret'
json_data_2 = json.loads(res_2.data.decode("utf-8"))
assert json_data_2['ARN'] != ''
assert json_data_2['Name'] == 'test-secret-2'
@mock_secretsmanager
def test_describe_secret():
@ -107,12 +116,30 @@ def test_describe_secret():
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
create_secret_2 = test_client.post('/',
data={"Name": "test-secret-2",
"SecretString": "barsecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
describe_secret_2 = test_client.post('/',
data={"SecretId": "test-secret-2"},
headers={
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
)
assert json_data['ARN'] != ''
assert json_data['Name'] == 'test-secret'
json_data_2 = json.loads(describe_secret_2.data.decode("utf-8"))
assert json_data_2 # Returned dict is not empty
assert json_data_2['ARN'] != ''
assert json_data_2['Name'] == 'test-secret-2'
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
@ -179,9 +206,7 @@ def test_rotate_secret():
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
)
assert json_data['ARN'] != ''
assert json_data['Name'] == 'test-secret'
assert json_data['VersionId'] == client_request_token

View File

@ -319,13 +319,15 @@ def test_describe_parameters():
Name='test',
Description='A test parameter',
Value='value',
Type='String')
Type='String',
AllowedPattern=r'.*')
response = client.describe_parameters()
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Type'].should.equal('String')
response['Parameters'][0]['AllowedPattern'].should.equal(r'.*')
@mock_ssm

View File

@ -8,6 +8,10 @@ deps =
commands =
{envpython} setup.py test
nosetests {posargs}
setenv =
AWS_ACCESS_KEY_ID=dummy_key
AWS_SECRET_ACCESS_KEY=dummy_secret
AWS_DEFAULT_REGION=us-east-1
[flake8]
ignore = E128,E501