Merge remote-tracking branch 'original/master'

This commit is contained in:
Stephan Huber 2018-06-06 09:57:12 +02:00
commit e118a678a6
57 changed files with 2158 additions and 93 deletions

View File

@ -2,5 +2,6 @@ include README.md LICENSE AUTHORS.md
include requirements.txt requirements-dev.txt tox.ini include requirements.txt requirements-dev.txt tox.ini
include moto/ec2/resources/instance_types.json include moto/ec2/resources/instance_types.json
include moto/ec2/resources/amis.json include moto/ec2/resources/amis.json
include moto/cognitoidp/resources/*.json
recursive-include moto/templates * recursive-include moto/templates *
recursive-include tests * recursive-include tests *

View File

@ -72,6 +72,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|------------------------------------------------------------------------------| |------------------------------------------------------------------------------|
| Cognito Identity | @mock_cognitoidentity| basic endpoints done | | Cognito Identity | @mock_cognitoidentity| basic endpoints done |
|------------------------------------------------------------------------------| |------------------------------------------------------------------------------|
| Cognito Identity Provider | @mock_cognitoidp| basic endpoints done |
|------------------------------------------------------------------------------|
| Data Pipeline | @mock_datapipeline| basic endpoints done | | Data Pipeline | @mock_datapipeline| basic endpoints done |
|------------------------------------------------------------------------------| |------------------------------------------------------------------------------|
| DynamoDB | @mock_dynamodb | core endpoints done | | DynamoDB | @mock_dynamodb | core endpoints done |
@ -138,6 +140,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|------------------------------------------------------------------------------| |------------------------------------------------------------------------------|
``` ```
For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md)
### Another Example ### Another Example
Imagine you have a function that you use to launch new ec2 instances: Imagine you have a function that you use to launch new ec2 instances:

View File

@ -12,6 +12,7 @@ from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa
from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa

View File

@ -328,6 +328,9 @@ class RestAPI(BaseModel):
self.resources = {} self.resources = {}
self.add_child('/') # Add default child self.add_child('/') # Add default child
def __repr__(self):
return str(self.id)
def to_dict(self): def to_dict(self):
return { return {
"id": self.id, "id": self.id,

View File

@ -320,8 +320,7 @@ DESCRIBE_LAUNCH_CONFIGURATIONS_TEMPLATE = """<DescribeLaunchConfigurationsRespon
<UserData/> <UserData/>
{% endif %} {% endif %}
<InstanceType>{{ launch_configuration.instance_type }}</InstanceType> <InstanceType>{{ launch_configuration.instance_type }}</InstanceType>
<LaunchConfigurationARN>arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration: <LaunchConfigurationARN>arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/{{ launch_configuration.name }}</LaunchConfigurationARN>
9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/{{ launch_configuration.name }}</LaunchConfigurationARN>
{% if launch_configuration.block_device_mappings %} {% if launch_configuration.block_device_mappings %}
<BlockDeviceMappings> <BlockDeviceMappings>
{% for mount_point, mapping in launch_configuration.block_device_mappings.items() %} {% for mount_point, mapping in launch_configuration.block_device_mappings.items() %}
@ -517,8 +516,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
{% endif %} {% endif %}
<HealthCheckGracePeriod>{{ group.health_check_period }}</HealthCheckGracePeriod> <HealthCheckGracePeriod>{{ group.health_check_period }}</HealthCheckGracePeriod>
<DefaultCooldown>{{ group.default_cooldown }}</DefaultCooldown> <DefaultCooldown>{{ group.default_cooldown }}</DefaultCooldown>
<AutoScalingGroupARN>arn:aws:autoscaling:us-east-1:803981987763:autoScalingGroup:ca861182-c8f9-4ca7-b1eb-cd35505f5ebb <AutoScalingGroupARN>arn:aws:autoscaling:us-east-1:803981987763:autoScalingGroup:ca861182-c8f9-4ca7-b1eb-cd35505f5ebb:autoScalingGroupName/{{ group.name }}</AutoScalingGroupARN>
:autoScalingGroupName/{{ group.name }}</AutoScalingGroupARN>
{% if group.termination_policies %} {% if group.termination_policies %}
<TerminationPolicies> <TerminationPolicies>
{% for policy in group.termination_policies %} {% for policy in group.termination_policies %}

View File

@ -4,6 +4,7 @@ import base64
from collections import defaultdict from collections import defaultdict
import copy import copy
import datetime import datetime
import docker
import docker.errors import docker.errors
import hashlib import hashlib
import io import io
@ -44,6 +45,7 @@ except ImportError:
_stderr_regex = re.compile(r'START|END|REPORT RequestId: .*') _stderr_regex = re.compile(r'START|END|REPORT RequestId: .*')
_orig_adapter_send = requests.adapters.HTTPAdapter.send _orig_adapter_send = requests.adapters.HTTPAdapter.send
docker_3 = docker.__version__.startswith("3")
def zip2tar(zip_bytes): def zip2tar(zip_bytes):
@ -104,7 +106,11 @@ class _DockerDataVolumeContext:
# It doesn't exist so we need to create it # It doesn't exist so we need to create it
self._vol_ref.volume = self._lambda_func.docker_client.volumes.create(self._lambda_func.code_sha_256) self._vol_ref.volume = self._lambda_func.docker_client.volumes.create(self._lambda_func.code_sha_256)
container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes={self.name: {'bind': '/tmp/data', 'mode': 'rw'}}, detach=True) if docker_3:
volumes = {self.name: {'bind': '/tmp/data', 'mode': 'rw'}}
else:
volumes = {self.name: '/tmp/data'}
container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes=volumes, detach=True)
try: try:
tar_bytes = zip2tar(self._lambda_func.code_bytes) tar_bytes = zip2tar(self._lambda_func.code_bytes)
container.put_archive('/tmp/data', tar_bytes) container.put_archive('/tmp/data', tar_bytes)
@ -309,11 +315,15 @@ class LambdaFunction(BaseModel):
finally: finally:
if container: if container:
try: try:
exit_code = container.wait(timeout=300)['StatusCode'] exit_code = container.wait(timeout=300)
except requests.exceptions.ReadTimeout: except requests.exceptions.ReadTimeout:
exit_code = -1 exit_code = -1
container.stop() container.stop()
container.kill() container.kill()
else:
if docker_3:
exit_code = exit_code['StatusCode']
output = container.logs(stdout=False, stderr=True) output = container.logs(stdout=False, stderr=True)
output += container.logs(stdout=True, stderr=False) output += container.logs(stdout=True, stderr=False)
container.remove() container.remove()
@ -445,6 +455,9 @@ class LambdaVersion(BaseModel):
def __init__(self, spec): def __init__(self, spec):
self.version = spec['Version'] self.version = spec['Version']
def __repr__(self):
return str(self.logical_resource_id)
@classmethod @classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, def create_from_cloudformation_json(cls, resource_name, cloudformation_json,
region_name): region_name):
@ -675,3 +688,4 @@ lambda_backends = {_region.name: LambdaBackend(_region.name)
for _region in boto.awslambda.regions()} for _region in boto.awslambda.regions()}
lambda_backends['ap-southeast-2'] = LambdaBackend('ap-southeast-2') lambda_backends['ap-southeast-2'] = LambdaBackend('ap-southeast-2')
lambda_backends['us-gov-west-1'] = LambdaBackend('us-gov-west-1')

View File

@ -7,6 +7,7 @@ from moto.awslambda import lambda_backends
from moto.cloudformation import cloudformation_backends from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends from moto.cloudwatch import cloudwatch_backends
from moto.cognitoidentity import cognitoidentity_backends from moto.cognitoidentity import cognitoidentity_backends
from moto.cognitoidp import cognitoidp_backends
from moto.core import moto_api_backends from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends from moto.dynamodb import dynamodb_backends
@ -51,6 +52,7 @@ BACKENDS = {
'cloudformation': cloudformation_backends, 'cloudformation': cloudformation_backends,
'cloudwatch': cloudwatch_backends, 'cloudwatch': cloudwatch_backends,
'cognito-identity': cognitoidentity_backends, 'cognito-identity': cognitoidentity_backends,
'cognito-idp': cognitoidp_backends,
'datapipeline': datapipeline_backends, 'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends, 'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2, 'dynamodb2': dynamodb_backends2,

View File

@ -295,6 +295,14 @@ class Job(threading.Thread, BaseModel):
} }
if self.job_stopped: if self.job_stopped:
result['stoppedAt'] = datetime2int(self.job_stopped_at) result['stoppedAt'] = datetime2int(self.job_stopped_at)
result['container'] = {}
result['container']['command'] = ['/bin/sh -c "for a in `seq 1 10`; do echo Hello World; sleep 1; done"']
result['container']['privileged'] = False
result['container']['readonlyRootFilesystem'] = False
result['container']['ulimits'] = {}
result['container']['vcpus'] = 1
result['container']['volumes'] = ''
result['container']['logStreamName'] = self.log_stream_name
if self.job_stopped_reason is not None: if self.job_stopped_reason is not None:
result['statusReason'] = self.job_stopped_reason result['statusReason'] = self.job_stopped_reason
return result return result
@ -378,6 +386,7 @@ class Job(threading.Thread, BaseModel):
# Send to cloudwatch # Send to cloudwatch
log_group = '/aws/batch/job' log_group = '/aws/batch/job'
stream_name = '{0}/default/{1}'.format(self.job_definition.name, self.job_id) stream_name = '{0}/default/{1}'.format(self.job_definition.name, self.job_id)
self.log_stream_name = stream_name
self._log_backend.ensure_log_group(log_group, None) self._log_backend.ensure_log_group(log_group, None)
self._log_backend.create_log_stream(log_group, stream_name) self._log_backend.create_log_stream(log_group, stream_name)
self._log_backend.put_log_events(log_group, stream_name, logs, None) self._log_backend.put_log_events(log_group, stream_name, logs, None)

View File

@ -33,6 +33,18 @@ class MissingParameterError(BadRequest):
) )
class ExportNotFound(BadRequest):
"""Exception to raise if a template tries to import a non-existent export"""
def __init__(self, export_name):
template = Template(ERROR_RESPONSE)
super(ExportNotFound, self).__init__()
self.description = template.render(
code='ExportNotFound',
message="No export named {0} found.".format(export_name)
)
ERROR_RESPONSE = """<ErrorResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/"> ERROR_RESPONSE = """<ErrorResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/">
<Error> <Error>
<Type>Sender</Type> <Type>Sender</Type>

View File

@ -38,7 +38,7 @@ class FakeStack(BaseModel):
resource_status_reason="User Initiated") resource_status_reason="User Initiated")
self.description = self.template_dict.get('Description') self.description = self.template_dict.get('Description')
self.cross_stack_resources = cross_stack_resources or [] self.cross_stack_resources = cross_stack_resources or {}
self.resource_map = self._create_resource_map() self.resource_map = self._create_resource_map()
self.output_map = self._create_output_map() self.output_map = self._create_output_map()
self._add_stack_event("CREATE_COMPLETE") self._add_stack_event("CREATE_COMPLETE")

View File

@ -28,7 +28,7 @@ from moto.s3 import models as s3_models
from moto.sns import models as sns_models from moto.sns import models as sns_models
from moto.sqs import models as sqs_models from moto.sqs import models as sqs_models
from .utils import random_suffix from .utils import random_suffix
from .exceptions import MissingParameterError, UnformattedGetAttTemplateException, ValidationError from .exceptions import ExportNotFound, MissingParameterError, UnformattedGetAttTemplateException, ValidationError
from boto.cloudformation.stack import Output from boto.cloudformation.stack import Output
MODEL_MAP = { MODEL_MAP = {
@ -206,6 +206,8 @@ def clean_json(resource_json, resources_map):
values = [x.value for x in resources_map.cross_stack_resources.values() if x.name == cleaned_val] values = [x.value for x in resources_map.cross_stack_resources.values() if x.name == cleaned_val]
if any(values): if any(values):
return values[0] return values[0]
else:
raise ExportNotFound(cleaned_val)
if 'Fn::GetAZs' in resource_json: if 'Fn::GetAZs' in resource_json:
region = resource_json.get('Fn::GetAZs') or DEFAULT_REGION region = resource_json.get('Fn::GetAZs') or DEFAULT_REGION
@ -369,6 +371,7 @@ class ResourceMap(collections.Mapping):
"AWS::Region": self._region_name, "AWS::Region": self._region_name,
"AWS::StackId": stack_id, "AWS::StackId": stack_id,
"AWS::StackName": stack_name, "AWS::StackName": stack_name,
"AWS::URLSuffix": "amazonaws.com",
"AWS::NoValue": None, "AWS::NoValue": None,
} }

View File

@ -229,8 +229,13 @@ class CloudWatchBackend(BaseBackend):
def put_metric_data(self, namespace, metric_data): def put_metric_data(self, namespace, metric_data):
for metric_member in metric_data: for metric_member in metric_data:
# Preserve "datetime" for get_metric_statistics comparisons
timestamp = metric_member.get('Timestamp')
if timestamp is not None and type(timestamp) != datetime:
timestamp = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
timestamp = timestamp.replace(tzinfo=tzutc())
self.metric_data.append(MetricDatum( self.metric_data.append(MetricDatum(
namespace, metric_member['MetricName'], float(metric_member['Value']), metric_member.get('Dimensions.member', _EMPTY_LIST), metric_member.get('Timestamp'))) namespace, metric_member['MetricName'], float(metric_member.get('Value', 0)), metric_member.get('Dimensions.member', _EMPTY_LIST), timestamp))
def get_metric_statistics(self, namespace, metric_name, start_time, end_time, period, stats): def get_metric_statistics(self, namespace, metric_name, start_time, end_time, period, stats):
period_delta = timedelta(seconds=period) period_delta = timedelta(seconds=period)

View File

@ -272,7 +272,7 @@ GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://m
</ResponseMetadata> </ResponseMetadata>
<GetMetricStatisticsResult> <GetMetricStatisticsResult>
<Label> {{ label }} </Label> <Label>{{ label }}</Label>
<Datapoints> <Datapoints>
{% for datapoint in datapoints %} {% for datapoint in datapoints %}
<Datapoint> <Datapoint>

View File

@ -0,0 +1,6 @@
from __future__ import unicode_literals
from .models import cognitoidp_backends
from ..core.models import base_decorator, deprecated_base_decorator
mock_cognitoidp = base_decorator(cognitoidp_backends)
mock_cognitoidp_deprecated = deprecated_base_decorator(cognitoidp_backends)

View File

@ -0,0 +1,34 @@
from __future__ import unicode_literals
import json
from werkzeug.exceptions import BadRequest
class ResourceNotFoundError(BadRequest):
def __init__(self, message):
super(ResourceNotFoundError, self).__init__()
self.description = json.dumps({
"message": message,
'__type': 'ResourceNotFoundException',
})
class UserNotFoundError(BadRequest):
def __init__(self, message):
super(UserNotFoundError, self).__init__()
self.description = json.dumps({
"message": message,
'__type': 'UserNotFoundException',
})
class NotAuthorizedError(BadRequest):
def __init__(self, message):
super(NotAuthorizedError, self).__init__()
self.description = json.dumps({
"message": message,
'__type': 'NotAuthorizedException',
})

512
moto/cognitoidp/models.py Normal file
View File

@ -0,0 +1,512 @@
from __future__ import unicode_literals
import datetime
import json
import os
import time
import uuid
import boto.cognito.identity
from jose import jws
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
UserStatus = {
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
"CONFIRMED": "CONFIRMED",
}
class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config):
self.region = region
self.id = str(uuid.uuid4())
self.name = name
self.status = None
self.extended_config = extended_config or {}
self.creation_date = datetime.datetime.utcnow()
self.last_modified_date = datetime.datetime.utcnow()
self.clients = OrderedDict()
self.identity_providers = OrderedDict()
self.users = OrderedDict()
self.refresh_tokens = {}
self.access_tokens = {}
self.id_tokens = {}
with open(os.path.join(os.path.dirname(__file__), "resources/jwks-private.json")) as f:
self.json_web_key = json.loads(f.read())
def _base_json(self):
return {
"Id": self.id,
"Name": self.name,
"Status": self.status,
"CreationDate": time.mktime(self.creation_date.timetuple()),
"LastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
}
def to_json(self, extended=False):
user_pool_json = self._base_json()
if extended:
user_pool_json.update(self.extended_config)
else:
user_pool_json["LambdaConfig"] = self.extended_config.get("LambdaConfig") or {}
return user_pool_json
def create_jwt(self, client_id, username, expires_in=60 * 60, extra_data={}):
now = int(time.time())
payload = {
"iss": "https://cognito-idp.{}.amazonaws.com/{}".format(self.region, self.id),
"sub": self.users[username].id,
"aud": client_id,
"token_use": "id",
"auth_time": now,
"exp": now + expires_in,
}
payload.update(extra_data)
return jws.sign(payload, self.json_web_key, algorithm='RS256'), expires_in
def create_id_token(self, client_id, username):
id_token, expires_in = self.create_jwt(client_id, username)
self.id_tokens[id_token] = (client_id, username)
return id_token, expires_in
def create_refresh_token(self, client_id, username):
refresh_token = str(uuid.uuid4())
self.refresh_tokens[refresh_token] = (client_id, username)
return refresh_token
def create_access_token(self, client_id, username):
access_token, expires_in = self.create_jwt(client_id, username)
self.access_tokens[access_token] = (client_id, username)
return access_token, expires_in
def create_tokens_from_refresh_token(self, refresh_token):
client_id, username = self.refresh_tokens.get(refresh_token)
if not username:
raise NotAuthorizedError(refresh_token)
access_token, expires_in = self.create_access_token(client_id, username)
id_token, _ = self.create_id_token(client_id, username)
return access_token, id_token, expires_in
class CognitoIdpUserPoolDomain(BaseModel):
def __init__(self, user_pool_id, domain):
self.user_pool_id = user_pool_id
self.domain = domain
def to_json(self):
return {
"UserPoolId": self.user_pool_id,
"AWSAccountId": str(uuid.uuid4()),
"CloudFrontDistribution": None,
"Domain": self.domain,
"S3Bucket": None,
"Status": "ACTIVE",
"Version": None,
}
class CognitoIdpUserPoolClient(BaseModel):
def __init__(self, user_pool_id, extended_config):
self.user_pool_id = user_pool_id
self.id = str(uuid.uuid4())
self.secret = str(uuid.uuid4())
self.extended_config = extended_config or {}
def _base_json(self):
return {
"ClientId": self.id,
"ClientName": self.extended_config.get("ClientName"),
"UserPoolId": self.user_pool_id,
}
def to_json(self, extended=False):
user_pool_client_json = self._base_json()
if extended:
user_pool_client_json.update(self.extended_config)
return user_pool_client_json
class CognitoIdpIdentityProvider(BaseModel):
def __init__(self, name, extended_config):
self.name = name
self.extended_config = extended_config or {}
self.creation_date = datetime.datetime.utcnow()
self.last_modified_date = datetime.datetime.utcnow()
def _base_json(self):
return {
"ProviderName": self.name,
"ProviderType": self.extended_config.get("ProviderType"),
"CreationDate": time.mktime(self.creation_date.timetuple()),
"LastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
}
def to_json(self, extended=False):
identity_provider_json = self._base_json()
if extended:
identity_provider_json.update(self.extended_config)
return identity_provider_json
class CognitoIdpUser(BaseModel):
def __init__(self, user_pool_id, username, password, status, attributes):
self.id = str(uuid.uuid4())
self.user_pool_id = user_pool_id
self.username = username
self.password = password
self.status = status
self.enabled = True
self.attributes = attributes
self.create_date = datetime.datetime.utcnow()
self.last_modified_date = datetime.datetime.utcnow()
def _base_json(self):
return {
"UserPoolId": self.user_pool_id,
"Username": self.username,
"UserStatus": self.status,
"UserCreateDate": time.mktime(self.create_date.timetuple()),
"UserLastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
}
# list_users brings back "Attributes" while admin_get_user brings back "UserAttributes".
def to_json(self, extended=False, attributes_key="Attributes"):
user_json = self._base_json()
if extended:
user_json.update(
{
"Enabled": self.enabled,
attributes_key: self.attributes,
"MFAOptions": []
}
)
return user_json
class CognitoIdpBackend(BaseBackend):
def __init__(self, region):
super(CognitoIdpBackend, self).__init__()
self.region = region
self.user_pools = OrderedDict()
self.user_pool_domains = OrderedDict()
self.sessions = {}
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
# User pool
def create_user_pool(self, name, extended_config):
user_pool = CognitoIdpUserPool(self.region, name, extended_config)
self.user_pools[user_pool.id] = user_pool
return user_pool
def list_user_pools(self):
return self.user_pools.values()
def describe_user_pool(self, user_pool_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
return user_pool
def delete_user_pool(self, user_pool_id):
if user_pool_id not in self.user_pools:
raise ResourceNotFoundError(user_pool_id)
del self.user_pools[user_pool_id]
# User pool domain
def create_user_pool_domain(self, user_pool_id, domain):
if user_pool_id not in self.user_pools:
raise ResourceNotFoundError(user_pool_id)
user_pool_domain = CognitoIdpUserPoolDomain(user_pool_id, domain)
self.user_pool_domains[domain] = user_pool_domain
return user_pool_domain
def describe_user_pool_domain(self, domain):
if domain not in self.user_pool_domains:
return None
return self.user_pool_domains[domain]
def delete_user_pool_domain(self, domain):
if domain not in self.user_pool_domains:
raise ResourceNotFoundError(domain)
del self.user_pool_domains[domain]
# User pool client
def create_user_pool_client(self, user_pool_id, extended_config):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
user_pool_client = CognitoIdpUserPoolClient(user_pool_id, extended_config)
user_pool.clients[user_pool_client.id] = user_pool_client
return user_pool_client
def list_user_pool_clients(self, user_pool_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
return user_pool.clients.values()
def describe_user_pool_client(self, user_pool_id, client_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
client = user_pool.clients.get(client_id)
if not client:
raise ResourceNotFoundError(client_id)
return client
def update_user_pool_client(self, user_pool_id, client_id, extended_config):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
client = user_pool.clients.get(client_id)
if not client:
raise ResourceNotFoundError(client_id)
client.extended_config.update(extended_config)
return client
def delete_user_pool_client(self, user_pool_id, client_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if client_id not in user_pool.clients:
raise ResourceNotFoundError(client_id)
del user_pool.clients[client_id]
# Identity provider
def create_identity_provider(self, user_pool_id, name, extended_config):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
identity_provider = CognitoIdpIdentityProvider(name, extended_config)
user_pool.identity_providers[name] = identity_provider
return identity_provider
def list_identity_providers(self, user_pool_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
return user_pool.identity_providers.values()
def describe_identity_provider(self, user_pool_id, name):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
identity_provider = user_pool.identity_providers.get(name)
if not identity_provider:
raise ResourceNotFoundError(name)
return identity_provider
def delete_identity_provider(self, user_pool_id, name):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if name not in user_pool.identity_providers:
raise ResourceNotFoundError(name)
del user_pool.identity_providers[name]
# User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
user = CognitoIdpUser(user_pool_id, username, temporary_password, UserStatus["FORCE_CHANGE_PASSWORD"], attributes)
user_pool.users[user.username] = user
return user
def admin_get_user(self, user_pool_id, username):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if username not in user_pool.users:
raise ResourceNotFoundError(username)
return user_pool.users[username]
def list_users(self, user_pool_id):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
return user_pool.users.values()
def admin_delete_user(self, user_pool_id, username):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if username not in user_pool.users:
raise ResourceNotFoundError(username)
del user_pool.users[username]
def _log_user_in(self, user_pool, client, username):
refresh_token = user_pool.create_refresh_token(client.id, username)
access_token, id_token, expires_in = user_pool.create_tokens_from_refresh_token(refresh_token)
return {
"AuthenticationResult": {
"IdToken": id_token,
"AccessToken": access_token,
"RefreshToken": refresh_token,
"ExpiresIn": expires_in,
}
}
def admin_initiate_auth(self, user_pool_id, client_id, auth_flow, auth_parameters):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
client = user_pool.clients.get(client_id)
if not client:
raise ResourceNotFoundError(client_id)
if auth_flow == "ADMIN_NO_SRP_AUTH":
username = auth_parameters.get("USERNAME")
password = auth_parameters.get("PASSWORD")
user = user_pool.users.get(username)
if not user:
raise UserNotFoundError(username)
if user.password != password:
raise NotAuthorizedError(username)
if user.status == UserStatus["FORCE_CHANGE_PASSWORD"]:
session = str(uuid.uuid4())
self.sessions[session] = user_pool
return {
"ChallengeName": "NEW_PASSWORD_REQUIRED",
"ChallengeParameters": {},
"Session": session,
}
return self._log_user_in(user_pool, client, username)
elif auth_flow == "REFRESH_TOKEN":
refresh_token = auth_parameters.get("REFRESH_TOKEN")
id_token, access_token, expires_in = user_pool.create_tokens_from_refresh_token(refresh_token)
return {
"AuthenticationResult": {
"IdToken": id_token,
"AccessToken": access_token,
"ExpiresIn": expires_in,
}
}
else:
return {}
def respond_to_auth_challenge(self, session, client_id, challenge_name, challenge_responses):
user_pool = self.sessions.get(session)
if not user_pool:
raise ResourceNotFoundError(session)
client = user_pool.clients.get(client_id)
if not client:
raise ResourceNotFoundError(client_id)
if challenge_name == "NEW_PASSWORD_REQUIRED":
username = challenge_responses.get("USERNAME")
new_password = challenge_responses.get("NEW_PASSWORD")
user = user_pool.users.get(username)
if not user:
raise UserNotFoundError(username)
user.password = new_password
user.status = UserStatus["CONFIRMED"]
del self.sessions[session]
return self._log_user_in(user_pool, client, username)
else:
return {}
def confirm_forgot_password(self, client_id, username, password):
for user_pool in self.user_pools.values():
if client_id in user_pool.clients and username in user_pool.users:
user_pool.users[username].password = password
break
else:
raise ResourceNotFoundError(client_id)
def change_password(self, access_token, previous_password, proposed_password):
for user_pool in self.user_pools.values():
if access_token in user_pool.access_tokens:
_, username = user_pool.access_tokens[access_token]
user = user_pool.users.get(username)
if not user:
raise UserNotFoundError(username)
if user.password != previous_password:
raise NotAuthorizedError(username)
user.password = proposed_password
if user.status == UserStatus["FORCE_CHANGE_PASSWORD"]:
user.status = UserStatus["CONFIRMED"]
break
else:
raise NotAuthorizedError(access_token)
cognitoidp_backends = {}
for region in boto.cognito.identity.regions():
cognitoidp_backends[region.name] = CognitoIdpBackend(region.name)
# Hack to help moto-server process requests on localhost, where the region isn't
# specified in the host header. Some endpoints (change password, confirm forgot
# password) have no authorization header from which to extract the region.
def find_region_by_value(key, value):
for region in cognitoidp_backends:
backend = cognitoidp_backends[region]
for user_pool in backend.user_pools.values():
if key == "client_id" and value in user_pool.clients:
return region
if key == "access_token" and value in user_pool.access_tokens:
return region
return cognitoidp_backends.keys()[0]

View File

@ -0,0 +1,9 @@
{
"alg": "RS256",
"d": "DrrLT2qMERN0Id-bNglOe6SVkUNF3MTIzrH-TVkMZhsHk8kyqiqt-8JbLQMh2gOgTIjpu93b2_UREGA0BGdWs34hv0v7Gx8uIngCY6e6XO8LDemOo-2VHZHl5Ew-lrRYhwq12c_c4mfavAdMzXHODrpXSnqLnbFK88S-3fu6Da4czc4Svo4v8MkGZk_fcTml3Y1jIFHxbbTWka37j4NLpAzdfvX--J086m-LbZ8CJL_lGMKbAKsWURMmzCFL9ZFH9JdzX79KeDOH0GrzGwS_cOsZHsCamF_CWrtG4asPt-SHyn_k0X4JJJgAWVA674VCqorMAPDVYIzKJOUMImmsEQ",
"e": "AQAB",
"kid": "dummy",
"kty": "RSA",
"n": "j1pT3xKbswmMySvCefmiD3mfDaRFpZ9Y3Jl4fF0hMaCRVAt_e0yR7BeueDfqmj_NhVSO0WB5ao5e8V-9RFQOtK8SrqKl3i01-CyWYPICwybaGKhbJJR0S_6cZ8n5kscF1MjpIlsJcCzm-yKgTc3Mxk6KtrLoNgRvMwGLeHUXPkhS9YHfDKRe864iMFOK4df69brIYEICG2VLduh0hXYa0i-J3drwm7vxNdX7pVpCDu34qJtYoWq6CXt3Tzfi3YfWp8cFjGNbaDa3WnCd2IXpp0TFsFS-cEsw5rJjSl5OllJGeZKBtLeyVTy9PYwnk7MW43WSYeYstbk9NluX4H8Iuw",
"use": "sig"
}

View File

@ -0,0 +1,12 @@
{
"keys": [
{
"alg": "RS256",
"e": "AQAB",
"kid": "dummy",
"kty": "RSA",
"n": "j1pT3xKbswmMySvCefmiD3mfDaRFpZ9Y3Jl4fF0hMaCRVAt_e0yR7BeueDfqmj_NhVSO0WB5ao5e8V-9RFQOtK8SrqKl3i01-CyWYPICwybaGKhbJJR0S_6cZ8n5kscF1MjpIlsJcCzm-yKgTc3Mxk6KtrLoNgRvMwGLeHUXPkhS9YHfDKRe864iMFOK4df69brIYEICG2VLduh0hXYa0i-J3drwm7vxNdX7pVpCDu34qJtYoWq6CXt3Tzfi3YfWp8cFjGNbaDa3WnCd2IXpp0TFsFS-cEsw5rJjSl5OllJGeZKBtLeyVTy9PYwnk7MW43WSYeYstbk9NluX4H8Iuw",
"use": "sig"
}
]
}

View File

@ -0,0 +1,235 @@
from __future__ import unicode_literals
import json
import os
from moto.core.responses import BaseResponse
from .models import cognitoidp_backends, find_region_by_value
class CognitoIdpResponse(BaseResponse):
@property
def parameters(self):
return json.loads(self.body)
# User pool
def create_user_pool(self):
name = self.parameters.pop("PoolName")
user_pool = cognitoidp_backends[self.region].create_user_pool(name, self.parameters)
return json.dumps({
"UserPool": user_pool.to_json(extended=True)
})
def list_user_pools(self):
user_pools = cognitoidp_backends[self.region].list_user_pools()
return json.dumps({
"UserPools": [user_pool.to_json() for user_pool in user_pools]
})
def describe_user_pool(self):
user_pool_id = self._get_param("UserPoolId")
user_pool = cognitoidp_backends[self.region].describe_user_pool(user_pool_id)
return json.dumps({
"UserPool": user_pool.to_json(extended=True)
})
def delete_user_pool(self):
user_pool_id = self._get_param("UserPoolId")
cognitoidp_backends[self.region].delete_user_pool(user_pool_id)
return ""
# User pool domain
def create_user_pool_domain(self):
domain = self._get_param("Domain")
user_pool_id = self._get_param("UserPoolId")
cognitoidp_backends[self.region].create_user_pool_domain(user_pool_id, domain)
return ""
def describe_user_pool_domain(self):
domain = self._get_param("Domain")
user_pool_domain = cognitoidp_backends[self.region].describe_user_pool_domain(domain)
domain_description = {}
if user_pool_domain:
domain_description = user_pool_domain.to_json()
return json.dumps({
"DomainDescription": domain_description
})
def delete_user_pool_domain(self):
domain = self._get_param("Domain")
cognitoidp_backends[self.region].delete_user_pool_domain(domain)
return ""
# User pool client
def create_user_pool_client(self):
user_pool_id = self.parameters.pop("UserPoolId")
user_pool_client = cognitoidp_backends[self.region].create_user_pool_client(user_pool_id, self.parameters)
return json.dumps({
"UserPoolClient": user_pool_client.to_json(extended=True)
})
def list_user_pool_clients(self):
user_pool_id = self._get_param("UserPoolId")
user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id)
return json.dumps({
"UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients]
})
def describe_user_pool_client(self):
user_pool_id = self._get_param("UserPoolId")
client_id = self._get_param("ClientId")
user_pool_client = cognitoidp_backends[self.region].describe_user_pool_client(user_pool_id, client_id)
return json.dumps({
"UserPoolClient": user_pool_client.to_json(extended=True)
})
def update_user_pool_client(self):
user_pool_id = self.parameters.pop("UserPoolId")
client_id = self.parameters.pop("ClientId")
user_pool_client = cognitoidp_backends[self.region].update_user_pool_client(user_pool_id, client_id, self.parameters)
return json.dumps({
"UserPoolClient": user_pool_client.to_json(extended=True)
})
def delete_user_pool_client(self):
user_pool_id = self._get_param("UserPoolId")
client_id = self._get_param("ClientId")
cognitoidp_backends[self.region].delete_user_pool_client(user_pool_id, client_id)
return ""
# Identity provider
def create_identity_provider(self):
user_pool_id = self._get_param("UserPoolId")
name = self.parameters.pop("ProviderName")
identity_provider = cognitoidp_backends[self.region].create_identity_provider(user_pool_id, name, self.parameters)
return json.dumps({
"IdentityProvider": identity_provider.to_json(extended=True)
})
def list_identity_providers(self):
user_pool_id = self._get_param("UserPoolId")
identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id)
return json.dumps({
"Providers": [identity_provider.to_json() for identity_provider in identity_providers]
})
def describe_identity_provider(self):
user_pool_id = self._get_param("UserPoolId")
name = self._get_param("ProviderName")
identity_provider = cognitoidp_backends[self.region].describe_identity_provider(user_pool_id, name)
return json.dumps({
"IdentityProvider": identity_provider.to_json(extended=True)
})
def delete_identity_provider(self):
user_pool_id = self._get_param("UserPoolId")
name = self._get_param("ProviderName")
cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name)
return ""
# User
def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
temporary_password = self._get_param("TemporaryPassword")
user = cognitoidp_backends[self.region].admin_create_user(
user_pool_id,
username,
temporary_password,
self._get_param("UserAttributes", [])
)
return json.dumps({
"User": user.to_json(extended=True)
})
def admin_get_user(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
user = cognitoidp_backends[self.region].admin_get_user(user_pool_id, username)
return json.dumps(
user.to_json(extended=True, attributes_key="UserAttributes")
)
def list_users(self):
user_pool_id = self._get_param("UserPoolId")
users = cognitoidp_backends[self.region].list_users(user_pool_id)
return json.dumps({
"Users": [user.to_json(extended=True) for user in users]
})
def admin_delete_user(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
cognitoidp_backends[self.region].admin_delete_user(user_pool_id, username)
return ""
def admin_initiate_auth(self):
user_pool_id = self._get_param("UserPoolId")
client_id = self._get_param("ClientId")
auth_flow = self._get_param("AuthFlow")
auth_parameters = self._get_param("AuthParameters")
auth_result = cognitoidp_backends[self.region].admin_initiate_auth(
user_pool_id,
client_id,
auth_flow,
auth_parameters,
)
return json.dumps(auth_result)
def respond_to_auth_challenge(self):
session = self._get_param("Session")
client_id = self._get_param("ClientId")
challenge_name = self._get_param("ChallengeName")
challenge_responses = self._get_param("ChallengeResponses")
auth_result = cognitoidp_backends[self.region].respond_to_auth_challenge(
session,
client_id,
challenge_name,
challenge_responses,
)
return json.dumps(auth_result)
def forgot_password(self):
return json.dumps({
"CodeDeliveryDetails": {
"DeliveryMedium": "EMAIL",
"Destination": "...",
}
})
# This endpoint receives no authorization header, so if moto-server is listening
# on localhost (doesn't get a region in the host header), it doesn't know what
# region's backend should handle the traffic, and we use `find_region_by_value` to
# solve that problem.
def confirm_forgot_password(self):
client_id = self._get_param("ClientId")
username = self._get_param("Username")
password = self._get_param("Password")
region = find_region_by_value("client_id", client_id)
cognitoidp_backends[region].confirm_forgot_password(client_id, username, password)
return ""
# Ditto the comment on confirm_forgot_password.
def change_password(self):
access_token = self._get_param("AccessToken")
previous_password = self._get_param("PreviousPassword")
proposed_password = self._get_param("ProposedPassword")
region = find_region_by_value("access_token", access_token)
cognitoidp_backends[region].change_password(access_token, previous_password, proposed_password)
return ""
class CognitoIdpJsonWebKeyResponse(BaseResponse):
def __init__(self):
with open(os.path.join(os.path.dirname(__file__), "resources/jwks-public.json")) as f:
self.json_web_key = f.read()
def serve_json_web_key(self, request, full_url, headers):
return 200, {"Content-Type": "application/json"}, self.json_web_key

11
moto/cognitoidp/urls.py Normal file
View File

@ -0,0 +1,11 @@
from __future__ import unicode_literals
from .responses import CognitoIdpResponse, CognitoIdpJsonWebKeyResponse
url_bases = [
"https?://cognito-idp.(.+).amazonaws.com",
]
url_paths = {
'{0}/$': CognitoIdpResponse.dispatch,
'{0}/<user_pool_id>/.well-known/jwks.json$': CognitoIdpJsonWebKeyResponse().serve_json_web_key,
}

View File

@ -5,6 +5,7 @@ import datetime
import json import json
import logging import logging
import re import re
import io
import pytz import pytz
from moto.core.exceptions import DryRunClientError from moto.core.exceptions import DryRunClientError
@ -622,7 +623,7 @@ class AWSServiceSpec(object):
def __init__(self, path): def __init__(self, path):
self.path = resource_filename('botocore', path) self.path = resource_filename('botocore', path)
with open(self.path, "rb") as f: with io.open(self.path, 'r', encoding='utf-8') as f:
spec = json.load(f) spec = json.load(f)
self.metadata = spec['metadata'] self.metadata = spec['metadata']
self.operations = spec['operations'] self.operations = spec['operations']

View File

@ -62,13 +62,13 @@ class DynamoHandler(BaseResponse):
name = body['TableName'] name = body['TableName']
key_schema = body['KeySchema'] key_schema = body['KeySchema']
hash_hey = key_schema['HashKeyElement'] hash_key = key_schema['HashKeyElement']
hash_key_attr = hash_hey['AttributeName'] hash_key_attr = hash_key['AttributeName']
hash_key_type = hash_hey['AttributeType'] hash_key_type = hash_key['AttributeType']
range_hey = key_schema.get('RangeKeyElement', {}) range_key = key_schema.get('RangeKeyElement', {})
range_key_attr = range_hey.get('AttributeName') range_key_attr = range_key.get('AttributeName')
range_key_type = range_hey.get('AttributeType') range_key_type = range_key.get('AttributeType')
throughput = body["ProvisionedThroughput"] throughput = body["ProvisionedThroughput"]
read_units = throughput["ReadCapacityUnits"] read_units = throughput["ReadCapacityUnits"]

View File

@ -176,16 +176,17 @@ class Item(BaseModel):
key_parts = key.split('.') key_parts = key.split('.')
attr = key_parts.pop(0) attr = key_parts.pop(0)
if attr not in self.attrs: if attr not in self.attrs:
raise ValueError() raise ValueError
last_val = self.attrs[attr].value last_val = self.attrs[attr].value
for key_part in key_parts: for key_part in key_parts:
# Hack but it'll do, traverses into a dict # Hack but it'll do, traverses into a dict
if list(last_val.keys())[0] == 'M': last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M'] last_val = last_val['M']
if key_part not in last_val: if key_part not in last_val:
raise ValueError() last_val[key_part] = {'M': {}}
last_val = last_val[key_part] last_val = last_val[key_part]

View File

@ -1324,7 +1324,7 @@ class SecurityGroup(TaggedEC2Resource):
self.name = name self.name = name
self.description = description self.description = description
self.ingress_rules = [] self.ingress_rules = []
self.egress_rules = [SecurityRule(-1, -1, -1, ['0.0.0.0/0'], [])] self.egress_rules = [SecurityRule(-1, None, None, ['0.0.0.0/0'], [])]
self.enis = {} self.enis = {}
self.vpc_id = vpc_id self.vpc_id = vpc_id
self.owner_id = "123456789012" self.owner_id = "123456789012"
@ -3159,7 +3159,10 @@ class SpotFleetBackend(object):
class ElasticAddress(object): class ElasticAddress(object):
def __init__(self, domain): def __init__(self, domain, address=None):
if address:
self.public_ip = address
else:
self.public_ip = random_ip() self.public_ip = random_ip()
self.allocation_id = random_eip_allocation_id() if domain == "vpc" else None self.allocation_id = random_eip_allocation_id() if domain == "vpc" else None
self.domain = domain self.domain = domain
@ -3222,10 +3225,12 @@ class ElasticAddressBackend(object):
self.addresses = [] self.addresses = []
super(ElasticAddressBackend, self).__init__() super(ElasticAddressBackend, self).__init__()
def allocate_address(self, domain): def allocate_address(self, domain, address=None):
if domain not in ['standard', 'vpc']: if domain not in ['standard', 'vpc']:
raise InvalidDomainError(domain) raise InvalidDomainError(domain)
if address:
address = ElasticAddress(domain, address)
else:
address = ElasticAddress(domain) address = ElasticAddress(domain)
self.addresses.append(address) self.addresses.append(address)
return address return address

View File

@ -7,7 +7,12 @@ class ElasticIPAddresses(BaseResponse):
def allocate_address(self): def allocate_address(self):
domain = self._get_param('Domain', if_none='standard') domain = self._get_param('Domain', if_none='standard')
reallocate_address = self._get_param('Address', if_none=None)
if self.is_not_dryrun('AllocateAddress'): if self.is_not_dryrun('AllocateAddress'):
if reallocate_address:
address = self.ec2_backend.allocate_address(
domain, address=reallocate_address)
else:
address = self.ec2_backend.allocate_address(domain) address = self.ec2_backend.allocate_address(domain)
template = self.response_template(ALLOCATE_ADDRESS_RESPONSE) template = self.response_template(ALLOCATE_ADDRESS_RESPONSE)
return template.render(address=address) return template.render(address=address)

View File

@ -179,8 +179,12 @@ DESCRIBE_SECURITY_GROUPS_RESPONSE = """<DescribeSecurityGroupsResponse xmlns="ht
{% for rule in group.egress_rules %} {% for rule in group.egress_rules %}
<item> <item>
<ipProtocol>{{ rule.ip_protocol }}</ipProtocol> <ipProtocol>{{ rule.ip_protocol }}</ipProtocol>
{% if rule.from_port %}
<fromPort>{{ rule.from_port }}</fromPort> <fromPort>{{ rule.from_port }}</fromPort>
{% endif %}
{% if rule.to_port %}
<toPort>{{ rule.to_port }}</toPort> <toPort>{{ rule.to_port }}</toPort>
{% endif %}
<groups> <groups>
{% for source_group in rule.source_groups %} {% for source_group in rule.source_groups %}
<item> <item>

View File

@ -1,5 +1,6 @@
import os import os
import re import re
import json
from moto.core.exceptions import JsonRESTError from moto.core.exceptions import JsonRESTError
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
@ -210,7 +211,7 @@ class EventsBackend(BaseBackend):
raise NotImplementedError() raise NotImplementedError()
def put_permission(self, action, principal, statement_id): def put_permission(self, action, principal, statement_id):
if action is None or action != 'PutEvents': if action is None or action != 'events:PutEvents':
raise JsonRESTError('InvalidParameterValue', 'Action must be PutEvents') raise JsonRESTError('InvalidParameterValue', 'Action must be PutEvents')
if principal is None or self.ACCOUNT_ID.match(principal) is None: if principal is None or self.ACCOUNT_ID.match(principal) is None:
@ -235,11 +236,13 @@ class EventsBackend(BaseBackend):
'Sid': statement_id, 'Sid': statement_id,
'Effect': 'Allow', 'Effect': 'Allow',
'Principal': {'AWS': 'arn:aws:iam::{0}:root'.format(data['principal'])}, 'Principal': {'AWS': 'arn:aws:iam::{0}:root'.format(data['principal'])},
'Action': 'events:{0}'.format(data['action']), 'Action': data['action'],
'Resource': arn 'Resource': arn
}) })
policy = {'Version': '2012-10-17', 'Statement': statements}
policy_json = json.dumps(policy)
return { return {
'Policy': {'Version': '2012-10-17', 'Statement': statements}, 'Policy': policy_json,
'Name': 'default', 'Name': 'default',
'Arn': arn 'Arn': arn
} }

View File

@ -407,7 +407,7 @@ class IamResponse(BaseResponse):
return template.render( return template.render(
user_name=user_name, user_name=user_name,
policy_name=policy_name, policy_name=policy_name,
policy_document=policy_document policy_document=policy_document.get('policy_document')
) )
def list_user_policies(self): def list_user_policies(self):

View File

@ -132,6 +132,9 @@ class LogGroup:
def __init__(self, region, name, tags): def __init__(self, region, name, tags):
self.name = name self.name = name
self.region = region self.region = region
self.arn = "arn:aws:logs:{region}:1:log-group:{log_group}".format(
region=region, log_group=name)
self.creationTime = unix_time_millis()
self.tags = tags self.tags = tags
self.streams = dict() # {name: LogStream} self.streams = dict() # {name: LogStream}
@ -197,6 +200,16 @@ class LogGroup:
searched_streams = [{"logStreamName": stream.logStreamName, "searchedCompletely": True} for stream in streams] searched_streams = [{"logStreamName": stream.logStreamName, "searchedCompletely": True} for stream in streams]
return events_page, next_token, searched_streams return events_page, next_token, searched_streams
def to_describe_dict(self):
return {
"arn": self.arn,
"creationTime": self.creationTime,
"logGroupName": self.name,
"metricFilterCount": 0,
"retentionInDays": 30,
"storedBytes": sum(s.storedBytes for s in self.streams.values()),
}
class LogsBackend(BaseBackend): class LogsBackend(BaseBackend):
def __init__(self, region_name): def __init__(self, region_name):
@ -223,6 +236,21 @@ class LogsBackend(BaseBackend):
raise ResourceNotFoundException() raise ResourceNotFoundException()
del self.groups[log_group_name] del self.groups[log_group_name]
def describe_log_groups(self, limit, log_group_name_prefix, next_token):
if log_group_name_prefix is None:
log_group_name_prefix = ''
if next_token is None:
next_token = 0
groups = sorted(group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix))
groups_page = groups[next_token:next_token + limit]
next_token += limit
if next_token >= len(groups):
next_token = None
return groups_page, next_token
def create_log_stream(self, log_group_name, log_stream_name): def create_log_stream(self, log_group_name, log_stream_name):
if log_group_name not in self.groups: if log_group_name not in self.groups:
raise ResourceNotFoundException() raise ResourceNotFoundException()

View File

@ -33,6 +33,18 @@ class LogsResponse(BaseResponse):
self.logs_backend.delete_log_group(log_group_name) self.logs_backend.delete_log_group(log_group_name)
return '' return ''
def describe_log_groups(self):
log_group_name_prefix = self._get_param('logGroupNamePrefix')
next_token = self._get_param('nextToken')
limit = self._get_param('limit', 50)
assert limit <= 50
groups, next_token = self.logs_backend.describe_log_groups(
limit, log_group_name_prefix, next_token)
return json.dumps({
"logGroups": groups,
"nextToken": next_token
})
def create_log_stream(self): def create_log_stream(self):
log_group_name = self._get_param('logGroupName') log_group_name = self._get_param('logGroupName')
log_stream_name = self._get_param('logStreamName') log_stream_name = self._get_param('logStreamName')

View File

@ -726,10 +726,11 @@ class RDS2Backend(BaseBackend):
def describe_snapshots(self, db_instance_identifier, db_snapshot_identifier): def describe_snapshots(self, db_instance_identifier, db_snapshot_identifier):
if db_instance_identifier: if db_instance_identifier:
db_instance_snapshots = []
for snapshot in self.snapshots.values(): for snapshot in self.snapshots.values():
if snapshot.database.db_instance_identifier == db_instance_identifier: if snapshot.database.db_instance_identifier == db_instance_identifier:
return [snapshot] db_instance_snapshots.append(snapshot)
raise DBSnapshotNotFoundError() return db_instance_snapshots
if db_snapshot_identifier: if db_snapshot_identifier:
if db_snapshot_identifier in self.snapshots: if db_snapshot_identifier in self.snapshots:

View File

@ -73,7 +73,8 @@ class Cluster(TaggableResourceMixin, BaseModel):
preferred_maintenance_window, cluster_parameter_group_name, preferred_maintenance_window, cluster_parameter_group_name,
automated_snapshot_retention_period, port, cluster_version, automated_snapshot_retention_period, port, cluster_version,
allow_version_upgrade, number_of_nodes, publicly_accessible, allow_version_upgrade, number_of_nodes, publicly_accessible,
encrypted, region_name, tags=None, iam_roles_arn=None): encrypted, region_name, tags=None, iam_roles_arn=None,
restored_from_snapshot=False):
super(Cluster, self).__init__(region_name, tags) super(Cluster, self).__init__(region_name, tags)
self.redshift_backend = redshift_backend self.redshift_backend = redshift_backend
self.cluster_identifier = cluster_identifier self.cluster_identifier = cluster_identifier
@ -119,6 +120,7 @@ class Cluster(TaggableResourceMixin, BaseModel):
self.number_of_nodes = 1 self.number_of_nodes = 1
self.iam_roles_arn = iam_roles_arn or [] self.iam_roles_arn = iam_roles_arn or []
self.restored_from_snapshot = restored_from_snapshot
@classmethod @classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
@ -242,7 +244,15 @@ class Cluster(TaggableResourceMixin, BaseModel):
"IamRoleArn": iam_role_arn "IamRoleArn": iam_role_arn
} for iam_role_arn in self.iam_roles_arn] } for iam_role_arn in self.iam_roles_arn]
} }
if self.restored_from_snapshot:
json_response['RestoreStatus'] = {
'Status': 'completed',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
}
try: try:
json_response['ClusterSnapshotCopyStatus'] = self.cluster_snapshot_copy_status json_response['ClusterSnapshotCopyStatus'] = self.cluster_snapshot_copy_status
except AttributeError: except AttributeError:
@ -639,7 +649,8 @@ class RedshiftBackend(BaseBackend):
"cluster_version": snapshot.cluster.cluster_version, "cluster_version": snapshot.cluster.cluster_version,
"number_of_nodes": snapshot.cluster.number_of_nodes, "number_of_nodes": snapshot.cluster.number_of_nodes,
"encrypted": snapshot.cluster.encrypted, "encrypted": snapshot.cluster.encrypted,
"tags": snapshot.cluster.tags "tags": snapshot.cluster.tags,
"restored_from_snapshot": True
} }
create_kwargs.update(kwargs) create_kwargs.update(kwargs)
return self.create_cluster(**create_kwargs) return self.create_cluster(**create_kwargs)

View File

@ -859,6 +859,9 @@ class S3Backend(BaseBackend):
if str(key.version_id) != str(version_id) if str(key.version_id) != str(version_id)
] ]
) )
if not bucket.keys.getlist(key_name):
bucket.keys.pop(key_name)
return True return True
except KeyError: except KeyError:
return False return False

View File

@ -706,8 +706,11 @@ class ResponseObject(_TemplateEnvironmentMixin):
if 'x-amz-copy-source' in request.headers: if 'x-amz-copy-source' in request.headers:
# Copy key # Copy key
src_key_parsed = urlparse(unquote(request.headers.get("x-amz-copy-source"))) # you can have a quoted ?version=abc with a version Id, so work on
src_bucket, src_key = src_key_parsed.path.lstrip("/").split("/", 1) # we need to parse the unquoted string first
src_key_parsed = urlparse(request.headers.get("x-amz-copy-source"))
src_bucket, src_key = unquote(src_key_parsed.path).\
lstrip("/").split("/", 1)
src_version_id = parse_qs(src_key_parsed.query).get( src_version_id = parse_qs(src_key_parsed.query).get(
'versionId', [None])[0] 'versionId', [None])[0]
self.backend.copy_key(src_bucket, src_key, bucket_name, key_name, self.backend.copy_key(src_bucket, src_key, bucket_name, key_name,

View File

@ -69,8 +69,13 @@ class DomainDispatcherApplication(object):
_, _, region, service, _ = environ['HTTP_AUTHORIZATION'].split(",")[0].split()[ _, _, region, service, _ = environ['HTTP_AUTHORIZATION'].split(",")[0].split()[
1].split("/") 1].split("/")
except (KeyError, ValueError): except (KeyError, ValueError):
region = 'us-east-1' # Some cognito-idp endpoints (e.g. change password) do not receive an auth header.
if environ.get('HTTP_X_AMZ_TARGET', '').startswith('AWSCognitoIdentityProviderService'):
service = 'cognito-idp'
else:
service = 's3' service = 's3'
region = 'us-east-1'
if service == 'dynamodb': if service == 'dynamodb':
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0] dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
# If Newer API version, use dynamodb2 # If Newer API version, use dynamodb2
@ -186,9 +191,17 @@ def main(argv=sys.argv[1:]):
parser.add_argument( parser.add_argument(
'-s', '--ssl', '-s', '--ssl',
action='store_true', action='store_true',
help='Enable SSL encrypted connection (use https://... URL)', help='Enable SSL encrypted connection with auto-generated certificate (use https://... URL)',
default=False default=False
) )
parser.add_argument(
'-c', '--ssl-cert', type=str,
help='Path to SSL certificate',
default=None)
parser.add_argument(
'-k', '--ssl-key', type=str,
help='Path to SSL private key',
default=None)
args = parser.parse_args(argv) args = parser.parse_args(argv)
@ -197,9 +210,15 @@ def main(argv=sys.argv[1:]):
create_backend_app, service=args.service) create_backend_app, service=args.service)
main_app.debug = True main_app.debug = True
ssl_context = None
if args.ssl_key and args.ssl_cert:
ssl_context = (args.ssl_cert, args.ssl_key)
elif args.ssl:
ssl_context = 'adhoc'
run_simple(args.host, args.port, main_app, run_simple(args.host, args.port, main_app,
threaded=True, use_reloader=args.reload, threaded=True, use_reloader=args.reload,
ssl_context='adhoc' if args.ssl else None) ssl_context=ssl_context)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import email import email
from email.utils import parseaddr
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from .exceptions import MessageRejectedError from .exceptions import MessageRejectedError
@ -84,13 +85,27 @@ class SESBackend(BaseBackend):
return message return message
def send_raw_email(self, source, destinations, raw_data): def send_raw_email(self, source, destinations, raw_data):
if source not in self.addresses: if source is not None:
_, source_email_address = parseaddr(source)
if source_email_address not in self.addresses:
raise MessageRejectedError( raise MessageRejectedError(
"Did not have authority to send from email %s" % source "Did not have authority to send from email %s" % source_email_address
) )
recipient_count = len(destinations) recipient_count = len(destinations)
message = email.message_from_string(raw_data) message = email.message_from_string(raw_data)
if source is None:
if message['from'] is None:
raise MessageRejectedError(
"Source not specified"
)
_, source_email_address = parseaddr(message['from'])
if source_email_address not in self.addresses:
raise MessageRejectedError(
"Did not have authority to send from email %s" % source_email_address
)
for header in 'TO', 'CC', 'BCC': for header in 'TO', 'CC', 'BCC':
recipient_count += sum( recipient_count += sum(
d.strip() and 1 or 0 d.strip() and 1 or 0

View File

@ -75,7 +75,10 @@ class EmailResponse(BaseResponse):
return template.render(message=message) return template.render(message=message)
def send_raw_email(self): def send_raw_email(self):
source = self.querystring.get('Source')[0] source = self.querystring.get('Source')
if source is not None:
source, = source
raw_data = self.querystring.get('RawMessage.Data')[0] raw_data = self.querystring.get('RawMessage.Data')[0]
raw_data = base64.b64decode(raw_data) raw_data = base64.b64decode(raw_data)
if six.PY3: if six.PY3:

View File

@ -94,11 +94,14 @@ class Subscription(BaseModel):
if self.protocol == 'sqs': if self.protocol == 'sqs':
queue_name = self.endpoint.split(":")[-1] queue_name = self.endpoint.split(":")[-1]
region = self.endpoint.split(":")[3] region = self.endpoint.split(":")[3]
if self.attributes.get('RawMessageDelivery') != 'true':
enveloped_message = json.dumps(self.get_post_data(message, message_id, subject, message_attributes=message_attributes), sort_keys=True, indent=2, separators=(',', ': ')) enveloped_message = json.dumps(self.get_post_data(message, message_id, subject, message_attributes=message_attributes), sort_keys=True, indent=2, separators=(',', ': '))
else:
enveloped_message = message
sqs_backends[region].send_message(queue_name, enveloped_message) sqs_backends[region].send_message(queue_name, enveloped_message)
elif self.protocol in ['http', 'https']: elif self.protocol in ['http', 'https']:
post_data = self.get_post_data(message, message_id, subject) post_data = self.get_post_data(message, message_id, subject)
requests.post(self.endpoint, json=post_data) requests.post(self.endpoint, json=post_data, headers={'Content-Type': 'text/plain; charset=UTF-8'})
elif self.protocol == 'lambda': elif self.protocol == 'lambda':
# TODO: support bad function name # TODO: support bad function name
# http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html # http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html
@ -240,7 +243,7 @@ class SNSBackend(BaseBackend):
self.sms_attributes.update(attrs) self.sms_attributes.update(attrs)
def create_topic(self, name): def create_topic(self, name):
fails_constraints = not re.match(r'^[a-zA-Z0-9](?:[A-Za-z0-9_-]{0,253}[a-zA-Z0-9])?$', name) fails_constraints = not re.match(r'^[a-zA-Z0-9_-]{1,256}$', name)
if fails_constraints: if fails_constraints:
raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.") raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.")
candidate_topic = Topic(name, self) candidate_topic = Topic(name, self)

View File

@ -1,4 +1,5 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class MessageNotInflight(Exception): class MessageNotInflight(Exception):
@ -21,3 +22,11 @@ class MessageAttributesInvalid(Exception):
class QueueDoesNotExist(Exception): class QueueDoesNotExist(Exception):
status_code = 404 status_code = 404
description = "The specified queue does not exist for this wsdl version." description = "The specified queue does not exist for this wsdl version."
class QueueAlreadyExists(RESTError):
code = 400
def __init__(self, message):
super(QueueAlreadyExists, self).__init__(
"QueueAlreadyExists", message)

View File

@ -18,6 +18,7 @@ from .exceptions import (
MessageAttributesInvalid, MessageAttributesInvalid,
MessageNotInflight, MessageNotInflight,
QueueDoesNotExist, QueueDoesNotExist,
QueueAlreadyExists,
ReceiptHandleIsInvalid, ReceiptHandleIsInvalid,
) )
@ -180,6 +181,7 @@ class Queue(BaseModel):
self.permissions = {} self.permissions = {}
self._messages = [] self._messages = []
self._pending_messages = set()
now = unix_time() now = unix_time()
self.created_timestamp = now self.created_timestamp = now
@ -209,6 +211,16 @@ class Queue(BaseModel):
if self.fifo_queue and not self.name.endswith('.fifo'): if self.fifo_queue and not self.name.endswith('.fifo'):
raise MessageAttributesInvalid('Queue name must end in .fifo for FIFO queues') raise MessageAttributesInvalid('Queue name must end in .fifo for FIFO queues')
@property
def pending_messages(self):
return self._pending_messages
@property
def pending_message_groups(self):
return set(message.group_id
for message in self._pending_messages
if message.group_id is not None)
def _set_attributes(self, attributes, now=None): def _set_attributes(self, attributes, now=None):
if not now: if not now:
now = unix_time() now = unix_time()
@ -234,11 +246,17 @@ class Queue(BaseModel):
self.last_modified_timestamp = now self.last_modified_timestamp = now
def _setup_dlq(self, policy_json): def _setup_dlq(self, policy):
if isinstance(policy, six.text_type):
try: try:
self.redrive_policy = json.loads(policy_json) self.redrive_policy = json.loads(policy)
except ValueError: except ValueError:
raise RESTError('InvalidParameterValue', 'Redrive policy does not contain valid json') raise RESTError('InvalidParameterValue', 'Redrive policy is not a dict or valid json')
elif isinstance(policy, dict):
self.redrive_policy = policy
else:
raise RESTError('InvalidParameterValue', 'Redrive policy is not a dict or valid json')
if 'deadLetterTargetArn' not in self.redrive_policy: if 'deadLetterTargetArn' not in self.redrive_policy:
raise RESTError('InvalidParameterValue', 'Redrive policy does not contain deadLetterTargetArn') raise RESTError('InvalidParameterValue', 'Redrive policy does not contain deadLetterTargetArn')
@ -366,7 +384,12 @@ class SQSBackend(BaseBackend):
def create_queue(self, name, **kwargs): def create_queue(self, name, **kwargs):
queue = self.queues.get(name) queue = self.queues.get(name)
if queue is None: if queue:
# Queue already exist. If attributes don't match, throw error
for key, value in kwargs.items():
if getattr(queue, camelcase_to_underscores(key)) != value:
raise QueueAlreadyExists("The specified queue already exists.")
else:
try: try:
kwargs.pop('region') kwargs.pop('region')
except KeyError: except KeyError:
@ -448,6 +471,7 @@ class SQSBackend(BaseBackend):
""" """
queue = self.get_queue(queue_name) queue = self.get_queue(queue_name)
result = [] result = []
previous_result_count = len(result)
polling_end = unix_time() + wait_seconds_timeout polling_end = unix_time() + wait_seconds_timeout
@ -457,19 +481,25 @@ class SQSBackend(BaseBackend):
if result or (wait_seconds_timeout and unix_time() > polling_end): if result or (wait_seconds_timeout and unix_time() > polling_end):
break break
if len(queue.messages) == 0:
# we want to break here, otherwise it will be an infinite loop
if wait_seconds_timeout == 0:
break
import time
time.sleep(0.001)
continue
messages_to_dlq = [] messages_to_dlq = []
for message in queue.messages: for message in queue.messages:
if not message.visible: if not message.visible:
continue continue
if message in queue.pending_messages:
# The message is pending but is visible again, so the
# consumer must have timed out.
queue.pending_messages.remove(message)
if message.group_id and queue.fifo_queue:
if message.group_id in queue.pending_message_groups:
# There is already one active message with the same
# group, so we cannot deliver this one.
continue
queue.pending_messages.add(message)
if queue.dead_letter_queue is not None and message.approximate_receive_count >= queue.redrive_policy['maxReceiveCount']: if queue.dead_letter_queue is not None and message.approximate_receive_count >= queue.redrive_policy['maxReceiveCount']:
messages_to_dlq.append(message) messages_to_dlq.append(message)
continue continue
@ -485,6 +515,18 @@ class SQSBackend(BaseBackend):
queue._messages.remove(message) queue._messages.remove(message)
queue.dead_letter_queue.add_message(message) queue.dead_letter_queue.add_message(message)
if previous_result_count == len(result):
if wait_seconds_timeout == 0:
# There is timeout and we have added no additional results,
# so break to avoid an infinite loop.
break
import time
time.sleep(0.001)
continue
previous_result_count = len(result)
return result return result
def delete_message(self, queue_name, receipt_handle): def delete_message(self, queue_name, receipt_handle):
@ -494,6 +536,7 @@ class SQSBackend(BaseBackend):
# Only delete message if it is not visible and the reciept_handle # Only delete message if it is not visible and the reciept_handle
# matches. # matches.
if message.receipt_handle == receipt_handle: if message.receipt_handle == receipt_handle:
queue.pending_messages.remove(message)
continue continue
new_messages.append(message) new_messages.append(message)
queue._messages = new_messages queue._messages = new_messages
@ -505,6 +548,10 @@ class SQSBackend(BaseBackend):
if message.visible: if message.visible:
raise MessageNotInflight raise MessageNotInflight
message.change_visibility(visibility_timeout) message.change_visibility(visibility_timeout)
if message.visible:
# If the message is visible again, remove it from pending
# messages.
queue.pending_messages.remove(message)
return return
raise ReceiptHandleIsInvalid raise ReceiptHandleIsInvalid

View File

@ -19,11 +19,12 @@ install_requires = [
"pyaml", "pyaml",
"pytz", "pytz",
"python-dateutil<3.0.0,>=2.1", "python-dateutil<3.0.0,>=2.1",
"python-jose<3.0.0",
"mock", "mock",
"docker>=2.5.1", "docker>=2.5.1",
"jsondiff==1.1.1", "jsondiff==1.1.1",
"aws-xray-sdk<0.96,>=0.93", "aws-xray-sdk<0.96,>=0.93",
"responses", "responses>=0.9.0",
] ]
extras_require = { extras_require = {

View File

@ -148,10 +148,41 @@ dummy_import_template = {
} }
} }
dummy_redrive_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MainQueue": {
"Type": "AWS::SQS::Queue",
"Properties": {
"QueueName": "mainqueue.fifo",
"FifoQueue": True,
"ContentBasedDeduplication": False,
"RedrivePolicy": {
"deadLetterTargetArn": {
"Fn::GetAtt": [
"DeadLetterQueue",
"Arn"
]
},
"maxReceiveCount": 5
}
}
},
"DeadLetterQueue": {
"Type": "AWS::SQS::Queue",
"Properties": {
"FifoQueue": True
}
},
}
}
dummy_template_json = json.dumps(dummy_template) dummy_template_json = json.dumps(dummy_template)
dummy_update_template_json = json.dumps(dummy_update_template) dummy_update_template_json = json.dumps(dummy_update_template)
dummy_output_template_json = json.dumps(dummy_output_template) dummy_output_template_json = json.dumps(dummy_output_template)
dummy_import_template_json = json.dumps(dummy_import_template) dummy_import_template_json = json.dumps(dummy_import_template)
dummy_redrive_template_json = json.dumps(dummy_redrive_template)
@mock_cloudformation @mock_cloudformation
@ -746,3 +777,19 @@ def test_stack_with_imports():
output = output_stack.outputs[0]['OutputValue'] output = output_stack.outputs[0]['OutputValue']
queue = ec2_resource.get_queue_by_name(QueueName=output) queue = ec2_resource.get_queue_by_name(QueueName=output)
queue.should_not.be.none queue.should_not.be.none
@mock_sqs
@mock_cloudformation
def test_non_json_redrive_policy():
cf = boto3.resource('cloudformation', region_name='us-east-1')
stack = cf.create_stack(
StackName="test_stack1",
TemplateBody=dummy_redrive_template_json
)
stack.Resource('MainQueue').resource_status\
.should.equal("CREATE_COMPLETE")
stack.Resource('DeadLetterQueue').resource_status\
.should.equal("CREATE_COMPLETE")

View File

@ -0,0 +1,87 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# Standard library modules
import unittest
# Third-party modules
import boto3
from botocore.exceptions import ClientError
# Package modules
from moto import mock_cloudformation
AWS_REGION = 'us-west-1'
SG_STACK_NAME = 'simple-sg-stack'
SG_TEMPLATE = """
AWSTemplateFormatVersion: 2010-09-09
Description: Simple test CF template for moto_cloudformation
Resources:
SimpleSecurityGroup:
Type: AWS::EC2::SecurityGroup
Description: "A simple security group"
Properties:
GroupName: simple-security-group
GroupDescription: "A simple security group"
SecurityGroupEgress:
-
Description: "Egress to remote HTTPS servers"
CidrIp: 0.0.0.0/0
IpProtocol: tcp
FromPort: 443
ToPort: 443
Outputs:
SimpleSecurityGroupName:
Value: !GetAtt SimpleSecurityGroup.GroupId
Export:
Name: "SimpleSecurityGroup"
"""
EC2_STACK_NAME = 'simple-ec2-stack'
EC2_TEMPLATE = """
---
# The latest template format version is "2010-09-09" and as of 2018-04-09
# is currently the only valid value.
AWSTemplateFormatVersion: 2010-09-09
Description: Simple test CF template for moto_cloudformation
Resources:
SimpleInstance:
Type: AWS::EC2::Instance
Properties:
ImageId: ami-03cf127a
InstanceType: t2.micro
SecurityGroups: !Split [',', !ImportValue SimpleSecurityGroup]
"""
class TestSimpleInstance(unittest.TestCase):
def test_simple_instance(self):
"""Test that we can create a simple CloudFormation stack that imports values from an existing CloudFormation stack"""
with mock_cloudformation():
client = boto3.client('cloudformation', region_name=AWS_REGION)
client.create_stack(StackName=SG_STACK_NAME, TemplateBody=SG_TEMPLATE)
response = client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
self.assertIn('StackId', response)
response = client.describe_stacks(StackName=response['StackId'])
self.assertIn('Stacks', response)
stack_info = response['Stacks']
self.assertEqual(1, len(stack_info))
self.assertIn('StackName', stack_info[0])
self.assertEqual(EC2_STACK_NAME, stack_info[0]['StackName'])
def test_simple_instance_missing_export(self):
"""Test that we get an exception if a CloudFormation stack tries to imports a non-existent export value"""
with mock_cloudformation():
client = boto3.client('cloudformation', region_name=AWS_REGION)
with self.assertRaises(ClientError) as e:
client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
self.assertIn('Error', e.exception.response)
self.assertIn('Code', e.exception.response['Error'])
self.assertEqual('ExportNotFound', e.exception.response['Error']['Code'])

34
tests/test_cloudwatch/test_cloudwatch_boto3.py Normal file → Executable file
View File

@ -162,6 +162,37 @@ def test_put_metric_data_no_dimensions():
metric['MetricName'].should.equal('metric') metric['MetricName'].should.equal('metric')
@mock_cloudwatch
def test_put_metric_data_with_statistics():
conn = boto3.client('cloudwatch', region_name='us-east-1')
conn.put_metric_data(
Namespace='tester',
MetricData=[
dict(
MetricName='statmetric',
Timestamp=datetime(2015, 1, 1),
# no Value to test https://github.com/spulec/moto/issues/1615
StatisticValues=dict(
SampleCount=123.0,
Sum=123.0,
Minimum=123.0,
Maximum=123.0
),
Unit='Milliseconds',
StorageResolution=123
)
]
)
metrics = conn.list_metrics()['Metrics']
metrics.should.have.length_of(1)
metric = metrics[0]
metric['Namespace'].should.equal('tester')
metric['MetricName'].should.equal('statmetric')
# TODO: test statistics - https://github.com/spulec/moto/issues/1615
@mock_cloudwatch @mock_cloudwatch
def test_get_metric_statistics(): def test_get_metric_statistics():
conn = boto3.client('cloudwatch', region_name='us-east-1') conn = boto3.client('cloudwatch', region_name='us-east-1')
@ -173,6 +204,7 @@ def test_get_metric_statistics():
dict( dict(
MetricName='metric', MetricName='metric',
Value=1.5, Value=1.5,
Timestamp=utc_now
) )
] ]
) )
@ -180,7 +212,7 @@ def test_get_metric_statistics():
stats = conn.get_metric_statistics( stats = conn.get_metric_statistics(
Namespace='tester', Namespace='tester',
MetricName='metric', MetricName='metric',
StartTime=utc_now, StartTime=utc_now - timedelta(seconds=60),
EndTime=utc_now + timedelta(seconds=60), EndTime=utc_now + timedelta(seconds=60),
Period=60, Period=60,
Statistics=['SampleCount', 'Sum'] Statistics=['SampleCount', 'Sum']

View File

@ -0,0 +1,539 @@
from __future__ import unicode_literals
import boto3
import json
import os
import uuid
from jose import jws
from moto import mock_cognitoidp
import sure # noqa
@mock_cognitoidp
def test_create_user_pool():
conn = boto3.client("cognito-idp", "us-west-2")
name = str(uuid.uuid4())
value = str(uuid.uuid4())
result = conn.create_user_pool(
PoolName=name,
LambdaConfig={
"PreSignUp": value
}
)
result["UserPool"]["Id"].should_not.be.none
result["UserPool"]["Name"].should.equal(name)
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
@mock_cognitoidp
def test_list_user_pools():
conn = boto3.client("cognito-idp", "us-west-2")
name = str(uuid.uuid4())
conn.create_user_pool(PoolName=name)
result = conn.list_user_pools(MaxResults=10)
result["UserPools"].should.have.length_of(1)
result["UserPools"][0]["Name"].should.equal(name)
@mock_cognitoidp
def test_describe_user_pool():
conn = boto3.client("cognito-idp", "us-west-2")
name = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_details = conn.create_user_pool(
PoolName=name,
LambdaConfig={
"PreSignUp": value
}
)
result = conn.describe_user_pool(UserPoolId=user_pool_details["UserPool"]["Id"])
result["UserPool"]["Name"].should.equal(name)
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
@mock_cognitoidp
def test_delete_user_pool():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(1)
conn.delete_user_pool(UserPoolId=user_pool_id)
conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(0)
@mock_cognitoidp
def test_create_user_pool_domain():
conn = boto3.client("cognito-idp", "us-west-2")
domain = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
result = conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
@mock_cognitoidp
def test_describe_user_pool_domain():
conn = boto3.client("cognito-idp", "us-west-2")
domain = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
result = conn.describe_user_pool_domain(Domain=domain)
result["DomainDescription"]["Domain"].should.equal(domain)
result["DomainDescription"]["UserPoolId"].should.equal(user_pool_id)
result["DomainDescription"]["AWSAccountId"].should_not.be.none
@mock_cognitoidp
def test_delete_user_pool_domain():
conn = boto3.client("cognito-idp", "us-west-2")
domain = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
result = conn.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
result = conn.describe_user_pool_domain(Domain=domain)
# This is a surprising behavior of the real service: describing a missing domain comes
# back with status 200 and a DomainDescription of {}
result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
result["DomainDescription"].keys().should.have.length_of(0)
@mock_cognitoidp
def test_create_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
client_name = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
result = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=client_name,
CallbackURLs=[value],
)
result["UserPoolClient"]["UserPoolId"].should.equal(user_pool_id)
result["UserPoolClient"]["ClientId"].should_not.be.none
result["UserPoolClient"]["ClientName"].should.equal(client_name)
result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
result["UserPoolClient"]["CallbackURLs"][0].should.equal(value)
@mock_cognitoidp
def test_list_user_pool_clients():
conn = boto3.client("cognito-idp", "us-west-2")
client_name = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_user_pool_client(UserPoolId=user_pool_id, ClientName=client_name)
result = conn.list_user_pool_clients(UserPoolId=user_pool_id, MaxResults=10)
result["UserPoolClients"].should.have.length_of(1)
result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
@mock_cognitoidp
def test_describe_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
client_name = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
client_details = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=client_name,
CallbackURLs=[value],
)
result = conn.describe_user_pool_client(
UserPoolId=user_pool_id,
ClientId=client_details["UserPoolClient"]["ClientId"],
)
result["UserPoolClient"]["ClientName"].should.equal(client_name)
result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
result["UserPoolClient"]["CallbackURLs"][0].should.equal(value)
@mock_cognitoidp
def test_update_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
old_client_name = str(uuid.uuid4())
new_client_name = str(uuid.uuid4())
old_value = str(uuid.uuid4())
new_value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
client_details = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=old_client_name,
CallbackURLs=[old_value],
)
result = conn.update_user_pool_client(
UserPoolId=user_pool_id,
ClientId=client_details["UserPoolClient"]["ClientId"],
ClientName=new_client_name,
CallbackURLs=[new_value],
)
result["UserPoolClient"]["ClientName"].should.equal(new_client_name)
result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
result["UserPoolClient"]["CallbackURLs"][0].should.equal(new_value)
@mock_cognitoidp
def test_delete_user_pool_client():
conn = boto3.client("cognito-idp", "us-west-2")
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
client_details = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=str(uuid.uuid4()),
)
conn.delete_user_pool_client(
UserPoolId=user_pool_id,
ClientId=client_details["UserPoolClient"]["ClientId"],
)
caught = False
try:
conn.describe_user_pool_client(
UserPoolId=user_pool_id,
ClientId=client_details["UserPoolClient"]["ClientId"],
)
except conn.exceptions.ResourceNotFoundException:
caught = True
caught.should.be.true
@mock_cognitoidp
def test_create_identity_provider():
conn = boto3.client("cognito-idp", "us-west-2")
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
result = conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={
"thing": value
},
)
result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id)
result["IdentityProvider"]["ProviderName"].should.equal(provider_name)
result["IdentityProvider"]["ProviderType"].should.equal(provider_type)
result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
@mock_cognitoidp
def test_list_identity_providers():
conn = boto3.client("cognito-idp", "us-west-2")
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={},
)
result = conn.list_identity_providers(
UserPoolId=user_pool_id,
MaxResults=10,
)
result["Providers"].should.have.length_of(1)
result["Providers"][0]["ProviderName"].should.equal(provider_name)
result["Providers"][0]["ProviderType"].should.equal(provider_type)
@mock_cognitoidp
def test_describe_identity_providers():
conn = boto3.client("cognito-idp", "us-west-2")
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={
"thing": value
},
)
result = conn.describe_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
)
result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id)
result["IdentityProvider"]["ProviderName"].should.equal(provider_name)
result["IdentityProvider"]["ProviderType"].should.equal(provider_type)
result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
@mock_cognitoidp
def test_delete_identity_providers():
conn = boto3.client("cognito-idp", "us-west-2")
provider_name = str(uuid.uuid4())
provider_type = "Facebook"
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.create_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
ProviderType=provider_type,
ProviderDetails={
"thing": value
},
)
conn.delete_identity_provider(UserPoolId=user_pool_id, ProviderName=provider_name)
caught = False
try:
conn.describe_identity_provider(
UserPoolId=user_pool_id,
ProviderName=provider_name,
)
except conn.exceptions.ResourceNotFoundException:
caught = True
caught.should.be.true
@mock_cognitoidp
def test_admin_create_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
result = conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[
{"Name": "thing", "Value": value}
],
)
result["User"]["Username"].should.equal(username)
result["User"]["UserStatus"].should.equal("FORCE_CHANGE_PASSWORD")
result["User"]["Attributes"].should.have.length_of(1)
result["User"]["Attributes"][0]["Name"].should.equal("thing")
result["User"]["Attributes"][0]["Value"].should.equal(value)
@mock_cognitoidp
def test_admin_get_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[
{"Name": "thing", "Value": value}
],
)
result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
result["Username"].should.equal(username)
result["UserAttributes"].should.have.length_of(1)
result["UserAttributes"][0]["Name"].should.equal("thing")
result["UserAttributes"][0]["Value"].should.equal(value)
@mock_cognitoidp
def test_list_users():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
result = conn.list_users(UserPoolId=user_pool_id)
result["Users"].should.have.length_of(1)
result["Users"][0]["Username"].should.equal(username)
@mock_cognitoidp
def test_admin_delete_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
conn.admin_delete_user(UserPoolId=user_pool_id, Username=username)
caught = False
try:
conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
except conn.exceptions.ResourceNotFoundException:
caught = True
caught.should.be.true
def authentication_flow(conn):
username = str(uuid.uuid4())
temporary_password = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
client_id = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=str(uuid.uuid4()),
)["UserPoolClient"]["ClientId"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
TemporaryPassword=temporary_password,
)
result = conn.admin_initiate_auth(
UserPoolId=user_pool_id,
ClientId=client_id,
AuthFlow="ADMIN_NO_SRP_AUTH",
AuthParameters={
"USERNAME": username,
"PASSWORD": temporary_password
},
)
# A newly created user is forced to set a new password
result["ChallengeName"].should.equal("NEW_PASSWORD_REQUIRED")
result["Session"].should_not.be.none
# This sets a new password and logs the user in (creates tokens)
new_password = str(uuid.uuid4())
result = conn.respond_to_auth_challenge(
Session=result["Session"],
ClientId=client_id,
ChallengeName="NEW_PASSWORD_REQUIRED",
ChallengeResponses={
"USERNAME": username,
"NEW_PASSWORD": new_password
}
)
result["AuthenticationResult"]["IdToken"].should_not.be.none
result["AuthenticationResult"]["AccessToken"].should_not.be.none
return {
"user_pool_id": user_pool_id,
"client_id": client_id,
"id_token": result["AuthenticationResult"]["IdToken"],
"access_token": result["AuthenticationResult"]["AccessToken"],
"username": username,
"password": new_password,
}
@mock_cognitoidp
def test_authentication_flow():
conn = boto3.client("cognito-idp", "us-west-2")
authentication_flow(conn)
@mock_cognitoidp
def test_token_legitimacy():
conn = boto3.client("cognito-idp", "us-west-2")
path = "../../moto/cognitoidp/resources/jwks-public.json"
with open(os.path.join(os.path.dirname(__file__), path)) as f:
json_web_key = json.loads(f.read())["keys"][0]
outputs = authentication_flow(conn)
id_token = outputs["id_token"]
access_token = outputs["access_token"]
client_id = outputs["client_id"]
issuer = "https://cognito-idp.us-west-2.amazonaws.com/{}".format(outputs["user_pool_id"])
id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256"))
id_claims["iss"].should.equal(issuer)
id_claims["aud"].should.equal(client_id)
access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256"))
access_claims["iss"].should.equal(issuer)
access_claims["aud"].should.equal(client_id)
@mock_cognitoidp
def test_change_password():
conn = boto3.client("cognito-idp", "us-west-2")
outputs = authentication_flow(conn)
# Take this opportunity to test change_password, which requires an access token.
newer_password = str(uuid.uuid4())
conn.change_password(
AccessToken=outputs["access_token"],
PreviousPassword=outputs["password"],
ProposedPassword=newer_password,
)
# Log in again, which should succeed without a challenge because the user is no
# longer in the force-new-password state.
result = conn.admin_initiate_auth(
UserPoolId=outputs["user_pool_id"],
ClientId=outputs["client_id"],
AuthFlow="ADMIN_NO_SRP_AUTH",
AuthParameters={
"USERNAME": outputs["username"],
"PASSWORD": newer_password,
},
)
result["AuthenticationResult"].should_not.be.none
@mock_cognitoidp
def test_forgot_password():
conn = boto3.client("cognito-idp", "us-west-2")
result = conn.forgot_password(ClientId=str(uuid.uuid4()), Username=str(uuid.uuid4()))
result["CodeDeliveryDetails"].should_not.be.none
@mock_cognitoidp
def test_confirm_forgot_password():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
client_id = conn.create_user_pool_client(
UserPoolId=user_pool_id,
ClientName=str(uuid.uuid4()),
)["UserPoolClient"]["ClientId"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
TemporaryPassword=str(uuid.uuid4()),
)
conn.confirm_forgot_password(
ClientId=client_id,
Username=username,
ConfirmationCode=str(uuid.uuid4()),
Password=str(uuid.uuid4()),
)

View File

@ -6,6 +6,7 @@ import boto3
from boto3.dynamodb.conditions import Attr from boto3.dynamodb.conditions import Attr
import sure # noqa import sure # noqa
import requests import requests
from pytest import raises
from moto import mock_dynamodb2, mock_dynamodb2_deprecated from moto import mock_dynamodb2, mock_dynamodb2_deprecated
from moto.dynamodb2 import dynamodb_backend2 from moto.dynamodb2 import dynamodb_backend2
from boto.exception import JSONResponseError from boto.exception import JSONResponseError
@ -1052,6 +1053,7 @@ def test_query_missing_expr_names():
@mock_dynamodb2 @mock_dynamodb2
def test_update_item_on_map(): def test_update_item_on_map():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1') dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
client = boto3.client('dynamodb', region_name='us-east-1')
# Create the DynamoDB table. # Create the DynamoDB table.
dynamodb.create_table( dynamodb.create_table(
@ -1092,21 +1094,44 @@ def test_update_item_on_map():
resp = table.scan() resp = table.scan()
resp['Items'][0]['body'].should.equal({'nested': {'data': 'test'}}) resp['Items'][0]['body'].should.equal({'nested': {'data': 'test'}})
# Nonexistent nested attributes are supported for existing top-level attributes.
table.update_item(Key={ table.update_item(Key={
'forum_name': 'the-key', 'forum_name': 'the-key',
'subject': '123' 'subject': '123'
}, },
UpdateExpression='SET body.#nested.#data = :tb', UpdateExpression='SET body.#nested.#data = :tb, body.nested.#nonexistentnested.#data = :tb2',
ExpressionAttributeNames={ ExpressionAttributeNames={
'#nested': 'nested', '#nested': 'nested',
'#nonexistentnested': 'nonexistentnested',
'#data': 'data' '#data': 'data'
}, },
ExpressionAttributeValues={
':tb': 'new_value',
':tb2': 'other_value'
})
resp = table.scan()
resp['Items'][0]['body'].should.equal({
'nested': {
'data': 'new_value',
'nonexistentnested': {'data': 'other_value'}
}
})
# Test nested value for a nonexistent attribute.
with raises(client.exceptions.ConditionalCheckFailedException):
table.update_item(Key={
'forum_name': 'the-key',
'subject': '123'
},
UpdateExpression='SET nonexistent.#nested = :tb',
ExpressionAttributeNames={
'#nested': 'nested'
},
ExpressionAttributeValues={ ExpressionAttributeValues={
':tb': 'new_value' ':tb': 'new_value'
}) })
resp = table.scan()
resp['Items'][0]['body'].should.equal({'nested': {'data': 'new_value'}})
# https://github.com/spulec/moto/issues/1358 # https://github.com/spulec/moto/issues/1358

View File

@ -8,6 +8,7 @@ import boto
import boto3 import boto3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from boto.exception import EC2ResponseError from boto.exception import EC2ResponseError
from freezegun import freeze_time
import sure # noqa import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2 from moto import mock_ec2_deprecated, mock_ec2
@ -588,6 +589,7 @@ def test_volume_tag_escaping():
dict(snaps[0].tags).should.equal({'key': '</closed>'}) dict(snaps[0].tags).should.equal({'key': '</closed>'})
@freeze_time
@mock_ec2 @mock_ec2
def test_copy_snapshot(): def test_copy_snapshot():
ec2_client = boto3.client('ec2', region_name='eu-west-1') ec2_client = boto3.client('ec2', region_name='eu-west-1')

View File

@ -62,6 +62,17 @@ def test_eip_allocate_vpc():
logging.debug("vpc alloc_id:".format(vpc.allocation_id)) logging.debug("vpc alloc_id:".format(vpc.allocation_id))
vpc.release() vpc.release()
@mock_ec2
def test_specific_eip_allocate_vpc():
"""Allocate VPC EIP with specific address"""
service = boto3.resource('ec2', region_name='us-west-1')
client = boto3.client('ec2', region_name='us-west-1')
vpc = client.allocate_address(Domain="vpc", Address="127.38.43.222")
vpc['Domain'].should.be.equal("vpc")
vpc['PublicIp'].should.be.equal("127.38.43.222")
logging.debug("vpc alloc_id:".format(vpc['AllocationId']))
@mock_ec2_deprecated @mock_ec2_deprecated
def test_eip_allocate_invalid_domain(): def test_eip_allocate_invalid_domain():

View File

@ -1,6 +1,7 @@
import random import random
import boto3 import boto3
import json
from moto.events import mock_events from moto.events import mock_events
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
@ -177,17 +178,19 @@ def test_remove_targets():
def test_permissions(): def test_permissions():
client = boto3.client('events', 'eu-central-1') client = boto3.client('events', 'eu-central-1')
client.put_permission(Action='PutEvents', Principal='111111111111', StatementId='Account1') client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1')
client.put_permission(Action='PutEvents', Principal='222222222222', StatementId='Account2') client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2')
resp = client.describe_event_bus() resp = client.describe_event_bus()
assert len(resp['Policy']['Statement']) == 2 resp_policy = json.loads(resp['Policy'])
assert len(resp_policy['Statement']) == 2
client.remove_permission(StatementId='Account2') client.remove_permission(StatementId='Account2')
resp = client.describe_event_bus() resp = client.describe_event_bus()
assert len(resp['Policy']['Statement']) == 1 resp_policy = json.loads(resp['Policy'])
assert resp['Policy']['Statement'][0]['Sid'] == 'Account1' assert len(resp_policy['Statement']) == 1
assert resp_policy['Statement'][0]['Sid'] == 'Account1'
@mock_events @mock_events

View File

@ -13,6 +13,10 @@ def test_log_group_create():
conn = boto3.client('logs', 'us-west-2') conn = boto3.client('logs', 'us-west-2')
log_group_name = 'dummy' log_group_name = 'dummy'
response = conn.create_log_group(logGroupName=log_group_name) response = conn.create_log_group(logGroupName=log_group_name)
response = conn.describe_log_groups(logGroupNamePrefix=log_group_name)
assert len(response['logGroups']) == 1
response = conn.delete_log_group(logGroupName=log_group_name) response = conn.delete_log_group(logGroupName=log_group_name)

View File

@ -353,8 +353,6 @@ def test_describe_db_snapshots():
MasterUserPassword='hunter2', MasterUserPassword='hunter2',
Port=1234, Port=1234,
DBSecurityGroups=["my_sg"]) DBSecurityGroups=["my_sg"])
conn.describe_db_snapshots.when.called_with(
DBInstanceIdentifier="db-primary-1").should.throw(ClientError)
created = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1', created = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-1').get('DBSnapshot') DBSnapshotIdentifier='snapshot-1').get('DBSnapshot')
@ -369,6 +367,11 @@ def test_describe_db_snapshots():
snapshot.should.equal(created) snapshot.should.equal(created)
snapshot.get('Engine').should.equal('postgres') snapshot.get('Engine').should.equal('postgres')
conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-2')
snapshots = conn.describe_db_snapshots(DBInstanceIdentifier='db-primary-1').get('DBSnapshots')
snapshots.should.have.length_of(2)
@mock_rds2 @mock_rds2
def test_delete_db_snapshot(): def test_delete_db_snapshot():

View File

@ -818,6 +818,48 @@ def test_create_cluster_from_snapshot():
new_cluster['Endpoint']['Port'].should.equal(1234) new_cluster['Endpoint']['Port'].should.equal(1234)
@mock_redshift
def test_create_cluster_from_snapshot_with_waiter():
client = boto3.client('redshift', region_name='us-east-1')
original_cluster_identifier = 'original-cluster'
original_snapshot_identifier = 'original-snapshot'
new_cluster_identifier = 'new-cluster'
client.create_cluster(
ClusterIdentifier=original_cluster_identifier,
ClusterType='single-node',
NodeType='ds2.xlarge',
MasterUsername='username',
MasterUserPassword='password',
)
client.create_cluster_snapshot(
SnapshotIdentifier=original_snapshot_identifier,
ClusterIdentifier=original_cluster_identifier
)
response = client.restore_from_cluster_snapshot(
ClusterIdentifier=new_cluster_identifier,
SnapshotIdentifier=original_snapshot_identifier,
Port=1234
)
response['Cluster']['ClusterStatus'].should.equal('creating')
client.get_waiter('cluster_restored').wait(
ClusterIdentifier=new_cluster_identifier,
WaiterConfig={
'Delay': 1,
'MaxAttempts': 2,
}
)
response = client.describe_clusters(
ClusterIdentifier=new_cluster_identifier
)
new_cluster = response['Clusters'][0]
new_cluster['NodeType'].should.equal('ds2.xlarge')
new_cluster['MasterUsername'].should.equal('username')
new_cluster['Endpoint']['Port'].should.equal(1234)
@mock_redshift @mock_redshift
def test_create_cluster_from_non_existent_snapshot(): def test_create_cluster_from_non_existent_snapshot():
client = boto3.client('redshift', region_name='us-east-1') client = boto3.client('redshift', region_name='us-east-1')

View File

@ -1405,6 +1405,19 @@ def test_boto3_deleted_versionings_list():
assert len(listed['Contents']) == 1 assert len(listed['Contents']) == 1
@mock_s3
def test_boto3_delete_versioned_bucket():
client = boto3.client('s3', region_name='us-east-1')
client.create_bucket(Bucket='blah')
client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'})
resp = client.put_object(Bucket='blah', Key='test1', Body=b'test1')
client.delete_object(Bucket='blah', Key='test1', VersionId=resp["VersionId"])
client.delete_bucket(Bucket='blah')
@mock_s3 @mock_s3
def test_boto3_head_object_if_modified_since(): def test_boto3_head_object_if_modified_since():
s3 = boto3.client('s3', region_name='us-east-1') s3 = boto3.client('s3', region_name='us-east-1')

View File

@ -136,3 +136,59 @@ def test_send_raw_email():
send_quota = conn.get_send_quota() send_quota = conn.get_send_quota()
sent_count = int(send_quota['SentLast24Hours']) sent_count = int(send_quota['SentLast24Hours'])
sent_count.should.equal(2) sent_count.should.equal(2)
@mock_ses
def test_send_raw_email_without_source():
conn = boto3.client('ses', region_name='us-east-1')
message = MIMEMultipart()
message['Subject'] = 'Test'
message['From'] = 'test@example.com'
message['To'] = 'to@example.com, foo@example.com'
# Message body
part = MIMEText('test file attached')
message.attach(part)
# Attachment
part = MIMEText('contents of test file here')
part.add_header('Content-Disposition', 'attachment; filename=test.txt')
message.attach(part)
kwargs = dict(
RawMessage={'Data': message.as_string()},
)
conn.send_raw_email.when.called_with(**kwargs).should.throw(ClientError)
conn.verify_email_identity(EmailAddress="test@example.com")
conn.send_raw_email(**kwargs)
send_quota = conn.get_send_quota()
sent_count = int(send_quota['SentLast24Hours'])
sent_count.should.equal(2)
@mock_ses
def test_send_raw_email_without_source_or_from():
conn = boto3.client('ses', region_name='us-east-1')
message = MIMEMultipart()
message['Subject'] = 'Test'
message['To'] = 'to@example.com, foo@example.com'
# Message body
part = MIMEText('test file attached')
message.attach(part)
# Attachment
part = MIMEText('contents of test file here')
part.add_header('Content-Disposition', 'attachment; filename=test.txt')
message.attach(part)
kwargs = dict(
RawMessage={'Data': message.as_string()},
)
conn.send_raw_email.when.called_with(**kwargs).should.throw(ClientError)

View File

@ -42,6 +42,29 @@ def test_publish_to_sqs():
acquired_message.should.equal(expected) acquired_message.should.equal(expected)
@mock_sqs
@mock_sns
def test_publish_to_sqs_raw():
sns = boto3.resource('sns', region_name='us-east-1')
topic = sns.create_topic(Name='some-topic')
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(QueueName='test-queue')
subscription = topic.subscribe(
Protocol='sqs', Endpoint=queue.attributes['QueueArn'])
subscription.set_attributes(
AttributeName='RawMessageDelivery', AttributeValue='true')
message = 'my message'
with freeze_time("2015-01-01 12:00:00"):
topic.publish(Message=message)
messages = queue.receive_messages(MaxNumberOfMessages=1)
messages[0].body.should.equal(message)
@mock_sqs @mock_sqs
@mock_sns @mock_sns
def test_publish_to_sqs_bad(): def test_publish_to_sqs_bad():
@ -230,7 +253,7 @@ def test_publish_to_sqs_in_different_region():
@mock_sns @mock_sns
def test_publish_to_http(): def test_publish_to_http():
def callback(request): def callback(request):
request.headers["Content-Type"].should.equal("application/json") request.headers["Content-Type"].should.equal("text/plain; charset=UTF-8")
json.loads.when.called_with( json.loads.when.called_with(
request.body.decode() request.body.decode()
).should_not.throw(Exception) ).should_not.throw(Exception)

View File

@ -13,14 +13,15 @@ from moto.sns.models import DEFAULT_TOPIC_POLICY, DEFAULT_EFFECTIVE_DELIVERY_POL
@mock_sns @mock_sns
def test_create_and_delete_topic(): def test_create_and_delete_topic():
conn = boto3.client("sns", region_name="us-east-1") conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic") for topic_name in ('some-topic', '-some-topic-', '_some-topic_', 'a' * 256):
conn.create_topic(Name=topic_name)
topics_json = conn.list_topics() topics_json = conn.list_topics()
topics = topics_json["Topics"] topics = topics_json["Topics"]
topics.should.have.length_of(1) topics.should.have.length_of(1)
topics[0]['TopicArn'].should.equal( topics[0]['TopicArn'].should.equal(
"arn:aws:sns:{0}:123456789012:some-topic" "arn:aws:sns:{0}:123456789012:{1}"
.format(conn._client_config.region_name) .format(conn._client_config.region_name, topic_name)
) )
# Delete the topic # Delete the topic

View File

@ -40,6 +40,29 @@ def test_create_fifo_queue_fail():
raise RuntimeError('Should of raised InvalidParameterValue Exception') raise RuntimeError('Should of raised InvalidParameterValue Exception')
@mock_sqs
def test_create_queue_with_different_attributes_fail():
sqs = boto3.client('sqs', region_name='us-east-1')
sqs.create_queue(
QueueName='test-queue',
Attributes={
'VisibilityTimeout': '10',
}
)
try:
sqs.create_queue(
QueueName='test-queue',
Attributes={
'VisibilityTimeout': '60',
}
)
except botocore.exceptions.ClientError as err:
err.response['Error']['Code'].should.equal('QueueAlreadyExists')
else:
raise RuntimeError('Should of raised QueueAlreadyExists Exception')
@mock_sqs @mock_sqs
def test_create_fifo_queue(): def test_create_fifo_queue():
sqs = boto3.client('sqs', region_name='us-east-1') sqs = boto3.client('sqs', region_name='us-east-1')
@ -1063,3 +1086,112 @@ def test_redrive_policy_set_attributes():
assert 'RedrivePolicy' in copy.attributes assert 'RedrivePolicy' in copy.attributes
copy_policy = json.loads(copy.attributes['RedrivePolicy']) copy_policy = json.loads(copy.attributes['RedrivePolicy'])
assert copy_policy == redrive_policy assert copy_policy == redrive_policy
@mock_sqs
def test_receive_messages_with_message_group_id():
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(QueueName="test-queue.fifo",
Attributes={
'FifoQueue': 'true',
})
queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
queue.send_message(
MessageBody="message-1",
MessageGroupId="group"
)
queue.send_message(
MessageBody="message-2",
MessageGroupId="group"
)
messages = queue.receive_messages()
messages.should.have.length_of(1)
message = messages[0]
# received message is not deleted!
messages = queue.receive_messages(WaitTimeSeconds=0)
messages.should.have.length_of(0)
# message is now processed, next one should be available
message.delete()
messages = queue.receive_messages()
messages.should.have.length_of(1)
@mock_sqs
def test_receive_messages_with_message_group_id_on_requeue():
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(QueueName="test-queue.fifo",
Attributes={
'FifoQueue': 'true',
})
queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
queue.send_message(
MessageBody="message-1",
MessageGroupId="group"
)
queue.send_message(
MessageBody="message-2",
MessageGroupId="group"
)
messages = queue.receive_messages()
messages.should.have.length_of(1)
message = messages[0]
# received message is not deleted!
messages = queue.receive_messages(WaitTimeSeconds=0)
messages.should.have.length_of(0)
# message is now available again, next one should be available
message.change_visibility(VisibilityTimeout=0)
messages = queue.receive_messages()
messages.should.have.length_of(1)
messages[0].message_id.should.equal(message.message_id)
@mock_sqs
def test_receive_messages_with_message_group_id_on_visibility_timeout():
if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'true':
raise SkipTest('Cant manipulate time in server mode')
with freeze_time("2015-01-01 12:00:00"):
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(QueueName="test-queue.fifo",
Attributes={
'FifoQueue': 'true',
})
queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
queue.send_message(
MessageBody="message-1",
MessageGroupId="group"
)
queue.send_message(
MessageBody="message-2",
MessageGroupId="group"
)
messages = queue.receive_messages()
messages.should.have.length_of(1)
message = messages[0]
# received message is not deleted!
messages = queue.receive_messages(WaitTimeSeconds=0)
messages.should.have.length_of(0)
message.change_visibility(VisibilityTimeout=10)
with freeze_time("2015-01-01 12:00:05"):
# no timeout yet
messages = queue.receive_messages(WaitTimeSeconds=0)
messages.should.have.length_of(0)
with freeze_time("2015-01-01 12:00:15"):
# message is now available again, next one should be available
messages = queue.receive_messages()
messages.should.have.length_of(1)
messages[0].message_id.should.equal(message.message_id)