diff --git a/MANIFEST.in b/MANIFEST.in
index 43e8120e4..bd7eb968a 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,5 +2,6 @@ include README.md LICENSE AUTHORS.md
include requirements.txt requirements-dev.txt tox.ini
include moto/ec2/resources/instance_types.json
include moto/ec2/resources/amis.json
+include moto/cognitoidp/resources/*.json
recursive-include moto/templates *
-recursive-include tests *
+recursive-include tests *
diff --git a/README.md b/README.md
index 9642a8db6..3fbee44f8 100644
--- a/README.md
+++ b/README.md
@@ -72,6 +72,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|------------------------------------------------------------------------------|
| Cognito Identity | @mock_cognitoidentity| basic endpoints done |
|------------------------------------------------------------------------------|
+| Cognito Identity Provider | @mock_cognitoidp| basic endpoints done |
+|------------------------------------------------------------------------------|
| Data Pipeline | @mock_datapipeline| basic endpoints done |
|------------------------------------------------------------------------------|
| DynamoDB | @mock_dynamodb | core endpoints done |
@@ -138,6 +140,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|------------------------------------------------------------------------------|
```
+For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md)
+
### Another Example
Imagine you have a function that you use to launch new ec2 instances:
diff --git a/moto/__init__.py b/moto/__init__.py
index c6f24388b..5e6f71b7a 100644
--- a/moto/__init__.py
+++ b/moto/__init__.py
@@ -12,6 +12,7 @@ from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa
from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa
+from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py
index 160b443b0..868262ccc 100644
--- a/moto/apigateway/models.py
+++ b/moto/apigateway/models.py
@@ -328,6 +328,9 @@ class RestAPI(BaseModel):
self.resources = {}
self.add_child('/') # Add default child
+ def __repr__(self):
+ return str(self.id)
+
def to_dict(self):
return {
"id": self.id,
diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py
index c7170e17e..5586c51dd 100644
--- a/moto/autoscaling/responses.py
+++ b/moto/autoscaling/responses.py
@@ -320,8 +320,7 @@ DESCRIBE_LAUNCH_CONFIGURATIONS_TEMPLATE = """
{% endif %}
{{ launch_configuration.instance_type }}
- arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:
- 9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/{{ launch_configuration.name }}
+ arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/{{ launch_configuration.name }}
{% if launch_configuration.block_device_mappings %}
{% for mount_point, mapping in launch_configuration.block_device_mappings.items() %}
@@ -517,8 +516,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """{{ group.health_check_period }}
{{ group.default_cooldown }}
- arn:aws:autoscaling:us-east-1:803981987763:autoScalingGroup:ca861182-c8f9-4ca7-b1eb-cd35505f5ebb
- :autoScalingGroupName/{{ group.name }}
+ arn:aws:autoscaling:us-east-1:803981987763:autoScalingGroup:ca861182-c8f9-4ca7-b1eb-cd35505f5ebb:autoScalingGroupName/{{ group.name }}
{% if group.termination_policies %}
{% for policy in group.termination_policies %}
diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py
index 80b4ffba3..b11bde042 100644
--- a/moto/awslambda/models.py
+++ b/moto/awslambda/models.py
@@ -4,6 +4,7 @@ import base64
from collections import defaultdict
import copy
import datetime
+import docker
import docker.errors
import hashlib
import io
@@ -44,6 +45,7 @@ except ImportError:
_stderr_regex = re.compile(r'START|END|REPORT RequestId: .*')
_orig_adapter_send = requests.adapters.HTTPAdapter.send
+docker_3 = docker.__version__.startswith("3")
def zip2tar(zip_bytes):
@@ -104,7 +106,11 @@ class _DockerDataVolumeContext:
# It doesn't exist so we need to create it
self._vol_ref.volume = self._lambda_func.docker_client.volumes.create(self._lambda_func.code_sha_256)
- container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes={self.name: {'bind': '/tmp/data', 'mode': 'rw'}}, detach=True)
+ if docker_3:
+ volumes = {self.name: {'bind': '/tmp/data', 'mode': 'rw'}}
+ else:
+ volumes = {self.name: '/tmp/data'}
+ container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes=volumes, detach=True)
try:
tar_bytes = zip2tar(self._lambda_func.code_bytes)
container.put_archive('/tmp/data', tar_bytes)
@@ -309,11 +315,15 @@ class LambdaFunction(BaseModel):
finally:
if container:
try:
- exit_code = container.wait(timeout=300)['StatusCode']
+ exit_code = container.wait(timeout=300)
except requests.exceptions.ReadTimeout:
exit_code = -1
container.stop()
container.kill()
+ else:
+ if docker_3:
+ exit_code = exit_code['StatusCode']
+
output = container.logs(stdout=False, stderr=True)
output += container.logs(stdout=True, stderr=False)
container.remove()
@@ -445,6 +455,9 @@ class LambdaVersion(BaseModel):
def __init__(self, spec):
self.version = spec['Version']
+ def __repr__(self):
+ return str(self.logical_resource_id)
+
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json,
region_name):
@@ -675,3 +688,4 @@ lambda_backends = {_region.name: LambdaBackend(_region.name)
for _region in boto.awslambda.regions()}
lambda_backends['ap-southeast-2'] = LambdaBackend('ap-southeast-2')
+lambda_backends['us-gov-west-1'] = LambdaBackend('us-gov-west-1')
diff --git a/moto/backends.py b/moto/backends.py
index d8d317573..496af13e1 100644
--- a/moto/backends.py
+++ b/moto/backends.py
@@ -7,6 +7,7 @@ from moto.awslambda import lambda_backends
from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends
from moto.cognitoidentity import cognitoidentity_backends
+from moto.cognitoidp import cognitoidp_backends
from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends
@@ -51,6 +52,7 @@ BACKENDS = {
'cloudformation': cloudformation_backends,
'cloudwatch': cloudwatch_backends,
'cognito-identity': cognitoidentity_backends,
+ 'cognito-idp': cognitoidp_backends,
'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2,
diff --git a/moto/batch/models.py b/moto/batch/models.py
index 8b3b81ccb..c47ca6e97 100644
--- a/moto/batch/models.py
+++ b/moto/batch/models.py
@@ -295,6 +295,14 @@ class Job(threading.Thread, BaseModel):
}
if self.job_stopped:
result['stoppedAt'] = datetime2int(self.job_stopped_at)
+ result['container'] = {}
+ result['container']['command'] = ['/bin/sh -c "for a in `seq 1 10`; do echo Hello World; sleep 1; done"']
+ result['container']['privileged'] = False
+ result['container']['readonlyRootFilesystem'] = False
+ result['container']['ulimits'] = {}
+ result['container']['vcpus'] = 1
+ result['container']['volumes'] = ''
+ result['container']['logStreamName'] = self.log_stream_name
if self.job_stopped_reason is not None:
result['statusReason'] = self.job_stopped_reason
return result
@@ -378,6 +386,7 @@ class Job(threading.Thread, BaseModel):
# Send to cloudwatch
log_group = '/aws/batch/job'
stream_name = '{0}/default/{1}'.format(self.job_definition.name, self.job_id)
+ self.log_stream_name = stream_name
self._log_backend.ensure_log_group(log_group, None)
self._log_backend.create_log_stream(log_group, stream_name)
self._log_backend.put_log_events(log_group, stream_name, logs, None)
diff --git a/moto/cloudformation/exceptions.py b/moto/cloudformation/exceptions.py
index 56a95382a..6ea15c5ca 100644
--- a/moto/cloudformation/exceptions.py
+++ b/moto/cloudformation/exceptions.py
@@ -33,6 +33,18 @@ class MissingParameterError(BadRequest):
)
+class ExportNotFound(BadRequest):
+ """Exception to raise if a template tries to import a non-existent export"""
+
+ def __init__(self, export_name):
+ template = Template(ERROR_RESPONSE)
+ super(ExportNotFound, self).__init__()
+ self.description = template.render(
+ code='ExportNotFound',
+ message="No export named {0} found.".format(export_name)
+ )
+
+
ERROR_RESPONSE = """
Sender
diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py
index 57f42df56..e5ab7255d 100644
--- a/moto/cloudformation/models.py
+++ b/moto/cloudformation/models.py
@@ -38,7 +38,7 @@ class FakeStack(BaseModel):
resource_status_reason="User Initiated")
self.description = self.template_dict.get('Description')
- self.cross_stack_resources = cross_stack_resources or []
+ self.cross_stack_resources = cross_stack_resources or {}
self.resource_map = self._create_resource_map()
self.output_map = self._create_output_map()
self._add_stack_event("CREATE_COMPLETE")
diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py
index 849d8c917..19018158d 100644
--- a/moto/cloudformation/parsing.py
+++ b/moto/cloudformation/parsing.py
@@ -28,7 +28,7 @@ from moto.s3 import models as s3_models
from moto.sns import models as sns_models
from moto.sqs import models as sqs_models
from .utils import random_suffix
-from .exceptions import MissingParameterError, UnformattedGetAttTemplateException, ValidationError
+from .exceptions import ExportNotFound, MissingParameterError, UnformattedGetAttTemplateException, ValidationError
from boto.cloudformation.stack import Output
MODEL_MAP = {
@@ -206,6 +206,8 @@ def clean_json(resource_json, resources_map):
values = [x.value for x in resources_map.cross_stack_resources.values() if x.name == cleaned_val]
if any(values):
return values[0]
+ else:
+ raise ExportNotFound(cleaned_val)
if 'Fn::GetAZs' in resource_json:
region = resource_json.get('Fn::GetAZs') or DEFAULT_REGION
@@ -369,6 +371,7 @@ class ResourceMap(collections.Mapping):
"AWS::Region": self._region_name,
"AWS::StackId": stack_id,
"AWS::StackName": stack_name,
+ "AWS::URLSuffix": "amazonaws.com",
"AWS::NoValue": None,
}
diff --git a/moto/cloudwatch/models.py b/moto/cloudwatch/models.py
index ba6569981..ed644f874 100644
--- a/moto/cloudwatch/models.py
+++ b/moto/cloudwatch/models.py
@@ -229,8 +229,13 @@ class CloudWatchBackend(BaseBackend):
def put_metric_data(self, namespace, metric_data):
for metric_member in metric_data:
+ # Preserve "datetime" for get_metric_statistics comparisons
+ timestamp = metric_member.get('Timestamp')
+ if timestamp is not None and type(timestamp) != datetime:
+ timestamp = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
+ timestamp = timestamp.replace(tzinfo=tzutc())
self.metric_data.append(MetricDatum(
- namespace, metric_member['MetricName'], float(metric_member['Value']), metric_member.get('Dimensions.member', _EMPTY_LIST), metric_member.get('Timestamp')))
+ namespace, metric_member['MetricName'], float(metric_member.get('Value', 0)), metric_member.get('Dimensions.member', _EMPTY_LIST), timestamp))
def get_metric_statistics(self, namespace, metric_name, start_time, end_time, period, stats):
period_delta = timedelta(seconds=period)
diff --git a/moto/cloudwatch/responses.py b/moto/cloudwatch/responses.py
index c080d4620..8118f35ba 100644
--- a/moto/cloudwatch/responses.py
+++ b/moto/cloudwatch/responses.py
@@ -272,7 +272,7 @@ GET_METRIC_STATISTICS_TEMPLATE = """/.well-known/jwks.json$': CognitoIdpJsonWebKeyResponse().serve_json_web_key,
+}
diff --git a/moto/core/responses.py b/moto/core/responses.py
index ed4792083..0f133e72c 100644
--- a/moto/core/responses.py
+++ b/moto/core/responses.py
@@ -5,6 +5,7 @@ import datetime
import json
import logging
import re
+import io
import pytz
from moto.core.exceptions import DryRunClientError
@@ -622,7 +623,7 @@ class AWSServiceSpec(object):
def __init__(self, path):
self.path = resource_filename('botocore', path)
- with open(self.path, "rb") as f:
+ with io.open(self.path, 'r', encoding='utf-8') as f:
spec = json.load(f)
self.metadata = spec['metadata']
self.operations = spec['operations']
diff --git a/moto/dynamodb/responses.py b/moto/dynamodb/responses.py
index d4f832be2..990069a46 100644
--- a/moto/dynamodb/responses.py
+++ b/moto/dynamodb/responses.py
@@ -62,13 +62,13 @@ class DynamoHandler(BaseResponse):
name = body['TableName']
key_schema = body['KeySchema']
- hash_hey = key_schema['HashKeyElement']
- hash_key_attr = hash_hey['AttributeName']
- hash_key_type = hash_hey['AttributeType']
+ hash_key = key_schema['HashKeyElement']
+ hash_key_attr = hash_key['AttributeName']
+ hash_key_type = hash_key['AttributeType']
- range_hey = key_schema.get('RangeKeyElement', {})
- range_key_attr = range_hey.get('AttributeName')
- range_key_type = range_hey.get('AttributeType')
+ range_key = key_schema.get('RangeKeyElement', {})
+ range_key_attr = range_key.get('AttributeName')
+ range_key_type = range_key.get('AttributeType')
throughput = body["ProvisionedThroughput"]
read_units = throughput["ReadCapacityUnits"]
diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py
index 73b09d73c..c4aa10237 100644
--- a/moto/dynamodb2/models.py
+++ b/moto/dynamodb2/models.py
@@ -176,16 +176,17 @@ class Item(BaseModel):
key_parts = key.split('.')
attr = key_parts.pop(0)
if attr not in self.attrs:
- raise ValueError()
+ raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts:
# Hack but it'll do, traverses into a dict
- if list(last_val.keys())[0] == 'M':
- last_val = last_val['M']
+ last_val_type = list(last_val.keys())
+ if last_val_type and last_val_type[0] == 'M':
+ last_val = last_val['M']
if key_part not in last_val:
- raise ValueError()
+ last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
diff --git a/moto/ec2/models.py b/moto/ec2/models.py
index 31bfb4839..4e26f0f65 100755
--- a/moto/ec2/models.py
+++ b/moto/ec2/models.py
@@ -1324,7 +1324,7 @@ class SecurityGroup(TaggedEC2Resource):
self.name = name
self.description = description
self.ingress_rules = []
- self.egress_rules = [SecurityRule(-1, -1, -1, ['0.0.0.0/0'], [])]
+ self.egress_rules = [SecurityRule(-1, None, None, ['0.0.0.0/0'], [])]
self.enis = {}
self.vpc_id = vpc_id
self.owner_id = "123456789012"
@@ -3159,8 +3159,11 @@ class SpotFleetBackend(object):
class ElasticAddress(object):
- def __init__(self, domain):
- self.public_ip = random_ip()
+ def __init__(self, domain, address=None):
+ if address:
+ self.public_ip = address
+ else:
+ self.public_ip = random_ip()
self.allocation_id = random_eip_allocation_id() if domain == "vpc" else None
self.domain = domain
self.instance = None
@@ -3222,11 +3225,13 @@ class ElasticAddressBackend(object):
self.addresses = []
super(ElasticAddressBackend, self).__init__()
- def allocate_address(self, domain):
+ def allocate_address(self, domain, address=None):
if domain not in ['standard', 'vpc']:
raise InvalidDomainError(domain)
-
- address = ElasticAddress(domain)
+ if address:
+ address = ElasticAddress(domain, address)
+ else:
+ address = ElasticAddress(domain)
self.addresses.append(address)
return address
diff --git a/moto/ec2/responses/elastic_ip_addresses.py b/moto/ec2/responses/elastic_ip_addresses.py
index 11c1d9c1f..6e1c9fe38 100644
--- a/moto/ec2/responses/elastic_ip_addresses.py
+++ b/moto/ec2/responses/elastic_ip_addresses.py
@@ -7,8 +7,13 @@ class ElasticIPAddresses(BaseResponse):
def allocate_address(self):
domain = self._get_param('Domain', if_none='standard')
+ reallocate_address = self._get_param('Address', if_none=None)
if self.is_not_dryrun('AllocateAddress'):
- address = self.ec2_backend.allocate_address(domain)
+ if reallocate_address:
+ address = self.ec2_backend.allocate_address(
+ domain, address=reallocate_address)
+ else:
+ address = self.ec2_backend.allocate_address(domain)
template = self.response_template(ALLOCATE_ADDRESS_RESPONSE)
return template.render(address=address)
diff --git a/moto/ec2/responses/security_groups.py b/moto/ec2/responses/security_groups.py
index 9118c01b3..0009ff131 100644
--- a/moto/ec2/responses/security_groups.py
+++ b/moto/ec2/responses/security_groups.py
@@ -179,8 +179,12 @@ DESCRIBE_SECURITY_GROUPS_RESPONSE = """= len(groups):
+ next_token = None
+
+ return groups_page, next_token
+
def create_log_stream(self, log_group_name, log_stream_name):
if log_group_name not in self.groups:
raise ResourceNotFoundException()
diff --git a/moto/logs/responses.py b/moto/logs/responses.py
index 7bf481908..4bec86cb2 100644
--- a/moto/logs/responses.py
+++ b/moto/logs/responses.py
@@ -33,6 +33,18 @@ class LogsResponse(BaseResponse):
self.logs_backend.delete_log_group(log_group_name)
return ''
+ def describe_log_groups(self):
+ log_group_name_prefix = self._get_param('logGroupNamePrefix')
+ next_token = self._get_param('nextToken')
+ limit = self._get_param('limit', 50)
+ assert limit <= 50
+ groups, next_token = self.logs_backend.describe_log_groups(
+ limit, log_group_name_prefix, next_token)
+ return json.dumps({
+ "logGroups": groups,
+ "nextToken": next_token
+ })
+
def create_log_stream(self):
log_group_name = self._get_param('logGroupName')
log_stream_name = self._get_param('logStreamName')
diff --git a/moto/rds2/models.py b/moto/rds2/models.py
index 29fa95959..3fc4b6d65 100644
--- a/moto/rds2/models.py
+++ b/moto/rds2/models.py
@@ -726,10 +726,11 @@ class RDS2Backend(BaseBackend):
def describe_snapshots(self, db_instance_identifier, db_snapshot_identifier):
if db_instance_identifier:
+ db_instance_snapshots = []
for snapshot in self.snapshots.values():
if snapshot.database.db_instance_identifier == db_instance_identifier:
- return [snapshot]
- raise DBSnapshotNotFoundError()
+ db_instance_snapshots.append(snapshot)
+ return db_instance_snapshots
if db_snapshot_identifier:
if db_snapshot_identifier in self.snapshots:
diff --git a/moto/redshift/models.py b/moto/redshift/models.py
index 4eb9d6b5c..4eafcfc79 100644
--- a/moto/redshift/models.py
+++ b/moto/redshift/models.py
@@ -73,7 +73,8 @@ class Cluster(TaggableResourceMixin, BaseModel):
preferred_maintenance_window, cluster_parameter_group_name,
automated_snapshot_retention_period, port, cluster_version,
allow_version_upgrade, number_of_nodes, publicly_accessible,
- encrypted, region_name, tags=None, iam_roles_arn=None):
+ encrypted, region_name, tags=None, iam_roles_arn=None,
+ restored_from_snapshot=False):
super(Cluster, self).__init__(region_name, tags)
self.redshift_backend = redshift_backend
self.cluster_identifier = cluster_identifier
@@ -119,6 +120,7 @@ class Cluster(TaggableResourceMixin, BaseModel):
self.number_of_nodes = 1
self.iam_roles_arn = iam_roles_arn or []
+ self.restored_from_snapshot = restored_from_snapshot
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
@@ -242,7 +244,15 @@ class Cluster(TaggableResourceMixin, BaseModel):
"IamRoleArn": iam_role_arn
} for iam_role_arn in self.iam_roles_arn]
}
-
+ if self.restored_from_snapshot:
+ json_response['RestoreStatus'] = {
+ 'Status': 'completed',
+ 'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
+ 'SnapshotSizeInMegaBytes': 123,
+ 'ProgressInMegaBytes': 123,
+ 'ElapsedTimeInSeconds': 123,
+ 'EstimatedTimeToCompletionInSeconds': 123
+ }
try:
json_response['ClusterSnapshotCopyStatus'] = self.cluster_snapshot_copy_status
except AttributeError:
@@ -639,7 +649,8 @@ class RedshiftBackend(BaseBackend):
"cluster_version": snapshot.cluster.cluster_version,
"number_of_nodes": snapshot.cluster.number_of_nodes,
"encrypted": snapshot.cluster.encrypted,
- "tags": snapshot.cluster.tags
+ "tags": snapshot.cluster.tags,
+ "restored_from_snapshot": True
}
create_kwargs.update(kwargs)
return self.create_cluster(**create_kwargs)
diff --git a/moto/s3/models.py b/moto/s3/models.py
index 3b4623d61..9e58fdb47 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -859,6 +859,9 @@ class S3Backend(BaseBackend):
if str(key.version_id) != str(version_id)
]
)
+
+ if not bucket.keys.getlist(key_name):
+ bucket.keys.pop(key_name)
return True
except KeyError:
return False
diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index 02a9ac40e..5e7cf0fe5 100755
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -706,8 +706,11 @@ class ResponseObject(_TemplateEnvironmentMixin):
if 'x-amz-copy-source' in request.headers:
# Copy key
- src_key_parsed = urlparse(unquote(request.headers.get("x-amz-copy-source")))
- src_bucket, src_key = src_key_parsed.path.lstrip("/").split("/", 1)
+ # you can have a quoted ?version=abc with a version Id, so work on
+ # we need to parse the unquoted string first
+ src_key_parsed = urlparse(request.headers.get("x-amz-copy-source"))
+ src_bucket, src_key = unquote(src_key_parsed.path).\
+ lstrip("/").split("/", 1)
src_version_id = parse_qs(src_key_parsed.query).get(
'versionId', [None])[0]
self.backend.copy_key(src_bucket, src_key, bucket_name, key_name,
diff --git a/moto/server.py b/moto/server.py
index e9f4c0904..aad47757a 100644
--- a/moto/server.py
+++ b/moto/server.py
@@ -69,8 +69,13 @@ class DomainDispatcherApplication(object):
_, _, region, service, _ = environ['HTTP_AUTHORIZATION'].split(",")[0].split()[
1].split("/")
except (KeyError, ValueError):
+ # Some cognito-idp endpoints (e.g. change password) do not receive an auth header.
+ if environ.get('HTTP_X_AMZ_TARGET', '').startswith('AWSCognitoIdentityProviderService'):
+ service = 'cognito-idp'
+ else:
+ service = 's3'
+
region = 'us-east-1'
- service = 's3'
if service == 'dynamodb':
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
# If Newer API version, use dynamodb2
@@ -186,9 +191,17 @@ def main(argv=sys.argv[1:]):
parser.add_argument(
'-s', '--ssl',
action='store_true',
- help='Enable SSL encrypted connection (use https://... URL)',
+ help='Enable SSL encrypted connection with auto-generated certificate (use https://... URL)',
default=False
)
+ parser.add_argument(
+ '-c', '--ssl-cert', type=str,
+ help='Path to SSL certificate',
+ default=None)
+ parser.add_argument(
+ '-k', '--ssl-key', type=str,
+ help='Path to SSL private key',
+ default=None)
args = parser.parse_args(argv)
@@ -197,9 +210,15 @@ def main(argv=sys.argv[1:]):
create_backend_app, service=args.service)
main_app.debug = True
+ ssl_context = None
+ if args.ssl_key and args.ssl_cert:
+ ssl_context = (args.ssl_cert, args.ssl_key)
+ elif args.ssl:
+ ssl_context = 'adhoc'
+
run_simple(args.host, args.port, main_app,
threaded=True, use_reloader=args.reload,
- ssl_context='adhoc' if args.ssl else None)
+ ssl_context=ssl_context)
if __name__ == '__main__':
diff --git a/moto/ses/models.py b/moto/ses/models.py
index 179f4d8e0..b1135a406 100644
--- a/moto/ses/models.py
+++ b/moto/ses/models.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
import email
+from email.utils import parseaddr
from moto.core import BaseBackend, BaseModel
from .exceptions import MessageRejectedError
@@ -84,13 +85,27 @@ class SESBackend(BaseBackend):
return message
def send_raw_email(self, source, destinations, raw_data):
- if source not in self.addresses:
- raise MessageRejectedError(
- "Did not have authority to send from email %s" % source
- )
+ if source is not None:
+ _, source_email_address = parseaddr(source)
+ if source_email_address not in self.addresses:
+ raise MessageRejectedError(
+ "Did not have authority to send from email %s" % source_email_address
+ )
recipient_count = len(destinations)
message = email.message_from_string(raw_data)
+ if source is None:
+ if message['from'] is None:
+ raise MessageRejectedError(
+ "Source not specified"
+ )
+
+ _, source_email_address = parseaddr(message['from'])
+ if source_email_address not in self.addresses:
+ raise MessageRejectedError(
+ "Did not have authority to send from email %s" % source_email_address
+ )
+
for header in 'TO', 'CC', 'BCC':
recipient_count += sum(
d.strip() and 1 or 0
diff --git a/moto/ses/responses.py b/moto/ses/responses.py
index 6cd018aa6..bdf873836 100644
--- a/moto/ses/responses.py
+++ b/moto/ses/responses.py
@@ -75,7 +75,10 @@ class EmailResponse(BaseResponse):
return template.render(message=message)
def send_raw_email(self):
- source = self.querystring.get('Source')[0]
+ source = self.querystring.get('Source')
+ if source is not None:
+ source, = source
+
raw_data = self.querystring.get('RawMessage.Data')[0]
raw_data = base64.b64decode(raw_data)
if six.PY3:
diff --git a/moto/sns/models.py b/moto/sns/models.py
index 1c1be6680..ebdf5cd16 100644
--- a/moto/sns/models.py
+++ b/moto/sns/models.py
@@ -94,11 +94,14 @@ class Subscription(BaseModel):
if self.protocol == 'sqs':
queue_name = self.endpoint.split(":")[-1]
region = self.endpoint.split(":")[3]
- enveloped_message = json.dumps(self.get_post_data(message, message_id, subject, message_attributes=message_attributes), sort_keys=True, indent=2, separators=(',', ': '))
+ if self.attributes.get('RawMessageDelivery') != 'true':
+ enveloped_message = json.dumps(self.get_post_data(message, message_id, subject, message_attributes=message_attributes), sort_keys=True, indent=2, separators=(',', ': '))
+ else:
+ enveloped_message = message
sqs_backends[region].send_message(queue_name, enveloped_message)
elif self.protocol in ['http', 'https']:
post_data = self.get_post_data(message, message_id, subject)
- requests.post(self.endpoint, json=post_data)
+ requests.post(self.endpoint, json=post_data, headers={'Content-Type': 'text/plain; charset=UTF-8'})
elif self.protocol == 'lambda':
# TODO: support bad function name
# http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html
@@ -240,7 +243,7 @@ class SNSBackend(BaseBackend):
self.sms_attributes.update(attrs)
def create_topic(self, name):
- fails_constraints = not re.match(r'^[a-zA-Z0-9](?:[A-Za-z0-9_-]{0,253}[a-zA-Z0-9])?$', name)
+ fails_constraints = not re.match(r'^[a-zA-Z0-9_-]{1,256}$', name)
if fails_constraints:
raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.")
candidate_topic = Topic(name, self)
diff --git a/moto/sqs/exceptions.py b/moto/sqs/exceptions.py
index baf721b53..5f1cc46b2 100644
--- a/moto/sqs/exceptions.py
+++ b/moto/sqs/exceptions.py
@@ -1,4 +1,5 @@
from __future__ import unicode_literals
+from moto.core.exceptions import RESTError
class MessageNotInflight(Exception):
@@ -21,3 +22,11 @@ class MessageAttributesInvalid(Exception):
class QueueDoesNotExist(Exception):
status_code = 404
description = "The specified queue does not exist for this wsdl version."
+
+
+class QueueAlreadyExists(RESTError):
+ code = 400
+
+ def __init__(self, message):
+ super(QueueAlreadyExists, self).__init__(
+ "QueueAlreadyExists", message)
diff --git a/moto/sqs/models.py b/moto/sqs/models.py
index 9c8858bc0..b8db356e9 100644
--- a/moto/sqs/models.py
+++ b/moto/sqs/models.py
@@ -18,6 +18,7 @@ from .exceptions import (
MessageAttributesInvalid,
MessageNotInflight,
QueueDoesNotExist,
+ QueueAlreadyExists,
ReceiptHandleIsInvalid,
)
@@ -180,6 +181,7 @@ class Queue(BaseModel):
self.permissions = {}
self._messages = []
+ self._pending_messages = set()
now = unix_time()
self.created_timestamp = now
@@ -209,6 +211,16 @@ class Queue(BaseModel):
if self.fifo_queue and not self.name.endswith('.fifo'):
raise MessageAttributesInvalid('Queue name must end in .fifo for FIFO queues')
+ @property
+ def pending_messages(self):
+ return self._pending_messages
+
+ @property
+ def pending_message_groups(self):
+ return set(message.group_id
+ for message in self._pending_messages
+ if message.group_id is not None)
+
def _set_attributes(self, attributes, now=None):
if not now:
now = unix_time()
@@ -234,11 +246,17 @@ class Queue(BaseModel):
self.last_modified_timestamp = now
- def _setup_dlq(self, policy_json):
- try:
- self.redrive_policy = json.loads(policy_json)
- except ValueError:
- raise RESTError('InvalidParameterValue', 'Redrive policy does not contain valid json')
+ def _setup_dlq(self, policy):
+
+ if isinstance(policy, six.text_type):
+ try:
+ self.redrive_policy = json.loads(policy)
+ except ValueError:
+ raise RESTError('InvalidParameterValue', 'Redrive policy is not a dict or valid json')
+ elif isinstance(policy, dict):
+ self.redrive_policy = policy
+ else:
+ raise RESTError('InvalidParameterValue', 'Redrive policy is not a dict or valid json')
if 'deadLetterTargetArn' not in self.redrive_policy:
raise RESTError('InvalidParameterValue', 'Redrive policy does not contain deadLetterTargetArn')
@@ -366,7 +384,12 @@ class SQSBackend(BaseBackend):
def create_queue(self, name, **kwargs):
queue = self.queues.get(name)
- if queue is None:
+ if queue:
+ # Queue already exist. If attributes don't match, throw error
+ for key, value in kwargs.items():
+ if getattr(queue, camelcase_to_underscores(key)) != value:
+ raise QueueAlreadyExists("The specified queue already exists.")
+ else:
try:
kwargs.pop('region')
except KeyError:
@@ -448,6 +471,7 @@ class SQSBackend(BaseBackend):
"""
queue = self.get_queue(queue_name)
result = []
+ previous_result_count = len(result)
polling_end = unix_time() + wait_seconds_timeout
@@ -457,19 +481,25 @@ class SQSBackend(BaseBackend):
if result or (wait_seconds_timeout and unix_time() > polling_end):
break
- if len(queue.messages) == 0:
- # we want to break here, otherwise it will be an infinite loop
- if wait_seconds_timeout == 0:
- break
-
- import time
- time.sleep(0.001)
- continue
-
messages_to_dlq = []
+
for message in queue.messages:
if not message.visible:
continue
+
+ if message in queue.pending_messages:
+ # The message is pending but is visible again, so the
+ # consumer must have timed out.
+ queue.pending_messages.remove(message)
+
+ if message.group_id and queue.fifo_queue:
+ if message.group_id in queue.pending_message_groups:
+ # There is already one active message with the same
+ # group, so we cannot deliver this one.
+ continue
+
+ queue.pending_messages.add(message)
+
if queue.dead_letter_queue is not None and message.approximate_receive_count >= queue.redrive_policy['maxReceiveCount']:
messages_to_dlq.append(message)
continue
@@ -485,6 +515,18 @@ class SQSBackend(BaseBackend):
queue._messages.remove(message)
queue.dead_letter_queue.add_message(message)
+ if previous_result_count == len(result):
+ if wait_seconds_timeout == 0:
+ # There is timeout and we have added no additional results,
+ # so break to avoid an infinite loop.
+ break
+
+ import time
+ time.sleep(0.001)
+ continue
+
+ previous_result_count = len(result)
+
return result
def delete_message(self, queue_name, receipt_handle):
@@ -494,6 +536,7 @@ class SQSBackend(BaseBackend):
# Only delete message if it is not visible and the reciept_handle
# matches.
if message.receipt_handle == receipt_handle:
+ queue.pending_messages.remove(message)
continue
new_messages.append(message)
queue._messages = new_messages
@@ -505,6 +548,10 @@ class SQSBackend(BaseBackend):
if message.visible:
raise MessageNotInflight
message.change_visibility(visibility_timeout)
+ if message.visible:
+ # If the message is visible again, remove it from pending
+ # messages.
+ queue.pending_messages.remove(message)
return
raise ReceiptHandleIsInvalid
diff --git a/setup.py b/setup.py
index ebbf6f0cd..62f9026d7 100755
--- a/setup.py
+++ b/setup.py
@@ -19,11 +19,12 @@ install_requires = [
"pyaml",
"pytz",
"python-dateutil<3.0.0,>=2.1",
+ "python-jose<3.0.0",
"mock",
"docker>=2.5.1",
"jsondiff==1.1.1",
"aws-xray-sdk<0.96,>=0.93",
- "responses",
+ "responses>=0.9.0",
]
extras_require = {
diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
index 1dbf80fb5..9bfae6174 100644
--- a/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
+++ b/tests/test_cloudformation/test_cloudformation_stack_crud_boto3.py
@@ -148,10 +148,41 @@ dummy_import_template = {
}
}
+dummy_redrive_template = {
+ "AWSTemplateFormatVersion": "2010-09-09",
+ "Resources": {
+ "MainQueue": {
+ "Type": "AWS::SQS::Queue",
+ "Properties": {
+ "QueueName": "mainqueue.fifo",
+ "FifoQueue": True,
+ "ContentBasedDeduplication": False,
+ "RedrivePolicy": {
+ "deadLetterTargetArn": {
+ "Fn::GetAtt": [
+ "DeadLetterQueue",
+ "Arn"
+ ]
+ },
+ "maxReceiveCount": 5
+ }
+ }
+ },
+ "DeadLetterQueue": {
+ "Type": "AWS::SQS::Queue",
+ "Properties": {
+ "FifoQueue": True
+ }
+ },
+ }
+}
+
dummy_template_json = json.dumps(dummy_template)
dummy_update_template_json = json.dumps(dummy_update_template)
dummy_output_template_json = json.dumps(dummy_output_template)
dummy_import_template_json = json.dumps(dummy_import_template)
+dummy_redrive_template_json = json.dumps(dummy_redrive_template)
+
@mock_cloudformation
@@ -746,3 +777,19 @@ def test_stack_with_imports():
output = output_stack.outputs[0]['OutputValue']
queue = ec2_resource.get_queue_by_name(QueueName=output)
queue.should_not.be.none
+
+
+@mock_sqs
+@mock_cloudformation
+def test_non_json_redrive_policy():
+ cf = boto3.resource('cloudformation', region_name='us-east-1')
+
+ stack = cf.create_stack(
+ StackName="test_stack1",
+ TemplateBody=dummy_redrive_template_json
+ )
+
+ stack.Resource('MainQueue').resource_status\
+ .should.equal("CREATE_COMPLETE")
+ stack.Resource('DeadLetterQueue').resource_status\
+ .should.equal("CREATE_COMPLETE")
diff --git a/tests/test_cloudformation/test_import_value.py b/tests/test_cloudformation/test_import_value.py
new file mode 100644
index 000000000..04c2b5801
--- /dev/null
+++ b/tests/test_cloudformation/test_import_value.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+# Standard library modules
+import unittest
+
+# Third-party modules
+import boto3
+from botocore.exceptions import ClientError
+
+# Package modules
+from moto import mock_cloudformation
+
+AWS_REGION = 'us-west-1'
+
+SG_STACK_NAME = 'simple-sg-stack'
+SG_TEMPLATE = """
+AWSTemplateFormatVersion: 2010-09-09
+Description: Simple test CF template for moto_cloudformation
+
+
+Resources:
+ SimpleSecurityGroup:
+ Type: AWS::EC2::SecurityGroup
+ Description: "A simple security group"
+ Properties:
+ GroupName: simple-security-group
+ GroupDescription: "A simple security group"
+ SecurityGroupEgress:
+ -
+ Description: "Egress to remote HTTPS servers"
+ CidrIp: 0.0.0.0/0
+ IpProtocol: tcp
+ FromPort: 443
+ ToPort: 443
+
+Outputs:
+ SimpleSecurityGroupName:
+ Value: !GetAtt SimpleSecurityGroup.GroupId
+ Export:
+ Name: "SimpleSecurityGroup"
+
+"""
+
+EC2_STACK_NAME = 'simple-ec2-stack'
+EC2_TEMPLATE = """
+---
+# The latest template format version is "2010-09-09" and as of 2018-04-09
+# is currently the only valid value.
+AWSTemplateFormatVersion: 2010-09-09
+Description: Simple test CF template for moto_cloudformation
+
+
+Resources:
+ SimpleInstance:
+ Type: AWS::EC2::Instance
+ Properties:
+ ImageId: ami-03cf127a
+ InstanceType: t2.micro
+ SecurityGroups: !Split [',', !ImportValue SimpleSecurityGroup]
+"""
+
+
+class TestSimpleInstance(unittest.TestCase):
+ def test_simple_instance(self):
+ """Test that we can create a simple CloudFormation stack that imports values from an existing CloudFormation stack"""
+ with mock_cloudformation():
+ client = boto3.client('cloudformation', region_name=AWS_REGION)
+ client.create_stack(StackName=SG_STACK_NAME, TemplateBody=SG_TEMPLATE)
+ response = client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
+ self.assertIn('StackId', response)
+ response = client.describe_stacks(StackName=response['StackId'])
+ self.assertIn('Stacks', response)
+ stack_info = response['Stacks']
+ self.assertEqual(1, len(stack_info))
+ self.assertIn('StackName', stack_info[0])
+ self.assertEqual(EC2_STACK_NAME, stack_info[0]['StackName'])
+
+ def test_simple_instance_missing_export(self):
+ """Test that we get an exception if a CloudFormation stack tries to imports a non-existent export value"""
+ with mock_cloudformation():
+ client = boto3.client('cloudformation', region_name=AWS_REGION)
+ with self.assertRaises(ClientError) as e:
+ client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
+ self.assertIn('Error', e.exception.response)
+ self.assertIn('Code', e.exception.response['Error'])
+ self.assertEqual('ExportNotFound', e.exception.response['Error']['Code'])
diff --git a/tests/test_cloudwatch/test_cloudwatch_boto3.py b/tests/test_cloudwatch/test_cloudwatch_boto3.py
old mode 100644
new mode 100755
index 5fbf75749..40b5eee08
--- a/tests/test_cloudwatch/test_cloudwatch_boto3.py
+++ b/tests/test_cloudwatch/test_cloudwatch_boto3.py
@@ -162,6 +162,37 @@ def test_put_metric_data_no_dimensions():
metric['MetricName'].should.equal('metric')
+
+@mock_cloudwatch
+def test_put_metric_data_with_statistics():
+ conn = boto3.client('cloudwatch', region_name='us-east-1')
+
+ conn.put_metric_data(
+ Namespace='tester',
+ MetricData=[
+ dict(
+ MetricName='statmetric',
+ Timestamp=datetime(2015, 1, 1),
+ # no Value to test https://github.com/spulec/moto/issues/1615
+ StatisticValues=dict(
+ SampleCount=123.0,
+ Sum=123.0,
+ Minimum=123.0,
+ Maximum=123.0
+ ),
+ Unit='Milliseconds',
+ StorageResolution=123
+ )
+ ]
+ )
+
+ metrics = conn.list_metrics()['Metrics']
+ metrics.should.have.length_of(1)
+ metric = metrics[0]
+ metric['Namespace'].should.equal('tester')
+ metric['MetricName'].should.equal('statmetric')
+ # TODO: test statistics - https://github.com/spulec/moto/issues/1615
+
@mock_cloudwatch
def test_get_metric_statistics():
conn = boto3.client('cloudwatch', region_name='us-east-1')
@@ -173,6 +204,7 @@ def test_get_metric_statistics():
dict(
MetricName='metric',
Value=1.5,
+ Timestamp=utc_now
)
]
)
@@ -180,7 +212,7 @@ def test_get_metric_statistics():
stats = conn.get_metric_statistics(
Namespace='tester',
MetricName='metric',
- StartTime=utc_now,
+ StartTime=utc_now - timedelta(seconds=60),
EndTime=utc_now + timedelta(seconds=60),
Period=60,
Statistics=['SampleCount', 'Sum']
diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py
new file mode 100644
index 000000000..b2bd469ce
--- /dev/null
+++ b/tests/test_cognitoidp/test_cognitoidp.py
@@ -0,0 +1,539 @@
+from __future__ import unicode_literals
+
+import boto3
+import json
+import os
+import uuid
+
+from jose import jws
+from moto import mock_cognitoidp
+import sure # noqa
+
+
+@mock_cognitoidp
+def test_create_user_pool():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ name = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ result = conn.create_user_pool(
+ PoolName=name,
+ LambdaConfig={
+ "PreSignUp": value
+ }
+ )
+
+ result["UserPool"]["Id"].should_not.be.none
+ result["UserPool"]["Name"].should.equal(name)
+ result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_list_user_pools():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ name = str(uuid.uuid4())
+ conn.create_user_pool(PoolName=name)
+ result = conn.list_user_pools(MaxResults=10)
+ result["UserPools"].should.have.length_of(1)
+ result["UserPools"][0]["Name"].should.equal(name)
+
+
+@mock_cognitoidp
+def test_describe_user_pool():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ name = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ user_pool_details = conn.create_user_pool(
+ PoolName=name,
+ LambdaConfig={
+ "PreSignUp": value
+ }
+ )
+
+ result = conn.describe_user_pool(UserPoolId=user_pool_details["UserPool"]["Id"])
+ result["UserPool"]["Name"].should.equal(name)
+ result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_delete_user_pool():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(1)
+ conn.delete_user_pool(UserPoolId=user_pool_id)
+ conn.list_user_pools(MaxResults=10)["UserPools"].should.have.length_of(0)
+
+
+@mock_cognitoidp
+def test_create_user_pool_domain():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ domain = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ result = conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
+ result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+
+
+@mock_cognitoidp
+def test_describe_user_pool_domain():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ domain = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
+ result = conn.describe_user_pool_domain(Domain=domain)
+ result["DomainDescription"]["Domain"].should.equal(domain)
+ result["DomainDescription"]["UserPoolId"].should.equal(user_pool_id)
+ result["DomainDescription"]["AWSAccountId"].should_not.be.none
+
+
+@mock_cognitoidp
+def test_delete_user_pool_domain():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ domain = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
+ result = conn.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
+ result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ result = conn.describe_user_pool_domain(Domain=domain)
+ # This is a surprising behavior of the real service: describing a missing domain comes
+ # back with status 200 and a DomainDescription of {}
+ result["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ result["DomainDescription"].keys().should.have.length_of(0)
+
+
+@mock_cognitoidp
+def test_create_user_pool_client():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ client_name = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ result = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=client_name,
+ CallbackURLs=[value],
+ )
+
+ result["UserPoolClient"]["UserPoolId"].should.equal(user_pool_id)
+ result["UserPoolClient"]["ClientId"].should_not.be.none
+ result["UserPoolClient"]["ClientName"].should.equal(client_name)
+ result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
+ result["UserPoolClient"]["CallbackURLs"][0].should.equal(value)
+
+
+@mock_cognitoidp
+def test_list_user_pool_clients():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ client_name = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_user_pool_client(UserPoolId=user_pool_id, ClientName=client_name)
+ result = conn.list_user_pool_clients(UserPoolId=user_pool_id, MaxResults=10)
+ result["UserPoolClients"].should.have.length_of(1)
+ result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
+
+
+@mock_cognitoidp
+def test_describe_user_pool_client():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ client_name = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ client_details = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=client_name,
+ CallbackURLs=[value],
+ )
+
+ result = conn.describe_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientId=client_details["UserPoolClient"]["ClientId"],
+ )
+
+ result["UserPoolClient"]["ClientName"].should.equal(client_name)
+ result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
+ result["UserPoolClient"]["CallbackURLs"][0].should.equal(value)
+
+
+@mock_cognitoidp
+def test_update_user_pool_client():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ old_client_name = str(uuid.uuid4())
+ new_client_name = str(uuid.uuid4())
+ old_value = str(uuid.uuid4())
+ new_value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ client_details = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=old_client_name,
+ CallbackURLs=[old_value],
+ )
+
+ result = conn.update_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientId=client_details["UserPoolClient"]["ClientId"],
+ ClientName=new_client_name,
+ CallbackURLs=[new_value],
+ )
+
+ result["UserPoolClient"]["ClientName"].should.equal(new_client_name)
+ result["UserPoolClient"]["CallbackURLs"].should.have.length_of(1)
+ result["UserPoolClient"]["CallbackURLs"][0].should.equal(new_value)
+
+
+@mock_cognitoidp
+def test_delete_user_pool_client():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ client_details = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=str(uuid.uuid4()),
+ )
+
+ conn.delete_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientId=client_details["UserPoolClient"]["ClientId"],
+ )
+
+ caught = False
+ try:
+ conn.describe_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientId=client_details["UserPoolClient"]["ClientId"],
+ )
+ except conn.exceptions.ResourceNotFoundException:
+ caught = True
+
+ caught.should.be.true
+
+
+@mock_cognitoidp
+def test_create_identity_provider():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ result = conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={
+ "thing": value
+ },
+ )
+
+ result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id)
+ result["IdentityProvider"]["ProviderName"].should.equal(provider_name)
+ result["IdentityProvider"]["ProviderType"].should.equal(provider_type)
+ result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_list_identity_providers():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={},
+ )
+
+ result = conn.list_identity_providers(
+ UserPoolId=user_pool_id,
+ MaxResults=10,
+ )
+
+ result["Providers"].should.have.length_of(1)
+ result["Providers"][0]["ProviderName"].should.equal(provider_name)
+ result["Providers"][0]["ProviderType"].should.equal(provider_type)
+
+
+@mock_cognitoidp
+def test_describe_identity_providers():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={
+ "thing": value
+ },
+ )
+
+ result = conn.describe_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ )
+
+ result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id)
+ result["IdentityProvider"]["ProviderName"].should.equal(provider_name)
+ result["IdentityProvider"]["ProviderType"].should.equal(provider_type)
+ result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_delete_identity_providers():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ provider_name = str(uuid.uuid4())
+ provider_type = "Facebook"
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.create_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ ProviderType=provider_type,
+ ProviderDetails={
+ "thing": value
+ },
+ )
+
+ conn.delete_identity_provider(UserPoolId=user_pool_id, ProviderName=provider_name)
+
+ caught = False
+ try:
+ conn.describe_identity_provider(
+ UserPoolId=user_pool_id,
+ ProviderName=provider_name,
+ )
+ except conn.exceptions.ResourceNotFoundException:
+ caught = True
+
+ caught.should.be.true
+
+
+@mock_cognitoidp
+def test_admin_create_user():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ username = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ result = conn.admin_create_user(
+ UserPoolId=user_pool_id,
+ Username=username,
+ UserAttributes=[
+ {"Name": "thing", "Value": value}
+ ],
+ )
+
+ result["User"]["Username"].should.equal(username)
+ result["User"]["UserStatus"].should.equal("FORCE_CHANGE_PASSWORD")
+ result["User"]["Attributes"].should.have.length_of(1)
+ result["User"]["Attributes"][0]["Name"].should.equal("thing")
+ result["User"]["Attributes"][0]["Value"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_admin_get_user():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ username = str(uuid.uuid4())
+ value = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.admin_create_user(
+ UserPoolId=user_pool_id,
+ Username=username,
+ UserAttributes=[
+ {"Name": "thing", "Value": value}
+ ],
+ )
+
+ result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
+ result["Username"].should.equal(username)
+ result["UserAttributes"].should.have.length_of(1)
+ result["UserAttributes"][0]["Name"].should.equal("thing")
+ result["UserAttributes"][0]["Value"].should.equal(value)
+
+
+@mock_cognitoidp
+def test_list_users():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ username = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+ result = conn.list_users(UserPoolId=user_pool_id)
+ result["Users"].should.have.length_of(1)
+ result["Users"][0]["Username"].should.equal(username)
+
+
+@mock_cognitoidp
+def test_admin_delete_user():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ username = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
+ conn.admin_delete_user(UserPoolId=user_pool_id, Username=username)
+
+ caught = False
+ try:
+ conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
+ except conn.exceptions.ResourceNotFoundException:
+ caught = True
+
+ caught.should.be.true
+
+
+def authentication_flow(conn):
+ username = str(uuid.uuid4())
+ temporary_password = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ client_id = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=str(uuid.uuid4()),
+ )["UserPoolClient"]["ClientId"]
+
+ conn.admin_create_user(
+ UserPoolId=user_pool_id,
+ Username=username,
+ TemporaryPassword=temporary_password,
+ )
+
+ result = conn.admin_initiate_auth(
+ UserPoolId=user_pool_id,
+ ClientId=client_id,
+ AuthFlow="ADMIN_NO_SRP_AUTH",
+ AuthParameters={
+ "USERNAME": username,
+ "PASSWORD": temporary_password
+ },
+ )
+
+ # A newly created user is forced to set a new password
+ result["ChallengeName"].should.equal("NEW_PASSWORD_REQUIRED")
+ result["Session"].should_not.be.none
+
+ # This sets a new password and logs the user in (creates tokens)
+ new_password = str(uuid.uuid4())
+ result = conn.respond_to_auth_challenge(
+ Session=result["Session"],
+ ClientId=client_id,
+ ChallengeName="NEW_PASSWORD_REQUIRED",
+ ChallengeResponses={
+ "USERNAME": username,
+ "NEW_PASSWORD": new_password
+ }
+ )
+
+ result["AuthenticationResult"]["IdToken"].should_not.be.none
+ result["AuthenticationResult"]["AccessToken"].should_not.be.none
+
+ return {
+ "user_pool_id": user_pool_id,
+ "client_id": client_id,
+ "id_token": result["AuthenticationResult"]["IdToken"],
+ "access_token": result["AuthenticationResult"]["AccessToken"],
+ "username": username,
+ "password": new_password,
+ }
+
+
+@mock_cognitoidp
+def test_authentication_flow():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ authentication_flow(conn)
+
+
+@mock_cognitoidp
+def test_token_legitimacy():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ path = "../../moto/cognitoidp/resources/jwks-public.json"
+ with open(os.path.join(os.path.dirname(__file__), path)) as f:
+ json_web_key = json.loads(f.read())["keys"][0]
+
+ outputs = authentication_flow(conn)
+ id_token = outputs["id_token"]
+ access_token = outputs["access_token"]
+ client_id = outputs["client_id"]
+ issuer = "https://cognito-idp.us-west-2.amazonaws.com/{}".format(outputs["user_pool_id"])
+ id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256"))
+ id_claims["iss"].should.equal(issuer)
+ id_claims["aud"].should.equal(client_id)
+ access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256"))
+ access_claims["iss"].should.equal(issuer)
+ access_claims["aud"].should.equal(client_id)
+
+
+@mock_cognitoidp
+def test_change_password():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ outputs = authentication_flow(conn)
+
+ # Take this opportunity to test change_password, which requires an access token.
+ newer_password = str(uuid.uuid4())
+ conn.change_password(
+ AccessToken=outputs["access_token"],
+ PreviousPassword=outputs["password"],
+ ProposedPassword=newer_password,
+ )
+
+ # Log in again, which should succeed without a challenge because the user is no
+ # longer in the force-new-password state.
+ result = conn.admin_initiate_auth(
+ UserPoolId=outputs["user_pool_id"],
+ ClientId=outputs["client_id"],
+ AuthFlow="ADMIN_NO_SRP_AUTH",
+ AuthParameters={
+ "USERNAME": outputs["username"],
+ "PASSWORD": newer_password,
+ },
+ )
+
+ result["AuthenticationResult"].should_not.be.none
+
+
+@mock_cognitoidp
+def test_forgot_password():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ result = conn.forgot_password(ClientId=str(uuid.uuid4()), Username=str(uuid.uuid4()))
+ result["CodeDeliveryDetails"].should_not.be.none
+
+
+@mock_cognitoidp
+def test_confirm_forgot_password():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ username = str(uuid.uuid4())
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+ client_id = conn.create_user_pool_client(
+ UserPoolId=user_pool_id,
+ ClientName=str(uuid.uuid4()),
+ )["UserPoolClient"]["ClientId"]
+
+ conn.admin_create_user(
+ UserPoolId=user_pool_id,
+ Username=username,
+ TemporaryPassword=str(uuid.uuid4()),
+ )
+
+ conn.confirm_forgot_password(
+ ClientId=client_id,
+ Username=username,
+ ConfirmationCode=str(uuid.uuid4()),
+ Password=str(uuid.uuid4()),
+ )
diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py
index 20ff80167..93188001f 100644
--- a/tests/test_dynamodb2/test_dynamodb.py
+++ b/tests/test_dynamodb2/test_dynamodb.py
@@ -6,6 +6,7 @@ import boto3
from boto3.dynamodb.conditions import Attr
import sure # noqa
import requests
+from pytest import raises
from moto import mock_dynamodb2, mock_dynamodb2_deprecated
from moto.dynamodb2 import dynamodb_backend2
from boto.exception import JSONResponseError
@@ -1052,6 +1053,7 @@ def test_query_missing_expr_names():
@mock_dynamodb2
def test_update_item_on_map():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
+ client = boto3.client('dynamodb', region_name='us-east-1')
# Create the DynamoDB table.
dynamodb.create_table(
@@ -1092,21 +1094,44 @@ def test_update_item_on_map():
resp = table.scan()
resp['Items'][0]['body'].should.equal({'nested': {'data': 'test'}})
+ # Nonexistent nested attributes are supported for existing top-level attributes.
table.update_item(Key={
'forum_name': 'the-key',
'subject': '123'
},
- UpdateExpression='SET body.#nested.#data = :tb',
+ UpdateExpression='SET body.#nested.#data = :tb, body.nested.#nonexistentnested.#data = :tb2',
ExpressionAttributeNames={
'#nested': 'nested',
+ '#nonexistentnested': 'nonexistentnested',
'#data': 'data'
},
ExpressionAttributeValues={
- ':tb': 'new_value'
+ ':tb': 'new_value',
+ ':tb2': 'other_value'
})
resp = table.scan()
- resp['Items'][0]['body'].should.equal({'nested': {'data': 'new_value'}})
+ resp['Items'][0]['body'].should.equal({
+ 'nested': {
+ 'data': 'new_value',
+ 'nonexistentnested': {'data': 'other_value'}
+ }
+ })
+
+ # Test nested value for a nonexistent attribute.
+ with raises(client.exceptions.ConditionalCheckFailedException):
+ table.update_item(Key={
+ 'forum_name': 'the-key',
+ 'subject': '123'
+ },
+ UpdateExpression='SET nonexistent.#nested = :tb',
+ ExpressionAttributeNames={
+ '#nested': 'nested'
+ },
+ ExpressionAttributeValues={
+ ':tb': 'new_value'
+ })
+
# https://github.com/spulec/moto/issues/1358
diff --git a/tests/test_ec2/test_elastic_block_store.py b/tests/test_ec2/test_elastic_block_store.py
index 32ce1be22..8930838c6 100644
--- a/tests/test_ec2/test_elastic_block_store.py
+++ b/tests/test_ec2/test_elastic_block_store.py
@@ -8,6 +8,7 @@ import boto
import boto3
from botocore.exceptions import ClientError
from boto.exception import EC2ResponseError
+from freezegun import freeze_time
import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2
@@ -588,6 +589,7 @@ def test_volume_tag_escaping():
dict(snaps[0].tags).should.equal({'key': ''})
+@freeze_time
@mock_ec2
def test_copy_snapshot():
ec2_client = boto3.client('ec2', region_name='eu-west-1')
diff --git a/tests/test_ec2/test_elastic_ip_addresses.py b/tests/test_ec2/test_elastic_ip_addresses.py
index 709bdc33b..ca6637b18 100644
--- a/tests/test_ec2/test_elastic_ip_addresses.py
+++ b/tests/test_ec2/test_elastic_ip_addresses.py
@@ -62,6 +62,17 @@ def test_eip_allocate_vpc():
logging.debug("vpc alloc_id:".format(vpc.allocation_id))
vpc.release()
+@mock_ec2
+def test_specific_eip_allocate_vpc():
+ """Allocate VPC EIP with specific address"""
+ service = boto3.resource('ec2', region_name='us-west-1')
+ client = boto3.client('ec2', region_name='us-west-1')
+
+ vpc = client.allocate_address(Domain="vpc", Address="127.38.43.222")
+ vpc['Domain'].should.be.equal("vpc")
+ vpc['PublicIp'].should.be.equal("127.38.43.222")
+ logging.debug("vpc alloc_id:".format(vpc['AllocationId']))
+
@mock_ec2_deprecated
def test_eip_allocate_invalid_domain():
diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py
index e839bde5b..80630c5b8 100644
--- a/tests/test_events/test_events.py
+++ b/tests/test_events/test_events.py
@@ -1,6 +1,7 @@
import random
import boto3
+import json
from moto.events import mock_events
from botocore.exceptions import ClientError
@@ -177,17 +178,19 @@ def test_remove_targets():
def test_permissions():
client = boto3.client('events', 'eu-central-1')
- client.put_permission(Action='PutEvents', Principal='111111111111', StatementId='Account1')
- client.put_permission(Action='PutEvents', Principal='222222222222', StatementId='Account2')
+ client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1')
+ client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2')
resp = client.describe_event_bus()
- assert len(resp['Policy']['Statement']) == 2
+ resp_policy = json.loads(resp['Policy'])
+ assert len(resp_policy['Statement']) == 2
client.remove_permission(StatementId='Account2')
resp = client.describe_event_bus()
- assert len(resp['Policy']['Statement']) == 1
- assert resp['Policy']['Statement'][0]['Sid'] == 'Account1'
+ resp_policy = json.loads(resp['Policy'])
+ assert len(resp_policy['Statement']) == 1
+ assert resp_policy['Statement'][0]['Sid'] == 'Account1'
@mock_events
diff --git a/tests/test_logs/test_logs.py b/tests/test_logs/test_logs.py
index a9a7f5260..3f924cc55 100644
--- a/tests/test_logs/test_logs.py
+++ b/tests/test_logs/test_logs.py
@@ -13,6 +13,10 @@ def test_log_group_create():
conn = boto3.client('logs', 'us-west-2')
log_group_name = 'dummy'
response = conn.create_log_group(logGroupName=log_group_name)
+
+ response = conn.describe_log_groups(logGroupNamePrefix=log_group_name)
+ assert len(response['logGroups']) == 1
+
response = conn.delete_log_group(logGroupName=log_group_name)
diff --git a/tests/test_rds2/test_rds2.py b/tests/test_rds2/test_rds2.py
index d056049b5..80dcd4f53 100644
--- a/tests/test_rds2/test_rds2.py
+++ b/tests/test_rds2/test_rds2.py
@@ -353,8 +353,6 @@ def test_describe_db_snapshots():
MasterUserPassword='hunter2',
Port=1234,
DBSecurityGroups=["my_sg"])
- conn.describe_db_snapshots.when.called_with(
- DBInstanceIdentifier="db-primary-1").should.throw(ClientError)
created = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-1').get('DBSnapshot')
@@ -369,6 +367,11 @@ def test_describe_db_snapshots():
snapshot.should.equal(created)
snapshot.get('Engine').should.equal('postgres')
+ conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='snapshot-2')
+ snapshots = conn.describe_db_snapshots(DBInstanceIdentifier='db-primary-1').get('DBSnapshots')
+ snapshots.should.have.length_of(2)
+
@mock_rds2
def test_delete_db_snapshot():
diff --git a/tests/test_redshift/test_redshift.py b/tests/test_redshift/test_redshift.py
index 96e3ee5b3..6e027b86c 100644
--- a/tests/test_redshift/test_redshift.py
+++ b/tests/test_redshift/test_redshift.py
@@ -818,6 +818,48 @@ def test_create_cluster_from_snapshot():
new_cluster['Endpoint']['Port'].should.equal(1234)
+@mock_redshift
+def test_create_cluster_from_snapshot_with_waiter():
+ client = boto3.client('redshift', region_name='us-east-1')
+ original_cluster_identifier = 'original-cluster'
+ original_snapshot_identifier = 'original-snapshot'
+ new_cluster_identifier = 'new-cluster'
+
+ client.create_cluster(
+ ClusterIdentifier=original_cluster_identifier,
+ ClusterType='single-node',
+ NodeType='ds2.xlarge',
+ MasterUsername='username',
+ MasterUserPassword='password',
+ )
+ client.create_cluster_snapshot(
+ SnapshotIdentifier=original_snapshot_identifier,
+ ClusterIdentifier=original_cluster_identifier
+ )
+ response = client.restore_from_cluster_snapshot(
+ ClusterIdentifier=new_cluster_identifier,
+ SnapshotIdentifier=original_snapshot_identifier,
+ Port=1234
+ )
+ response['Cluster']['ClusterStatus'].should.equal('creating')
+
+ client.get_waiter('cluster_restored').wait(
+ ClusterIdentifier=new_cluster_identifier,
+ WaiterConfig={
+ 'Delay': 1,
+ 'MaxAttempts': 2,
+ }
+ )
+
+ response = client.describe_clusters(
+ ClusterIdentifier=new_cluster_identifier
+ )
+ new_cluster = response['Clusters'][0]
+ new_cluster['NodeType'].should.equal('ds2.xlarge')
+ new_cluster['MasterUsername'].should.equal('username')
+ new_cluster['Endpoint']['Port'].should.equal(1234)
+
+
@mock_redshift
def test_create_cluster_from_non_existent_snapshot():
client = boto3.client('redshift', region_name='us-east-1')
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 369426758..9f37791cb 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -1405,6 +1405,19 @@ def test_boto3_deleted_versionings_list():
assert len(listed['Contents']) == 1
+@mock_s3
+def test_boto3_delete_versioned_bucket():
+ client = boto3.client('s3', region_name='us-east-1')
+
+ client.create_bucket(Bucket='blah')
+ client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'})
+
+ resp = client.put_object(Bucket='blah', Key='test1', Body=b'test1')
+ client.delete_object(Bucket='blah', Key='test1', VersionId=resp["VersionId"])
+
+ client.delete_bucket(Bucket='blah')
+
+
@mock_s3
def test_boto3_head_object_if_modified_since():
s3 = boto3.client('s3', region_name='us-east-1')
diff --git a/tests/test_ses/test_ses_boto3.py b/tests/test_ses/test_ses_boto3.py
index 5d39f61d4..e800b8035 100644
--- a/tests/test_ses/test_ses_boto3.py
+++ b/tests/test_ses/test_ses_boto3.py
@@ -136,3 +136,59 @@ def test_send_raw_email():
send_quota = conn.get_send_quota()
sent_count = int(send_quota['SentLast24Hours'])
sent_count.should.equal(2)
+
+
+@mock_ses
+def test_send_raw_email_without_source():
+ conn = boto3.client('ses', region_name='us-east-1')
+
+ message = MIMEMultipart()
+ message['Subject'] = 'Test'
+ message['From'] = 'test@example.com'
+ message['To'] = 'to@example.com, foo@example.com'
+
+ # Message body
+ part = MIMEText('test file attached')
+ message.attach(part)
+
+ # Attachment
+ part = MIMEText('contents of test file here')
+ part.add_header('Content-Disposition', 'attachment; filename=test.txt')
+ message.attach(part)
+
+ kwargs = dict(
+ RawMessage={'Data': message.as_string()},
+ )
+
+ conn.send_raw_email.when.called_with(**kwargs).should.throw(ClientError)
+
+ conn.verify_email_identity(EmailAddress="test@example.com")
+ conn.send_raw_email(**kwargs)
+
+ send_quota = conn.get_send_quota()
+ sent_count = int(send_quota['SentLast24Hours'])
+ sent_count.should.equal(2)
+
+
+@mock_ses
+def test_send_raw_email_without_source_or_from():
+ conn = boto3.client('ses', region_name='us-east-1')
+
+ message = MIMEMultipart()
+ message['Subject'] = 'Test'
+ message['To'] = 'to@example.com, foo@example.com'
+
+ # Message body
+ part = MIMEText('test file attached')
+ message.attach(part)
+ # Attachment
+ part = MIMEText('contents of test file here')
+ part.add_header('Content-Disposition', 'attachment; filename=test.txt')
+ message.attach(part)
+
+ kwargs = dict(
+ RawMessage={'Data': message.as_string()},
+ )
+
+ conn.send_raw_email.when.called_with(**kwargs).should.throw(ClientError)
+
diff --git a/tests/test_sns/test_publishing_boto3.py b/tests/test_sns/test_publishing_boto3.py
index 7db072287..65d2f25cc 100644
--- a/tests/test_sns/test_publishing_boto3.py
+++ b/tests/test_sns/test_publishing_boto3.py
@@ -42,6 +42,29 @@ def test_publish_to_sqs():
acquired_message.should.equal(expected)
+@mock_sqs
+@mock_sns
+def test_publish_to_sqs_raw():
+ sns = boto3.resource('sns', region_name='us-east-1')
+ topic = sns.create_topic(Name='some-topic')
+
+ sqs = boto3.resource('sqs', region_name='us-east-1')
+ queue = sqs.create_queue(QueueName='test-queue')
+
+ subscription = topic.subscribe(
+ Protocol='sqs', Endpoint=queue.attributes['QueueArn'])
+
+ subscription.set_attributes(
+ AttributeName='RawMessageDelivery', AttributeValue='true')
+
+ message = 'my message'
+ with freeze_time("2015-01-01 12:00:00"):
+ topic.publish(Message=message)
+
+ messages = queue.receive_messages(MaxNumberOfMessages=1)
+ messages[0].body.should.equal(message)
+
+
@mock_sqs
@mock_sns
def test_publish_to_sqs_bad():
@@ -230,7 +253,7 @@ def test_publish_to_sqs_in_different_region():
@mock_sns
def test_publish_to_http():
def callback(request):
- request.headers["Content-Type"].should.equal("application/json")
+ request.headers["Content-Type"].should.equal("text/plain; charset=UTF-8")
json.loads.when.called_with(
request.body.decode()
).should_not.throw(Exception)
diff --git a/tests/test_sns/test_topics_boto3.py b/tests/test_sns/test_topics_boto3.py
index 95dd41f89..7d9a27b18 100644
--- a/tests/test_sns/test_topics_boto3.py
+++ b/tests/test_sns/test_topics_boto3.py
@@ -13,23 +13,24 @@ from moto.sns.models import DEFAULT_TOPIC_POLICY, DEFAULT_EFFECTIVE_DELIVERY_POL
@mock_sns
def test_create_and_delete_topic():
conn = boto3.client("sns", region_name="us-east-1")
- conn.create_topic(Name="some-topic")
+ for topic_name in ('some-topic', '-some-topic-', '_some-topic_', 'a' * 256):
+ conn.create_topic(Name=topic_name)
- topics_json = conn.list_topics()
- topics = topics_json["Topics"]
- topics.should.have.length_of(1)
- topics[0]['TopicArn'].should.equal(
- "arn:aws:sns:{0}:123456789012:some-topic"
- .format(conn._client_config.region_name)
- )
+ topics_json = conn.list_topics()
+ topics = topics_json["Topics"]
+ topics.should.have.length_of(1)
+ topics[0]['TopicArn'].should.equal(
+ "arn:aws:sns:{0}:123456789012:{1}"
+ .format(conn._client_config.region_name, topic_name)
+ )
- # Delete the topic
- conn.delete_topic(TopicArn=topics[0]['TopicArn'])
+ # Delete the topic
+ conn.delete_topic(TopicArn=topics[0]['TopicArn'])
- # And there should now be 0 topics
- topics_json = conn.list_topics()
- topics = topics_json["Topics"]
- topics.should.have.length_of(0)
+ # And there should now be 0 topics
+ topics_json = conn.list_topics()
+ topics = topics_json["Topics"]
+ topics.should.have.length_of(0)
@mock_sns
def test_create_topic_should_be_indempodent():
diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py
index 1280fed80..d3e4ca917 100644
--- a/tests/test_sqs/test_sqs.py
+++ b/tests/test_sqs/test_sqs.py
@@ -40,6 +40,29 @@ def test_create_fifo_queue_fail():
raise RuntimeError('Should of raised InvalidParameterValue Exception')
+@mock_sqs
+def test_create_queue_with_different_attributes_fail():
+ sqs = boto3.client('sqs', region_name='us-east-1')
+
+ sqs.create_queue(
+ QueueName='test-queue',
+ Attributes={
+ 'VisibilityTimeout': '10',
+ }
+ )
+ try:
+ sqs.create_queue(
+ QueueName='test-queue',
+ Attributes={
+ 'VisibilityTimeout': '60',
+ }
+ )
+ except botocore.exceptions.ClientError as err:
+ err.response['Error']['Code'].should.equal('QueueAlreadyExists')
+ else:
+ raise RuntimeError('Should of raised QueueAlreadyExists Exception')
+
+
@mock_sqs
def test_create_fifo_queue():
sqs = boto3.client('sqs', region_name='us-east-1')
@@ -1063,3 +1086,112 @@ def test_redrive_policy_set_attributes():
assert 'RedrivePolicy' in copy.attributes
copy_policy = json.loads(copy.attributes['RedrivePolicy'])
assert copy_policy == redrive_policy
+
+
+@mock_sqs
+def test_receive_messages_with_message_group_id():
+ sqs = boto3.resource('sqs', region_name='us-east-1')
+ queue = sqs.create_queue(QueueName="test-queue.fifo",
+ Attributes={
+ 'FifoQueue': 'true',
+ })
+ queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
+ queue.send_message(
+ MessageBody="message-1",
+ MessageGroupId="group"
+ )
+ queue.send_message(
+ MessageBody="message-2",
+ MessageGroupId="group"
+ )
+
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+ message = messages[0]
+
+ # received message is not deleted!
+
+ messages = queue.receive_messages(WaitTimeSeconds=0)
+ messages.should.have.length_of(0)
+
+ # message is now processed, next one should be available
+ message.delete()
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+
+
+@mock_sqs
+def test_receive_messages_with_message_group_id_on_requeue():
+ sqs = boto3.resource('sqs', region_name='us-east-1')
+ queue = sqs.create_queue(QueueName="test-queue.fifo",
+ Attributes={
+ 'FifoQueue': 'true',
+ })
+ queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
+ queue.send_message(
+ MessageBody="message-1",
+ MessageGroupId="group"
+ )
+ queue.send_message(
+ MessageBody="message-2",
+ MessageGroupId="group"
+ )
+
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+ message = messages[0]
+
+ # received message is not deleted!
+
+ messages = queue.receive_messages(WaitTimeSeconds=0)
+ messages.should.have.length_of(0)
+
+ # message is now available again, next one should be available
+ message.change_visibility(VisibilityTimeout=0)
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+ messages[0].message_id.should.equal(message.message_id)
+
+
+@mock_sqs
+def test_receive_messages_with_message_group_id_on_visibility_timeout():
+ if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'true':
+ raise SkipTest('Cant manipulate time in server mode')
+
+ with freeze_time("2015-01-01 12:00:00"):
+ sqs = boto3.resource('sqs', region_name='us-east-1')
+ queue = sqs.create_queue(QueueName="test-queue.fifo",
+ Attributes={
+ 'FifoQueue': 'true',
+ })
+ queue.set_attributes(Attributes={"VisibilityTimeout": "3600"})
+ queue.send_message(
+ MessageBody="message-1",
+ MessageGroupId="group"
+ )
+ queue.send_message(
+ MessageBody="message-2",
+ MessageGroupId="group"
+ )
+
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+ message = messages[0]
+
+ # received message is not deleted!
+
+ messages = queue.receive_messages(WaitTimeSeconds=0)
+ messages.should.have.length_of(0)
+
+ message.change_visibility(VisibilityTimeout=10)
+
+ with freeze_time("2015-01-01 12:00:05"):
+ # no timeout yet
+ messages = queue.receive_messages(WaitTimeSeconds=0)
+ messages.should.have.length_of(0)
+
+ with freeze_time("2015-01-01 12:00:15"):
+ # message is now available again, next one should be available
+ messages = queue.receive_messages()
+ messages.should.have.length_of(1)
+ messages[0].message_id.should.equal(message.message_id)