Merge pull request #4 from spulec/master

Updating
This commit is contained in:
Daniele Stefano Ferru 2018-09-28 15:07:20 +02:00 committed by GitHub
commit 9c8ecf6ee7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 1987 additions and 160 deletions

View File

@ -8,6 +8,19 @@ python:
env:
- TEST_SERVER_MODE=false
- TEST_SERVER_MODE=true
# Due to incomplete Python 3.7 support on Travis CI (
# https://github.com/travis-ci/travis-ci/issues/9815),
# using a matrix is necessary
matrix:
include:
- python: 3.7
env: TEST_SERVER_MODE=false
dist: xenial
sudo: true
- python: 3.7
env: TEST_SERVER_MODE=true
dist: xenial
sudo: true
before_install:
- export BOTO_CONFIG=/dev/null
install:

View File

@ -53,3 +53,4 @@ Moto is written by Steve Pulec with contributions from:
* [Jim Shields](https://github.com/jimjshields)
* [William Richard](https://github.com/william-richard)
* [Alex Casalboni](https://github.com/alexcasalboni)
* [Jon Beilke](https://github.com/jrbeilke)

View File

@ -1,6 +1,17 @@
Moto Changelog
===================
1.3.6
-----
* Fix boto3 pinning.
1.3.5
-----
* Pin down botocore issue as temporary fix for #1793.
* More features on secrets manager
1.3.4
------

View File

@ -3645,18 +3645,18 @@
- [ ] put_attributes
- [ ] select
## secretsmanager - 20% implemented
## secretsmanager - 33% implemented
- [ ] cancel_rotate_secret
- [X] create_secret
- [ ] delete_secret
- [ ] describe_secret
- [X] describe_secret
- [X] get_random_password
- [X] get_secret_value
- [ ] list_secret_version_ids
- [ ] list_secrets
- [ ] put_secret_value
- [ ] restore_secret
- [ ] rotate_secret
- [X] rotate_secret
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_secret

View File

@ -175,7 +175,7 @@ def test_add_servers():
```
#### Using moto 1.0.X with boto2
moto 1.0.X mock docorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
moto 1.0.X mock decorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
Using moto with boto2
```python

View File

@ -3,7 +3,7 @@ import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = 'moto'
__version__ = '1.3.4'
__version__ = '1.3.6'
from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa

View File

@ -387,6 +387,7 @@ class ResourceMap(collections.Mapping):
"AWS::StackName": stack_name,
"AWS::URLSuffix": "amazonaws.com",
"AWS::NoValue": None,
"AWS::Partition": "aws",
}
def __getitem__(self, key):

View File

@ -89,6 +89,17 @@ class BaseMockAWS(object):
if inspect.ismethod(attr_value) and attr_value.__self__ is klass:
continue
# Check if this is a staticmethod. If so, skip patching
for cls in inspect.getmro(klass):
if attr_value.__name__ not in cls.__dict__:
continue
bound_attr_value = cls.__dict__[attr_value.__name__]
if not isinstance(bound_attr_value, staticmethod):
break
else:
# It is a staticmethod, skip patching
continue
try:
setattr(klass, attr, self(attr_value, reset=False))
except TypeError:

View File

@ -20,6 +20,17 @@ def has_empty_keys_or_values(_dict):
)
def get_empty_str_error():
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return (400,
{'server': 'amazon.com'},
dynamo_json_dump({'__type': er,
'message': ('One or more parameter values were '
'invalid: An AttributeValue may not '
'contain an empty string')}
))
class DynamoHandler(BaseResponse):
def get_endpoint_name(self, headers):
@ -174,14 +185,7 @@ class DynamoHandler(BaseResponse):
item = self.body['Item']
if has_empty_keys_or_values(item):
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return (400,
{'server': 'amazon.com'},
dynamo_json_dump({'__type': er,
'message': ('One or more parameter values were '
'invalid: An AttributeValue may not '
'contain an empty string')}
))
return get_empty_str_error()
overwrite = 'Expected' not in self.body
if not overwrite:
@ -523,6 +527,7 @@ class DynamoHandler(BaseResponse):
return dynamo_json_dump(item_dict)
def update_item(self):
name = self.body['TableName']
key = self.body['Key']
update_expression = self.body.get('UpdateExpression')
@ -533,6 +538,9 @@ class DynamoHandler(BaseResponse):
'ExpressionAttributeValues', {})
existing_item = self.dynamodb_backend.get_item(name, key)
if has_empty_keys_or_values(expression_attribute_values):
return get_empty_str_error()
if 'Expected' in self.body:
expected = self.body['Expected']
else:

View File

@ -13,6 +13,7 @@ from pkg_resources import resource_filename
import boto.ec2
from collections import defaultdict
import weakref
from datetime import datetime
from boto.ec2.instance import Instance as BotoInstance, Reservation
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
@ -2115,10 +2116,20 @@ class VPC(TaggedEC2Resource):
class VPCBackend(object):
__refs__ = defaultdict(list)
def __init__(self):
self.vpcs = {}
self.__refs__[self.__class__].append(weakref.ref(self))
super(VPCBackend, self).__init__()
@classmethod
def get_instances(cls):
for inst_ref in cls.__refs__[cls]:
inst = inst_ref()
if inst is not None:
yield inst
def create_vpc(self, cidr_block, instance_tenancy='default', amazon_provided_ipv6_cidr_block=False):
vpc_id = random_vpc_id()
vpc = VPC(self, vpc_id, cidr_block, len(self.vpcs) == 0, instance_tenancy, amazon_provided_ipv6_cidr_block)
@ -2142,6 +2153,13 @@ class VPCBackend(object):
raise InvalidVPCIdError(vpc_id)
return self.vpcs.get(vpc_id)
# get vpc by vpc id and aws region
def get_cross_vpc(self, vpc_id, peer_region):
for vpcs in self.get_instances():
if vpcs.region_name == peer_region:
match_vpc = vpcs.get_vpc(vpc_id)
return match_vpc
def get_all_vpcs(self, vpc_ids=None, filters=None):
matches = self.vpcs.values()
if vpc_ids:

View File

@ -5,8 +5,12 @@ from moto.core.responses import BaseResponse
class VPCPeeringConnections(BaseResponse):
def create_vpc_peering_connection(self):
peer_region = self._get_param('PeerRegion')
if peer_region == self.region or peer_region is None:
peer_vpc = self.ec2_backend.get_vpc(self._get_param('PeerVpcId'))
else:
peer_vpc = self.ec2_backend.get_cross_vpc(self._get_param('PeerVpcId'), peer_region)
vpc = self.ec2_backend.get_vpc(self._get_param('VpcId'))
peer_vpc = self.ec2_backend.get_vpc(self._get_param('PeerVpcId'))
vpc_pcx = self.ec2_backend.create_vpc_peering_connection(vpc, peer_vpc)
template = self.response_template(
CREATE_VPC_PEERING_CONNECTION_RESPONSE)
@ -41,26 +45,31 @@ class VPCPeeringConnections(BaseResponse):
CREATE_VPC_PEERING_CONNECTION_RESPONSE = """
<CreateVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpcPeeringConnection>
<vpcPeeringConnectionId>{{ vpc_pcx.id }}</vpcPeeringConnectionId>
<CreateVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2016-11-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpcPeeringConnection>
<vpcPeeringConnectionId>{{ vpc_pcx.id }}</vpcPeeringConnectionId>
<requesterVpcInfo>
<ownerId>777788889999</ownerId>
<vpcId>{{ vpc_pcx.vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.vpc.cidr_block }}</cidrBlock>
<ownerId>777788889999</ownerId>
<vpcId>{{ vpc_pcx.vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.vpc.cidr_block }}</cidrBlock>
<peeringOptions>
<allowEgressFromLocalClassicLinkToRemoteVpc>false</allowEgressFromLocalClassicLinkToRemoteVpc>
<allowEgressFromLocalVpcToRemoteClassicLink>false</allowEgressFromLocalVpcToRemoteClassicLink>
<allowDnsResolutionFromRemoteVpc>false</allowDnsResolutionFromRemoteVpc>
</peeringOptions>
</requesterVpcInfo>
<accepterVpcInfo>
<ownerId>123456789012</ownerId>
<vpcId>{{ vpc_pcx.peer_vpc.id }}</vpcId>
</accepterVpcInfo>
<status>
<code>initiating-request</code>
<message>Initiating request to {accepter ID}.</message>
<code>initiating-request</code>
<message>Initiating Request to {accepter ID}</message>
</status>
<expirationTime>2014-02-18T14:37:25.000Z</expirationTime>
<tagSet/>
</vpcPeeringConnection>
</vpcPeeringConnection>
</CreateVpcPeeringConnectionResponse>
"""

View File

@ -4,12 +4,12 @@ import hashlib
from copy import copy
from random import random
from botocore.exceptions import ParamValidationError
from moto.core import BaseBackend, BaseModel
from moto.ec2 import ec2_backends
from moto.ecr.exceptions import ImageNotFoundException, RepositoryNotFoundException
from botocore.exceptions import ParamValidationError
DEFAULT_REGISTRY_ID = '012345678910'
@ -97,13 +97,14 @@ class Repository(BaseObject):
class Image(BaseObject):
def __init__(self, tag, manifest, repository, registry_id=DEFAULT_REGISTRY_ID):
def __init__(self, tag, manifest, repository, digest=None, registry_id=DEFAULT_REGISTRY_ID):
self.image_tag = tag
self.image_tags = [tag] if tag is not None else []
self.image_manifest = manifest
self.image_size_in_bytes = 50 * 1024 * 1024
self.repository = repository
self.registry_id = registry_id
self.image_digest = None
self.image_digest = digest
self.image_pushed_at = None
def _create_digest(self):
@ -115,6 +116,14 @@ class Image(BaseObject):
self._create_digest()
return self.image_digest
def get_image_manifest(self):
return self.image_manifest
def update_tag(self, tag):
self.image_tag = tag
if tag not in self.image_tags and tag is not None:
self.image_tags.append(tag)
@property
def response_object(self):
response_object = self.gen_response_object()
@ -124,26 +133,26 @@ class Image(BaseObject):
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
return response_object
return {k: v for k, v in response_object.items() if v is not None and v != [None]}
@property
def response_list_object(self):
response_object = self.gen_response_object()
response_object['imageTag'] = self.image_tag
response_object['imageDigest'] = "i don't know"
return response_object
return {k: v for k, v in response_object.items() if v is not None and v != [None]}
@property
def response_describe_object(self):
response_object = self.gen_response_object()
response_object['imageTags'] = [self.image_tag]
response_object['imageTags'] = self.image_tags
response_object['imageDigest'] = self.get_image_digest()
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
response_object['imageSizeInBytes'] = self.image_size_in_bytes
response_object['imagePushedAt'] = '2017-05-09'
return response_object
return {k: v for k, v in response_object.items() if v is not None and v != []}
@property
def response_batch_get_image(self):
@ -154,7 +163,7 @@ class Image(BaseObject):
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
return response_object
return {k: v for k, v in response_object.items() if v is not None and v != [None]}
class ECRBackend(BaseBackend):
@ -231,7 +240,7 @@ class ECRBackend(BaseBackend):
found = False
for image in repository.images:
if (('imageDigest' in image_id and image.get_image_digest() == image_id['imageDigest']) or
('imageTag' in image_id and image.image_tag == image_id['imageTag'])):
('imageTag' in image_id and image_id['imageTag'] in image.image_tags)):
found = True
response.add(image)
if not found:
@ -257,9 +266,16 @@ class ECRBackend(BaseBackend):
else:
raise Exception("{0} is not a repository".format(repository_name))
image = Image(image_tag, image_manifest, repository_name)
repository.images.append(image)
return image
existing_images = list(filter(lambda x: x.response_object['imageManifest'] == image_manifest, repository.images))
if not existing_images:
# this image is not in ECR yet
image = Image(image_tag, image_manifest, repository_name)
repository.images.append(image)
return image
else:
# update existing image
existing_images[0].update_tag(image_tag)
return existing_images[0]
def batch_get_image(self, repository_name, registry_id=None, image_ids=None, accepted_media_types=None):
if repository_name in self.repositories:

View File

@ -179,7 +179,7 @@ class Task(BaseObject):
class Service(BaseObject):
def __init__(self, cluster, service_name, task_definition, desired_count, load_balancers=None):
def __init__(self, cluster, service_name, task_definition, desired_count, load_balancers=None, scheduling_strategy=None):
self.cluster_arn = cluster.arn
self.arn = 'arn:aws:ecs:us-east-1:012345678910:service/{0}'.format(
service_name)
@ -202,6 +202,7 @@ class Service(BaseObject):
}
]
self.load_balancers = load_balancers if load_balancers is not None else []
self.scheduling_strategy = scheduling_strategy if scheduling_strategy is not None else 'REPLICA'
self.pending_count = 0
@property
@ -214,6 +215,7 @@ class Service(BaseObject):
del response_object['name'], response_object['arn']
response_object['serviceName'] = self.name
response_object['serviceArn'] = self.arn
response_object['schedulingStrategy'] = self.scheduling_strategy
for deployment in response_object['deployments']:
if isinstance(deployment['createdAt'], datetime):
@ -655,7 +657,7 @@ class EC2ContainerServiceBackend(BaseBackend):
raise Exception("Could not find task {} on cluster {}".format(
task_str, cluster_name))
def create_service(self, cluster_str, service_name, task_definition_str, desired_count, load_balancers=None):
def create_service(self, cluster_str, service_name, task_definition_str, desired_count, load_balancers=None, scheduling_strategy=None):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
@ -665,7 +667,7 @@ class EC2ContainerServiceBackend(BaseBackend):
desired_count = desired_count if desired_count is not None else 0
service = Service(cluster, service_name,
task_definition, desired_count, load_balancers)
task_definition, desired_count, load_balancers, scheduling_strategy)
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
self.services[cluster_service_pair] = service

View File

@ -154,8 +154,9 @@ class EC2ContainerServiceResponse(BaseResponse):
task_definition_str = self._get_param('taskDefinition')
desired_count = self._get_int_param('desiredCount')
load_balancers = self._get_param('loadBalancers')
scheduling_strategy = self._get_param('schedulingStrategy')
service = self.ecs_backend.create_service(
cluster_str, service_name, task_definition_str, desired_count, load_balancers)
cluster_str, service_name, task_definition_str, desired_count, load_balancers, scheduling_strategy)
return json.dumps({
'service': service.response_object
})

View File

@ -259,12 +259,22 @@ class ELBResponse(BaseResponse):
def describe_instance_health(self):
load_balancer_name = self._get_param('LoadBalancerName')
instance_ids = [list(param.values())[0] for param in self._get_list_prefix('Instances.member')]
if len(instance_ids) == 0:
instance_ids = self.elb_backend.get_load_balancer(
load_balancer_name).instance_ids
provided_instance_ids = [
list(param.values())[0]
for param in self._get_list_prefix('Instances.member')
]
registered_instances_id = self.elb_backend.get_load_balancer(
load_balancer_name).instance_ids
if len(provided_instance_ids) == 0:
provided_instance_ids = registered_instances_id
template = self.response_template(DESCRIBE_INSTANCE_HEALTH_TEMPLATE)
return template.render(instance_ids=instance_ids)
instances = []
for instance_id in provided_instance_ids:
state = "InService" \
if instance_id in registered_instances_id\
else "Unknown"
instances.append({"InstanceId": instance_id, "State": state})
return template.render(instances=instances)
def add_tags(self):
@ -689,11 +699,11 @@ SET_LOAD_BALANCER_POLICIES_FOR_BACKEND_SERVER_TEMPLATE = """<SetLoadBalancerPoli
DESCRIBE_INSTANCE_HEALTH_TEMPLATE = """<DescribeInstanceHealthResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
<DescribeInstanceHealthResult>
<InstanceStates>
{% for instance_id in instance_ids %}
{% for instance in instances %}
<member>
<Description>N/A</Description>
<InstanceId>{{ instance_id }}</InstanceId>
<State>InService</State>
<InstanceId>{{ instance['InstanceId'] }}</InstanceId>
<State>{{ instance['State'] }}</State>
<ReasonCode>N/A</ReasonCode>
</member>
{% endfor %}

View File

@ -37,7 +37,6 @@ class Policy(BaseModel):
description=None,
document=None,
path=None):
self.document = document or {}
self.name = name
self.attachment_count = 0
@ -45,7 +44,7 @@ class Policy(BaseModel):
self.id = random_policy_id()
self.path = path or '/'
self.default_version_id = default_version_id or 'v1'
self.versions = []
self.versions = [PolicyVersion(self.arn, document, True)]
self.create_datetime = datetime.now(pytz.utc)
self.update_datetime = datetime.now(pytz.utc)
@ -72,11 +71,11 @@ class ManagedPolicy(Policy):
def attach_to(self, obj):
self.attachment_count += 1
obj.managed_policies[self.name] = self
obj.managed_policies[self.arn] = self
def detach_from(self, obj):
self.attachment_count -= 1
del obj.managed_policies[self.name]
del obj.managed_policies[self.arn]
@property
def arn(self):
@ -477,11 +476,13 @@ class IAMBackend(BaseBackend):
document=policy_document,
path=path,
)
self.managed_policies[policy.name] = policy
self.managed_policies[policy.arn] = policy
return policy
def get_policy(self, policy_name):
return self.managed_policies.get(policy_name)
def get_policy(self, policy_arn):
if policy_arn not in self.managed_policies:
raise IAMNotFoundException("Policy {0} not found".format(policy_arn))
return self.managed_policies.get(policy_arn)
def list_attached_role_policies(self, role_name, marker=None, max_items=100, path_prefix='/'):
policies = self.get_role(role_name).managed_policies.values()
@ -575,21 +576,18 @@ class IAMBackend(BaseBackend):
return role.policies.keys()
def create_policy_version(self, policy_arn, policy_document, set_as_default):
policy_name = policy_arn.split(':')[-1]
policy_name = policy_name.split('/')[1]
policy = self.get_policy(policy_name)
policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
version = PolicyVersion(policy_arn, policy_document, set_as_default)
policy.versions.append(version)
version.version_id = 'v{0}'.format(len(policy.versions))
if set_as_default:
policy.default_version_id = version.version_id
return version
def get_policy_version(self, policy_arn, version_id):
policy_name = policy_arn.split(':')[-1]
policy_name = policy_name.split('/')[1]
policy = self.get_policy(policy_name)
policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
for version in policy.versions:
@ -598,19 +596,18 @@ class IAMBackend(BaseBackend):
raise IAMNotFoundException("Policy version not found")
def list_policy_versions(self, policy_arn):
policy_name = policy_arn.split(':')[-1]
policy_name = policy_name.split('/')[1]
policy = self.get_policy(policy_name)
policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
return policy.versions
def delete_policy_version(self, policy_arn, version_id):
policy_name = policy_arn.split(':')[-1]
policy_name = policy_name.split('/')[1]
policy = self.get_policy(policy_name)
policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
if version_id == policy.default_version_id:
raise IAMConflictException(
"Cannot delete the default version of a policy")
for i, v in enumerate(policy.versions):
if v.version_id == version_id:
del policy.versions[i]

View File

@ -58,6 +58,12 @@ class IamResponse(BaseResponse):
template = self.response_template(CREATE_POLICY_TEMPLATE)
return template.render(policy=policy)
def get_policy(self):
policy_arn = self._get_param('PolicyArn')
policy = iam_backend.get_policy(policy_arn)
template = self.response_template(GET_POLICY_TEMPLATE)
return template.render(policy=policy)
def list_attached_role_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
@ -601,6 +607,25 @@ CREATE_POLICY_TEMPLATE = """<CreatePolicyResponse>
</ResponseMetadata>
</CreatePolicyResponse>"""
GET_POLICY_TEMPLATE = """<GetPolicyResponse>
<GetPolicyResult>
<Policy>
<PolicyName>{{ policy.name }}</PolicyName>
<Description>{{ policy.description }}</Description>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<PolicyId>{{ policy.id }}</PolicyId>
<Path>{{ policy.path }}</Path>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate>
</Policy>
</GetPolicyResult>
<ResponseMetadata>
<RequestId>684f0917-3d22-11e4-a4a0-cffb9EXAMPLE</RequestId>
</ResponseMetadata>
</GetPolicyResponse>"""
LIST_ATTACHED_ROLE_POLICIES_TEMPLATE = """<ListAttachedRolePoliciesResponse>
<ListAttachedRolePoliciesResult>
{% if marker is none %}

View File

@ -1,14 +1,17 @@
from __future__ import unicode_literals
import time
import boto3
import string
import random
import hashlib
import uuid
import random
import re
from datetime import datetime
from moto.core import BaseBackend, BaseModel
import string
import time
import uuid
from collections import OrderedDict
from datetime import datetime
import boto3
from moto.core import BaseBackend, BaseModel
from .exceptions import (
ResourceNotFoundException,
InvalidRequestException,
@ -271,15 +274,37 @@ class IoTBackend(BaseBackend):
def list_thing_types(self, thing_type_name=None):
if thing_type_name:
# It's wierd but thing_type_name is filterd by forward match, not complete match
# It's weird but thing_type_name is filtered by forward match, not complete match
return [_ for _ in self.thing_types.values() if _.thing_type_name.startswith(thing_type_name)]
thing_types = self.thing_types.values()
return thing_types
return self.thing_types.values()
def list_things(self, attribute_name, attribute_value, thing_type_name):
# TODO: filter by attributess or thing_type
things = self.things.values()
return things
def list_things(self, attribute_name, attribute_value, thing_type_name, max_results, token):
all_things = [_.to_dict() for _ in self.things.values()]
if attribute_name is not None and thing_type_name is not None:
filtered_things = list(filter(lambda elem:
attribute_name in elem["attributes"] and
elem["attributes"][attribute_name] == attribute_value and
"thingTypeName" in elem and
elem["thingTypeName"] == thing_type_name, all_things))
elif attribute_name is not None and thing_type_name is None:
filtered_things = list(filter(lambda elem:
attribute_name in elem["attributes"] and
elem["attributes"][attribute_name] == attribute_value, all_things))
elif attribute_name is None and thing_type_name is not None:
filtered_things = list(
filter(lambda elem: "thingTypeName" in elem and elem["thingTypeName"] == thing_type_name, all_things))
else:
filtered_things = all_things
if token is None:
things = filtered_things[0:max_results]
next_token = str(max_results) if len(filtered_things) > max_results else None
else:
token = int(token)
things = filtered_things[token:token + max_results]
next_token = str(token + max_results) if len(filtered_things) > token + max_results else None
return things, next_token
def describe_thing(self, thing_name):
things = [_ for _ in self.things.values() if _.thing_name == thing_name]

View File

@ -1,7 +1,9 @@
from __future__ import unicode_literals
import json
from moto.core.responses import BaseResponse
from .models import iot_backends
import json
class IoTResponse(BaseResponse):
@ -32,30 +34,39 @@ class IoTResponse(BaseResponse):
return json.dumps(dict(thingTypeName=thing_type_name, thingTypeArn=thing_type_arn))
def list_thing_types(self):
# previous_next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
previous_next_token = self._get_param("nextToken")
max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
thing_type_name = self._get_param("thingTypeName")
thing_types = self.iot_backend.list_thing_types(
thing_type_name=thing_type_name
)
# TODO: implement pagination in the future
next_token = None
return json.dumps(dict(thingTypes=[_.to_dict() for _ in thing_types], nextToken=next_token))
thing_types = [_.to_dict() for _ in thing_types]
if previous_next_token is None:
result = thing_types[0:max_results]
next_token = str(max_results) if len(thing_types) > max_results else None
else:
token = int(previous_next_token)
result = thing_types[token:token + max_results]
next_token = str(token + max_results) if len(thing_types) > token + max_results else None
return json.dumps(dict(thingTypes=result, nextToken=next_token))
def list_things(self):
# previous_next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
previous_next_token = self._get_param("nextToken")
max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
attribute_name = self._get_param("attributeName")
attribute_value = self._get_param("attributeValue")
thing_type_name = self._get_param("thingTypeName")
things = self.iot_backend.list_things(
things, next_token = self.iot_backend.list_things(
attribute_name=attribute_name,
attribute_value=attribute_value,
thing_type_name=thing_type_name,
max_results=max_results,
token=previous_next_token
)
# TODO: implement pagination in the future
next_token = None
return json.dumps(dict(things=[_.to_dict() for _ in things], nextToken=next_token))
return json.dumps(dict(things=things, nextToken=next_token))
def describe_thing(self):
thing_name = self._get_param("thingName")

View File

@ -29,5 +29,5 @@ class ResourceAlreadyExistsException(LogsClientError):
self.code = 400
super(ResourceAlreadyExistsException, self).__init__(
'ResourceAlreadyExistsException',
'The specified resource already exists.'
'The specified log group already exists'
)

View File

@ -86,7 +86,7 @@ class LogStream:
self.events += [LogEvent(self.lastIngestionTime, log_event) for log_event in log_events]
self.uploadSequenceToken += 1
return self.uploadSequenceToken
return '{:056d}'.format(self.uploadSequenceToken)
def get_log_events(self, log_group_name, log_stream_name, start_time, end_time, limit, next_token, start_from_head):
def filter_func(event):

View File

@ -85,6 +85,7 @@ old_socksocket = None
old_ssl_wrap_socket = None
old_sslwrap_simple = None
old_sslsocket = None
old_sslcontext_wrap_socket = None
if PY3: # pragma: no cover
basestring = (bytes, str)
@ -100,6 +101,10 @@ try: # pragma: no cover
if not PY3:
old_sslwrap_simple = ssl.sslwrap_simple
old_sslsocket = ssl.SSLSocket
try:
old_sslcontext_wrap_socket = ssl.SSLContext.wrap_socket
except AttributeError:
pass
except ImportError: # pragma: no cover
ssl = None
@ -281,7 +286,7 @@ class fakesock(object):
return {
'notAfter': shift.strftime('%b %d %H:%M:%S GMT'),
'subjectAltName': (
('DNS', '*%s' % self._host),
('DNS', '*.%s' % self._host),
('DNS', self._host),
('DNS', '*'),
),
@ -772,7 +777,7 @@ class URIMatcher(object):
def __init__(self, uri, entries, match_querystring=False):
self._match_querystring = match_querystring
if type(uri).__name__ == 'SRE_Pattern':
if type(uri).__name__ in ('SRE_Pattern', 'Pattern'):
self.regex = uri
result = urlsplit(uri.pattern)
if result.scheme == 'https':
@ -1012,6 +1017,10 @@ class httpretty(HttpBaseClass):
if ssl:
ssl.wrap_socket = old_ssl_wrap_socket
ssl.SSLSocket = old_sslsocket
try:
ssl.SSLContext.wrap_socket = old_sslcontext_wrap_socket
except AttributeError:
pass
ssl.__dict__['wrap_socket'] = old_ssl_wrap_socket
ssl.__dict__['SSLSocket'] = old_sslsocket
@ -1058,6 +1067,14 @@ class httpretty(HttpBaseClass):
ssl.wrap_socket = fake_wrap_socket
ssl.SSLSocket = FakeSSLSocket
try:
def fake_sslcontext_wrap_socket(cls, *args, **kwargs):
return fake_wrap_socket(*args, **kwargs)
ssl.SSLContext.wrap_socket = fake_sslcontext_wrap_socket
except AttributeError:
pass
ssl.__dict__['wrap_socket'] = fake_wrap_socket
ssl.__dict__['SSLSocket'] = FakeSSLSocket

View File

@ -48,6 +48,10 @@ class Database(BaseModel):
if self.publicly_accessible is None:
self.publicly_accessible = True
self.copy_tags_to_snapshot = kwargs.get("copy_tags_to_snapshot")
if self.copy_tags_to_snapshot is None:
self.copy_tags_to_snapshot = False
self.backup_retention_period = kwargs.get("backup_retention_period")
if self.backup_retention_period is None:
self.backup_retention_period = 1
@ -137,6 +141,7 @@ class Database(BaseModel):
"multi_az": properties.get("MultiAZ"),
"port": properties.get('Port', 3306),
"publicly_accessible": properties.get("PubliclyAccessible"),
"copy_tags_to_snapshot": properties.get("CopyTagsToSnapshot"),
"region": region_name,
"security_groups": security_groups,
"storage_encrypted": properties.get("StorageEncrypted"),
@ -217,6 +222,7 @@ class Database(BaseModel):
</DBSubnetGroup>
{% endif %}
<PubliclyAccessible>{{ database.publicly_accessible }}</PubliclyAccessible>
<CopyTagsToSnapshot>{{ database.copy_tags_to_snapshot }}</CopyTagsToSnapshot>
<AutoMinorVersionUpgrade>{{ database.auto_minor_version_upgrade }}</AutoMinorVersionUpgrade>
<AllocatedStorage>{{ database.allocated_storage }}</AllocatedStorage>
<StorageEncrypted>{{ database.storage_encrypted }}</StorageEncrypted>

View File

@ -73,6 +73,9 @@ class Database(BaseModel):
self.publicly_accessible = kwargs.get("publicly_accessible")
if self.publicly_accessible is None:
self.publicly_accessible = True
self.copy_tags_to_snapshot = kwargs.get("copy_tags_to_snapshot")
if self.copy_tags_to_snapshot is None:
self.copy_tags_to_snapshot = False
self.backup_retention_period = kwargs.get("backup_retention_period")
if self.backup_retention_period is None:
self.backup_retention_period = 1
@ -208,6 +211,7 @@ class Database(BaseModel):
</DBSubnetGroup>
{% endif %}
<PubliclyAccessible>{{ database.publicly_accessible }}</PubliclyAccessible>
<CopyTagsToSnapshot>{{ database.copy_tags_to_snapshot }}</CopyTagsToSnapshot>
<AutoMinorVersionUpgrade>{{ database.auto_minor_version_upgrade }}</AutoMinorVersionUpgrade>
<AllocatedStorage>{{ database.allocated_storage }}</AllocatedStorage>
<StorageEncrypted>{{ database.storage_encrypted }}</StorageEncrypted>
@ -304,6 +308,7 @@ class Database(BaseModel):
"db_parameter_group_name": properties.get('DBParameterGroupName'),
"port": properties.get('Port', 3306),
"publicly_accessible": properties.get("PubliclyAccessible"),
"copy_tags_to_snapshot": properties.get("CopyTagsToSnapshot"),
"region": region_name,
"security_groups": security_groups,
"storage_encrypted": properties.get("StorageEncrypted"),
@ -362,6 +367,7 @@ class Database(BaseModel):
"PreferredBackupWindow": "{{ database.preferred_backup_window }}",
"PreferredMaintenanceWindow": "{{ database.preferred_maintenance_window }}",
"PubliclyAccessible": "{{ database.publicly_accessible }}",
"CopyTagsToSnapshot": "{{ database.copy_tags_to_snapshot }}",
"AllocatedStorage": "{{ database.allocated_storage }}",
"Endpoint": {
"Address": "{{ database.address }}",
@ -411,10 +417,10 @@ class Database(BaseModel):
class Snapshot(BaseModel):
def __init__(self, database, snapshot_id, tags=None):
def __init__(self, database, snapshot_id, tags):
self.database = database
self.snapshot_id = snapshot_id
self.tags = tags or []
self.tags = tags
self.created_at = iso_8601_datetime_with_milliseconds(datetime.datetime.now())
@property
@ -456,6 +462,20 @@ class Snapshot(BaseModel):
</DBSnapshot>""")
return template.render(snapshot=self, database=self.database)
def get_tags(self):
return self.tags
def add_tags(self, tags):
new_keys = [tag_set['Key'] for tag_set in tags]
self.tags = [tag_set for tag_set in self.tags if tag_set[
'Key'] not in new_keys]
self.tags.extend(tags)
return self.tags
def remove_tags(self, tag_keys):
self.tags = [tag_set for tag_set in self.tags if tag_set[
'Key'] not in tag_keys]
class SecurityGroup(BaseModel):
@ -691,6 +711,10 @@ class RDS2Backend(BaseBackend):
raise DBSnapshotAlreadyExistsError(db_snapshot_identifier)
if len(self.snapshots) >= int(os.environ.get('MOTO_RDS_SNAPSHOT_LIMIT', '100')):
raise SnapshotQuotaExceededError()
if tags is None:
tags = list()
if database.copy_tags_to_snapshot and not tags:
tags = database.get_tags()
snapshot = Snapshot(database, db_snapshot_identifier, tags)
self.snapshots[db_snapshot_identifier] = snapshot
return snapshot
@ -787,13 +811,13 @@ class RDS2Backend(BaseBackend):
def delete_database(self, db_instance_identifier, db_snapshot_name=None):
if db_instance_identifier in self.databases:
if db_snapshot_name:
self.create_snapshot(db_instance_identifier, db_snapshot_name)
database = self.databases.pop(db_instance_identifier)
if database.is_replica:
primary = self.find_db_from_id(database.source_db_identifier)
primary.remove_replica(database)
database.status = 'deleting'
if db_snapshot_name:
self.snapshots[db_snapshot_name] = Snapshot(database, db_snapshot_name)
return database
else:
raise DBInstanceNotFoundError(db_instance_identifier)
@ -1028,8 +1052,8 @@ class RDS2Backend(BaseBackend):
if resource_name in self.security_groups:
return self.security_groups[resource_name].get_tags()
elif resource_type == 'snapshot': # DB Snapshot
# TODO: Complete call to tags on resource type DB Snapshot
return []
if resource_name in self.snapshots:
return self.snapshots[resource_name].get_tags()
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].get_tags()
@ -1059,7 +1083,8 @@ class RDS2Backend(BaseBackend):
if resource_name in self.security_groups:
return self.security_groups[resource_name].remove_tags(tag_keys)
elif resource_type == 'snapshot': # DB Snapshot
return None
if resource_name in self.snapshots:
return self.snapshots[resource_name].remove_tags(tag_keys)
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].remove_tags(tag_keys)
@ -1088,7 +1113,8 @@ class RDS2Backend(BaseBackend):
if resource_name in self.security_groups:
return self.security_groups[resource_name].add_tags(tags)
elif resource_type == 'snapshot': # DB Snapshot
return []
if resource_name in self.snapshots:
return self.snapshots[resource_name].add_tags(tags)
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].add_tags(tags)

View File

@ -19,6 +19,7 @@ class RDS2Response(BaseResponse):
"allocated_storage": self._get_int_param('AllocatedStorage'),
"availability_zone": self._get_param("AvailabilityZone"),
"backup_retention_period": self._get_param("BackupRetentionPeriod"),
"copy_tags_to_snapshot": self._get_param("CopyTagsToSnapshot"),
"db_instance_class": self._get_param('DBInstanceClass'),
"db_instance_identifier": self._get_param('DBInstanceIdentifier'),
"db_name": self._get_param("DBName"),
@ -159,7 +160,7 @@ class RDS2Response(BaseResponse):
def create_db_snapshot(self):
db_instance_identifier = self._get_param('DBInstanceIdentifier')
db_snapshot_identifier = self._get_param('DBSnapshotIdentifier')
tags = self._get_param('Tags', [])
tags = self.unpack_complex_list_params('Tags.Tag', ('Key', 'Value'))
snapshot = self.backend.create_snapshot(db_instance_identifier, db_snapshot_identifier, tags)
template = self.response_template(CREATE_SNAPSHOT_TEMPLATE)
return template.render(snapshot=snapshot)

View File

@ -78,6 +78,7 @@ class Cluster(TaggableResourceMixin, BaseModel):
super(Cluster, self).__init__(region_name, tags)
self.redshift_backend = redshift_backend
self.cluster_identifier = cluster_identifier
self.create_time = iso_8601_datetime_with_milliseconds(datetime.datetime.now())
self.status = 'available'
self.node_type = node_type
self.master_username = master_username
@ -237,6 +238,7 @@ class Cluster(TaggableResourceMixin, BaseModel):
"Address": self.endpoint,
"Port": self.port
},
'ClusterCreateTime': self.create_time,
"PendingModifiedValues": [],
"Tags": self.tags,
"IamRoles": [{

View File

@ -27,8 +27,14 @@ class FakeDeleteMarker(BaseModel):
def __init__(self, key):
self.key = key
self.name = key.name
self.last_modified = datetime.datetime.utcnow()
self._version_id = key.version_id + 1
@property
def last_modified_ISO8601(self):
return iso_8601_datetime_with_milliseconds(self.last_modified)
@property
def version_id(self):
return self._version_id
@ -630,10 +636,7 @@ class S3Backend(BaseBackend):
latest_versions = {}
for version in versions:
if isinstance(version, FakeDeleteMarker):
name = version.key.name
else:
name = version.name
name = version.name
version_id = version.version_id
maximum_version_per_key[name] = max(
version_id,

View File

@ -1273,10 +1273,10 @@ S3_BUCKET_GET_VERSIONS = """<?xml version="1.0" encoding="UTF-8"?>
{% endfor %}
{% for marker in delete_marker_list %}
<DeleteMarker>
<Key>{{ marker.key.name }}</Key>
<Key>{{ marker.name }}</Key>
<VersionId>{{ marker.version_id }}</VersionId>
<IsLatest>{% if latest_versions[marker.key.name] == marker.version_id %}true{% else %}false{% endif %}</IsLatest>
<LastModified>{{ marker.key.last_modified_ISO8601 }}</LastModified>
<IsLatest>{% if latest_versions[marker.name] == marker.version_id %}true{% else %}false{% endif %}</IsLatest>
<LastModified>{{ marker.last_modified_ISO8601 }}</LastModified>
<Owner>
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
<DisplayName>webfile</DisplayName>

View File

@ -33,15 +33,22 @@ class SecretsManagerBackend(BaseBackend):
self.name = kwargs.get('name', '')
self.createdate = int(time.time())
self.secret_string = ''
self.rotation_enabled = False
self.rotation_lambda_arn = ''
self.auto_rotate_after_days = 0
self.version_id = ''
def reset(self):
region_name = self.region
self.__dict__ = {}
self.__init__(region_name)
def _is_valid_identifier(self, identifier):
return identifier in (self.name, self.secret_id)
def get_secret_value(self, secret_id, version_id, version_stage):
if secret_id not in (self.secret_id, self.name):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException()
response = json.dumps({
@ -71,6 +78,84 @@ class SecretsManagerBackend(BaseBackend):
return response
def describe_secret(self, secret_id):
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
"Name": self.name,
"Description": "",
"KmsKeyId": "",
"RotationEnabled": self.rotation_enabled,
"RotationLambdaARN": self.rotation_lambda_arn,
"RotationRules": {
"AutomaticallyAfterDays": self.auto_rotate_after_days
},
"LastRotatedDate": None,
"LastChangedDate": None,
"LastAccessedDate": None,
"DeletedDate": None,
"Tags": [
{
"Key": "",
"Value": ""
},
]
})
return response
def rotate_secret(self, secret_id, client_request_token=None,
rotation_lambda_arn=None, rotation_rules=None):
rotation_days = 'AutomaticallyAfterDays'
if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException
if client_request_token:
token_length = len(client_request_token)
if token_length < 32 or token_length > 64:
msg = (
'ClientRequestToken '
'must be 32-64 characters long.'
)
raise InvalidParameterException(msg)
if rotation_lambda_arn:
if len(rotation_lambda_arn) > 2048:
msg = (
'RotationLambdaARN '
'must <= 2048 characters long.'
)
raise InvalidParameterException(msg)
if rotation_rules:
if rotation_days in rotation_rules:
rotation_period = rotation_rules[rotation_days]
if rotation_period < 1 or rotation_period > 1000:
msg = (
'RotationRules.AutomaticallyAfterDays '
'must be within 1-1000.'
)
raise InvalidParameterException(msg)
self.version_id = client_request_token or ''
self.rotation_lambda_arn = rotation_lambda_arn or ''
if rotation_rules:
self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0)
if self.auto_rotate_after_days > 0:
self.rotation_enabled = True
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
"Name": self.name,
"VersionId": self.version_id
})
return response
def get_random_password(self, password_length,
exclude_characters, exclude_numbers,
exclude_punctuation, exclude_uppercase,

View File

@ -44,3 +44,21 @@ class SecretsManagerResponse(BaseResponse):
include_space=include_space,
require_each_included_type=require_each_included_type
)
def describe_secret(self):
secret_id = self._get_param('SecretId')
return secretsmanager_backends[self.region].describe_secret(
secret_id=secret_id
)
def rotate_secret(self):
client_request_token = self._get_param('ClientRequestToken')
rotation_lambda_arn = self._get_param('RotationLambdaARN')
rotation_rules = self._get_param('RotationRules')
secret_id = self._get_param('SecretId')
return secretsmanager_backends[self.region].rotate_secret(
secret_id=secret_id,
client_request_token=client_request_token,
rotation_lambda_arn=rotation_lambda_arn,
rotation_rules=rotation_rules
)

View File

@ -34,6 +34,9 @@ class DomainDispatcherApplication(object):
self.service = service
def get_backend_for_host(self, host):
if host == 'moto_api':
return host
if self.service:
return self.service

View File

@ -49,7 +49,8 @@ class SESBackend(BaseBackend):
self.sent_messages = []
self.sent_message_count = 0
def _is_verified_address(self, address):
def _is_verified_address(self, source):
_, address = parseaddr(source)
if address in self.addresses:
return True
user, host = address.split('@', 1)

View File

@ -385,10 +385,22 @@ class SQSBackend(BaseBackend):
def create_queue(self, name, **kwargs):
queue = self.queues.get(name)
if queue:
# Queue already exist. If attributes don't match, throw error
for key, value in kwargs.items():
if getattr(queue, camelcase_to_underscores(key)) != value:
raise QueueAlreadyExists("The specified queue already exists.")
try:
kwargs.pop('region')
except KeyError:
pass
new_queue = Queue(name, region=self.region_name, **kwargs)
queue_attributes = queue.attributes
new_queue_attributes = new_queue.attributes
for key in ['CreatedTimestamp', 'LastModifiedTimestamp']:
queue_attributes.pop(key)
new_queue_attributes.pop(key)
if queue_attributes != new_queue_attributes:
raise QueueAlreadyExists("The specified queue already exists.")
else:
try:
kwargs.pop('region')

View File

@ -336,7 +336,7 @@ class SQSResponse(BaseResponse):
try:
wait_time = int(self.querystring.get("WaitTimeSeconds")[0])
except TypeError:
wait_time = queue.receive_message_wait_time_seconds
wait_time = int(queue.receive_message_wait_time_seconds)
if wait_time < 0 or wait_time > 20:
return self._error(

View File

@ -5,10 +5,12 @@ from collections import defaultdict
from moto.core import BaseBackend, BaseModel
from moto.core.exceptions import RESTError
from moto.ec2 import ec2_backends
from moto.cloudformation import cloudformation_backends
import datetime
import time
import uuid
import itertools
class Parameter(BaseModel):
@ -67,7 +69,7 @@ class Command(BaseModel):
instance_ids=None, max_concurrency='', max_errors='',
notification_config=None, output_s3_bucket_name='',
output_s3_key_prefix='', output_s3_region='', parameters=None,
service_role_arn='', targets=None):
service_role_arn='', targets=None, backend_region='us-east-1'):
if instance_ids is None:
instance_ids = []
@ -88,9 +90,9 @@ class Command(BaseModel):
self.status = 'Success'
self.status_details = 'Details placeholder'
now = datetime.datetime.now()
self.requested_date_time = now.isoformat()
expires_after = now + datetime.timedelta(0, timeout_seconds)
self.requested_date_time = datetime.datetime.now()
self.requested_date_time_iso = self.requested_date_time.isoformat()
expires_after = self.requested_date_time + datetime.timedelta(0, timeout_seconds)
self.expires_after = expires_after.isoformat()
self.comment = comment
@ -105,6 +107,32 @@ class Command(BaseModel):
self.parameters = parameters
self.service_role_arn = service_role_arn
self.targets = targets
self.backend_region = backend_region
# Get instance ids from a cloud formation stack target.
stack_instance_ids = [self.get_instance_ids_by_stack_ids(target['Values']) for
target in self.targets if
target['Key'] == 'tag:aws:cloudformation:stack-name']
self.instance_ids += list(itertools.chain.from_iterable(stack_instance_ids))
# Create invocations with a single run command plugin.
self.invocations = []
for instance_id in self.instance_ids:
self.invocations.append(
self.invocation_response(instance_id, "aws:runShellScript"))
def get_instance_ids_by_stack_ids(self, stack_ids):
instance_ids = []
cloudformation_backend = cloudformation_backends[self.backend_region]
for stack_id in stack_ids:
stack_resources = cloudformation_backend.list_stack_resources(stack_id)
instance_resources = [
instance.id for instance in stack_resources
if instance.type == "AWS::EC2::Instance"]
instance_ids.extend(instance_resources)
return instance_ids
def response_object(self):
r = {
@ -122,7 +150,7 @@ class Command(BaseModel):
'OutputS3BucketName': self.output_s3_bucket_name,
'OutputS3KeyPrefix': self.output_s3_key_prefix,
'Parameters': self.parameters,
'RequestedDateTime': self.requested_date_time,
'RequestedDateTime': self.requested_date_time_iso,
'ServiceRole': self.service_role_arn,
'Status': self.status,
'StatusDetails': self.status_details,
@ -132,6 +160,50 @@ class Command(BaseModel):
return r
def invocation_response(self, instance_id, plugin_name):
# Calculate elapsed time from requested time and now. Use a hardcoded
# elapsed time since there is no easy way to convert a timedelta to
# an ISO 8601 duration string.
elapsed_time_iso = "PT5M"
elapsed_time_delta = datetime.timedelta(minutes=5)
end_time = self.requested_date_time + elapsed_time_delta
r = {
'CommandId': self.command_id,
'InstanceId': instance_id,
'Comment': self.comment,
'DocumentName': self.document_name,
'PluginName': plugin_name,
'ResponseCode': 0,
'ExecutionStartDateTime': self.requested_date_time_iso,
'ExecutionElapsedTime': elapsed_time_iso,
'ExecutionEndDateTime': end_time.isoformat(),
'Status': 'Success',
'StatusDetails': 'Success',
'StandardOutputContent': '',
'StandardOutputUrl': '',
'StandardErrorContent': '',
}
return r
def get_invocation(self, instance_id, plugin_name):
invocation = next(
(invocation for invocation in self.invocations
if invocation['InstanceId'] == instance_id), None)
if invocation is None:
raise RESTError(
'InvocationDoesNotExist',
'An error occurred (InvocationDoesNotExist) when calling the GetCommandInvocation operation')
if plugin_name is not None and invocation['PluginName'] != plugin_name:
raise RESTError(
'InvocationDoesNotExist',
'An error occurred (InvocationDoesNotExist) when calling the GetCommandInvocation operation')
return invocation
class SimpleSystemManagerBackend(BaseBackend):
@ -140,6 +212,11 @@ class SimpleSystemManagerBackend(BaseBackend):
self._resource_tags = defaultdict(lambda: defaultdict(dict))
self._commands = []
# figure out what region we're in
for region, backend in ssm_backends.items():
if backend == self:
self._region = region
def delete_parameter(self, name):
try:
del self._parameters[name]
@ -260,7 +337,8 @@ class SimpleSystemManagerBackend(BaseBackend):
output_s3_region=kwargs.get('OutputS3Region', ''),
parameters=kwargs.get('Parameters', {}),
service_role_arn=kwargs.get('ServiceRoleArn', ''),
targets=kwargs.get('Targets', []))
targets=kwargs.get('Targets', []),
backend_region=self._region)
self._commands.append(command)
return {
@ -298,6 +376,18 @@ class SimpleSystemManagerBackend(BaseBackend):
command for command in self._commands
if instance_id in command.instance_ids]
def get_command_invocation(self, **kwargs):
"""
https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_GetCommandInvocation.html
"""
command_id = kwargs.get('CommandId')
instance_id = kwargs.get('InstanceId')
plugin_name = kwargs.get('PluginName', None)
command = self.get_command_by_id(command_id)
return command.get_invocation(instance_id, plugin_name)
ssm_backends = {}
for region, ec2_backend in ec2_backends.items():

View File

@ -210,3 +210,8 @@ class SimpleSystemManagerResponse(BaseResponse):
return json.dumps(
self.ssm_backend.list_commands(**self.request_params)
)
def get_command_invocation(self):
return json.dumps(
self.ssm_backend.get_command_invocation(**self.request_params)
)

View File

@ -1,7 +1,7 @@
-r requirements.txt
mock
nose
sure==1.2.24
sure==1.4.11
coverage
flake8==3.5.0
freezegun
@ -13,5 +13,5 @@ six>=1.9
prompt-toolkit==1.0.14
click==6.7
inflection==0.3.1
lxml==4.0.0
lxml==4.2.3
beautifulsoup4==4.6.0

View File

@ -10,12 +10,13 @@ script_dir = os.path.dirname(os.path.abspath(__file__))
def get_moto_implementation(service_name):
if not hasattr(moto, service_name):
service_name_standardized = service_name.replace("-", "") if "-" in service_name else service_name
if not hasattr(moto, service_name_standardized):
return None
module = getattr(moto, service_name)
module = getattr(moto, service_name_standardized)
if module is None:
return None
mock = getattr(module, "mock_{}".format(service_name))
mock = getattr(module, "mock_{}".format(service_name_standardized))
if mock is None:
return None
backends = list(mock().backends.values())

View File

@ -8,10 +8,9 @@ import sys
install_requires = [
"Jinja2>=2.7.3",
"boto>=2.36.0",
"boto3>=1.6.16",
"botocore>=1.9.16",
"cookies",
"cryptography>=2.0.0",
"boto3>=1.6.16,<1.8",
"botocore>=1.9.16,<1.11",
"cryptography>=2.3.0",
"requests>=2.5",
"xmltodict",
"six>1.9",
@ -41,7 +40,7 @@ else:
setup(
name='moto',
version='1.3.4',
version='1.3.6',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',

View File

@ -85,3 +85,14 @@ class TesterWithSetup(unittest.TestCase):
def test_still_the_same(self):
bucket = self.conn.get_bucket('mybucket')
bucket.name.should.equal("mybucket")
@mock_s3_deprecated
class TesterWithStaticmethod(object):
@staticmethod
def static(*args):
assert not args or not isinstance(args[0], TesterWithStaticmethod)
def test_no_instance_sent_to_staticmethod(self):
self.static()

View File

@ -201,6 +201,48 @@ def test_item_add_empty_string_exception():
)
@requires_boto_gte("2.9")
@mock_dynamodb2
def test_update_item_with_empty_string_exception():
name = 'TestTable'
conn = boto3.client('dynamodb',
region_name='us-west-2',
aws_access_key_id="ak",
aws_secret_access_key="sk")
conn.create_table(TableName=name,
KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}],
AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}],
ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5})
conn.put_item(
TableName=name,
Item={
'forum_name': { 'S': 'LOLCat Forum' },
'subject': { 'S': 'Check this out!' },
'Body': { 'S': 'http://url_to_lolcat.gif'},
'SentBy': { 'S': "test" },
'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'},
}
)
with assert_raises(ClientError) as ex:
conn.update_item(
TableName=name,
Key={
'forum_name': { 'S': 'LOLCat Forum'},
},
UpdateExpression='set Body=:Body',
ExpressionAttributeValues={
':Body': {'S': ''}
})
ex.exception.response['Error']['Code'].should.equal('ValidationException')
ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
ex.exception.response['Error']['Message'].should.equal(
'One or more parameter values were invalid: An AttributeValue may not contain an empty string'
)
@requires_boto_gte("2.9")
@mock_dynamodb2
def test_query_invalid_table():

View File

@ -2,12 +2,15 @@ from __future__ import unicode_literals
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises
from nose.tools import assert_raises
from moto.ec2.exceptions import EC2ClientError
from botocore.exceptions import ClientError
import boto3
import boto
from boto.exception import EC2ResponseError
import sure # noqa
from moto import mock_ec2_deprecated
from moto import mock_ec2, mock_ec2_deprecated
from tests.helpers import requires_boto_gte
@ -93,3 +96,37 @@ def test_vpc_peering_connections_delete():
cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
@mock_ec2
def test_vpc_peering_connections_cross_region():
# create vpc in us-west-1 and ap-northeast-1
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
# create peering
vpc_pcx = ec2_usw1.create_vpc_peering_connection(
VpcId=vpc_usw1.id,
PeerVpcId=vpc_apn1.id,
PeerRegion='ap-northeast-1',
)
vpc_pcx.status['Code'].should.equal('initiating-request')
vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id)
vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id)
@mock_ec2
def test_vpc_peering_connections_cross_region_fail():
# create vpc in us-west-1 and ap-northeast-1
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
# create peering wrong region with no vpc
with assert_raises(ClientError) as cm:
ec2_usw1.create_vpc_peering_connection(
VpcId=vpc_usw1.id,
PeerVpcId=vpc_apn1.id,
PeerRegion='ap-northeast-2')
cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound')

View File

@ -45,7 +45,8 @@ def _create_image_manifest():
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 73109,
"digest": _create_image_digest("layer3")
# randomize image digest
"digest": _create_image_digest()
}
]
}
@ -197,6 +198,47 @@ def test_put_image():
response['image']['repositoryName'].should.equal('test_repository')
response['image']['registryId'].should.equal('012345678910')
@mock_ecr
def test_put_image_with_multiple_tags():
client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository(
repositoryName='test_repository'
)
manifest = _create_image_manifest()
response = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest),
imageTag='v1'
)
response['image']['imageId']['imageTag'].should.equal('v1')
response['image']['imageId']['imageDigest'].should.contain("sha")
response['image']['repositoryName'].should.equal('test_repository')
response['image']['registryId'].should.equal('012345678910')
response1 = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest),
imageTag='latest'
)
response1['image']['imageId']['imageTag'].should.equal('latest')
response1['image']['imageId']['imageDigest'].should.contain("sha")
response1['image']['repositoryName'].should.equal('test_repository')
response1['image']['registryId'].should.equal('012345678910')
response2 = client.describe_images(repositoryName='test_repository')
type(response2['imageDetails']).should.be(list)
len(response2['imageDetails']).should.be(1)
response2['imageDetails'][0]['imageDigest'].should.contain("sha")
response2['imageDetails'][0]['registryId'].should.equal("012345678910")
response2['imageDetails'][0]['repositoryName'].should.equal("test_repository")
len(response2['imageDetails'][0]['imageTags']).should.be(2)
response2['imageDetails'][0]['imageTags'].should.be.equal(['v1', 'latest'])
@mock_ecr
def test_list_images():
@ -281,6 +323,11 @@ def test_describe_images():
repositoryName='test_repository'
)
_ = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest())
)
_ = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest()),
@ -301,32 +348,37 @@ def test_describe_images():
response = client.describe_images(repositoryName='test_repository')
type(response['imageDetails']).should.be(list)
len(response['imageDetails']).should.be(3)
len(response['imageDetails']).should.be(4)
response['imageDetails'][0]['imageDigest'].should.contain("sha")
response['imageDetails'][1]['imageDigest'].should.contain("sha")
response['imageDetails'][2]['imageDigest'].should.contain("sha")
response['imageDetails'][3]['imageDigest'].should.contain("sha")
response['imageDetails'][0]['registryId'].should.equal("012345678910")
response['imageDetails'][1]['registryId'].should.equal("012345678910")
response['imageDetails'][2]['registryId'].should.equal("012345678910")
response['imageDetails'][3]['registryId'].should.equal("012345678910")
response['imageDetails'][0]['repositoryName'].should.equal("test_repository")
response['imageDetails'][1]['repositoryName'].should.equal("test_repository")
response['imageDetails'][2]['repositoryName'].should.equal("test_repository")
response['imageDetails'][3]['repositoryName'].should.equal("test_repository")
len(response['imageDetails'][0]['imageTags']).should.be(1)
response['imageDetails'][0].should_not.have.key('imageTags')
len(response['imageDetails'][1]['imageTags']).should.be(1)
len(response['imageDetails'][2]['imageTags']).should.be(1)
len(response['imageDetails'][3]['imageTags']).should.be(1)
image_tags = ['latest', 'v1', 'v2']
set([response['imageDetails'][0]['imageTags'][0],
response['imageDetails'][1]['imageTags'][0],
response['imageDetails'][2]['imageTags'][0]]).should.equal(set(image_tags))
set([response['imageDetails'][1]['imageTags'][0],
response['imageDetails'][2]['imageTags'][0],
response['imageDetails'][3]['imageTags'][0]]).should.equal(set(image_tags))
response['imageDetails'][0]['imageSizeInBytes'].should.equal(52428800)
response['imageDetails'][1]['imageSizeInBytes'].should.equal(52428800)
response['imageDetails'][2]['imageSizeInBytes'].should.equal(52428800)
response['imageDetails'][3]['imageSizeInBytes'].should.equal(52428800)
@mock_ecr
@ -355,6 +407,68 @@ def test_describe_images_by_tag():
image_detail['imageDigest'].should.equal(put_response['imageId']['imageDigest'])
@mock_ecr
def test_describe_images_tags_should_not_contain_empty_tag1():
client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository(
repositoryName='test_repository'
)
manifest = _create_image_manifest()
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest)
)
tags = ['v1', 'v2', 'latest']
for tag in tags:
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest),
imageTag=tag
)
response = client.describe_images(repositoryName='test_repository', imageIds=[{'imageTag': tag}])
len(response['imageDetails']).should.be(1)
image_detail = response['imageDetails'][0]
len(image_detail['imageTags']).should.equal(3)
image_detail['imageTags'].should.be.equal(tags)
@mock_ecr
def test_describe_images_tags_should_not_contain_empty_tag2():
client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository(
repositoryName='test_repository'
)
manifest = _create_image_manifest()
tags = ['v1', 'v2']
for tag in tags:
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest),
imageTag=tag
)
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest)
)
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(manifest),
imageTag='latest'
)
response = client.describe_images(repositoryName='test_repository', imageIds=[{'imageTag': tag}])
len(response['imageDetails']).should.be(1)
image_detail = response['imageDetails'][0]
len(image_detail['imageTags']).should.equal(3)
image_detail['imageTags'].should.be.equal(['v1', 'v2', 'latest'])
@mock_ecr
def test_describe_repository_that_doesnt_exist():
client = boto3.client('ecr', region_name='us-east-1')

View File

@ -304,6 +304,52 @@ def test_create_service():
response['service']['status'].should.equal('ACTIVE')
response['service']['taskDefinition'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
response['service']['schedulingStrategy'].should.equal('REPLICA')
@mock_ecs
def test_create_service_scheduling_strategy():
client = boto3.client('ecs', region_name='us-east-1')
_ = client.create_cluster(
clusterName='test_ecs_cluster'
)
_ = client.register_task_definition(
family='test_ecs_task',
containerDefinitions=[
{
'name': 'hello_world',
'image': 'docker/hello-world:latest',
'cpu': 1024,
'memory': 400,
'essential': True,
'environment': [{
'name': 'AWS_ACCESS_KEY_ID',
'value': 'SOME_ACCESS_KEY'
}],
'logConfiguration': {'logDriver': 'json-file'}
}
]
)
response = client.create_service(
cluster='test_ecs_cluster',
serviceName='test_ecs_service',
taskDefinition='test_ecs_task',
desiredCount=2,
schedulingStrategy='DAEMON',
)
response['service']['clusterArn'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:cluster/test_ecs_cluster')
response['service']['desiredCount'].should.equal(2)
len(response['service']['events']).should.equal(0)
len(response['service']['loadBalancers']).should.equal(0)
response['service']['pendingCount'].should.equal(0)
response['service']['runningCount'].should.equal(0)
response['service']['serviceArn'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service')
response['service']['serviceName'].should.equal('test_ecs_service')
response['service']['status'].should.equal('ACTIVE')
response['service']['taskDefinition'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
response['service']['schedulingStrategy'].should.equal('DAEMON')
@mock_ecs
@ -411,6 +457,72 @@ def test_describe_services():
response['services'][0]['deployments'][0]['status'].should.equal('PRIMARY')
@mock_ecs
def test_describe_services_scheduling_strategy():
client = boto3.client('ecs', region_name='us-east-1')
_ = client.create_cluster(
clusterName='test_ecs_cluster'
)
_ = client.register_task_definition(
family='test_ecs_task',
containerDefinitions=[
{
'name': 'hello_world',
'image': 'docker/hello-world:latest',
'cpu': 1024,
'memory': 400,
'essential': True,
'environment': [{
'name': 'AWS_ACCESS_KEY_ID',
'value': 'SOME_ACCESS_KEY'
}],
'logConfiguration': {'logDriver': 'json-file'}
}
]
)
_ = client.create_service(
cluster='test_ecs_cluster',
serviceName='test_ecs_service1',
taskDefinition='test_ecs_task',
desiredCount=2
)
_ = client.create_service(
cluster='test_ecs_cluster',
serviceName='test_ecs_service2',
taskDefinition='test_ecs_task',
desiredCount=2,
schedulingStrategy='DAEMON'
)
_ = client.create_service(
cluster='test_ecs_cluster',
serviceName='test_ecs_service3',
taskDefinition='test_ecs_task',
desiredCount=2
)
response = client.describe_services(
cluster='test_ecs_cluster',
services=['test_ecs_service1',
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2',
'test_ecs_service3']
)
len(response['services']).should.equal(3)
response['services'][0]['serviceArn'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1')
response['services'][0]['serviceName'].should.equal('test_ecs_service1')
response['services'][1]['serviceArn'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2')
response['services'][1]['serviceName'].should.equal('test_ecs_service2')
response['services'][0]['deployments'][0]['desiredCount'].should.equal(2)
response['services'][0]['deployments'][0]['pendingCount'].should.equal(2)
response['services'][0]['deployments'][0]['runningCount'].should.equal(0)
response['services'][0]['deployments'][0]['status'].should.equal('PRIMARY')
response['services'][0]['schedulingStrategy'].should.equal('REPLICA')
response['services'][1]['schedulingStrategy'].should.equal('DAEMON')
response['services'][2]['schedulingStrategy'].should.equal('REPLICA')
@mock_ecs
def test_update_service():
client = boto3.client('ecs', region_name='us-east-1')
@ -449,6 +561,7 @@ def test_update_service():
desiredCount=0
)
response['service']['desiredCount'].should.equal(0)
response['service']['schedulingStrategy'].should.equal('REPLICA')
@mock_ecs
@ -515,8 +628,10 @@ def test_delete_service():
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service')
response['service']['serviceName'].should.equal('test_ecs_service')
response['service']['status'].should.equal('ACTIVE')
response['service']['schedulingStrategy'].should.equal('REPLICA')
response['service']['taskDefinition'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
@mock_ec2

View File

@ -723,6 +723,40 @@ def test_describe_instance_health():
instances_health[0].state.should.equal('InService')
@mock_ec2
@mock_elb
def test_describe_instance_health_boto3():
elb = boto3.client('elb', region_name="us-east-1")
ec2 = boto3.client('ec2', region_name="us-east-1")
instances = ec2.run_instances(MinCount=2, MaxCount=2)['Instances']
lb_name = "my_load_balancer"
elb.create_load_balancer(
Listeners=[{
'InstancePort': 80,
'LoadBalancerPort': 8080,
'Protocol': 'HTTP'
}],
LoadBalancerName=lb_name,
)
elb.register_instances_with_load_balancer(
LoadBalancerName=lb_name,
Instances=[{'InstanceId': instances[0]['InstanceId']}]
)
instances_health = elb.describe_instance_health(
LoadBalancerName=lb_name,
Instances=[{'InstanceId': instance['InstanceId']} for instance in instances]
)
instances_health['InstanceStates'].should.have.length_of(2)
instances_health['InstanceStates'][0]['InstanceId'].\
should.equal(instances[0]['InstanceId'])
instances_health['InstanceStates'][0]['State'].\
should.equal('InService')
instances_health['InstanceStates'][1]['InstanceId'].\
should.equal(instances[1]['InstanceId'])
instances_health['InstanceStates'][1]['State'].\
should.equal('Unknown')
@mock_elb
def test_add_remove_tags():
client = boto3.client('elb', region_name='us-east-1')

View File

@ -286,6 +286,16 @@ def test_create_policy_versions():
PolicyDocument='{"some":"policy"}')
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
@mock_iam
def test_get_policy():
conn = boto3.client('iam', region_name='us-east-1')
response = conn.create_policy(
PolicyName="TestGetPolicy",
PolicyDocument='{"some":"policy"}')
policy = conn.get_policy(
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy")
response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy")
@mock_iam
def test_get_policy_version():
@ -314,17 +324,22 @@ def test_list_policy_versions():
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
conn.create_policy(
PolicyName="TestListPolicyVersions",
PolicyDocument='{"some":"policy"}')
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"first":"policy"}')
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
versions.get('Versions')[0].get('VersionId').should.equal('v1')
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"second":"policy"}')
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"third":"policy"}')
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
versions.get('Versions')[0].get('Document').should.equal({'first': 'policy'})
print(versions.get('Versions'))
versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'})
versions.get('Versions')[2].get('Document').should.equal({'third': 'policy'})
@mock_iam
@ -332,20 +347,20 @@ def test_delete_policy_version():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy(
PolicyName="TestDeletePolicyVersion",
PolicyDocument='{"some":"policy"}')
PolicyDocument='{"first":"policy"}')
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
PolicyDocument='{"first":"policy"}')
PolicyDocument='{"second":"policy"}')
with assert_raises(ClientError):
conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
VersionId='v2-nope-this-does-not-exist')
conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
VersionId='v1')
VersionId='v2')
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion")
len(versions.get('Versions')).should.equal(0)
len(versions.get('Versions')).should.equal(1)
@mock_iam_deprecated()

View File

@ -1,8 +1,9 @@
from __future__ import unicode_literals
import boto3
import sure # noqa
import json
import sure # noqa
import boto3
from moto import mock_iot
@ -63,6 +64,166 @@ def test_things():
res.should.have.key('thingTypes').which.should.have.length_of(0)
@mock_iot
def test_list_thing_types():
client = boto3.client('iot', region_name='ap-northeast-1')
for i in range(0, 100):
client.create_thing_type(thingTypeName=str(i + 1))
thing_types = client.list_thing_types()
thing_types.should.have.key('nextToken')
thing_types.should.have.key('thingTypes').which.should.have.length_of(50)
thing_types['thingTypes'][0]['thingTypeName'].should.equal('1')
thing_types['thingTypes'][-1]['thingTypeName'].should.equal('50')
thing_types = client.list_thing_types(nextToken=thing_types['nextToken'])
thing_types.should.have.key('thingTypes').which.should.have.length_of(50)
thing_types.should_not.have.key('nextToken')
thing_types['thingTypes'][0]['thingTypeName'].should.equal('51')
thing_types['thingTypes'][-1]['thingTypeName'].should.equal('100')
@mock_iot
def test_list_thing_types_with_typename_filter():
client = boto3.client('iot', region_name='ap-northeast-1')
client.create_thing_type(thingTypeName='thing')
client.create_thing_type(thingTypeName='thingType')
client.create_thing_type(thingTypeName='thingTypeName')
client.create_thing_type(thingTypeName='thingTypeNameGroup')
client.create_thing_type(thingTypeName='shouldNotFind')
client.create_thing_type(thingTypeName='find me it shall not')
thing_types = client.list_thing_types(thingTypeName='thing')
thing_types.should_not.have.key('nextToken')
thing_types.should.have.key('thingTypes').which.should.have.length_of(4)
thing_types['thingTypes'][0]['thingTypeName'].should.equal('thing')
thing_types['thingTypes'][-1]['thingTypeName'].should.equal('thingTypeNameGroup')
thing_types = client.list_thing_types(thingTypeName='thingTypeName')
thing_types.should_not.have.key('nextToken')
thing_types.should.have.key('thingTypes').which.should.have.length_of(2)
thing_types['thingTypes'][0]['thingTypeName'].should.equal('thingTypeName')
thing_types['thingTypes'][-1]['thingTypeName'].should.equal('thingTypeNameGroup')
@mock_iot
def test_list_things_with_next_token():
client = boto3.client('iot', region_name='ap-northeast-1')
for i in range(0, 200):
client.create_thing(thingName=str(i + 1))
things = client.list_things()
things.should.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('1')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/1')
things['things'][-1]['thingName'].should.equal('50')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/50')
things = client.list_things(nextToken=things['nextToken'])
things.should.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('51')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/51')
things['things'][-1]['thingName'].should.equal('100')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/100')
things = client.list_things(nextToken=things['nextToken'])
things.should.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('101')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/101')
things['things'][-1]['thingName'].should.equal('150')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/150')
things = client.list_things(nextToken=things['nextToken'])
things.should_not.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('151')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/151')
things['things'][-1]['thingName'].should.equal('200')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/200')
@mock_iot
def test_list_things_with_attribute_and_thing_type_filter_and_next_token():
client = boto3.client('iot', region_name='ap-northeast-1')
client.create_thing_type(thingTypeName='my-thing-type')
for i in range(0, 200):
if not (i + 1) % 3:
attribute_payload = {
'attributes': {
'foo': 'bar'
}
}
elif not (i + 1) % 5:
attribute_payload = {
'attributes': {
'bar': 'foo'
}
}
else:
attribute_payload = {}
if not (i + 1) % 2:
thing_type_name = 'my-thing-type'
client.create_thing(thingName=str(i + 1), thingTypeName=thing_type_name, attributePayload=attribute_payload)
else:
client.create_thing(thingName=str(i + 1), attributePayload=attribute_payload)
# Test filter for thingTypeName
things = client.list_things(thingTypeName=thing_type_name)
things.should.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('2')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/2')
things['things'][-1]['thingName'].should.equal('100')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/100')
all(item['thingTypeName'] == thing_type_name for item in things['things'])
things = client.list_things(nextToken=things['nextToken'], thingTypeName=thing_type_name)
things.should_not.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('102')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/102')
things['things'][-1]['thingName'].should.equal('200')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/200')
all(item['thingTypeName'] == thing_type_name for item in things['things'])
# Test filter for attributes
things = client.list_things(attributeName='foo', attributeValue='bar')
things.should.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(50)
things['things'][0]['thingName'].should.equal('3')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/3')
things['things'][-1]['thingName'].should.equal('150')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/150')
all(item['attributes'] == {'foo': 'bar'} for item in things['things'])
things = client.list_things(nextToken=things['nextToken'], attributeName='foo', attributeValue='bar')
things.should_not.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(16)
things['things'][0]['thingName'].should.equal('153')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/153')
things['things'][-1]['thingName'].should.equal('198')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/198')
all(item['attributes'] == {'foo': 'bar'} for item in things['things'])
# Test filter for attributes and thingTypeName
things = client.list_things(thingTypeName=thing_type_name, attributeName='foo', attributeValue='bar')
things.should_not.have.key('nextToken')
things.should.have.key('things').which.should.have.length_of(33)
things['things'][0]['thingName'].should.equal('6')
things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/6')
things['things'][-1]['thingName'].should.equal('198')
things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/198')
all(item['attributes'] == {'foo': 'bar'} and item['thingTypeName'] == thing_type_name for item in things['things'])
@mock_iot
def test_certs():
client = boto3.client('iot', region_name='ap-northeast-1')
@ -204,7 +365,6 @@ def test_principal_thing():
@mock_iot
def test_thing_groups():
client = boto3.client('iot', region_name='ap-northeast-1')
name = 'my-thing'
group_name = 'my-group-name'
# thing group
@ -424,6 +584,7 @@ def test_create_job():
job.should.have.key('jobArn')
job.should.have.key('description')
@mock_iot
def test_describe_job():
client = boto3.client('iot', region_name='eu-west-1')

View File

@ -1,5 +1,6 @@
import boto3
import sure # noqa
import six
from botocore.exceptions import ClientError
from moto import mock_logs, settings
@ -47,7 +48,7 @@ def test_exceptions():
logEvents=[
{
'timestamp': 0,
'message': 'line'
'message': 'line'
},
],
)
@ -79,7 +80,7 @@ def test_put_logs():
{'timestamp': 0, 'message': 'hello'},
{'timestamp': 0, 'message': 'world'}
]
conn.put_log_events(
putRes = conn.put_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
logEvents=messages
@ -89,6 +90,9 @@ def test_put_logs():
logStreamName=log_stream_name
)
events = res['events']
nextSequenceToken = putRes['nextSequenceToken']
assert isinstance(nextSequenceToken, six.string_types) == True
assert len(nextSequenceToken) == 56
events.should.have.length_of(2)

View File

@ -33,6 +33,7 @@ def test_create_database():
db_instance['DBInstanceIdentifier'].should.equal("db-master-1")
db_instance['IAMDatabaseAuthenticationEnabled'].should.equal(False)
db_instance['DbiResourceId'].should.contain("db-")
db_instance['CopyTagsToSnapshot'].should.equal(False)
@mock_rds2
@ -339,6 +340,49 @@ def test_create_db_snapshots():
snapshot.get('Engine').should.equal('postgres')
snapshot.get('DBInstanceIdentifier').should.equal('db-primary-1')
snapshot.get('DBSnapshotIdentifier').should.equal('g-1')
result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshotArn'])
result['TagList'].should.equal([])
@mock_rds2
def test_create_db_snapshots_copy_tags():
conn = boto3.client('rds', region_name='us-west-2')
conn.create_db_snapshot.when.called_with(
DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-1').should.throw(ClientError)
conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
AllocatedStorage=10,
Engine='postgres',
DBName='staging-postgres',
DBInstanceClass='db.m1.small',
MasterUsername='root',
MasterUserPassword='hunter2',
Port=1234,
DBSecurityGroups=["my_sg"],
CopyTagsToSnapshot=True,
Tags=[
{
'Key': 'foo',
'Value': 'bar',
},
{
'Key': 'foo1',
'Value': 'bar1',
},
])
snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='g-1').get('DBSnapshot')
snapshot.get('Engine').should.equal('postgres')
snapshot.get('DBInstanceIdentifier').should.equal('db-primary-1')
snapshot.get('DBSnapshotIdentifier').should.equal('g-1')
result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshotArn'])
result['TagList'].should.equal([{'Value': 'bar',
'Key': 'foo'},
{'Value': 'bar1',
'Key': 'foo1'}])
@mock_rds2
@ -656,6 +700,117 @@ def test_remove_tags_db():
len(result['TagList']).should.equal(1)
@mock_rds2
def test_list_tags_snapshot():
conn = boto3.client('rds', region_name='us-west-2')
result = conn.list_tags_for_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:foo')
result['TagList'].should.equal([])
conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
AllocatedStorage=10,
Engine='postgres',
DBName='staging-postgres',
DBInstanceClass='db.m1.small',
MasterUsername='root',
MasterUserPassword='hunter2',
Port=1234,
DBSecurityGroups=["my_sg"])
snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-with-tags',
Tags=[
{
'Key': 'foo',
'Value': 'bar',
},
{
'Key': 'foo1',
'Value': 'bar1',
},
])
result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshot']['DBSnapshotArn'])
result['TagList'].should.equal([{'Value': 'bar',
'Key': 'foo'},
{'Value': 'bar1',
'Key': 'foo1'}])
@mock_rds2
def test_add_tags_snapshot():
conn = boto3.client('rds', region_name='us-west-2')
conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
AllocatedStorage=10,
Engine='postgres',
DBName='staging-postgres',
DBInstanceClass='db.m1.small',
MasterUsername='root',
MasterUserPassword='hunter2',
Port=1234,
DBSecurityGroups=["my_sg"])
snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-without-tags',
Tags=[
{
'Key': 'foo',
'Value': 'bar',
},
{
'Key': 'foo1',
'Value': 'bar1',
},
])
result = conn.list_tags_for_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags')
list(result['TagList']).should.have.length_of(2)
conn.add_tags_to_resource(ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags',
Tags=[
{
'Key': 'foo',
'Value': 'fish',
},
{
'Key': 'foo2',
'Value': 'bar2',
},
])
result = conn.list_tags_for_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags')
list(result['TagList']).should.have.length_of(3)
@mock_rds2
def test_remove_tags_snapshot():
conn = boto3.client('rds', region_name='us-west-2')
conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
AllocatedStorage=10,
Engine='postgres',
DBName='staging-postgres',
DBInstanceClass='db.m1.small',
MasterUsername='root',
MasterUserPassword='hunter2',
Port=1234,
DBSecurityGroups=["my_sg"])
snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
DBSnapshotIdentifier='snapshot-with-tags',
Tags=[
{
'Key': 'foo',
'Value': 'bar',
},
{
'Key': 'foo1',
'Value': 'bar1',
},
])
result = conn.list_tags_for_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags')
list(result['TagList']).should.have.length_of(2)
conn.remove_tags_from_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags', TagKeys=['foo'])
result = conn.list_tags_for_resource(
ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags')
len(result['TagList']).should.equal(1)
@mock_rds2
def test_add_tags_option_group():
conn = boto3.client('rds', region_name='us-west-2')

View File

@ -1,5 +1,7 @@
from __future__ import unicode_literals
import datetime
import boto
import boto3
from boto.redshift.exceptions import (
@ -32,6 +34,8 @@ def test_create_cluster_boto3():
MasterUserPassword='password',
)
response['Cluster']['NodeType'].should.equal('ds2.xlarge')
create_time = response['Cluster']['ClusterCreateTime']
create_time.should.be.lower_than(datetime.datetime.now(create_time.tzinfo))
@mock_redshift

View File

@ -2471,6 +2471,72 @@ def test_boto3_delete_markers():
oldest['Key'].should.equal('key-with-versions-and-unicode-ó')
@mock_s3
def test_boto3_multiple_delete_markers():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = u'key-with-versions-and-unicode-ó'
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_versioning(
Bucket=bucket_name,
VersioningConfiguration={
'Status': 'Enabled'
}
)
items = (six.b('v1'), six.b('v2'))
for body in items:
s3.put_object(
Bucket=bucket_name,
Key=key,
Body=body
)
# Delete the object twice to add multiple delete markers
s3.delete_object(Bucket=bucket_name, Key=key)
s3.delete_object(Bucket=bucket_name, Key=key)
response = s3.list_object_versions(Bucket=bucket_name)
response['DeleteMarkers'].should.have.length_of(2)
with assert_raises(ClientError) as e:
s3.get_object(
Bucket=bucket_name,
Key=key
)
e.response['Error']['Code'].should.equal('404')
# Remove both delete markers to restore the object
s3.delete_object(
Bucket=bucket_name,
Key=key,
VersionId='2'
)
s3.delete_object(
Bucket=bucket_name,
Key=key,
VersionId='3'
)
response = s3.get_object(
Bucket=bucket_name,
Key=key
)
response['Body'].read().should.equal(items[-1])
response = s3.list_object_versions(Bucket=bucket_name)
response['Versions'].should.have.length_of(2)
# We've asserted there is only 2 records so one is newest, one is oldest
latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0]
oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0]
# Double check ordering of version ID's
latest['VersionId'].should.equal('1')
oldest['VersionId'].should.equal('0')
# Double check the name is still unicode
latest['Key'].should.equal('key-with-versions-and-unicode-ó')
oldest['Key'].should.equal('key-with-versions-and-unicode-ó')
@mock_s3
def test_get_stream_gzipped():
payload = b"this is some stuff here"

View File

@ -26,13 +26,13 @@ def test_get_secret_that_does_not_exist():
result = conn.get_secret_value(SecretId='i-dont-exist')
@mock_secretsmanager
def test_get_secret_with_mismatched_id():
def test_get_secret_that_does_not_match():
conn = boto3.client('secretsmanager', region_name='us-west-2')
create_secret = conn.create_secret(Name='java-util-test-password',
SecretString="foosecret")
with assert_raises(ClientError):
result = conn.get_secret_value(SecretId='i-dont-exist')
result = conn.get_secret_value(SecretId='i-dont-match')
@mock_secretsmanager
def test_create_secret():
@ -152,3 +152,135 @@ def test_get_random_too_long_password():
with assert_raises(Exception):
random_password = conn.get_random_password(PasswordLength=5555)
@mock_secretsmanager
def test_describe_secret():
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name='test-secret',
SecretString='foosecret')
secret_description = conn.describe_secret(SecretId='test-secret')
assert secret_description # Returned dict is not empty
assert secret_description['ARN'] == (
'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad')
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
conn = boto3.client('secretsmanager', region_name='us-west-2')
with assert_raises(ClientError):
result = conn.get_secret_value(SecretId='i-dont-exist')
@mock_secretsmanager
def test_describe_secret_that_does_not_match():
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name='test-secret',
SecretString='foosecret')
with assert_raises(ClientError):
result = conn.get_secret_value(SecretId='i-dont-match')
@mock_secretsmanager
def test_rotate_secret():
secret_name = 'test-secret'
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name=secret_name,
SecretString='foosecret')
rotated_secret = conn.rotate_secret(SecretId=secret_name)
assert rotated_secret
assert rotated_secret['ARN'] == (
'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad'
)
assert rotated_secret['Name'] == secret_name
assert rotated_secret['VersionId'] != ''
@mock_secretsmanager
def test_rotate_secret_enable_rotation():
secret_name = 'test-secret'
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name=secret_name,
SecretString='foosecret')
initial_description = conn.describe_secret(SecretId=secret_name)
assert initial_description
assert initial_description['RotationEnabled'] is False
assert initial_description['RotationRules']['AutomaticallyAfterDays'] == 0
conn.rotate_secret(SecretId=secret_name,
RotationRules={'AutomaticallyAfterDays': 42})
rotated_description = conn.describe_secret(SecretId=secret_name)
assert rotated_description
assert rotated_description['RotationEnabled'] is True
assert rotated_description['RotationRules']['AutomaticallyAfterDays'] == 42
@mock_secretsmanager
def test_rotate_secret_that_does_not_exist():
conn = boto3.client('secretsmanager', 'us-west-2')
with assert_raises(ClientError):
result = conn.rotate_secret(SecretId='i-dont-exist')
@mock_secretsmanager
def test_rotate_secret_that_does_not_match():
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name='test-secret',
SecretString='foosecret')
with assert_raises(ClientError):
result = conn.rotate_secret(SecretId='i-dont-match')
@mock_secretsmanager
def test_rotate_secret_client_request_token_too_short():
# Test is intentionally empty. Boto3 catches too short ClientRequestToken
# and raises ParamValidationError before Moto can see it.
# test_server actually handles this error.
assert True
@mock_secretsmanager
def test_rotate_secret_client_request_token_too_long():
secret_name = 'test-secret'
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name=secret_name,
SecretString='foosecret')
client_request_token = (
'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-'
'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C'
)
with assert_raises(ClientError):
result = conn.rotate_secret(SecretId=secret_name,
ClientRequestToken=client_request_token)
@mock_secretsmanager
def test_rotate_secret_rotation_lambda_arn_too_long():
secret_name = 'test-secret'
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name=secret_name,
SecretString='foosecret')
rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters
with assert_raises(ClientError):
result = conn.rotate_secret(SecretId=secret_name,
RotationLambdaARN=rotation_lambda_arn)
@mock_secretsmanager
def test_rotate_secret_rotation_period_zero():
# Test is intentionally empty. Boto3 catches zero day rotation period
# and raises ParamValidationError before Moto can see it.
# test_server actually handles this error.
assert True
@mock_secretsmanager
def test_rotate_secret_rotation_period_too_long():
secret_name = 'test-secret'
conn = boto3.client('secretsmanager', region_name='us-west-2')
conn.create_secret(Name=secret_name,
SecretString='foosecret')
rotation_rules = {'AutomaticallyAfterDays': 1001}
with assert_raises(ClientError):
result = conn.rotate_secret(SecretId=secret_name,
RotationRules=rotation_rules)

View File

@ -49,6 +49,27 @@ def test_get_secret_that_does_not_exist():
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_get_secret_that_does_not_match():
backend = server.create_backend_app("secretsmanager")
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foo-secret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"},
)
get_secret = test_client.post('/',
data={"SecretId": "i-dont-match",
"VersionStage": "AWSCURRENT"},
headers={
"X-Amz-Target": "secretsmanager.GetSecretValue"},
)
json_data = json.loads(get_secret.data.decode("utf-8"))
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_create_secret():
@ -66,3 +87,335 @@ def test_create_secret():
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
assert json_data['Name'] == 'test-secret'
@mock_secretsmanager
def test_describe_secret():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
describe_secret = test_client.post('/',
data={"SecretId": "test-secret"},
headers={
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
)
@mock_secretsmanager
def test_describe_secret_that_does_not_exist():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
describe_secret = test_client.post('/',
data={"SecretId": "i-dont-exist"},
headers={
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_describe_secret_that_does_not_match():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
describe_secret = test_client.post('/',
data={"SecretId": "i-dont-match"},
headers={
"X-Amz-Target": "secretsmanager.DescribeSecret"
},
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_rotate_secret():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
client_request_token = "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2"
rotate_secret = test_client.post('/',
data={"SecretId": "test-secret",
"ClientRequestToken": client_request_token},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data # Returned dict is not empty
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
)
assert json_data['Name'] == 'test-secret'
assert json_data['VersionId'] == client_request_token
# @mock_secretsmanager
# def test_rotate_secret_enable_rotation():
# backend = server.create_backend_app('secretsmanager')
# test_client = backend.test_client()
# create_secret = test_client.post(
# '/',
# data={
# "Name": "test-secret",
# "SecretString": "foosecret"
# },
# headers={
# "X-Amz-Target": "secretsmanager.CreateSecret"
# },
# )
# initial_description = test_client.post(
# '/',
# data={
# "SecretId": "test-secret"
# },
# headers={
# "X-Amz-Target": "secretsmanager.DescribeSecret"
# },
# )
# json_data = json.loads(initial_description.data.decode("utf-8"))
# assert json_data # Returned dict is not empty
# assert json_data['RotationEnabled'] is False
# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 0
# rotate_secret = test_client.post(
# '/',
# data={
# "SecretId": "test-secret",
# "RotationRules": {"AutomaticallyAfterDays": 42}
# },
# headers={
# "X-Amz-Target": "secretsmanager.RotateSecret"
# },
# )
# rotated_description = test_client.post(
# '/',
# data={
# "SecretId": "test-secret"
# },
# headers={
# "X-Amz-Target": "secretsmanager.DescribeSecret"
# },
# )
# json_data = json.loads(rotated_description.data.decode("utf-8"))
# assert json_data # Returned dict is not empty
# assert json_data['RotationEnabled'] is True
# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 42
@mock_secretsmanager
def test_rotate_secret_that_does_not_exist():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
rotate_secret = test_client.post('/',
data={"SecretId": "i-dont-exist"},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_rotate_secret_that_does_not_match():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
rotate_secret = test_client.post('/',
data={"SecretId": "i-dont-match"},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
@mock_secretsmanager
def test_rotate_secret_client_request_token_too_short():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
client_request_token = "ED9F8B6C-85B7-B7E4-38F2A3BEB13C"
rotate_secret = test_client.post('/',
data={"SecretId": "test-secret",
"ClientRequestToken": client_request_token},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data['message'] == "ClientRequestToken must be 32-64 characters long."
assert json_data['__type'] == 'InvalidParameterException'
@mock_secretsmanager
def test_rotate_secret_client_request_token_too_long():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
client_request_token = (
'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-'
'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C'
)
rotate_secret = test_client.post('/',
data={"SecretId": "test-secret",
"ClientRequestToken": client_request_token},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data['message'] == "ClientRequestToken must be 32-64 characters long."
assert json_data['__type'] == 'InvalidParameterException'
@mock_secretsmanager
def test_rotate_secret_rotation_lambda_arn_too_long():
backend = server.create_backend_app('secretsmanager')
test_client = backend.test_client()
create_secret = test_client.post('/',
data={"Name": "test-secret",
"SecretString": "foosecret"},
headers={
"X-Amz-Target": "secretsmanager.CreateSecret"
},
)
rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters
rotate_secret = test_client.post('/',
data={"SecretId": "test-secret",
"RotationLambdaARN": rotation_lambda_arn},
headers={
"X-Amz-Target": "secretsmanager.RotateSecret"
},
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
assert json_data['message'] == "RotationLambdaARN must <= 2048 characters long."
assert json_data['__type'] == 'InvalidParameterException'
#
# The following tests should work, but fail on the embedded dict in
# RotationRules. The error message suggests a problem deeper in the code, which
# needs further investigation.
#
# @mock_secretsmanager
# def test_rotate_secret_rotation_period_zero():
# backend = server.create_backend_app('secretsmanager')
# test_client = backend.test_client()
# create_secret = test_client.post('/',
# data={"Name": "test-secret",
# "SecretString": "foosecret"},
# headers={
# "X-Amz-Target": "secretsmanager.CreateSecret"
# },
# )
# rotate_secret = test_client.post('/',
# data={"SecretId": "test-secret",
# "RotationRules": {"AutomaticallyAfterDays": 0}},
# headers={
# "X-Amz-Target": "secretsmanager.RotateSecret"
# },
# )
# json_data = json.loads(rotate_secret.data.decode("utf-8"))
# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000."
# assert json_data['__type'] == 'InvalidParameterException'
# @mock_secretsmanager
# def test_rotate_secret_rotation_period_too_long():
# backend = server.create_backend_app('secretsmanager')
# test_client = backend.test_client()
# create_secret = test_client.post('/',
# data={"Name": "test-secret",
# "SecretString": "foosecret"},
# headers={
# "X-Amz-Target": "secretsmanager.CreateSecret"
# },
# )
# rotate_secret = test_client.post('/',
# data={"SecretId": "test-secret",
# "RotationRules": {"AutomaticallyAfterDays": 1001}},
# headers={
# "X-Amz-Target": "secretsmanager.RotateSecret"
# },
# )
# json_data = json.loads(rotate_secret.data.decode("utf-8"))
# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000."
# assert json_data['__type'] == 'InvalidParameterException'

View File

@ -40,6 +40,33 @@ def test_create_fifo_queue_fail():
raise RuntimeError('Should of raised InvalidParameterValue Exception')
@mock_sqs
def test_create_queue_with_same_attributes():
sqs = boto3.client('sqs', region_name='us-east-1')
dlq_url = sqs.create_queue(QueueName='test-queue-dlq')['QueueUrl']
dlq_arn = sqs.get_queue_attributes(QueueUrl=dlq_url)['Attributes']['QueueArn']
attributes = {
'DelaySeconds': '900',
'MaximumMessageSize': '262144',
'MessageRetentionPeriod': '1209600',
'ReceiveMessageWaitTimeSeconds': '20',
'RedrivePolicy': '{"deadLetterTargetArn": "%s", "maxReceiveCount": 100}' % (dlq_arn),
'VisibilityTimeout': '43200'
}
sqs.create_queue(
QueueName='test-queue',
Attributes=attributes
)
sqs.create_queue(
QueueName='test-queue',
Attributes=attributes
)
@mock_sqs
def test_create_queue_with_different_attributes_fail():
sqs = boto3.client('sqs', region_name='us-east-1')
@ -1195,3 +1222,16 @@ def test_receive_messages_with_message_group_id_on_visibility_timeout():
messages = queue.receive_messages()
messages.should.have.length_of(1)
messages[0].message_id.should.equal(message.message_id)
@mock_sqs
def test_receive_message_for_queue_with_receive_message_wait_time_seconds_set():
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(
QueueName='test-queue',
Attributes={
'ReceiveMessageWaitTimeSeconds': '2',
}
)
queue.receive_messages()

View File

@ -5,11 +5,12 @@ import botocore.exceptions
import sure # noqa
import datetime
import uuid
import json
from botocore.exceptions import ClientError
from nose.tools import assert_raises
from moto import mock_ssm
from moto import mock_ssm, mock_cloudformation
@mock_ssm
@ -668,3 +669,118 @@ def test_list_commands():
with assert_raises(ClientError):
response = client.list_commands(
CommandId=str(uuid.uuid4()))
@mock_ssm
def test_get_command_invocation():
client = boto3.client('ssm', region_name='us-east-1')
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
response = client.send_command(
InstanceIds=['i-123456', 'i-234567', 'i-345678'],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref')
cmd = response['Command']
cmd_id = cmd['CommandId']
instance_id = 'i-345678'
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_id,
PluginName='aws:runShellScript')
invocation_response['CommandId'].should.equal(cmd_id)
invocation_response['InstanceId'].should.equal(instance_id)
# test the error case for an invalid instance id
with assert_raises(ClientError):
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId='i-FAKE')
# test the error case for an invalid plugin name
with assert_raises(ClientError):
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_id,
PluginName='FAKE')
@mock_ssm
@mock_cloudformation
def test_get_command_invocations_from_stack():
stack_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Test Stack",
"Resources": {
"EC2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "ami-test-image-id",
"KeyName": "test",
"InstanceType": "t2.micro",
"Tags": [
{
"Key": "Test Description",
"Value": "Test tag"
},
{
"Key": "Test Name",
"Value": "Name tag for tests"
}
]
}
}
},
"Outputs": {
"test": {
"Description": "Test Output",
"Value": "Test output value",
"Export": {
"Name": "Test value to export"
}
},
"PublicIP": {
"Value": "Test public ip"
}
}
}
cloudformation_client = boto3.client(
'cloudformation',
region_name='us-east-1')
stack_template_str = json.dumps(stack_template)
response = cloudformation_client.create_stack(
StackName='test_stack',
TemplateBody=stack_template_str,
Capabilities=('CAPABILITY_IAM', ))
client = boto3.client('ssm', region_name='us-east-1')
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
response = client.send_command(
Targets=[{
'Key': 'tag:aws:cloudformation:stack-name',
'Values': ('test_stack', )}],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref')
cmd = response['Command']
cmd_id = cmd['CommandId']
instance_ids = cmd['InstanceIds']
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_ids[0],
PluginName='aws:runShellScript')