Merge pull request #31 from spulec/master

Merge upstream
This commit is contained in:
Bert Blommers 2020-02-24 08:15:40 +00:00 committed by GitHub
commit f009f7da8c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
76 changed files with 5733 additions and 2847 deletions

View File

@ -26,11 +26,12 @@ install:
fi fi
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh & docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh &
fi fi
travis_retry pip install -r requirements-dev.txt
travis_retry pip install boto==2.45.0 travis_retry pip install boto==2.45.0
travis_retry pip install boto3 travis_retry pip install boto3
travis_retry pip install dist/moto*.gz travis_retry pip install dist/moto*.gz
travis_retry pip install coveralls==1.1 travis_retry pip install coveralls==1.1
travis_retry pip install -r requirements-dev.txt travis_retry pip install coverage==4.5.4
if [ "$TEST_SERVER_MODE" = "true" ]; then if [ "$TEST_SERVER_MODE" = "true" ]; then
python wait_for.py python wait_for.py

View File

@ -450,6 +450,16 @@ boto3.resource(
) )
``` ```
### Caveats
The standalone server has some caveats with some services. The following services
require that you update your hosts file for your code to work properly:
1. `s3-control`
For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`.
As a result, you need to add that entry to your host file for your tests to function properly.
## Install ## Install

View File

@ -56,9 +56,10 @@ author = 'Steve Pulec'
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = '0.4.10' import moto
version = moto.__version__
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = '0.4.10' release = moto.__version__
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.

View File

@ -24,8 +24,7 @@ For example, we have the following code we want to test:
.. sourcecode:: python .. sourcecode:: python
import boto import boto3
from boto.s3.key import Key
class MyModel(object): class MyModel(object):
def __init__(self, name, value): def __init__(self, name, value):
@ -33,11 +32,8 @@ For example, we have the following code we want to test:
self.value = value self.value = value
def save(self): def save(self):
conn = boto.connect_s3() s3 = boto3.client('s3', region_name='us-east-1')
bucket = conn.get_bucket('mybucket') s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
k = Key(bucket)
k.key = self.name
k.set_contents_from_string(self.value)
There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment. There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment.
@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out.
.. sourcecode:: python .. sourcecode:: python
import boto import boto3
from moto import mock_s3 from moto import mock_s3
from mymodule import MyModel from mymodule import MyModel
@mock_s3 @mock_s3
def test_my_model_save(): def test_my_model_save():
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account # We need to create the bucket since this is all in Moto's 'virtual' AWS account
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Context manager Context manager
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out.
def test_my_model_save(): def test_my_model_save():
with mock_s3(): with mock_s3():
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Raw Raw
~~~ ~~~
@ -91,13 +93,16 @@ You can also start and stop the mocking manually.
mock = mock_s3() mock = mock_s3()
mock.start() mock.start()
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
mock.stop() mock.stop()

View File

@ -85,6 +85,15 @@ class NoMethodDefined(BadRequestException):
) )
class AuthorizerNotFoundException(RESTError):
code = 404
def __init__(self):
super(AuthorizerNotFoundException, self).__init__(
"NotFoundException", "Invalid Authorizer identifier specified"
)
class StageNotFoundException(RESTError): class StageNotFoundException(RESTError):
code = 404 code = 404

View File

@ -28,6 +28,7 @@ from .exceptions import (
InvalidHttpEndpoint, InvalidHttpEndpoint,
InvalidResourcePathException, InvalidResourcePathException,
InvalidRequestInput, InvalidRequestInput,
AuthorizerNotFoundException,
StageNotFoundException, StageNotFoundException,
RoleNotSpecified, RoleNotSpecified,
NoIntegrationDefined, NoIntegrationDefined,
@ -117,14 +118,15 @@ class Resource(BaseModel):
self.api_id = api_id self.api_id = api_id
self.path_part = path_part self.path_part = path_part
self.parent_id = parent_id self.parent_id = parent_id
self.resource_methods = {"GET": {}} self.resource_methods = {}
def to_dict(self): def to_dict(self):
response = { response = {
"path": self.get_path(), "path": self.get_path(),
"id": self.id, "id": self.id,
"resourceMethods": self.resource_methods,
} }
if self.resource_methods:
response["resourceMethods"] = self.resource_methods
if self.parent_id: if self.parent_id:
response["parentId"] = self.parent_id response["parentId"] = self.parent_id
response["pathPart"] = self.path_part response["pathPart"] = self.path_part
@ -186,6 +188,54 @@ class Resource(BaseModel):
return self.resource_methods[method_type].pop("methodIntegration") return self.resource_methods[method_type].pop("methodIntegration")
class Authorizer(BaseModel, dict):
def __init__(self, id, name, authorizer_type, **kwargs):
super(Authorizer, self).__init__()
self["id"] = id
self["name"] = name
self["type"] = authorizer_type
if kwargs.get("provider_arns"):
self["providerARNs"] = kwargs.get("provider_arns")
if kwargs.get("auth_type"):
self["authType"] = kwargs.get("auth_type")
if kwargs.get("authorizer_uri"):
self["authorizerUri"] = kwargs.get("authorizer_uri")
if kwargs.get("authorizer_credentials"):
self["authorizerCredentials"] = kwargs.get("authorizer_credentials")
if kwargs.get("identity_source"):
self["identitySource"] = kwargs.get("identity_source")
if kwargs.get("identity_validation_expression"):
self["identityValidationExpression"] = kwargs.get(
"identity_validation_expression"
)
self["authorizerResultTtlInSeconds"] = kwargs.get("authorizer_result_ttl")
def apply_operations(self, patch_operations):
for op in patch_operations:
if "/authorizerUri" in op["path"]:
self["authorizerUri"] = op["value"]
elif "/authorizerCredentials" in op["path"]:
self["authorizerCredentials"] = op["value"]
elif "/authorizerResultTtlInSeconds" in op["path"]:
self["authorizerResultTtlInSeconds"] = int(op["value"])
elif "/authType" in op["path"]:
self["authType"] = op["value"]
elif "/identitySource" in op["path"]:
self["identitySource"] = op["value"]
elif "/identityValidationExpression" in op["path"]:
self["identityValidationExpression"] = op["value"]
elif "/name" in op["path"]:
self["name"] = op["value"]
elif "/providerARNs" in op["path"]:
# TODO: add and remove
raise Exception('Patch operation for "%s" not implemented' % op["path"])
elif "/type" in op["path"]:
self["type"] = op["value"]
else:
raise Exception('Patch operation "%s" not implemented' % op["op"])
return self
class Stage(BaseModel, dict): class Stage(BaseModel, dict):
def __init__( def __init__(
self, self,
@ -411,6 +461,7 @@ class RestAPI(BaseModel):
self.tags = kwargs.get("tags") or {} self.tags = kwargs.get("tags") or {}
self.deployments = {} self.deployments = {}
self.authorizers = {}
self.stages = {} self.stages = {}
self.resources = {} self.resources = {}
@ -478,6 +529,34 @@ class RestAPI(BaseModel):
), ),
) )
def create_authorizer(
self,
id,
name,
authorizer_type,
provider_arns=None,
auth_type=None,
authorizer_uri=None,
authorizer_credentials=None,
identity_source=None,
identiy_validation_expression=None,
authorizer_result_ttl=None,
):
authorizer = Authorizer(
id=id,
name=name,
authorizer_type=authorizer_type,
provider_arns=provider_arns,
auth_type=auth_type,
authorizer_uri=authorizer_uri,
authorizer_credentials=authorizer_credentials,
identity_source=identity_source,
identiy_validation_expression=identiy_validation_expression,
authorizer_result_ttl=authorizer_result_ttl,
)
self.authorizers[id] = authorizer
return authorizer
def create_stage( def create_stage(
self, self,
name, name,
@ -517,6 +596,9 @@ class RestAPI(BaseModel):
def get_deployment(self, deployment_id): def get_deployment(self, deployment_id):
return self.deployments[deployment_id] return self.deployments[deployment_id]
def get_authorizers(self):
return list(self.authorizers.values())
def get_stages(self): def get_stages(self):
return list(self.stages.values()) return list(self.stages.values())
@ -612,6 +694,46 @@ class APIGatewayBackend(BaseBackend):
) )
return method return method
def get_authorizer(self, restapi_id, authorizer_id):
api = self.get_rest_api(restapi_id)
authorizer = api.authorizers.get(authorizer_id)
if authorizer is None:
raise AuthorizerNotFoundException()
else:
return authorizer
def get_authorizers(self, restapi_id):
api = self.get_rest_api(restapi_id)
return api.get_authorizers()
def create_authorizer(self, restapi_id, name, authorizer_type, **kwargs):
api = self.get_rest_api(restapi_id)
authorizer_id = create_id()
authorizer = api.create_authorizer(
authorizer_id,
name,
authorizer_type,
provider_arns=kwargs.get("provider_arns"),
auth_type=kwargs.get("auth_type"),
authorizer_uri=kwargs.get("authorizer_uri"),
authorizer_credentials=kwargs.get("authorizer_credentials"),
identity_source=kwargs.get("identity_source"),
identiy_validation_expression=kwargs.get("identiy_validation_expression"),
authorizer_result_ttl=kwargs.get("authorizer_result_ttl"),
)
return api.authorizers.get(authorizer["id"])
def update_authorizer(self, restapi_id, authorizer_id, patch_operations):
authorizer = self.get_authorizer(restapi_id, authorizer_id)
if not authorizer:
api = self.get_rest_api(restapi_id)
authorizer = api.authorizers[authorizer_id] = Authorizer()
return authorizer.apply_operations(patch_operations)
def delete_authorizer(self, restapi_id, authorizer_id):
api = self.get_rest_api(restapi_id)
del api.authorizers[authorizer_id]
def get_stage(self, function_id, stage_name): def get_stage(self, function_id, stage_name):
api = self.get_rest_api(function_id) api = self.get_rest_api(function_id)
stage = api.stages.get(stage_name) stage = api.stages.get(stage_name)

View File

@ -8,11 +8,13 @@ from .exceptions import (
ApiKeyNotFoundException, ApiKeyNotFoundException,
BadRequestException, BadRequestException,
CrossAccountNotAllowed, CrossAccountNotAllowed,
AuthorizerNotFoundException,
StageNotFoundException, StageNotFoundException,
ApiKeyAlreadyExists, ApiKeyAlreadyExists,
) )
API_KEY_SOURCES = ["AUTHORIZER", "HEADER"] API_KEY_SOURCES = ["AUTHORIZER", "HEADER"]
AUTHORIZER_TYPES = ["TOKEN", "REQUEST", "COGNITO_USER_POOLS"]
ENDPOINT_CONFIGURATION_TYPES = ["PRIVATE", "EDGE", "REGIONAL"] ENDPOINT_CONFIGURATION_TYPES = ["PRIVATE", "EDGE", "REGIONAL"]
@ -177,6 +179,88 @@ class APIGatewayResponse(BaseResponse):
) )
return 200, {}, json.dumps(method_response) return 200, {}, json.dumps(method_response)
def restapis_authorizers(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
url_path_parts = self.path.split("/")
restapi_id = url_path_parts[2]
if self.method == "POST":
name = self._get_param("name")
authorizer_type = self._get_param("type")
provider_arns = self._get_param_with_default_value("providerARNs", None)
auth_type = self._get_param_with_default_value("authType", None)
authorizer_uri = self._get_param_with_default_value("authorizerUri", None)
authorizer_credentials = self._get_param_with_default_value(
"authorizerCredentials", None
)
identity_source = self._get_param_with_default_value("identitySource", None)
identiy_validation_expression = self._get_param_with_default_value(
"identityValidationExpression", None
)
authorizer_result_ttl = self._get_param_with_default_value(
"authorizerResultTtlInSeconds", 300
)
# Param validation
if authorizer_type and authorizer_type not in AUTHORIZER_TYPES:
return self.error(
"ValidationException",
(
"1 validation error detected: "
"Value '{authorizer_type}' at 'createAuthorizerInput.type' failed "
"to satisfy constraint: Member must satisfy enum value set: "
"[TOKEN, REQUEST, COGNITO_USER_POOLS]"
).format(authorizer_type=authorizer_type),
)
authorizer_response = self.backend.create_authorizer(
restapi_id,
name,
authorizer_type,
provider_arns=provider_arns,
auth_type=auth_type,
authorizer_uri=authorizer_uri,
authorizer_credentials=authorizer_credentials,
identity_source=identity_source,
identiy_validation_expression=identiy_validation_expression,
authorizer_result_ttl=authorizer_result_ttl,
)
elif self.method == "GET":
authorizers = self.backend.get_authorizers(restapi_id)
return 200, {}, json.dumps({"item": authorizers})
return 200, {}, json.dumps(authorizer_response)
def authorizers(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
url_path_parts = self.path.split("/")
restapi_id = url_path_parts[2]
authorizer_id = url_path_parts[4]
if self.method == "GET":
try:
authorizer_response = self.backend.get_authorizer(
restapi_id, authorizer_id
)
except AuthorizerNotFoundException as error:
return (
error.code,
{},
'{{"message":"{0}","code":"{1}"}}'.format(
error.message, error.error_type
),
)
elif self.method == "PATCH":
patch_operations = self._get_param("patchOperations")
authorizer_response = self.backend.update_authorizer(
restapi_id, authorizer_id, patch_operations
)
elif self.method == "DELETE":
self.backend.delete_authorizer(restapi_id, authorizer_id)
return 202, {}, "{}"
return 200, {}, json.dumps(authorizer_response)
def restapis_stages(self, request, full_url, headers): def restapis_stages(self, request, full_url, headers):
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
url_path_parts = self.path.split("/") url_path_parts = self.path.split("/")

View File

@ -7,6 +7,8 @@ url_paths = {
"{0}/restapis$": APIGatewayResponse().restapis, "{0}/restapis$": APIGatewayResponse().restapis,
"{0}/restapis/(?P<function_id>[^/]+)/?$": APIGatewayResponse().restapis_individual, "{0}/restapis/(?P<function_id>[^/]+)/?$": APIGatewayResponse().restapis_individual,
"{0}/restapis/(?P<function_id>[^/]+)/resources$": APIGatewayResponse().resources, "{0}/restapis/(?P<function_id>[^/]+)/resources$": APIGatewayResponse().resources,
"{0}/restapis/(?P<function_id>[^/]+)/authorizers$": APIGatewayResponse().restapis_authorizers,
"{0}/restapis/(?P<function_id>[^/]+)/authorizers/(?P<authorizer_id>[^/]+)/?$": APIGatewayResponse().authorizers,
"{0}/restapis/(?P<function_id>[^/]+)/stages$": APIGatewayResponse().restapis_stages, "{0}/restapis/(?P<function_id>[^/]+)/stages$": APIGatewayResponse().restapis_stages,
"{0}/restapis/(?P<function_id>[^/]+)/stages/(?P<stage_name>[^/]+)/?$": APIGatewayResponse().stages, "{0}/restapis/(?P<function_id>[^/]+)/stages/(?P<stage_name>[^/]+)/?$": APIGatewayResponse().stages,
"{0}/restapis/(?P<function_id>[^/]+)/deployments$": APIGatewayResponse().deployments, "{0}/restapis/(?P<function_id>[^/]+)/deployments$": APIGatewayResponse().deployments,

View File

@ -14,6 +14,7 @@ from jose import jws
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
from .exceptions import ( from .exceptions import (
GroupExistsException, GroupExistsException,
NotAuthorizedError, NotAuthorizedError,
@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config): def __init__(self, region, name, extended_config):
self.region = region self.region = region
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex)) self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format(
self.region, DEFAULT_ACCOUNT_ID, self.id
)
self.name = name self.name = name
self.status = None self.status = None
self.extended_config = extended_config or {} self.extended_config = extended_config or {}
@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel):
def _base_json(self): def _base_json(self):
return { return {
"Id": self.id, "Id": self.id,
"Arn": self.arn,
"Name": self.name, "Name": self.name,
"Status": self.status, "Status": self.status,
"CreationDate": time.mktime(self.creation_date.timetuple()), "CreationDate": time.mktime(self.creation_date.timetuple()),
@ -564,12 +569,17 @@ class CognitoIdpBackend(BaseBackend):
user.groups.discard(group) user.groups.discard(group)
# User # User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes): def admin_create_user(
self, user_pool_id, username, message_action, temporary_password, attributes
):
user_pool = self.user_pools.get(user_pool_id) user_pool = self.user_pools.get(user_pool_id)
if not user_pool: if not user_pool:
raise ResourceNotFoundError(user_pool_id) raise ResourceNotFoundError(user_pool_id)
if username in user_pool.users: if message_action and message_action == "RESEND":
if username not in user_pool.users:
raise UserNotFoundError(username)
elif username in user_pool.users:
raise UsernameExistsException(username) raise UsernameExistsException(username)
user = CognitoIdpUser( user = CognitoIdpUser(

View File

@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse):
def admin_create_user(self): def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId") user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username") username = self._get_param("Username")
message_action = self._get_param("MessageAction")
temporary_password = self._get_param("TemporaryPassword") temporary_password = self._get_param("TemporaryPassword")
user = cognitoidp_backends[self.region].admin_create_user( user = cognitoidp_backends[self.region].admin_create_user(
user_pool_id, user_pool_id,
username, username,
message_action,
temporary_password, temporary_password,
self._get_param("UserAttributes", []), self._get_param("UserAttributes", []),
) )

View File

@ -43,7 +43,7 @@ from moto.config.exceptions import (
) )
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.s3.config import s3_config_query from moto.s3.config import s3_account_public_access_block_query, s3_config_query
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
@ -58,7 +58,10 @@ POP_STRINGS = [
DEFAULT_PAGE_SIZE = 100 DEFAULT_PAGE_SIZE = 100
# Map the Config resource type to a backend: # Map the Config resource type to a backend:
RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query} RESOURCE_MAP = {
"AWS::S3::Bucket": s3_config_query,
"AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query,
}
def datetime2int(date): def datetime2int(date):
@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend):
backend_region=backend_query_region, backend_region=backend_query_region,
) )
result = { resource_identifiers = []
"resourceIdentifiers": [ for identifier in identifiers:
{ item = {"resourceType": identifier["type"], "resourceId": identifier["id"]}
"resourceType": identifier["type"],
"resourceId": identifier["id"], # Some resource types lack names:
"resourceName": identifier["name"], if identifier.get("name"):
} item["resourceName"] = identifier["name"]
for identifier in identifiers
] resource_identifiers.append(item)
}
result = {"resourceIdentifiers": resource_identifiers}
if new_token: if new_token:
result["nextToken"] = new_token result["nextToken"] = new_token
@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend):
resource_region=resource_region, resource_region=resource_region,
) )
result = { resource_identifiers = []
"ResourceIdentifiers": [ for identifier in identifiers:
{ item = {
"SourceAccountId": DEFAULT_ACCOUNT_ID, "SourceAccountId": DEFAULT_ACCOUNT_ID,
"SourceRegion": identifier["region"], "SourceRegion": identifier["region"],
"ResourceType": identifier["type"], "ResourceType": identifier["type"],
"ResourceId": identifier["id"], "ResourceId": identifier["id"],
"ResourceName": identifier["name"], }
}
for identifier in identifiers if identifier.get("name"):
] item["ResourceName"] = identifier["name"]
}
resource_identifiers.append(item)
result = {"ResourceIdentifiers": resource_identifiers}
if new_token: if new_token:
result["NextToken"] = new_token result["NextToken"] = new_token

View File

@ -606,12 +606,13 @@ class ConfigQueryModel(object):
As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter
from there. It may be valuable to make this a concatenation of the region and resource name. from there. It may be valuable to make this a concatenation of the region and resource name.
:param resource_region: :param resource_ids: A list of resource IDs
:param resource_ids: :param resource_name: The individual name of a resource
:param resource_name: :param limit: How many per page
:param limit: :param next_token: The item that will page on
:param next_token:
:param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query. :param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query.
:param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a
non-aggregated query.
:return: This should return a list of Dicts that have the following fields: :return: This should return a list of Dicts that have the following fields:
[ [
{ {

View File

@ -1406,6 +1406,7 @@ class DynamoDBBackend(BaseBackend):
range_value = None range_value = None
item = table.get_item(hash_value, range_value) item = table.get_item(hash_value, range_value)
orig_item = copy.deepcopy(item)
if not expected: if not expected:
expected = {} expected = {}
@ -1439,6 +1440,8 @@ class DynamoDBBackend(BaseBackend):
) )
else: else:
item.update_with_attribute_updates(attribute_updates) item.update_with_attribute_updates(attribute_updates)
if table.stream_shard is not None:
table.stream_shard.add(orig_item, item)
return item return item
def delete_item( def delete_item(

View File

@ -86,6 +86,9 @@ class FakeStep(BaseModel):
self.start_datetime = None self.start_datetime = None
self.state = state self.state = state
def start(self):
self.start_datetime = datetime.now(pytz.utc)
class FakeCluster(BaseModel): class FakeCluster(BaseModel):
def __init__( def __init__(
@ -204,6 +207,8 @@ class FakeCluster(BaseModel):
self.start_cluster() self.start_cluster()
self.run_bootstrap_actions() self.run_bootstrap_actions()
if self.steps:
self.steps[0].start()
@property @property
def instance_groups(self): def instance_groups(self):

View File

@ -835,7 +835,7 @@ LIST_STEPS_TEMPLATE = """<ListStepsResponse xmlns="http://elasticmapreduce.amazo
{% if step.end_datetime is not none %} {% if step.end_datetime is not none %}
<EndDateTime>{{ step.end_datetime.isoformat() }}</EndDateTime> <EndDateTime>{{ step.end_datetime.isoformat() }}</EndDateTime>
{% endif %} {% endif %}
{% if step.ready_datetime is not none %} {% if step.start_datetime is not none %}
<StartDateTime>{{ step.start_datetime.isoformat() }}</StartDateTime> <StartDateTime>{{ step.start_datetime.isoformat() }}</StartDateTime>
{% endif %} {% endif %}
</Timeline> </Timeline>

View File

@ -6,6 +6,7 @@ from boto3 import Session
from moto.core.exceptions import JsonRESTError from moto.core.exceptions import JsonRESTError
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.sts.models import ACCOUNT_ID from moto.sts.models import ACCOUNT_ID
from moto.utilities.tagging_service import TaggingService
class Rule(BaseModel): class Rule(BaseModel):
@ -104,6 +105,7 @@ class EventsBackend(BaseBackend):
self.region_name = region_name self.region_name = region_name
self.event_buses = {} self.event_buses = {}
self.event_sources = {} self.event_sources = {}
self.tagger = TaggingService()
self._add_default_event_bus() self._add_default_event_bus()
@ -141,6 +143,9 @@ class EventsBackend(BaseBackend):
def delete_rule(self, name): def delete_rule(self, name):
self.rules_order.pop(self.rules_order.index(name)) self.rules_order.pop(self.rules_order.index(name))
arn = self.rules.get(name).arn
if self.tagger.has_tags(arn):
self.tagger.delete_all_tags_for_resource(arn)
return self.rules.pop(name) is not None return self.rules.pop(name) is not None
def describe_rule(self, name): def describe_rule(self, name):
@ -361,6 +366,32 @@ class EventsBackend(BaseBackend):
self.event_buses.pop(name, None) self.event_buses.pop(name, None)
def list_tags_for_resource(self, arn):
name = arn.split("/")[-1]
if name in self.rules:
return self.tagger.list_tags_for_resource(self.rules[name].arn)
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def tag_resource(self, arn, tags):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.tag_resource(self.rules[name].arn, tags)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def untag_resource(self, arn, tag_names):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.untag_resource_using_names(self.rules[name].arn, tag_names)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
events_backends = {} events_backends = {}
for region in Session().get_available_regions("events"): for region in Session().get_available_regions("events"):

View File

@ -297,3 +297,26 @@ class EventsHandler(BaseResponse):
self.events_backend.delete_event_bus(name) self.events_backend.delete_event_bus(name)
return "", self.response_headers return "", self.response_headers
def list_tags_for_resource(self):
arn = self._get_param("ResourceARN")
result = self.events_backend.list_tags_for_resource(arn)
return json.dumps(result), self.response_headers
def tag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("Tags")
result = self.events_backend.tag_resource(arn, tags)
return json.dumps(result), self.response_headers
def untag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("TagKeys")
result = self.events_backend.untag_resource(arn, tags)
return json.dumps(result), self.response_headers

View File

@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError):
) )
class InvalidStateTransitionException(IoTClientError):
def __init__(self, msg=None):
self.code = 409
super(InvalidStateTransitionException, self).__init__(
"InvalidStateTransitionException",
msg or "An attempt was made to change to an invalid state.",
)
class VersionConflictException(IoTClientError): class VersionConflictException(IoTClientError):
def __init__(self, name): def __init__(self, name):
self.code = 409 self.code = 409

View File

@ -17,6 +17,7 @@ from .exceptions import (
DeleteConflictException, DeleteConflictException,
ResourceNotFoundException, ResourceNotFoundException,
InvalidRequestException, InvalidRequestException,
InvalidStateTransitionException,
VersionConflictException, VersionConflictException,
) )
@ -29,7 +30,7 @@ class FakeThing(BaseModel):
self.attributes = attributes self.attributes = attributes
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name) self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
self.version = 1 self.version = 1
# TODO: we need to handle 'version'? # TODO: we need to handle "version"?
# for iot-data # for iot-data
self.thing_shadow = None self.thing_shadow = None
@ -174,18 +175,19 @@ class FakeCertificate(BaseModel):
class FakePolicy(BaseModel): class FakePolicy(BaseModel):
def __init__(self, name, document, region_name): def __init__(self, name, document, region_name, default_version_id="1"):
self.name = name self.name = name
self.document = document self.document = document
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name) self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
self.version = "1" # TODO: handle version self.default_version_id = default_version_id
self.versions = [FakePolicyVersion(self.name, document, True, region_name)]
def to_get_dict(self): def to_get_dict(self):
return { return {
"policyName": self.name, "policyName": self.name,
"policyArn": self.arn, "policyArn": self.arn,
"policyDocument": self.document, "policyDocument": self.document,
"defaultVersionId": self.version, "defaultVersionId": self.default_version_id,
} }
def to_dict_at_creation(self): def to_dict_at_creation(self):
@ -193,13 +195,52 @@ class FakePolicy(BaseModel):
"policyName": self.name, "policyName": self.name,
"policyArn": self.arn, "policyArn": self.arn,
"policyDocument": self.document, "policyDocument": self.document,
"policyVersionId": self.version, "policyVersionId": self.default_version_id,
} }
def to_dict(self): def to_dict(self):
return {"policyName": self.name, "policyArn": self.arn} return {"policyName": self.name, "policyArn": self.arn}
class FakePolicyVersion(object):
def __init__(self, policy_name, document, is_default, region_name):
self.name = policy_name
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name)
self.document = document or {}
self.is_default = is_default
self.version_id = "1"
self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple())
def to_get_dict(self):
return {
"policyName": self.name,
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
"creationDate": self.create_datetime,
"lastModifiedDate": self.last_modified_datetime,
"generationId": self.version_id,
}
def to_dict_at_creation(self):
return {
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
}
def to_dict(self):
return {
"versionId": self.version_id,
"isDefaultVersion": self.is_default,
"createDate": self.create_datetime,
}
class FakeJob(BaseModel): class FakeJob(BaseModel):
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]" JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN) JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
@ -226,12 +267,14 @@ class FakeJob(BaseModel):
self.targets = targets self.targets = targets
self.document_source = document_source self.document_source = document_source
self.document = document self.document = document
self.force = False
self.description = description self.description = description
self.presigned_url_config = presigned_url_config self.presigned_url_config = presigned_url_config
self.target_selection = target_selection self.target_selection = target_selection
self.job_executions_rollout_config = job_executions_rollout_config self.job_executions_rollout_config = job_executions_rollout_config
self.status = None # IN_PROGRESS | CANCELED | COMPLETED self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED
self.comment = None self.comment = None
self.reason_code = None
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.completed_at = None self.completed_at = None
@ -258,9 +301,11 @@ class FakeJob(BaseModel):
"jobExecutionsRolloutConfig": self.job_executions_rollout_config, "jobExecutionsRolloutConfig": self.job_executions_rollout_config,
"status": self.status, "status": self.status,
"comment": self.comment, "comment": self.comment,
"forceCanceled": self.force,
"reasonCode": self.reason_code,
"createdAt": self.created_at, "createdAt": self.created_at,
"lastUpdatedAt": self.last_updated_at, "lastUpdatedAt": self.last_updated_at,
"completedAt": self.completedAt, "completedAt": self.completed_at,
"jobProcessDetails": self.job_process_details, "jobProcessDetails": self.job_process_details,
"documentParameters": self.document_parameters, "documentParameters": self.document_parameters,
"document": self.document, "document": self.document,
@ -275,12 +320,67 @@ class FakeJob(BaseModel):
return regex_match and length_match return regex_match and length_match
class FakeJobExecution(BaseModel):
def __init__(
self,
job_id,
thing_arn,
status="QUEUED",
force_canceled=False,
status_details_map={},
):
self.job_id = job_id
self.status = status # IN_PROGRESS | CANCELED | COMPLETED
self.force_canceled = force_canceled
self.status_details_map = status_details_map
self.thing_arn = thing_arn
self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.started_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.execution_number = 123
self.version_number = 123
self.approximate_seconds_before_time_out = 123
def to_get_dict(self):
obj = {
"jobId": self.job_id,
"status": self.status,
"forceCanceled": self.force_canceled,
"statusDetails": {"detailsMap": self.status_details_map},
"thingArn": self.thing_arn,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
"versionNumber": self.version_number,
"approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out,
}
return obj
def to_dict(self):
obj = {
"jobId": self.job_id,
"thingArn": self.thing_arn,
"jobExecutionSummary": {
"status": self.status,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
},
}
return obj
class IoTBackend(BaseBackend): class IoTBackend(BaseBackend):
def __init__(self, region_name=None): def __init__(self, region_name=None):
super(IoTBackend, self).__init__() super(IoTBackend, self).__init__()
self.region_name = region_name self.region_name = region_name
self.things = OrderedDict() self.things = OrderedDict()
self.jobs = OrderedDict() self.jobs = OrderedDict()
self.job_executions = OrderedDict()
self.thing_types = OrderedDict() self.thing_types = OrderedDict()
self.thing_groups = OrderedDict() self.thing_groups = OrderedDict()
self.certificates = OrderedDict() self.certificates = OrderedDict()
@ -535,6 +635,28 @@ class IoTBackend(BaseBackend):
self.policies[policy.name] = policy self.policies[policy.name] = policy
return policy return policy
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def list_attached_policies(self, target):
policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target]
return policies
def list_policies(self): def list_policies(self):
policies = self.policies.values() policies = self.policies.values()
return policies return policies
@ -559,6 +681,60 @@ class IoTBackend(BaseBackend):
policy = self.get_policy(policy_name) policy = self.get_policy(policy_name)
del self.policies[policy.name] del self.policies[policy.name]
def create_policy_version(self, policy_name, policy_document, set_as_default):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
version = FakePolicyVersion(
policy_name, policy_document, set_as_default, self.region_name
)
policy.versions.append(version)
version.version_id = "{0}".format(len(policy.versions))
if set_as_default:
self.set_default_policy_version(policy_name, version.version_id)
return version
def set_default_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
version.is_default = True
policy.default_version_id = version.version_id
policy.document = version.document
else:
version.is_default = False
def get_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
return version
raise ResourceNotFoundException()
def list_policy_versions(self, policy_name):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
return policy.versions
def delete_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
if version_id == policy.default_version_id:
raise InvalidRequestException(
"Cannot delete the default version of a policy"
)
for i, v in enumerate(policy.versions):
if v.version_id == version_id:
del policy.versions[i]
return
raise ResourceNotFoundException()
def _get_principal(self, principal_arn): def _get_principal(self, principal_arn):
""" """
raise ResourceNotFoundException raise ResourceNotFoundException
@ -574,14 +750,6 @@ class IoTBackend(BaseBackend):
pass pass
raise ResourceNotFoundException() raise ResourceNotFoundException()
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def attach_principal_policy(self, policy_name, principal_arn): def attach_principal_policy(self, policy_name, principal_arn):
principal = self._get_principal(principal_arn) principal = self._get_principal(principal_arn)
policy = self.get_policy(policy_name) policy = self.get_policy(policy_name)
@ -590,15 +758,6 @@ class IoTBackend(BaseBackend):
return return
self.principal_policies[k] = (principal, policy) self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def detach_principal_policy(self, policy_name, principal_arn): def detach_principal_policy(self, policy_name, principal_arn):
# this may raises ResourceNotFoundException # this may raises ResourceNotFoundException
self._get_principal(principal_arn) self._get_principal(principal_arn)
@ -819,11 +978,187 @@ class IoTBackend(BaseBackend):
self.region_name, self.region_name,
) )
self.jobs[job_id] = job self.jobs[job_id] = job
for thing_arn in targets:
thing_name = thing_arn.split(":")[-1].split("/")[-1]
job_execution = FakeJobExecution(job_id, thing_arn)
self.job_executions[(job_id, thing_name)] = job_execution
return job.job_arn, job_id, description return job.job_arn, job_id, description
def describe_job(self, job_id): def describe_job(self, job_id):
jobs = [_ for _ in self.jobs.values() if _.job_id == job_id]
if len(jobs) == 0:
raise ResourceNotFoundException()
return jobs[0]
def delete_job(self, job_id, force):
job = self.jobs[job_id]
if job.status == "IN_PROGRESS" and force:
del self.jobs[job_id]
elif job.status != "IN_PROGRESS":
del self.jobs[job_id]
else:
raise InvalidStateTransitionException()
def cancel_job(self, job_id, reason_code, comment, force):
job = self.jobs[job_id]
job.reason_code = reason_code if reason_code is not None else job.reason_code
job.comment = comment if comment is not None else job.comment
job.force = force if force is not None and force != job.force else job.force
job.status = "CANCELED"
if job.status == "IN_PROGRESS" and force:
self.jobs[job_id] = job
elif job.status != "IN_PROGRESS":
self.jobs[job_id] = job
else:
raise InvalidStateTransitionException()
return job
def get_job_document(self, job_id):
return self.jobs[job_id] return self.jobs[job_id]
def list_jobs(
self,
status,
target_selection,
max_results,
token,
thing_group_name,
thing_group_id,
):
# TODO: implement filters
all_jobs = [_.to_dict() for _ in self.jobs.values()]
filtered_jobs = all_jobs
if token is None:
jobs = filtered_jobs[0:max_results]
next_token = str(max_results) if len(filtered_jobs) > max_results else None
else:
token = int(token)
jobs = filtered_jobs[token : token + max_results]
next_token = (
str(token + max_results)
if len(filtered_jobs) > token + max_results
else None
)
return jobs, next_token
def describe_job_execution(self, job_id, thing_name, execution_number):
try:
job_execution = self.job_executions[(job_id, thing_name)]
except KeyError:
raise ResourceNotFoundException()
if job_execution is None or (
execution_number is not None
and job_execution.execution_number != execution_number
):
raise ResourceNotFoundException()
return job_execution
def cancel_job_execution(
self, job_id, thing_name, force, expected_version, status_details
):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution is None:
raise ResourceNotFoundException()
job_execution.force_canceled = (
force if force is not None else job_execution.force_canceled
)
# TODO: implement expected_version and status_details (at most 10 can be specified)
if job_execution.status == "IN_PROGRESS" and force:
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
elif job_execution.status != "IN_PROGRESS":
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
else:
raise InvalidStateTransitionException()
def delete_job_execution(self, job_id, thing_name, execution_number, force):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution.execution_number != execution_number:
raise ResourceNotFoundException()
if job_execution.status == "IN_PROGRESS" and force:
del self.job_executions[(job_id, thing_name)]
elif job_execution.status != "IN_PROGRESS":
del self.job_executions[(job_id, thing_name)]
else:
raise InvalidStateTransitionException()
def list_job_executions_for_job(self, job_id, status, max_results, next_token):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[0] == job_id
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
def list_job_executions_for_thing(
self, thing_name, status, max_results, next_token
):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[1] == thing_name
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
iot_backends = {} iot_backends = {}
for region in Session().get_available_regions("iot"): for region in Session().get_available_regions("iot"):

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import json import json
from six.moves.urllib.parse import unquote
from moto.core.responses import BaseResponse from moto.core.responses import BaseResponse
from .models import iot_backends from .models import iot_backends
@ -141,6 +142,8 @@ class IoTResponse(BaseResponse):
createdAt=job.created_at, createdAt=job.created_at,
description=job.description, description=job.description,
documentParameters=job.document_parameters, documentParameters=job.document_parameters,
forceCanceled=job.force,
reasonCode=job.reason_code,
jobArn=job.job_arn, jobArn=job.job_arn,
jobExecutionsRolloutConfig=job.job_executions_rollout_config, jobExecutionsRolloutConfig=job.job_executions_rollout_config,
jobId=job.job_id, jobId=job.job_id,
@ -154,6 +157,127 @@ class IoTResponse(BaseResponse):
) )
) )
def delete_job(self):
job_id = self._get_param("jobId")
force = self._get_bool_param("force")
self.iot_backend.delete_job(job_id=job_id, force=force)
return json.dumps(dict())
def cancel_job(self):
job_id = self._get_param("jobId")
reason_code = self._get_param("reasonCode")
comment = self._get_param("comment")
force = self._get_bool_param("force")
job = self.iot_backend.cancel_job(
job_id=job_id, reason_code=reason_code, comment=comment, force=force
)
return json.dumps(job.to_dict())
def get_job_document(self):
job = self.iot_backend.get_job_document(job_id=self._get_param("jobId"))
if job.document is not None:
return json.dumps({"document": job.document})
else:
# job.document_source is not None:
# TODO: needs to be implemented to get document_source's content from S3
return json.dumps({"document": ""})
def list_jobs(self):
status = (self._get_param("status"),)
target_selection = (self._get_param("targetSelection"),)
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
previous_next_token = self._get_param("nextToken")
thing_group_name = (self._get_param("thingGroupName"),)
thing_group_id = self._get_param("thingGroupId")
jobs, next_token = self.iot_backend.list_jobs(
status=status,
target_selection=target_selection,
max_results=max_results,
token=previous_next_token,
thing_group_name=thing_group_name,
thing_group_id=thing_group_id,
)
return json.dumps(dict(jobs=jobs, nextToken=next_token))
def describe_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
job_execution = self.iot_backend.describe_job_execution(
job_id=job_id, thing_name=thing_name, execution_number=execution_number
)
return json.dumps(dict(execution=job_execution.to_get_dict()))
def cancel_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
force = self._get_bool_param("force")
expected_version = self._get_int_param("expectedVersion")
status_details = self._get_param("statusDetails")
self.iot_backend.cancel_job_execution(
job_id=job_id,
thing_name=thing_name,
force=force,
expected_version=expected_version,
status_details=status_details,
)
return json.dumps(dict())
def delete_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
force = self._get_bool_param("force")
self.iot_backend.delete_job_execution(
job_id=job_id,
thing_name=thing_name,
execution_number=execution_number,
force=force,
)
return json.dumps(dict())
def list_job_executions_for_job(self):
job_id = self._get_param("jobId")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_job(
job_id=job_id, status=status, max_results=max_results, next_token=next_token
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def list_job_executions_for_thing(self):
thing_name = self._get_param("thingName")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_thing(
thing_name=thing_name,
status=status,
max_results=max_results,
next_token=next_token,
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def create_keys_and_certificate(self): def create_keys_and_certificate(self):
set_as_active = self._get_bool_param("setAsActive") set_as_active = self._get_bool_param("setAsActive")
cert, key_pair = self.iot_backend.create_keys_and_certificate( cert, key_pair = self.iot_backend.create_keys_and_certificate(
@ -241,12 +365,61 @@ class IoTResponse(BaseResponse):
self.iot_backend.delete_policy(policy_name=policy_name) self.iot_backend.delete_policy(policy_name=policy_name)
return json.dumps(dict()) return json.dumps(dict())
def create_policy_version(self):
policy_name = self._get_param("policyName")
policy_document = self._get_param("policyDocument")
set_as_default = self._get_bool_param("setAsDefault")
policy_version = self.iot_backend.create_policy_version(
policy_name, policy_document, set_as_default
)
return json.dumps(dict(policy_version.to_dict_at_creation()))
def set_default_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.set_default_policy_version(policy_name, version_id)
return json.dumps(dict())
def get_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
policy_version = self.iot_backend.get_policy_version(policy_name, version_id)
return json.dumps(dict(policy_version.to_get_dict()))
def list_policy_versions(self):
policy_name = self._get_param("policyName")
policiy_versions = self.iot_backend.list_policy_versions(
policy_name=policy_name
)
return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions]))
def delete_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.delete_policy_version(policy_name, version_id)
return json.dumps(dict())
def attach_policy(self): def attach_policy(self):
policy_name = self._get_param("policyName") policy_name = self._get_param("policyName")
target = self._get_param("target") target = self._get_param("target")
self.iot_backend.attach_policy(policy_name=policy_name, target=target) self.iot_backend.attach_policy(policy_name=policy_name, target=target)
return json.dumps(dict()) return json.dumps(dict())
def list_attached_policies(self):
principal = unquote(self._get_param("target"))
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
policies = self.iot_backend.list_attached_policies(target=principal)
# TODO: implement pagination in the future
next_marker = None
return json.dumps(
dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker)
)
def attach_principal_policy(self): def attach_principal_policy(self):
policy_name = self._get_param("policyName") policy_name = self._get_param("policyName")
principal = self.headers.get("x-amzn-iot-principal") principal = self.headers.get("x-amzn-iot-principal")

View File

@ -8,7 +8,8 @@ from boto3 import Session
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import unix_time from moto.core.utils import unix_time
from moto.utilities.tagging_service import TaggingService
from moto.core.exceptions import JsonRESTError
from moto.iam.models import ACCOUNT_ID from moto.iam.models import ACCOUNT_ID
from .utils import decrypt, encrypt, generate_key_id, generate_master_key from .utils import decrypt, encrypt, generate_key_id, generate_master_key
@ -16,7 +17,7 @@ from .utils import decrypt, encrypt, generate_key_id, generate_master_key
class Key(BaseModel): class Key(BaseModel):
def __init__( def __init__(
self, policy, key_usage, customer_master_key_spec, description, tags, region self, policy, key_usage, customer_master_key_spec, description, region
): ):
self.id = generate_key_id() self.id = generate_key_id()
self.creation_date = unix_time() self.creation_date = unix_time()
@ -29,7 +30,6 @@ class Key(BaseModel):
self.account_id = ACCOUNT_ID self.account_id = ACCOUNT_ID
self.key_rotation_status = False self.key_rotation_status = False
self.deletion_date = None self.deletion_date = None
self.tags = tags or {}
self.key_material = generate_master_key() self.key_material = generate_master_key()
self.origin = "AWS_KMS" self.origin = "AWS_KMS"
self.key_manager = "CUSTOMER" self.key_manager = "CUSTOMER"
@ -111,11 +111,12 @@ class Key(BaseModel):
key_usage="ENCRYPT_DECRYPT", key_usage="ENCRYPT_DECRYPT",
customer_master_key_spec="SYMMETRIC_DEFAULT", customer_master_key_spec="SYMMETRIC_DEFAULT",
description=properties["Description"], description=properties["Description"],
tags=properties.get("Tags"), tags=properties.get("Tags", []),
region=region_name, region=region_name,
) )
key.key_rotation_status = properties["EnableKeyRotation"] key.key_rotation_status = properties["EnableKeyRotation"]
key.enabled = properties["Enabled"] key.enabled = properties["Enabled"]
return key return key
def get_cfn_attribute(self, attribute_name): def get_cfn_attribute(self, attribute_name):
@ -130,32 +131,26 @@ class KmsBackend(BaseBackend):
def __init__(self): def __init__(self):
self.keys = {} self.keys = {}
self.key_to_aliases = defaultdict(set) self.key_to_aliases = defaultdict(set)
self.tagger = TaggingService(keyName="TagKey", valueName="TagValue")
def create_key( def create_key(
self, policy, key_usage, customer_master_key_spec, description, tags, region self, policy, key_usage, customer_master_key_spec, description, tags, region
): ):
key = Key( key = Key(policy, key_usage, customer_master_key_spec, description, region)
policy, key_usage, customer_master_key_spec, description, tags, region
)
self.keys[key.id] = key self.keys[key.id] = key
if tags is not None and len(tags) > 0:
self.tag_resource(key.id, tags)
return key return key
def update_key_description(self, key_id, description): def update_key_description(self, key_id, description):
key = self.keys[self.get_key_id(key_id)] key = self.keys[self.get_key_id(key_id)]
key.description = description key.description = description
def tag_resource(self, key_id, tags):
key = self.keys[self.get_key_id(key_id)]
key.tags = tags
def list_resource_tags(self, key_id):
key = self.keys[self.get_key_id(key_id)]
return key.tags
def delete_key(self, key_id): def delete_key(self, key_id):
if key_id in self.keys: if key_id in self.keys:
if key_id in self.key_to_aliases: if key_id in self.key_to_aliases:
self.key_to_aliases.pop(key_id) self.key_to_aliases.pop(key_id)
self.tagger.delete_all_tags_for_resource(key_id)
return self.keys.pop(key_id) return self.keys.pop(key_id)
@ -325,6 +320,32 @@ class KmsBackend(BaseBackend):
return plaintext, ciphertext_blob, arn return plaintext, ciphertext_blob, arn
def list_resource_tags(self, key_id):
if key_id in self.keys:
return self.tagger.list_tags_for_resource(key_id)
raise JsonRESTError(
"NotFoundException",
"The request was rejected because the specified entity or resource could not be found.",
)
def tag_resource(self, key_id, tags):
if key_id in self.keys:
self.tagger.tag_resource(key_id, tags)
return {}
raise JsonRESTError(
"NotFoundException",
"The request was rejected because the specified entity or resource could not be found.",
)
def untag_resource(self, key_id, tag_names):
if key_id in self.keys:
self.tagger.untag_resource_using_names(key_id, tag_names)
return {}
raise JsonRESTError(
"NotFoundException",
"The request was rejected because the specified entity or resource could not be found.",
)
kms_backends = {} kms_backends = {}
for region in Session().get_available_regions("kms"): for region in Session().get_available_regions("kms"):

View File

@ -144,17 +144,27 @@ class KmsResponse(BaseResponse):
self._validate_cmk_id(key_id) self._validate_cmk_id(key_id)
self.kms_backend.tag_resource(key_id, tags) result = self.kms_backend.tag_resource(key_id, tags)
return json.dumps({}) return json.dumps(result)
def untag_resource(self):
"""https://docs.aws.amazon.com/kms/latest/APIReference/API_UntagResource.html"""
key_id = self.parameters.get("KeyId")
tag_names = self.parameters.get("TagKeys")
self._validate_cmk_id(key_id)
result = self.kms_backend.untag_resource(key_id, tag_names)
return json.dumps(result)
def list_resource_tags(self): def list_resource_tags(self):
"""https://docs.aws.amazon.com/kms/latest/APIReference/API_ListResourceTags.html""" """https://docs.aws.amazon.com/kms/latest/APIReference/API_ListResourceTags.html"""
key_id = self.parameters.get("KeyId") key_id = self.parameters.get("KeyId")
self._validate_cmk_id(key_id) self._validate_cmk_id(key_id)
tags = self.kms_backend.list_resource_tags(key_id) tags = self.kms_backend.list_resource_tags(key_id)
return json.dumps({"Tags": tags, "NextMarker": None, "Truncated": False}) tags.update({"NextMarker": None, "Truncated": False})
return json.dumps(tags)
def describe_key(self): def describe_key(self):
"""https://docs.aws.amazon.com/kms/latest/APIReference/API_DescribeKey.html""" """https://docs.aws.amazon.com/kms/latest/APIReference/API_DescribeKey.html"""

View File

@ -318,7 +318,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
# KMS # KMS
def get_kms_tags(kms_key_id): def get_kms_tags(kms_key_id):
result = [] result = []
for tag in self.kms_backend.list_resource_tags(kms_key_id): for tag in self.kms_backend.list_resource_tags(kms_key_id).get("Tags", []):
result.append({"Key": tag["TagKey"], "Value": tag["TagValue"]}) result.append({"Key": tag["TagKey"], "Value": tag["TagValue"]})
return result return result

View File

@ -1,8 +1,13 @@
import datetime
import json import json
import time
from boto3 import Session
from moto.core.exceptions import InvalidNextTokenException from moto.core.exceptions import InvalidNextTokenException
from moto.core.models import ConfigQueryModel from moto.core.models import ConfigQueryModel
from moto.s3 import s3_backends from moto.s3 import s3_backends
from moto.s3.models import get_moto_s3_account_id
class S3ConfigQuery(ConfigQueryModel): class S3ConfigQuery(ConfigQueryModel):
@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel):
return config_data return config_data
class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel):
def list_config_service_resources(
self,
resource_ids,
resource_name,
limit,
next_token,
backend_region=None,
resource_region=None,
):
# For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID
# There is no resource name -- it should be a blank string "" if provided.
# The resource name can only ever be None or an empty string:
if resource_name is not None and resource_name != "":
return [], None
pab = None
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# If a resource ID was passed in, then filter accordingly:
if resource_ids:
for id in resource_ids:
if account_id == id:
pab = self.backends["global"].account_public_access_block
break
# Otherwise, just grab the one from the backend:
if not resource_ids:
pab = self.backends["global"].account_public_access_block
# If it's not present, then return nothing
if not pab:
return [], None
# Filter on regions (and paginate on them as well):
if backend_region:
pab_list = [backend_region]
elif resource_region:
# Invalid region?
if resource_region not in regions:
return [], None
pab_list = [resource_region]
# Aggregated query where no regions were supplied so return them all:
else:
pab_list = regions
# Pagination logic:
sorted_regions = sorted(pab_list)
new_token = None
# Get the start:
if not next_token:
start = 0
else:
# Tokens for this moto feature is just the region-name:
# For OTHER non-global resource types, it's the region concatenated with the resource ID.
if next_token not in sorted_regions:
raise InvalidNextTokenException()
start = sorted_regions.index(next_token)
# Get the list of items to collect:
pab_list = sorted_regions[start : (start + limit)]
if len(sorted_regions) > (start + limit):
new_token = sorted_regions[start + limit]
return (
[
{
"type": "AWS::S3::AccountPublicAccessBlock",
"id": account_id,
"region": region,
}
for region in pab_list
],
new_token,
)
def get_config_resource(
self, resource_id, resource_name=None, backend_region=None, resource_region=None
):
# Do we even have this defined?
if not self.backends["global"].account_public_access_block:
return None
# Resource name can only ever be "" if it's supplied:
if resource_name is not None and resource_name != "":
return None
# Are we filtering based on region?
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# Is the resource ID correct?:
if account_id == resource_id:
if backend_region:
pab_region = backend_region
# Invalid region?
elif resource_region not in regions:
return None
else:
pab_region = resource_region
else:
return None
# Format the PAB to the AWS Config format:
creation_time = datetime.datetime.utcnow()
config_data = {
"version": "1.3",
"accountId": account_id,
"configurationItemCaptureTime": str(creation_time),
"configurationItemStatus": "OK",
"configurationStateId": str(
int(time.mktime(creation_time.timetuple()))
), # PY2 and 3 compatible
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": account_id,
"awsRegion": pab_region,
"availabilityZone": "Not Applicable",
"configuration": self.backends[
"global"
].account_public_access_block.to_config_dict(),
"supplementaryConfiguration": {},
}
# The 'configuration' field is also a JSON string:
config_data["configuration"] = json.dumps(config_data["configuration"])
return config_data
s3_config_query = S3ConfigQuery(s3_backends) s3_config_query = S3ConfigQuery(s3_backends)
s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery(
s3_backends
)

View File

@ -359,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError):
*args, *args,
**kwargs **kwargs
) )
class WrongPublicAccessBlockAccountIdError(S3ClientError):
code = 403
def __init__(self):
super(WrongPublicAccessBlockAccountIdError, self).__init__(
"AccessDenied", "Access Denied"
)

View File

@ -19,7 +19,7 @@ import uuid
import six import six
from bisect import insort from bisect import insort
from moto.core import BaseBackend, BaseModel from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from .exceptions import ( from .exceptions import (
BucketAlreadyExists, BucketAlreadyExists,
@ -37,6 +37,7 @@ from .exceptions import (
CrossLocationLoggingProhibitted, CrossLocationLoggingProhibitted,
NoSuchPublicAccessBlockConfiguration, NoSuchPublicAccessBlockConfiguration,
InvalidPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration,
WrongPublicAccessBlockAccountIdError,
) )
from .utils import clean_key_name, _VersionedKeyStore from .utils import clean_key_name, _VersionedKeyStore
@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a" OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
def get_moto_s3_account_id():
"""This makes it easy for mocking AWS Account IDs when using AWS Config
-- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free.
"""
return ACCOUNT_ID
class FakeDeleteMarker(BaseModel): class FakeDeleteMarker(BaseModel):
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel):
class S3Backend(BaseBackend): class S3Backend(BaseBackend):
def __init__(self): def __init__(self):
self.buckets = {} self.buckets = {}
self.account_public_access_block = None
def create_bucket(self, bucket_name, region_name): def create_bucket(self, bucket_name, region_name):
if bucket_name in self.buckets: if bucket_name in self.buckets:
@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend):
return bucket.public_access_block return bucket.public_access_block
def get_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not self.account_public_access_block:
raise NoSuchPublicAccessBlockConfiguration()
return self.account_public_access_block
def set_key( def set_key(
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
): ):
@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
bucket.public_access_block = None bucket.public_access_block = None
def delete_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
self.account_public_access_block = None
def put_bucket_notification_configuration(self, bucket_name, notification_config): def put_bucket_notification_configuration(self, bucket_name, notification_config):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
bucket.set_notification_configuration(notification_config) bucket.set_notification_configuration(notification_config)
@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend):
pub_block_config.get("RestrictPublicBuckets"), pub_block_config.get("RestrictPublicBuckets"),
) )
def put_account_public_access_block(self, account_id, pub_block_config):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not pub_block_config:
raise InvalidPublicAccessBlockConfiguration()
self.account_public_access_block = PublicAccessBlock(
pub_block_config.get("BlockPublicAcls"),
pub_block_config.get("IgnorePublicAcls"),
pub_block_config.get("BlockPublicPolicy"),
pub_block_config.get("RestrictPublicBuckets"),
)
def initiate_multipart(self, bucket_name, key_name, metadata): def initiate_multipart(self, bucket_name, key_name, metadata):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
new_multipart = FakeMultipart(key_name, metadata) new_multipart = FakeMultipart(key_name, metadata)

View File

@ -4,6 +4,7 @@ import re
import sys import sys
import six import six
from botocore.awsrequest import AWSPreparedRequest
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
from six.moves.urllib.parse import parse_qs, urlparse, unquote from six.moves.urllib.parse import parse_qs, urlparse, unquote
@ -123,6 +124,11 @@ ACTION_MAP = {
"uploadId": "PutObject", "uploadId": "PutObject",
}, },
}, },
"CONTROL": {
"GET": {"publicAccessBlock": "GetPublicAccessBlock"},
"PUT": {"publicAccessBlock": "PutPublicAccessBlock"},
"DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"},
},
} }
@ -168,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
or host.startswith("localhost") or host.startswith("localhost")
or host.startswith("localstack") or host.startswith("localstack")
or re.match(r"^[^.]+$", host) or re.match(r"^[^.]+$", host)
or re.match(r"^.*\.svc\.cluster\.local$", host) or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host)
): ):
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev), # Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
# (3) local host names that do not contain a "." (e.g., Docker container host names), or # (3) local host names that do not contain a "." (e.g., Docker container host names), or
@ -220,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
# Depending on which calling format the client is using, we don't know # Depending on which calling format the client is using, we don't know
# if this is a bucket or key request so we have to check # if this is a bucket or key request so we have to check
if self.subdomain_based_buckets(request): if self.subdomain_based_buckets(request):
return self.key_response(request, full_url, headers) return self.key_or_control_response(request, full_url, headers)
else: else:
# Using path-based buckets # Using path-based buckets
return self.bucket_response(request, full_url, headers) return self.bucket_response(request, full_url, headers)
@ -287,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return self._bucket_response_post(request, body, bucket_name) return self._bucket_response_post(request, body, bucket_name)
else: else:
raise NotImplementedError( raise NotImplementedError(
"Method {0} has not been impelemented in the S3 backend yet".format( "Method {0} has not been implemented in the S3 backend yet".format(
method method
) )
) )
@ -595,6 +601,20 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
pass pass
return False return False
def _parse_pab_config(self, body):
parsed_xml = xmltodict.parse(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
return parsed_xml
def _bucket_response_put( def _bucket_response_put(
self, request, body, region_name, bucket_name, querystring self, request, body, region_name, bucket_name, querystring
): ):
@ -673,19 +693,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
raise e raise e
elif "publicAccessBlock" in querystring: elif "publicAccessBlock" in querystring:
parsed_xml = xmltodict.parse(body) pab_config = self._parse_pab_config(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
self.backend.put_bucket_public_access_block( self.backend.put_bucket_public_access_block(
bucket_name, parsed_xml["PublicAccessBlockConfiguration"] bucket_name, pab_config["PublicAccessBlockConfiguration"]
) )
return "" return ""
@ -870,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
) )
return 206, response_headers, response_content[begin : end + 1] return 206, response_headers, response_content[begin : end + 1]
def key_response(self, request, full_url, headers): def key_or_control_response(self, request, full_url, headers):
# Key and Control are lumped in because splitting out the regex is too much of a pain :/
self.method = request.method self.method = request.method
self.path = self._get_path(request) self.path = self._get_path(request)
self.headers = request.headers self.headers = request.headers
if "host" not in self.headers: if "host" not in self.headers:
self.headers["host"] = urlparse(full_url).netloc self.headers["host"] = urlparse(full_url).netloc
response_headers = {} response_headers = {}
try: try:
response = self._key_response(request, full_url, headers) # Is this an S3 control response?
if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url:
response = self._control_response(request, full_url, headers)
else:
response = self._key_response(request, full_url, headers)
except S3ClientError as s3error: except S3ClientError as s3error:
response = s3error.code, {}, s3error.description response = s3error.code, {}, s3error.description
@ -894,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
) )
return status_code, response_headers, response_content return status_code, response_headers, response_content
def _control_response(self, request, full_url, headers):
parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
if hasattr(request, "body"):
# Boto
body = request.body
if hasattr(body, "read"):
body = body.read()
else:
# Flask server
body = request.data
if body is None:
body = b""
if method == "GET":
return self._control_response_get(request, query, headers)
elif method == "PUT":
return self._control_response_put(request, body, query, headers)
elif method == "DELETE":
return self._control_response_delete(request, query, headers)
else:
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(
method
)
)
def _control_response_get(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "GET", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
public_block_config = self.backend.get_account_public_access_block(
headers["x-amz-account-id"]
)
template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION)
return (
200,
response_headers,
template.render(public_block_config=public_block_config),
)
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_put(self, request, body, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "PUT", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
pab_config = self._parse_pab_config(body)
self.backend.put_account_public_access_block(
headers["x-amz-account-id"],
pab_config["PublicAccessBlockConfiguration"],
)
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_delete(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "DELETE", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
self.backend.delete_account_public_access_block(headers["x-amz-account-id"])
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _key_response(self, request, full_url, headers): def _key_response(self, request, full_url, headers):
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True) query = parse_qs(parsed_url.query, keep_blank_values=True)
@ -1098,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if mdirective is not None and mdirective == "REPLACE": if mdirective is not None and mdirective == "REPLACE":
metadata = metadata_from_headers(request.headers) metadata = metadata_from_headers(request.headers)
new_key.set_metadata(metadata, replace=True) new_key.set_metadata(metadata, replace=True)
tdirective = request.headers.get("x-amz-tagging-directive")
if tdirective == "REPLACE":
tagging = self._tagging_from_headers(request.headers)
new_key.set_tagging(tagging)
template = self.response_template(S3_OBJECT_COPY_RESPONSE) template = self.response_template(S3_OBJECT_COPY_RESPONSE)
response_headers.update(new_key.response_dict) response_headers.update(new_key.response_dict)
return 200, response_headers, template.render(key=new_key) return 200, response_headers, template.render(key=new_key)

View File

@ -13,7 +13,7 @@ url_paths = {
# subdomain key of path-based bucket # subdomain key of path-based bucket
"{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response, "{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response,
# path-based bucket + key # path-based bucket + key
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_response, "{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
# subdomain bucket + key with empty first part of path # subdomain bucket + key with empty first part of path
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_response, "{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
} }

View File

@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel):
"executionInfo": self.to_medium_dict(), "executionInfo": self.to_medium_dict(),
"executionConfiguration": {"taskList": {"name": self.task_list}}, "executionConfiguration": {"taskList": {"name": self.task_list}},
} }
# info
if self.execution_status == "CLOSED":
hsh["executionInfo"]["closeStatus"] = self.close_status
hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp
# configuration # configuration
for key in self._configuration_keys: for key in self._configuration_keys:
attr = camelcase_to_underscores(key) attr = camelcase_to_underscores(key)

View File

@ -8,6 +8,8 @@ class WorkflowType(GenericType):
"defaultChildPolicy", "defaultChildPolicy",
"defaultExecutionStartToCloseTimeout", "defaultExecutionStartToCloseTimeout",
"defaultTaskStartToCloseTimeout", "defaultTaskStartToCloseTimeout",
"defaultTaskPriority",
"defaultLambdaRole",
] ]
@property @property

View File

@ -300,6 +300,8 @@ class SWFResponse(BaseResponse):
default_execution_start_to_close_timeout = self._params.get( default_execution_start_to_close_timeout = self._params.get(
"defaultExecutionStartToCloseTimeout" "defaultExecutionStartToCloseTimeout"
) )
default_task_priority = self._params.get("defaultTaskPriority")
default_lambda_role = self._params.get("defaultLambdaRole")
description = self._params.get("description") description = self._params.get("description")
self._check_string(domain) self._check_string(domain)
@ -309,10 +311,10 @@ class SWFResponse(BaseResponse):
self._check_none_or_string(default_child_policy) self._check_none_or_string(default_child_policy)
self._check_none_or_string(default_task_start_to_close_timeout) self._check_none_or_string(default_task_start_to_close_timeout)
self._check_none_or_string(default_execution_start_to_close_timeout) self._check_none_or_string(default_execution_start_to_close_timeout)
self._check_none_or_string(default_task_priority)
self._check_none_or_string(default_lambda_role)
self._check_none_or_string(description) self._check_none_or_string(description)
# TODO: add defaultTaskPriority when boto gets to support it
# TODO: add defaultLambdaRole when boto gets to support it
self.swf_backend.register_type( self.swf_backend.register_type(
"workflow", "workflow",
domain, domain,
@ -322,6 +324,8 @@ class SWFResponse(BaseResponse):
default_child_policy=default_child_policy, default_child_policy=default_child_policy,
default_task_start_to_close_timeout=default_task_start_to_close_timeout, default_task_start_to_close_timeout=default_task_start_to_close_timeout,
default_execution_start_to_close_timeout=default_execution_start_to_close_timeout, default_execution_start_to_close_timeout=default_execution_start_to_close_timeout,
default_task_priority=default_task_priority,
default_lambda_role=default_lambda_role,
description=description, description=description,
) )
return "" return ""

View File

View File

@ -0,0 +1,62 @@
class TaggingService:
def __init__(self, tagName="Tags", keyName="Key", valueName="Value"):
self.tagName = tagName
self.keyName = keyName
self.valueName = valueName
self.tags = {}
def list_tags_for_resource(self, arn):
result = []
if arn in self.tags:
for k, v in self.tags[arn].items():
result.append({self.keyName: k, self.valueName: v})
return {self.tagName: result}
def delete_all_tags_for_resource(self, arn):
del self.tags[arn]
def has_tags(self, arn):
return arn in self.tags
def tag_resource(self, arn, tags):
if arn not in self.tags:
self.tags[arn] = {}
for t in tags:
if self.valueName in t:
self.tags[arn][t[self.keyName]] = t[self.valueName]
else:
self.tags[arn][t[self.keyName]] = None
def untag_resource_using_names(self, arn, tag_names):
for name in tag_names:
if name in self.tags.get(arn, {}):
del self.tags[arn][name]
def untag_resource_using_tags(self, arn, tags):
m = self.tags.get(arn, {})
for t in tags:
if self.keyName in t:
if t[self.keyName] in m:
if self.valueName in t:
if m[t[self.keyName]] != t[self.valueName]:
continue
# If both key and value are provided, match both before deletion
del m[t[self.keyName]]
def extract_tag_names(self, tags):
results = []
if len(tags) == 0:
return results
for tag in tags:
if self.keyName in tag:
results.append(tag[self.keyName])
return results
def flatten_tag_list(self, tags):
result = {}
for t in tags:
if self.valueName in t:
result[t[self.keyName]] = t[self.valueName]
else:
result[t[self.keyName]] = None
return result

View File

@ -8,7 +8,7 @@ import sure # noqa
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import responses import responses
from moto import mock_apigateway, settings from moto import mock_apigateway, mock_cognitoidp, settings
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
from nose.tools import assert_raises from nose.tools import assert_raises
@ -204,12 +204,7 @@ def test_create_resource():
root_resource["ResponseMetadata"].pop("HTTPHeaders", None) root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
root_resource["ResponseMetadata"].pop("RetryAttempts", None) root_resource["ResponseMetadata"].pop("RetryAttempts", None)
root_resource.should.equal( root_resource.should.equal(
{ {"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},}
"path": "/",
"id": root_id,
"ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
}
) )
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users") client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
@ -257,7 +252,6 @@ def test_child_resource():
"parentId": users_id, "parentId": users_id,
"id": tags_id, "id": tags_id,
"ResponseMetadata": {"HTTPStatusCode": 200}, "ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
} }
) )
@ -582,6 +576,254 @@ def test_integration_response():
response["methodIntegration"]["integrationResponses"].should.equal({}) response["methodIntegration"]["integrationResponses"].should.equal({})
@mock_apigateway
@mock_cognitoidp
def test_update_authorizer_configuration():
client = boto3.client("apigateway", region_name="us-west-2")
authorizer_name = "my_authorizer"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
cognito_client = boto3.client("cognito-idp", region_name="us-west-2")
user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[
"UserPool"
]["Arn"]
response = client.create_authorizer(
restApiId=api_id,
name=authorizer_name,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id = response["id"]
response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id)
# createdDate is hard to match against, remove it
response.pop("createdDate", None)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"id": authorizer_id,
"name": authorizer_name,
"type": "COGNITO_USER_POOLS",
"providerARNs": [user_pool_arn],
"identitySource": "method.request.header.Authorization",
"authorizerResultTtlInSeconds": 300,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
client.update_authorizer(
restApiId=api_id,
authorizerId=authorizer_id,
patchOperations=[{"op": "replace", "path": "/type", "value": "TOKEN"}],
)
authorizer = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id)
authorizer.should.have.key("type").which.should.equal("TOKEN")
client.update_authorizer(
restApiId=api_id,
authorizerId=authorizer_id,
patchOperations=[{"op": "replace", "path": "/type", "value": "REQUEST"}],
)
authorizer = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id)
authorizer.should.have.key("type").which.should.equal("REQUEST")
# TODO: implement mult-update tests
try:
client.update_authorizer(
restApiId=api_id,
authorizerId=authorizer_id,
patchOperations=[
{"op": "add", "path": "/notasetting", "value": "eu-west-1"}
],
)
assert False.should.be.ok # Fail, should not be here
except Exception:
assert True.should.be.ok
@mock_apigateway
def test_non_existent_authorizer():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
client.get_authorizer.when.called_with(
restApiId=api_id, authorizerId="xxx"
).should.throw(ClientError)
@mock_apigateway
@mock_cognitoidp
def test_create_authorizer():
client = boto3.client("apigateway", region_name="us-west-2")
authorizer_name = "my_authorizer"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
cognito_client = boto3.client("cognito-idp", region_name="us-west-2")
user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[
"UserPool"
]["Arn"]
response = client.create_authorizer(
restApiId=api_id,
name=authorizer_name,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id = response["id"]
response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id)
# createdDate is hard to match against, remove it
response.pop("createdDate", None)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"id": authorizer_id,
"name": authorizer_name,
"type": "COGNITO_USER_POOLS",
"providerARNs": [user_pool_arn],
"identitySource": "method.request.header.Authorization",
"authorizerResultTtlInSeconds": 300,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
authorizer_name2 = "my_authorizer2"
response = client.create_authorizer(
restApiId=api_id,
name=authorizer_name2,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id2 = response["id"]
response = client.get_authorizers(restApiId=api_id)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response["items"][0]["id"].should.match(
r"{0}|{1}".format(authorizer_id2, authorizer_id)
)
response["items"][1]["id"].should.match(
r"{0}|{1}".format(authorizer_id2, authorizer_id)
)
new_authorizer_name_with_vars = "authorizer_with_vars"
response = client.create_authorizer(
restApiId=api_id,
name=new_authorizer_name_with_vars,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id3 = response["id"]
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"name": new_authorizer_name_with_vars,
"id": authorizer_id3,
"type": "COGNITO_USER_POOLS",
"providerARNs": [user_pool_arn],
"identitySource": "method.request.header.Authorization",
"authorizerResultTtlInSeconds": 300,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
stage = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id3)
stage["name"].should.equal(new_authorizer_name_with_vars)
stage["id"].should.equal(authorizer_id3)
stage["type"].should.equal("COGNITO_USER_POOLS")
stage["providerARNs"].should.equal([user_pool_arn])
stage["identitySource"].should.equal("method.request.header.Authorization")
stage["authorizerResultTtlInSeconds"].should.equal(300)
@mock_apigateway
@mock_cognitoidp
def test_delete_authorizer():
client = boto3.client("apigateway", region_name="us-west-2")
authorizer_name = "my_authorizer"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
cognito_client = boto3.client("cognito-idp", region_name="us-west-2")
user_pool_arn = cognito_client.create_user_pool(PoolName="my_cognito_pool")[
"UserPool"
]["Arn"]
response = client.create_authorizer(
restApiId=api_id,
name=authorizer_name,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id = response["id"]
response = client.get_authorizer(restApiId=api_id, authorizerId=authorizer_id)
# createdDate is hard to match against, remove it
response.pop("createdDate", None)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"id": authorizer_id,
"name": authorizer_name,
"type": "COGNITO_USER_POOLS",
"providerARNs": [user_pool_arn],
"identitySource": "method.request.header.Authorization",
"authorizerResultTtlInSeconds": 300,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
authorizer_name2 = "my_authorizer2"
response = client.create_authorizer(
restApiId=api_id,
name=authorizer_name2,
type="COGNITO_USER_POOLS",
providerARNs=[user_pool_arn],
identitySource="method.request.header.Authorization",
)
authorizer_id2 = response["id"]
authorizers = client.get_authorizers(restApiId=api_id)["items"]
sorted([authorizer["name"] for authorizer in authorizers]).should.equal(
sorted([authorizer_name2, authorizer_name])
)
# delete stage
response = client.delete_authorizer(restApiId=api_id, authorizerId=authorizer_id2)
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(202)
# verify other stage still exists
authorizers = client.get_authorizers(restApiId=api_id)["items"]
sorted([authorizer["name"] for authorizer in authorizers]).should.equal(
sorted([authorizer_name])
)
@mock_apigateway @mock_apigateway
def test_update_stage_configuration(): def test_update_stage_configuration():
client = boto3.client("apigateway", region_name="us-west-2") client = boto3.client("apigateway", region_name="us-west-2")

View File

@ -150,7 +150,7 @@ def test_invoke_requestresponse_function_with_arn():
Payload=json.dumps(in_data), Payload=json.dumps(in_data),
) )
success_result["StatusCode"].should.equal(202) success_result["StatusCode"].should.equal(200)
result_obj = json.loads( result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8") base64.b64decode(success_result["LogResult"]).decode("utf-8")
) )
@ -1161,7 +1161,7 @@ def test_invoke_function_from_sqs():
@mock_logs @mock_logs
@mock_lambda @mock_lambda
@mock_dynamodb2 @mock_dynamodb2
def test_invoke_function_from_dynamodb(): def test_invoke_function_from_dynamodb_put():
logs_conn = boto3.client("logs", region_name="us-east-1") logs_conn = boto3.client("logs", region_name="us-east-1")
dynamodb = boto3.client("dynamodb", region_name="us-east-1") dynamodb = boto3.client("dynamodb", region_name="us-east-1")
table_name = "table_with_stream" table_name = "table_with_stream"
@ -1218,6 +1218,72 @@ def test_invoke_function_from_dynamodb():
assert False, "Test Failed" assert False, "Test Failed"
@mock_logs
@mock_lambda
@mock_dynamodb2
def test_invoke_function_from_dynamodb_update():
logs_conn = boto3.client("logs", region_name="us-east-1")
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
table_name = "table_with_stream"
table = dynamodb.create_table(
TableName=table_name,
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
StreamSpecification={
"StreamEnabled": True,
"StreamViewType": "NEW_AND_OLD_IMAGES",
},
)
dynamodb.put_item(TableName=table_name, Item={"id": {"S": "item 1"}})
conn = boto3.client("lambda", region_name="us-east-1")
func = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file3()},
Description="test lambda function executed after a DynamoDB table is updated",
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=table["TableDescription"]["LatestStreamArn"],
FunctionName=func["FunctionArn"],
)
assert response["EventSourceArn"] == table["TableDescription"]["LatestStreamArn"]
assert response["State"] == "Enabled"
dynamodb.update_item(
TableName=table_name,
Key={"id": {"S": "item 1"}},
UpdateExpression="set #attr = :val",
ExpressionAttributeNames={"#attr": "new_attr"},
ExpressionAttributeValues={":val": {"S": "new_val"}},
)
start = time.time()
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
log_streams = result.get("logStreams")
if not log_streams:
time.sleep(1)
continue
assert len(log_streams) == 1
result = logs_conn.get_log_events(
logGroupName="/aws/lambda/testFunction",
logStreamName=log_streams[0]["logStreamName"],
)
for event in result.get("events"):
if event["message"] == "get_test_zip_file3 success":
return
time.sleep(1)
assert False, "Test Failed"
@mock_logs @mock_logs
@mock_lambda @mock_lambda
@mock_sqs @mock_sqs

View File

@ -1,5 +1,5 @@
from __future__ import unicode_literals from __future__ import unicode_literals
template = { template = {
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}} "Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
} }

View File

@ -1,276 +1,276 @@
from __future__ import unicode_literals from __future__ import unicode_literals
template = { template = {
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.", "Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
"Parameters": { "Parameters": {
"SSHLocation": { "SSHLocation": {
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.", "ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
"Description": " The IP address range that can be used to SSH to the EC2 instances", "Description": " The IP address range that can be used to SSH to the EC2 instances",
"Default": "0.0.0.0/0", "Default": "0.0.0.0/0",
"MinLength": "9", "MinLength": "9",
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
"MaxLength": "18", "MaxLength": "18",
"Type": "String", "Type": "String",
}, },
"KeyName": { "KeyName": {
"Type": "String", "Type": "String",
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance", "Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
"MinLength": "1", "MinLength": "1",
"AllowedPattern": "[\\x20-\\x7E]*", "AllowedPattern": "[\\x20-\\x7E]*",
"MaxLength": "255", "MaxLength": "255",
"ConstraintDescription": "can contain only ASCII characters.", "ConstraintDescription": "can contain only ASCII characters.",
}, },
"InstanceType": { "InstanceType": {
"Default": "m1.small", "Default": "m1.small",
"ConstraintDescription": "must be a valid EC2 instance type.", "ConstraintDescription": "must be a valid EC2 instance type.",
"Type": "String", "Type": "String",
"Description": "WebServer EC2 instance type", "Description": "WebServer EC2 instance type",
"AllowedValues": [ "AllowedValues": [
"t1.micro", "t1.micro",
"m1.small", "m1.small",
"m1.medium", "m1.medium",
"m1.large", "m1.large",
"m1.xlarge", "m1.xlarge",
"m2.xlarge", "m2.xlarge",
"m2.2xlarge", "m2.2xlarge",
"m2.4xlarge", "m2.4xlarge",
"m3.xlarge", "m3.xlarge",
"m3.2xlarge", "m3.2xlarge",
"c1.medium", "c1.medium",
"c1.xlarge", "c1.xlarge",
"cc1.4xlarge", "cc1.4xlarge",
"cc2.8xlarge", "cc2.8xlarge",
"cg1.4xlarge", "cg1.4xlarge",
], ],
}, },
}, },
"AWSTemplateFormatVersion": "2010-09-09", "AWSTemplateFormatVersion": "2010-09-09",
"Outputs": { "Outputs": {
"URL": { "URL": {
"Description": "Newly created application URL", "Description": "Newly created application URL",
"Value": { "Value": {
"Fn::Join": [ "Fn::Join": [
"", "",
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}], ["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
] ]
}, },
} }
}, },
"Resources": { "Resources": {
"Subnet": { "Subnet": {
"Type": "AWS::EC2::Subnet", "Type": "AWS::EC2::Subnet",
"Properties": { "Properties": {
"VpcId": {"Ref": "VPC"}, "VpcId": {"Ref": "VPC"},
"CidrBlock": "10.0.0.0/24", "CidrBlock": "10.0.0.0/24",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
}, },
}, },
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"}, "WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
"Route": { "Route": {
"Type": "AWS::EC2::Route", "Type": "AWS::EC2::Route",
"Properties": { "Properties": {
"GatewayId": {"Ref": "InternetGateway"}, "GatewayId": {"Ref": "InternetGateway"},
"DestinationCidrBlock": "0.0.0.0/0", "DestinationCidrBlock": "0.0.0.0/0",
"RouteTableId": {"Ref": "RouteTable"}, "RouteTableId": {"Ref": "RouteTable"},
}, },
"DependsOn": "AttachGateway", "DependsOn": "AttachGateway",
}, },
"SubnetRouteTableAssociation": { "SubnetRouteTableAssociation": {
"Type": "AWS::EC2::SubnetRouteTableAssociation", "Type": "AWS::EC2::SubnetRouteTableAssociation",
"Properties": { "Properties": {
"SubnetId": {"Ref": "Subnet"}, "SubnetId": {"Ref": "Subnet"},
"RouteTableId": {"Ref": "RouteTable"}, "RouteTableId": {"Ref": "RouteTable"},
}, },
}, },
"InternetGateway": { "InternetGateway": {
"Type": "AWS::EC2::InternetGateway", "Type": "AWS::EC2::InternetGateway",
"Properties": { "Properties": {
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}] "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
}, },
}, },
"RouteTable": { "RouteTable": {
"Type": "AWS::EC2::RouteTable", "Type": "AWS::EC2::RouteTable",
"Properties": { "Properties": {
"VpcId": {"Ref": "VPC"}, "VpcId": {"Ref": "VPC"},
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
}, },
}, },
"WebServerWaitCondition": { "WebServerWaitCondition": {
"Type": "AWS::CloudFormation::WaitCondition", "Type": "AWS::CloudFormation::WaitCondition",
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"}, "Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
"DependsOn": "WebServerInstance", "DependsOn": "WebServerInstance",
}, },
"VPC": { "VPC": {
"Type": "AWS::EC2::VPC", "Type": "AWS::EC2::VPC",
"Properties": { "Properties": {
"CidrBlock": "10.0.0.0/16", "CidrBlock": "10.0.0.0/16",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}], "Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
}, },
}, },
"InstanceSecurityGroup": { "InstanceSecurityGroup": {
"Type": "AWS::EC2::SecurityGroup", "Type": "AWS::EC2::SecurityGroup",
"Properties": { "Properties": {
"SecurityGroupIngress": [ "SecurityGroupIngress": [
{ {
"ToPort": "22", "ToPort": "22",
"IpProtocol": "tcp", "IpProtocol": "tcp",
"CidrIp": {"Ref": "SSHLocation"}, "CidrIp": {"Ref": "SSHLocation"},
"FromPort": "22", "FromPort": "22",
}, },
{ {
"ToPort": "80", "ToPort": "80",
"IpProtocol": "tcp", "IpProtocol": "tcp",
"CidrIp": "0.0.0.0/0", "CidrIp": "0.0.0.0/0",
"FromPort": "80", "FromPort": "80",
}, },
], ],
"VpcId": {"Ref": "VPC"}, "VpcId": {"Ref": "VPC"},
"GroupDescription": "Enable SSH access via port 22", "GroupDescription": "Enable SSH access via port 22",
}, },
}, },
"WebServerInstance": { "WebServerInstance": {
"Type": "AWS::EC2::Instance", "Type": "AWS::EC2::Instance",
"Properties": { "Properties": {
"UserData": { "UserData": {
"Fn::Base64": { "Fn::Base64": {
"Fn::Join": [ "Fn::Join": [
"", "",
[ [
"#!/bin/bash\n", "#!/bin/bash\n",
"yum update -y aws-cfn-bootstrap\n", "yum update -y aws-cfn-bootstrap\n",
"# Helper function\n", "# Helper function\n",
"function error_exit\n", "function error_exit\n",
"{\n", "{\n",
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'', ' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
{"Ref": "WebServerWaitHandle"}, {"Ref": "WebServerWaitHandle"},
"'\n", "'\n",
" exit 1\n", " exit 1\n",
"}\n", "}\n",
"# Install the simple web page\n", "# Install the simple web page\n",
"/opt/aws/bin/cfn-init -s ", "/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"}, {"Ref": "AWS::StackId"},
" -r WebServerInstance ", " -r WebServerInstance ",
" --region ", " --region ",
{"Ref": "AWS::Region"}, {"Ref": "AWS::Region"},
" || error_exit 'Failed to run cfn-init'\n", " || error_exit 'Failed to run cfn-init'\n",
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n", "# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n", "/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
"# All done so signal success\n", "# All done so signal success\n",
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'', '/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
{"Ref": "WebServerWaitHandle"}, {"Ref": "WebServerWaitHandle"},
"'\n", "'\n",
], ],
] ]
} }
}, },
"Tags": [ "Tags": [
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}, {"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
{"Value": "Bar", "Key": "Foo"}, {"Value": "Bar", "Key": "Foo"},
], ],
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}], "SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
"KeyName": {"Ref": "KeyName"}, "KeyName": {"Ref": "KeyName"},
"SubnetId": {"Ref": "Subnet"}, "SubnetId": {"Ref": "Subnet"},
"ImageId": { "ImageId": {
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"] "Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
}, },
"InstanceType": {"Ref": "InstanceType"}, "InstanceType": {"Ref": "InstanceType"},
}, },
"Metadata": { "Metadata": {
"Comment": "Install a simple PHP application", "Comment": "Install a simple PHP application",
"AWS::CloudFormation::Init": { "AWS::CloudFormation::Init": {
"config": { "config": {
"files": { "files": {
"/etc/cfn/cfn-hup.conf": { "/etc/cfn/cfn-hup.conf": {
"content": { "content": {
"Fn::Join": [ "Fn::Join": [
"", "",
[ [
"[main]\n", "[main]\n",
"stack=", "stack=",
{"Ref": "AWS::StackId"}, {"Ref": "AWS::StackId"},
"\n", "\n",
"region=", "region=",
{"Ref": "AWS::Region"}, {"Ref": "AWS::Region"},
"\n", "\n",
], ],
] ]
}, },
"owner": "root", "owner": "root",
"group": "root", "group": "root",
"mode": "000400", "mode": "000400",
}, },
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": { "/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
"content": { "content": {
"Fn::Join": [ "Fn::Join": [
"", "",
[ [
"[cfn-auto-reloader-hook]\n", "[cfn-auto-reloader-hook]\n",
"triggers=post.update\n", "triggers=post.update\n",
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n", "path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
"action=/opt/aws/bin/cfn-init -s ", "action=/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"}, {"Ref": "AWS::StackId"},
" -r WebServerInstance ", " -r WebServerInstance ",
" --region ", " --region ",
{"Ref": "AWS::Region"}, {"Ref": "AWS::Region"},
"\n", "\n",
"runas=root\n", "runas=root\n",
], ],
] ]
} }
}, },
"/var/www/html/index.php": { "/var/www/html/index.php": {
"content": { "content": {
"Fn::Join": [ "Fn::Join": [
"", "",
[ [
"<?php\n", "<?php\n",
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n", "echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
"?>\n", "?>\n",
], ],
] ]
}, },
"owner": "apache", "owner": "apache",
"group": "apache", "group": "apache",
"mode": "000644", "mode": "000644",
}, },
}, },
"services": { "services": {
"sysvinit": { "sysvinit": {
"httpd": {"ensureRunning": "true", "enabled": "true"}, "httpd": {"ensureRunning": "true", "enabled": "true"},
"sendmail": { "sendmail": {
"ensureRunning": "false", "ensureRunning": "false",
"enabled": "false", "enabled": "false",
}, },
} }
}, },
"packages": {"yum": {"httpd": [], "php": []}}, "packages": {"yum": {"httpd": [], "php": []}},
} }
}, },
}, },
}, },
"IPAddress": { "IPAddress": {
"Type": "AWS::EC2::EIP", "Type": "AWS::EC2::EIP",
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"}, "Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
"DependsOn": "AttachGateway", "DependsOn": "AttachGateway",
}, },
"AttachGateway": { "AttachGateway": {
"Type": "AWS::EC2::VPCGatewayAttachment", "Type": "AWS::EC2::VPCGatewayAttachment",
"Properties": { "Properties": {
"VpcId": {"Ref": "VPC"}, "VpcId": {"Ref": "VPC"},
"InternetGatewayId": {"Ref": "InternetGateway"}, "InternetGatewayId": {"Ref": "InternetGateway"},
}, },
}, },
}, },
"Mappings": { "Mappings": {
"RegionMap": { "RegionMap": {
"ap-southeast-1": {"AMI": "ami-74dda626"}, "ap-southeast-1": {"AMI": "ami-74dda626"},
"ap-southeast-2": {"AMI": "ami-b3990e89"}, "ap-southeast-2": {"AMI": "ami-b3990e89"},
"us-west-2": {"AMI": "ami-16fd7026"}, "us-west-2": {"AMI": "ami-16fd7026"},
"us-east-1": {"AMI": "ami-7f418316"}, "us-east-1": {"AMI": "ami-7f418316"},
"ap-northeast-1": {"AMI": "ami-dcfa4edd"}, "ap-northeast-1": {"AMI": "ami-dcfa4edd"},
"us-west-1": {"AMI": "ami-951945d0"}, "us-west-1": {"AMI": "ami-951945d0"},
"eu-west-1": {"AMI": "ami-24506250"}, "eu-west-1": {"AMI": "ami-24506250"},
"sa-east-1": {"AMI": "ami-3e3be423"}, "sa-east-1": {"AMI": "ami-3e3be423"},
} }
}, },
} }

View File

@ -1,117 +1,117 @@
import boto import boto
from boto.ec2.cloudwatch.alarm import MetricAlarm from boto.ec2.cloudwatch.alarm import MetricAlarm
import sure # noqa import sure # noqa
from moto import mock_cloudwatch_deprecated from moto import mock_cloudwatch_deprecated
def alarm_fixture(name="tester", action=None): def alarm_fixture(name="tester", action=None):
action = action or ["arn:alarm"] action = action or ["arn:alarm"]
return MetricAlarm( return MetricAlarm(
name=name, name=name,
namespace="{0}_namespace".format(name), namespace="{0}_namespace".format(name),
metric="{0}_metric".format(name), metric="{0}_metric".format(name),
comparison=">=", comparison=">=",
threshold=2.0, threshold=2.0,
period=60, period=60,
evaluation_periods=5, evaluation_periods=5,
statistic="Average", statistic="Average",
description="A test", description="A test",
dimensions={"InstanceId": ["i-0123456,i-0123457"]}, dimensions={"InstanceId": ["i-0123456,i-0123457"]},
alarm_actions=action, alarm_actions=action,
ok_actions=["arn:ok"], ok_actions=["arn:ok"],
insufficient_data_actions=["arn:insufficient"], insufficient_data_actions=["arn:insufficient"],
unit="Seconds", unit="Seconds",
) )
@mock_cloudwatch_deprecated @mock_cloudwatch_deprecated
def test_create_alarm(): def test_create_alarm():
conn = boto.connect_cloudwatch() conn = boto.connect_cloudwatch()
alarm = alarm_fixture() alarm = alarm_fixture()
conn.create_alarm(alarm) conn.create_alarm(alarm)
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(1) alarms.should.have.length_of(1)
alarm = alarms[0] alarm = alarms[0]
alarm.name.should.equal("tester") alarm.name.should.equal("tester")
alarm.namespace.should.equal("tester_namespace") alarm.namespace.should.equal("tester_namespace")
alarm.metric.should.equal("tester_metric") alarm.metric.should.equal("tester_metric")
alarm.comparison.should.equal(">=") alarm.comparison.should.equal(">=")
alarm.threshold.should.equal(2.0) alarm.threshold.should.equal(2.0)
alarm.period.should.equal(60) alarm.period.should.equal(60)
alarm.evaluation_periods.should.equal(5) alarm.evaluation_periods.should.equal(5)
alarm.statistic.should.equal("Average") alarm.statistic.should.equal("Average")
alarm.description.should.equal("A test") alarm.description.should.equal("A test")
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
list(alarm.alarm_actions).should.equal(["arn:alarm"]) list(alarm.alarm_actions).should.equal(["arn:alarm"])
list(alarm.ok_actions).should.equal(["arn:ok"]) list(alarm.ok_actions).should.equal(["arn:ok"])
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"]) list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
alarm.unit.should.equal("Seconds") alarm.unit.should.equal("Seconds")
@mock_cloudwatch_deprecated @mock_cloudwatch_deprecated
def test_delete_alarm(): def test_delete_alarm():
conn = boto.connect_cloudwatch() conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(0) alarms.should.have.length_of(0)
alarm = alarm_fixture() alarm = alarm_fixture()
conn.create_alarm(alarm) conn.create_alarm(alarm)
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(1) alarms.should.have.length_of(1)
alarms[0].delete() alarms[0].delete()
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(0) alarms.should.have.length_of(0)
@mock_cloudwatch_deprecated @mock_cloudwatch_deprecated
def test_put_metric_data(): def test_put_metric_data():
conn = boto.connect_cloudwatch() conn = boto.connect_cloudwatch()
conn.put_metric_data( conn.put_metric_data(
namespace="tester", namespace="tester",
name="metric", name="metric",
value=1.5, value=1.5,
dimensions={"InstanceId": ["i-0123456,i-0123457"]}, dimensions={"InstanceId": ["i-0123456,i-0123457"]},
) )
metrics = conn.list_metrics() metrics = conn.list_metrics()
metrics.should.have.length_of(1) metrics.should.have.length_of(1)
metric = metrics[0] metric = metrics[0]
metric.namespace.should.equal("tester") metric.namespace.should.equal("tester")
metric.name.should.equal("metric") metric.name.should.equal("metric")
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]}) dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
@mock_cloudwatch_deprecated @mock_cloudwatch_deprecated
def test_describe_alarms(): def test_describe_alarms():
conn = boto.connect_cloudwatch() conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(0) alarms.should.have.length_of(0)
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar")) conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz")) conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo")) conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo")) conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(4) alarms.should.have.length_of(4)
alarms = conn.describe_alarms(alarm_name_prefix="nfoo") alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
alarms.should.have.length_of(2) alarms.should.have.length_of(2)
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"]) alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
alarms.should.have.length_of(3) alarms.should.have.length_of(3)
alarms = conn.describe_alarms(action_prefix="afoo") alarms = conn.describe_alarms(action_prefix="afoo")
alarms.should.have.length_of(2) alarms.should.have.length_of(2)
for alarm in conn.describe_alarms(): for alarm in conn.describe_alarms():
alarm.delete() alarm.delete()
alarms = conn.describe_alarms() alarms = conn.describe_alarms()
alarms.should.have.length_of(0) alarms.should.have.length_of(0)

View File

@ -27,6 +27,11 @@ def test_create_user_pool():
result["UserPool"]["Id"].should_not.be.none result["UserPool"]["Id"].should_not.be.none
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+") result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
result["UserPool"]["Arn"].should.equal(
"arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format(
ACCOUNT_ID, result["UserPool"]["Id"]
)
)
result["UserPool"]["Name"].should.equal(name) result["UserPool"]["Name"].should.equal(name)
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
@ -911,6 +916,55 @@ def test_admin_create_existing_user():
caught.should.be.true caught.should.be.true
@mock_cognitoidp
def test_admin_resend_invitation_existing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
)
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UsernameExistsException:
caught = True
caught.should.be.false
@mock_cognitoidp
def test_admin_resend_invitation_missing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UserNotFoundException:
caught = True
caught.should.be.true
@mock_cognitoidp @mock_cognitoidp
def test_admin_get_user(): def test_admin_get_user():
conn = boto3.client("cognito-idp", "us-west-2") conn = boto3.client("cognito-idp", "us-west-2")

View File

@ -46,4 +46,4 @@ def test_domain_dispatched_with_service():
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3") dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"}) backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"})
keys = set(backend_app.view_functions.keys()) keys = set(backend_app.view_functions.keys())
keys.should.contain("ResponseObject.key_response") keys.should.contain("ResponseObject.key_or_control_response")

View File

@ -1,182 +1,182 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto.datapipeline import boto.datapipeline
import sure # noqa import sure # noqa
from moto import mock_datapipeline_deprecated from moto import mock_datapipeline_deprecated
from moto.datapipeline.utils import remove_capitalization_of_dict_keys from moto.datapipeline.utils import remove_capitalization_of_dict_keys
def get_value_from_fields(key, fields): def get_value_from_fields(key, fields):
for field in fields: for field in fields:
if field["key"] == key: if field["key"] == key:
return field["stringValue"] return field["stringValue"]
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_create_pipeline(): def test_create_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id") res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"] pipeline_id = res["pipelineId"]
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList" "pipelineDescriptionList"
] ]
pipeline_descriptions.should.have.length_of(1) pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0] pipeline_description = pipeline_descriptions[0]
pipeline_description["name"].should.equal("mypipeline") pipeline_description["name"].should.equal("mypipeline")
pipeline_description["pipelineId"].should.equal(pipeline_id) pipeline_description["pipelineId"].should.equal(pipeline_id)
fields = pipeline_description["fields"] fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("PENDING") get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id") get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
PIPELINE_OBJECTS = [ PIPELINE_OBJECTS = [
{ {
"id": "Default", "id": "Default",
"name": "Default", "name": "Default",
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}], "fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
}, },
{ {
"id": "Schedule", "id": "Schedule",
"name": "Schedule", "name": "Schedule",
"fields": [ "fields": [
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"}, {"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
{"key": "type", "stringValue": "Schedule"}, {"key": "type", "stringValue": "Schedule"},
{"key": "period", "stringValue": "1 hour"}, {"key": "period", "stringValue": "1 hour"},
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"}, {"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
], ],
}, },
{ {
"id": "SayHello", "id": "SayHello",
"name": "SayHello", "name": "SayHello",
"fields": [ "fields": [
{"key": "type", "stringValue": "ShellCommandActivity"}, {"key": "type", "stringValue": "ShellCommandActivity"},
{"key": "command", "stringValue": "echo hello"}, {"key": "command", "stringValue": "echo hello"},
{"key": "parent", "refValue": "Default"}, {"key": "parent", "refValue": "Default"},
{"key": "schedule", "refValue": "Schedule"}, {"key": "schedule", "refValue": "Schedule"},
], ],
}, },
] ]
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_creating_pipeline_definition(): def test_creating_pipeline_definition():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id") res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"] pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
pipeline_definition = conn.get_pipeline_definition(pipeline_id) pipeline_definition = conn.get_pipeline_definition(pipeline_id)
pipeline_definition["pipelineObjects"].should.have.length_of(3) pipeline_definition["pipelineObjects"].should.have.length_of(3)
default_object = pipeline_definition["pipelineObjects"][0] default_object = pipeline_definition["pipelineObjects"][0]
default_object["name"].should.equal("Default") default_object["name"].should.equal("Default")
default_object["id"].should.equal("Default") default_object["id"].should.equal("Default")
default_object["fields"].should.equal( default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}] [{"key": "workerGroup", "stringValue": "workerGroup"}]
) )
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_describing_pipeline_objects(): def test_describing_pipeline_objects():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id") res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"] pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id) conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[ objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
"pipelineObjects" "pipelineObjects"
] ]
objects.should.have.length_of(2) objects.should.have.length_of(2)
default_object = [x for x in objects if x["id"] == "Default"][0] default_object = [x for x in objects if x["id"] == "Default"][0]
default_object["name"].should.equal("Default") default_object["name"].should.equal("Default")
default_object["fields"].should.equal( default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}] [{"key": "workerGroup", "stringValue": "workerGroup"}]
) )
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_activate_pipeline(): def test_activate_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id") res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"] pipeline_id = res["pipelineId"]
conn.activate_pipeline(pipeline_id) conn.activate_pipeline(pipeline_id)
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[ pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList" "pipelineDescriptionList"
] ]
pipeline_descriptions.should.have.length_of(1) pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0] pipeline_description = pipeline_descriptions[0]
fields = pipeline_description["fields"] fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED") get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_delete_pipeline(): def test_delete_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id") res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"] pipeline_id = res["pipelineId"]
conn.delete_pipeline(pipeline_id) conn.delete_pipeline(pipeline_id)
response = conn.list_pipelines() response = conn.list_pipelines()
response["pipelineIdList"].should.have.length_of(0) response["pipelineIdList"].should.have.length_of(0)
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_listing_pipelines(): def test_listing_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1") res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2") res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
response = conn.list_pipelines() response = conn.list_pipelines()
response["hasMoreResults"].should.be(False) response["hasMoreResults"].should.be(False)
response["marker"].should.be.none response["marker"].should.be.none
response["pipelineIdList"].should.have.length_of(2) response["pipelineIdList"].should.have.length_of(2)
response["pipelineIdList"].should.contain( response["pipelineIdList"].should.contain(
{"id": res1["pipelineId"], "name": "mypipeline1"} {"id": res1["pipelineId"], "name": "mypipeline1"}
) )
response["pipelineIdList"].should.contain( response["pipelineIdList"].should.contain(
{"id": res2["pipelineId"], "name": "mypipeline2"} {"id": res2["pipelineId"], "name": "mypipeline2"}
) )
@mock_datapipeline_deprecated @mock_datapipeline_deprecated
def test_listing_paginated_pipelines(): def test_listing_paginated_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2") conn = boto.datapipeline.connect_to_region("us-west-2")
for i in range(100): for i in range(100):
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i) conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
response = conn.list_pipelines() response = conn.list_pipelines()
response["hasMoreResults"].should.be(True) response["hasMoreResults"].should.be(True)
response["marker"].should.equal(response["pipelineIdList"][-1]["id"]) response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
response["pipelineIdList"].should.have.length_of(50) response["pipelineIdList"].should.have.length_of(50)
# testing a helper function # testing a helper function
def test_remove_capitalization_of_dict_keys(): def test_remove_capitalization_of_dict_keys():
result = remove_capitalization_of_dict_keys( result = remove_capitalization_of_dict_keys(
{ {
"Id": "IdValue", "Id": "IdValue",
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}], "Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
} }
) )
result.should.equal( result.should.equal(
{ {
"id": "IdValue", "id": "IdValue",
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}], "fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
} }
) )

View File

@ -1,470 +1,470 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from freezegun import freeze_time from freezegun import freeze_time
from moto import mock_dynamodb_deprecated from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
from boto.exception import DynamoDBResponseError from boto.exception import DynamoDBResponseError
def create_table(conn): def create_table(conn):
message_table_schema = conn.create_schema( message_table_schema = conn.create_schema(
hash_key_name="forum_name", hash_key_name="forum_name",
hash_key_proto_value=str, hash_key_proto_value=str,
range_key_name="subject", range_key_name="subject",
range_key_proto_value=str, range_key_proto_value=str,
) )
table = conn.create_table( table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10 name="messages", schema=message_table_schema, read_units=10, write_units=10
) )
return table return table
@freeze_time("2012-01-14") @freeze_time("2012-01-14")
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_create_table(): def test_create_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
create_table(conn) create_table(conn)
expected = { expected = {
"Table": { "Table": {
"CreationDateTime": 1326499200.0, "CreationDateTime": 1326499200.0,
"ItemCount": 0, "ItemCount": 0,
"KeySchema": { "KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}, "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"}, "RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
}, },
"ProvisionedThroughput": { "ProvisionedThroughput": {
"ReadCapacityUnits": 10, "ReadCapacityUnits": 10,
"WriteCapacityUnits": 10, "WriteCapacityUnits": 10,
}, },
"TableName": "messages", "TableName": "messages",
"TableSizeBytes": 0, "TableSizeBytes": 0,
"TableStatus": "ACTIVE", "TableStatus": "ACTIVE",
} }
} }
conn.describe_table("messages").should.equal(expected) conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_table(): def test_delete_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
create_table(conn) create_table(conn)
conn.list_tables().should.have.length_of(1) conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages") conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0) conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw( conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError DynamoDBResponseError
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_update_table_throughput(): def test_update_table_throughput():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
table.read_units.should.equal(10) table.read_units.should.equal(10)
table.write_units.should.equal(10) table.write_units.should.equal(10)
table.update_throughput(5, 6) table.update_throughput(5, 6)
table.refresh() table.refresh()
table.read_units.should.equal(5) table.read_units.should.equal(5)
table.write_units.should.equal(6) table.write_units.should.equal(6)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_item_add_and_describe_and_update(): def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item( item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
) )
item.put() item.put()
table.has_item("LOLCat Forum", "Check this out!").should.equal(True) table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
returned_item = table.get_item( returned_item = table.get_item(
hash_key="LOLCat Forum", hash_key="LOLCat Forum",
range_key="Check this out!", range_key="Check this out!",
attributes_to_get=["Body", "SentBy"], attributes_to_get=["Body", "SentBy"],
) )
dict(returned_item).should.equal( dict(returned_item).should.equal(
{ {
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"subject": "Check this out!", "subject": "Check this out!",
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
} }
) )
item["SentBy"] = "User B" item["SentBy"] = "User B"
item.put() item.put()
returned_item = table.get_item( returned_item = table.get_item(
hash_key="LOLCat Forum", hash_key="LOLCat Forum",
range_key="Check this out!", range_key="Check this out!",
attributes_to_get=["Body", "SentBy"], attributes_to_get=["Body", "SentBy"],
) )
dict(returned_item).should.equal( dict(returned_item).should.equal(
{ {
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"subject": "Check this out!", "subject": "Check this out!",
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
} }
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_item_put_without_table(): def test_item_put_without_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with( conn.layer1.put_item.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"), item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_get_missing_item(): def test_get_missing_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw( table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
DynamoDBKeyNotFoundError DynamoDBKeyNotFoundError
) )
table.has_item("foobar", "more").should.equal(False) table.has_item("foobar", "more").should.equal(False)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_get_item_with_undeclared_table(): def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with( conn.layer1.get_item.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBKeyNotFoundError) ).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_get_item_without_range_key(): def test_get_item_without_range_key():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
message_table_schema = conn.create_schema( message_table_schema = conn.create_schema(
hash_key_name="test_hash", hash_key_name="test_hash",
hash_key_proto_value=int, hash_key_proto_value=int,
range_key_name="test_range", range_key_name="test_range",
range_key_proto_value=int, range_key_proto_value=int,
) )
table = conn.create_table( table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10 name="messages", schema=message_table_schema, read_units=10, write_units=10
) )
hash_key = 3241526475 hash_key = 3241526475
range_key = 1234567890987 range_key = 1234567890987
new_item = table.new_item(hash_key=hash_key, range_key=range_key) new_item = table.new_item(hash_key=hash_key, range_key=range_key)
new_item.put() new_item.put()
table.get_item.when.called_with(hash_key=hash_key).should.throw( table.get_item.when.called_with(hash_key=hash_key).should.throw(
DynamoDBValidationError DynamoDBValidationError
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item(): def test_delete_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item( item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
) )
item.put() item.put()
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
response = item.delete() response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh() table.refresh()
table.item_count.should.equal(0) table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError) item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item_with_attribute_response(): def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item( item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
) )
item.put() item.put()
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD") response = item.delete(return_values="ALL_OLD")
response.should.equal( response.should.equal(
{ {
"Attributes": { "Attributes": {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A", "SentBy": "User A",
"subject": "Check this out!", "subject": "Check this out!",
}, },
"ConsumedCapacityUnits": 0.5, "ConsumedCapacityUnits": 0.5,
} }
) )
table.refresh() table.refresh()
table.item_count.should.equal(0) table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError) item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table(): def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with( conn.layer1.delete_item.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}}, key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_query(): def test_query():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put() item.put()
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1")) results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
results.response["Items"].should.have.length_of(3) results.response["Items"].should.have.length_of(3)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234")) results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
results.response["Items"].should.have.length_of(2) results.response["Items"].should.have.length_of(2)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999")) results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
results.response["Items"].should.have.length_of(0) results.response["Items"].should.have.length_of(0)
results = table.query( results = table.query(
hash_key="the-key", range_key_condition=condition.CONTAINS("12") hash_key="the-key", range_key_condition=condition.CONTAINS("12")
) )
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.query( results = table.query(
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7") hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
) )
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.query( results = table.query(
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890") hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
) )
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_query_with_undeclared_table(): def test_query_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with( conn.layer1.query.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
hash_key_value={"S": "the-key"}, hash_key_value={"S": "the-key"},
range_key_conditions={ range_key_conditions={
"AttributeValueList": [{"S": "User B"}], "AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ", "ComparisonOperator": "EQ",
}, },
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan(): def test_scan():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put() item.put()
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
} }
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put() item.put()
results = table.scan() results = table.scan()
results.response["Items"].should.have.length_of(3) results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3) results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()}) results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2) results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0) results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan_with_undeclared_table(): def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with( conn.layer1.scan.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
scan_filter={ scan_filter={
"SentBy": { "SentBy": {
"AttributeValueList": [{"S": "User B"}], "AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ", "ComparisonOperator": "EQ",
} }
}, },
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan_after_has_item(): def test_scan_after_has_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
list(table.scan()).should.equal([]) list(table.scan()).should.equal([])
table.has_item(hash_key="the-key", range_key="123") table.has_item(hash_key="the-key", range_key="123")
list(table.scan()).should.equal([]) list(table.scan()).should.equal([])
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_write_batch(): def test_write_batch():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
batch_list = conn.new_batch_write_list() batch_list = conn.new_batch_write_list()
items = [] items = []
items.append( items.append(
table.new_item( table.new_item(
hash_key="the-key", hash_key="the-key",
range_key="123", range_key="123",
attrs={ attrs={
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
}, },
) )
) )
items.append( items.append(
table.new_item( table.new_item(
hash_key="the-key", hash_key="the-key",
range_key="789", range_key="789",
attrs={ attrs={
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
}, },
) )
) )
batch_list.add_batch(table, puts=items) batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list) conn.batch_write_item(batch_list)
table.refresh() table.refresh()
table.item_count.should.equal(2) table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list() batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key", "789")]) batch_list.add_batch(table, deletes=[("the-key", "789")])
conn.batch_write_item(batch_list) conn.batch_write_item(batch_list)
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_batch_read(): def test_batch_read():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data) item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put() item.put()
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
} }
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data) item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
item.put() item.put()
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")]) items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
# Iterate through so that batch_item gets called # Iterate through so that batch_item gets called
count = len([x for x in items]) count = len([x for x in items])
count.should.equal(2) count.should.equal(2)

View File

@ -1,390 +1,390 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from freezegun import freeze_time from freezegun import freeze_time
from moto import mock_dynamodb_deprecated from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
from boto.exception import DynamoDBResponseError from boto.exception import DynamoDBResponseError
def create_table(conn): def create_table(conn):
message_table_schema = conn.create_schema( message_table_schema = conn.create_schema(
hash_key_name="forum_name", hash_key_proto_value=str hash_key_name="forum_name", hash_key_proto_value=str
) )
table = conn.create_table( table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10 name="messages", schema=message_table_schema, read_units=10, write_units=10
) )
return table return table
@freeze_time("2012-01-14") @freeze_time("2012-01-14")
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_create_table(): def test_create_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
create_table(conn) create_table(conn)
expected = { expected = {
"Table": { "Table": {
"CreationDateTime": 1326499200.0, "CreationDateTime": 1326499200.0,
"ItemCount": 0, "ItemCount": 0,
"KeySchema": { "KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"} "HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
}, },
"ProvisionedThroughput": { "ProvisionedThroughput": {
"ReadCapacityUnits": 10, "ReadCapacityUnits": 10,
"WriteCapacityUnits": 10, "WriteCapacityUnits": 10,
}, },
"TableName": "messages", "TableName": "messages",
"TableSizeBytes": 0, "TableSizeBytes": 0,
"TableStatus": "ACTIVE", "TableStatus": "ACTIVE",
} }
} }
conn.describe_table("messages").should.equal(expected) conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_table(): def test_delete_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
create_table(conn) create_table(conn)
conn.list_tables().should.have.length_of(1) conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages") conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0) conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw( conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError DynamoDBResponseError
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_update_table_throughput(): def test_update_table_throughput():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
table.read_units.should.equal(10) table.read_units.should.equal(10)
table.write_units.should.equal(10) table.write_units.should.equal(10)
table.update_throughput(5, 6) table.update_throughput(5, 6)
table.refresh() table.refresh()
table.read_units.should.equal(5) table.read_units.should.equal(5)
table.write_units.should.equal(6) table.write_units.should.equal(6)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_item_add_and_describe_and_update(): def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put() item.put()
returned_item = table.get_item( returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
) )
dict(returned_item).should.equal( dict(returned_item).should.equal(
{ {
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
} }
) )
item["SentBy"] = "User B" item["SentBy"] = "User B"
item.put() item.put()
returned_item = table.get_item( returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"] hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
) )
dict(returned_item).should.equal( dict(returned_item).should.equal(
{ {
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
} }
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_item_put_without_table(): def test_item_put_without_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with( conn.layer1.put_item.when.called_with(
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum") table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_get_missing_item(): def test_get_missing_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
table.get_item.when.called_with(hash_key="tester").should.throw( table.get_item.when.called_with(hash_key="tester").should.throw(
DynamoDBKeyNotFoundError DynamoDBKeyNotFoundError
) )
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_get_item_with_undeclared_table(): def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with( conn.layer1.get_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBKeyNotFoundError) ).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item(): def test_delete_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put() item.put()
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
response = item.delete() response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5}) response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh() table.refresh()
table.item_count.should.equal(0) table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError) item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item_with_attribute_response(): def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data) item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put() item.put()
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD") response = item.delete(return_values="ALL_OLD")
response.should.equal( response.should.equal(
{ {
"Attributes": { "Attributes": {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum", "forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A", "SentBy": "User A",
}, },
"ConsumedCapacityUnits": 0.5, "ConsumedCapacityUnits": 0.5,
} }
) )
table.refresh() table.refresh()
table.item_count.should.equal(0) table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError) item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table(): def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with( conn.layer1.delete_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}} table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_query(): def test_query():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key", attrs=item_data) item = table.new_item(hash_key="the-key", attrs=item_data)
item.put() item.put()
results = table.query(hash_key="the-key") results = table.query(hash_key="the-key")
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_query_with_undeclared_table(): def test_query_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with( conn.layer1.query.when.called_with(
table_name="undeclared-table", hash_key_value={"S": "the-key"} table_name="undeclared-table", hash_key_value={"S": "the-key"}
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan(): def test_scan():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key", attrs=item_data) item = table.new_item(hash_key="the-key", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data) item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put() item.put()
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
} }
item = table.new_item(hash_key="the-key3", attrs=item_data) item = table.new_item(hash_key="the-key3", attrs=item_data)
item.put() item.put()
results = table.scan() results = table.scan()
results.response["Items"].should.have.length_of(3) results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")}) results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")}) results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3) results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)}) results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()}) results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()}) results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2) results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)}) results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0) results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)}) results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1) results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan_with_undeclared_table(): def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with( conn.layer1.scan.when.called_with(
table_name="undeclared-table", table_name="undeclared-table",
scan_filter={ scan_filter={
"SentBy": { "SentBy": {
"AttributeValueList": [{"S": "User B"}], "AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ", "ComparisonOperator": "EQ",
} }
}, },
).should.throw(DynamoDBResponseError) ).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_scan_after_has_item(): def test_scan_after_has_item():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
list(table.scan()).should.equal([]) list(table.scan()).should.equal([])
table.has_item("the-key") table.has_item("the-key")
list(table.scan()).should.equal([]) list(table.scan()).should.equal([])
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_write_batch(): def test_write_batch():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
batch_list = conn.new_batch_write_list() batch_list = conn.new_batch_write_list()
items = [] items = []
items.append( items.append(
table.new_item( table.new_item(
hash_key="the-key", hash_key="the-key",
attrs={ attrs={
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
}, },
) )
) )
items.append( items.append(
table.new_item( table.new_item(
hash_key="the-key2", hash_key="the-key2",
attrs={ attrs={
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
}, },
) )
) )
batch_list.add_batch(table, puts=items) batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list) conn.batch_write_item(batch_list)
table.refresh() table.refresh()
table.item_count.should.equal(2) table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list() batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key")]) batch_list.add_batch(table, deletes=[("the-key")])
conn.batch_write_item(batch_list) conn.batch_write_item(batch_list)
table.refresh() table.refresh()
table.item_count.should.equal(1) table.item_count.should.equal(1)
@mock_dynamodb_deprecated @mock_dynamodb_deprecated
def test_batch_read(): def test_batch_read():
conn = boto.connect_dynamodb() conn = boto.connect_dynamodb()
table = create_table(conn) table = create_table(conn)
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User A", "SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
} }
item = table.new_item(hash_key="the-key1", attrs=item_data) item = table.new_item(hash_key="the-key1", attrs=item_data)
item.put() item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data) item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put() item.put()
item_data = { item_data = {
"Body": "http://url_to_lolcat.gif", "Body": "http://url_to_lolcat.gif",
"SentBy": "User B", "SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM", "ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]), "Ids": set([1, 2, 3]),
"PK": 7, "PK": 7,
} }
item = table.new_item(hash_key="another-key", attrs=item_data) item = table.new_item(hash_key="another-key", attrs=item_data)
item.put() item.put()
items = table.batch_get_item([("the-key1"), ("another-key")]) items = table.batch_get_item([("the-key1"), ("another-key")])
# Iterate through so that batch_item gets called # Iterate through so that batch_item gets called
count = len([x for x in items]) count = len([x for x in items])
count.should.have.equal(2) count.should.have.equal(2)

View File

@ -1,37 +1,37 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto3 import boto3
from moto import mock_ec2 from moto import mock_ec2
import sure # noqa import sure # noqa
@mock_ec2 @mock_ec2
def test_describe_account_attributes(): def test_describe_account_attributes():
conn = boto3.client("ec2", region_name="us-east-1") conn = boto3.client("ec2", region_name="us-east-1")
response = conn.describe_account_attributes() response = conn.describe_account_attributes()
expected_attribute_values = [ expected_attribute_values = [
{ {
"AttributeValues": [{"AttributeValue": "5"}], "AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-security-groups-per-interface", "AttributeName": "vpc-max-security-groups-per-interface",
}, },
{ {
"AttributeValues": [{"AttributeValue": "20"}], "AttributeValues": [{"AttributeValue": "20"}],
"AttributeName": "max-instances", "AttributeName": "max-instances",
}, },
{ {
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}], "AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
"AttributeName": "supported-platforms", "AttributeName": "supported-platforms",
}, },
{ {
"AttributeValues": [{"AttributeValue": "none"}], "AttributeValues": [{"AttributeValue": "none"}],
"AttributeName": "default-vpc", "AttributeName": "default-vpc",
}, },
{ {
"AttributeValues": [{"AttributeValue": "5"}], "AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "max-elastic-ips", "AttributeName": "max-elastic-ips",
}, },
{ {
"AttributeValues": [{"AttributeValue": "5"}], "AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-elastic-ips", "AttributeName": "vpc-max-elastic-ips",
}, },
] ]
response["AccountAttributes"].should.equal(expected_attribute_values) response["AccountAttributes"].should.equal(expected_attribute_values)

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_amazon_dev_pay(): def test_amazon_dev_pay():
pass pass

View File

@ -1 +1 @@
from __future__ import unicode_literals from __future__ import unicode_literals

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_ip_addresses(): def test_ip_addresses():
pass pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_monitoring(): def test_monitoring():
pass pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_placement_groups(): def test_placement_groups():
pass pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_reserved_instances(): def test_reserved_instances():
pass pass

View File

@ -1,96 +1,96 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2_deprecated from moto import mock_ec2_deprecated
@mock_ec2_deprecated @mock_ec2_deprecated
def test_virtual_private_gateways(): def test_virtual_private_gateways():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.should_not.be.none vpn_gateway.should_not.be.none
vpn_gateway.id.should.match(r"vgw-\w+") vpn_gateway.id.should.match(r"vgw-\w+")
vpn_gateway.type.should.equal("ipsec.1") vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available") vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a") vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated @mock_ec2_deprecated
def test_describe_vpn_gateway(): def test_describe_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vgws = conn.get_all_vpn_gateways() vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(1) vgws.should.have.length_of(1)
gateway = vgws[0] gateway = vgws[0]
gateway.id.should.match(r"vgw-\w+") gateway.id.should.match(r"vgw-\w+")
gateway.id.should.equal(vpn_gateway.id) gateway.id.should.equal(vpn_gateway.id)
vpn_gateway.type.should.equal("ipsec.1") vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available") vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a") vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated @mock_ec2_deprecated
def test_vpn_gateway_vpc_attachment(): def test_vpn_gateway_vpc_attachment():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16") vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0] gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments attachments = gateway.attachments
attachments.should.have.length_of(1) attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id) attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached") attachments[0].state.should.equal("attached")
@mock_ec2_deprecated @mock_ec2_deprecated
def test_delete_vpn_gateway(): def test_delete_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.delete_vpn_gateway(vpn_gateway.id) conn.delete_vpn_gateway(vpn_gateway.id)
vgws = conn.get_all_vpn_gateways() vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(0) vgws.should.have.length_of(0)
@mock_ec2_deprecated @mock_ec2_deprecated
def test_vpn_gateway_tagging(): def test_vpn_gateway_tagging():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.add_tag("a key", "some value") vpn_gateway.add_tag("a key", "some value")
tag = conn.get_all_tags()[0] tag = conn.get_all_tags()[0]
tag.name.should.equal("a key") tag.name.should.equal("a key")
tag.value.should.equal("some value") tag.value.should.equal("some value")
# Refresh the subnet # Refresh the subnet
vpn_gateway = conn.get_all_vpn_gateways()[0] vpn_gateway = conn.get_all_vpn_gateways()[0]
vpn_gateway.tags.should.have.length_of(1) vpn_gateway.tags.should.have.length_of(1)
vpn_gateway.tags["a key"].should.equal("some value") vpn_gateway.tags["a key"].should.equal("some value")
@mock_ec2_deprecated @mock_ec2_deprecated
def test_detach_vpn_gateway(): def test_detach_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret") conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16") vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a") vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0] gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments attachments = gateway.attachments
attachments.should.have.length_of(1) attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id) attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached") attachments[0].state.should.equal("attached")
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id) conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0] gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments attachments = gateway.attachments
attachments.should.have.length_of(0) attachments.should.have.length_of(0)

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_vm_export(): def test_vm_export():
pass pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_vm_import(): def test_vm_import():
pass pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto import boto
import sure # noqa import sure # noqa
from moto import mock_ec2 from moto import mock_ec2
@mock_ec2 @mock_ec2
def test_windows(): def test_windows():
pass pass

View File

@ -752,7 +752,9 @@ def test_steps():
# StateChangeReason # StateChangeReason
x["Status"]["Timeline"]["CreationDateTime"].should.be.a("datetime.datetime") x["Status"]["Timeline"]["CreationDateTime"].should.be.a("datetime.datetime")
# x['Status']['Timeline']['EndDateTime'].should.be.a('datetime.datetime') # x['Status']['Timeline']['EndDateTime'].should.be.a('datetime.datetime')
# x['Status']['Timeline']['StartDateTime'].should.be.a('datetime.datetime') # Only the first step will have started - we don't know anything about when it finishes, so the second step never starts
if x["Name"] == "My wordcount example":
x["Status"]["Timeline"]["StartDateTime"].should.be.a("datetime.datetime")
x = client.describe_step(ClusterId=cluster_id, StepId=x["Id"])["Step"] x = client.describe_step(ClusterId=cluster_id, StepId=x["Id"])["Step"]
x["ActionOnFailure"].should.equal("TERMINATE_CLUSTER") x["ActionOnFailure"].should.equal("TERMINATE_CLUSTER")

View File

@ -1,11 +1,14 @@
import random from moto.events.models import EventsBackend
import boto3
import json
import sure # noqa
from moto.events import mock_events from moto.events import mock_events
import json
import random
import unittest
import boto3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from moto.core.exceptions import JsonRESTError
from nose.tools import assert_raises from nose.tools import assert_raises
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
RULES = [ RULES = [
@ -136,14 +139,6 @@ def test_list_rule_names_by_target():
assert rule in test_2_target["Rules"] assert rule in test_2_target["Rules"]
@mock_events
def test_list_rules():
client = generate_environment()
rules = client.list_rules()
assert len(rules["Rules"]) == len(RULES)
@mock_events @mock_events
def test_delete_rule(): def test_delete_rule():
client = generate_environment() client = generate_environment()
@ -461,3 +456,50 @@ def test_delete_event_bus_errors():
client.delete_event_bus.when.called_with(Name="default").should.throw( client.delete_event_bus.when.called_with(Name="default").should.throw(
ClientError, "Cannot delete event bus default." ClientError, "Cannot delete event bus default."
) )
@mock_events
def test_rule_tagging_happy():
client = generate_environment()
rule_name = get_random_rule()["Name"]
rule_arn = client.describe_rule(Name=rule_name).get("Arn")
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
client.tag_resource(ResourceARN=rule_arn, Tags=tags)
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
tc = unittest.TestCase("__init__")
expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}]
tc.assertTrue(
(expected[0] == actual[0] and expected[1] == actual[1])
or (expected[1] == actual[0] and expected[0] == actual[1])
)
client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"])
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
expected = [{"Key": "key2", "Value": "value2"}]
assert expected == actual
@mock_events
def test_rule_tagging_sad():
back_end = EventsBackend("us-west-2")
try:
back_end.tag_resource("unknown", [])
raise "tag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.untag_resource("unknown", [])
raise "untag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.list_tags_for_resource("unknown")
raise "list_tags_for_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass

View File

@ -1,21 +1,21 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
import boto.glacier import boto.glacier
import sure # noqa import sure # noqa
from moto import mock_glacier_deprecated from moto import mock_glacier_deprecated
@mock_glacier_deprecated @mock_glacier_deprecated
def test_create_and_delete_archive(): def test_create_and_delete_archive():
the_file = NamedTemporaryFile(delete=False) the_file = NamedTemporaryFile(delete=False)
the_file.write(b"some stuff") the_file.write(b"some stuff")
the_file.close() the_file.close()
conn = boto.glacier.connect_to_region("us-west-2") conn = boto.glacier.connect_to_region("us-west-2")
vault = conn.create_vault("my_vault") vault = conn.create_vault("my_vault")
archive_id = vault.upload_archive(the_file.name) archive_id = vault.upload_archive(the_file.name)
vault.delete_archive(archive_id) vault.delete_archive(archive_id)

View File

@ -1,31 +1,31 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto.glacier import boto.glacier
import sure # noqa import sure # noqa
from moto import mock_glacier_deprecated from moto import mock_glacier_deprecated
@mock_glacier_deprecated @mock_glacier_deprecated
def test_create_vault(): def test_create_vault():
conn = boto.glacier.connect_to_region("us-west-2") conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault") conn.create_vault("my_vault")
vaults = conn.list_vaults() vaults = conn.list_vaults()
vaults.should.have.length_of(1) vaults.should.have.length_of(1)
vaults[0].name.should.equal("my_vault") vaults[0].name.should.equal("my_vault")
@mock_glacier_deprecated @mock_glacier_deprecated
def test_delete_vault(): def test_delete_vault():
conn = boto.glacier.connect_to_region("us-west-2") conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault") conn.create_vault("my_vault")
vaults = conn.list_vaults() vaults = conn.list_vaults()
vaults.should.have.length_of(1) vaults.should.have.length_of(1)
conn.delete_vault("my_vault") conn.delete_vault("my_vault")
vaults = conn.list_vaults() vaults = conn.list_vaults()
vaults.should.have.length_of(0) vaults.should.have.length_of(0)

View File

@ -1 +1 @@
from __future__ import unicode_literals from __future__ import unicode_literals

View File

@ -1 +1 @@
from __future__ import unicode_literals from __future__ import unicode_literals

View File

@ -1,97 +1,97 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import copy import copy
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
def create_database(client, database_name): def create_database(client, database_name):
return client.create_database(DatabaseInput={"Name": database_name}) return client.create_database(DatabaseInput={"Name": database_name})
def get_database(client, database_name): def get_database(client, database_name):
return client.get_database(Name=database_name) return client.get_database(Name=database_name)
def create_table_input(database_name, table_name, columns=[], partition_keys=[]): def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
table_input = copy.deepcopy(TABLE_INPUT) table_input = copy.deepcopy(TABLE_INPUT)
table_input["Name"] = table_name table_input["Name"] = table_name
table_input["PartitionKeys"] = partition_keys table_input["PartitionKeys"] = partition_keys
table_input["StorageDescriptor"]["Columns"] = columns table_input["StorageDescriptor"]["Columns"] = columns
table_input["StorageDescriptor"][ table_input["StorageDescriptor"][
"Location" "Location"
] = "s3://my-bucket/{database_name}/{table_name}".format( ] = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name database_name=database_name, table_name=table_name
) )
return table_input return table_input
def create_table(client, database_name, table_name, table_input=None, **kwargs): def create_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None: if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs) table_input = create_table_input(database_name, table_name, **kwargs)
return client.create_table(DatabaseName=database_name, TableInput=table_input) return client.create_table(DatabaseName=database_name, TableInput=table_input)
def update_table(client, database_name, table_name, table_input=None, **kwargs): def update_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None: if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs) table_input = create_table_input(database_name, table_name, **kwargs)
return client.update_table(DatabaseName=database_name, TableInput=table_input) return client.update_table(DatabaseName=database_name, TableInput=table_input)
def get_table(client, database_name, table_name): def get_table(client, database_name, table_name):
return client.get_table(DatabaseName=database_name, Name=table_name) return client.get_table(DatabaseName=database_name, Name=table_name)
def get_tables(client, database_name): def get_tables(client, database_name):
return client.get_tables(DatabaseName=database_name) return client.get_tables(DatabaseName=database_name)
def get_table_versions(client, database_name, table_name): def get_table_versions(client, database_name, table_name):
return client.get_table_versions(DatabaseName=database_name, TableName=table_name) return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
def get_table_version(client, database_name, table_name, version_id): def get_table_version(client, database_name, table_name, version_id):
return client.get_table_version( return client.get_table_version(
DatabaseName=database_name, TableName=table_name, VersionId=version_id DatabaseName=database_name, TableName=table_name, VersionId=version_id
) )
def create_partition_input(database_name, table_name, values=[], columns=[]): def create_partition_input(database_name, table_name, values=[], columns=[]):
root_path = "s3://my-bucket/{database_name}/{table_name}".format( root_path = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name database_name=database_name, table_name=table_name
) )
part_input = copy.deepcopy(PARTITION_INPUT) part_input = copy.deepcopy(PARTITION_INPUT)
part_input["Values"] = values part_input["Values"] = values
part_input["StorageDescriptor"]["Columns"] = columns part_input["StorageDescriptor"]["Columns"] = columns
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
return part_input return part_input
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs): def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
if partiton_input is None: if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs) partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.create_partition( return client.create_partition(
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
) )
def update_partition( def update_partition(
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
): ):
if partiton_input is None: if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs) partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.update_partition( return client.update_partition(
DatabaseName=database_name, DatabaseName=database_name,
TableName=table_name, TableName=table_name,
PartitionInput=partiton_input, PartitionInput=partiton_input,
PartitionValueList=old_values, PartitionValueList=old_values,
) )
def get_partition(client, database_name, table_name, values): def get_partition(client, database_name, table_name, values):
return client.get_partition( return client.get_partition(
DatabaseName=database_name, TableName=table_name, PartitionValues=values DatabaseName=database_name, TableName=table_name, PartitionValues=values
) )

View File

@ -9,6 +9,173 @@ from botocore.exceptions import ClientError
from nose.tools import assert_raises from nose.tools import assert_raises
@mock_iot
def test_attach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
@mock_iot
def test_detach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
client.detach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.be.empty
@mock_iot
def test_list_attached_policies():
client = boto3.client("iot", region_name="ap-northeast-1")
cert = client.create_keys_and_certificate(setAsActive=True)
policies = client.list_attached_policies(target=cert["certificateArn"])
policies["policies"].should.be.empty
@mock_iot
def test_policy_versions():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
policy = client.create_policy(policyName=policy_name, policyDocument=doc)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("policyVersionId").which.should.equal("1")
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("defaultVersionId").which.should.equal(
policy["defaultVersionId"]
)
policy1 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_1"}),
setAsDefault=True,
)
policy1.should.have.key("policyArn").which.should_not.be.none
policy1.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy1.should.have.key("policyVersionId").which.should.equal("2")
policy1.should.have.key("isDefaultVersion").which.should.equal(True)
policy2 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_2"}),
setAsDefault=False,
)
policy2.should.have.key("policyArn").which.should_not.be.none
policy2.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy2.should.have.key("policyVersionId").which.should.equal("3")
policy2.should.have.key("isDefaultVersion").which.should.equal(False)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy1["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
client.set_default_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy2["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy2["policyVersionId"]
)
client.delete_policy_version(policyName=policy_name, policyVersionId="1")
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(2)
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(1)
# should fail as it"s the default policy. Should use delete_policy instead
try:
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
assert False, "Should have failed in previous call"
except Exception as exception:
exception.response["Error"]["Message"].should.equal(
"Cannot delete the default version of a policy"
)
@mock_iot @mock_iot
def test_things(): def test_things():
client = boto3.client("iot", region_name="ap-northeast-1") client = boto3.client("iot", region_name="ap-northeast-1")
@ -994,7 +1161,10 @@ def test_create_job():
client = boto3.client("iot", region_name="eu-west-1") client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing" name = "my-thing"
job_id = "TestJob" job_id = "TestJob"
# thing # thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name) thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name) thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn") thing.should.have.key("thingArn")
@ -1020,6 +1190,63 @@ def test_create_job():
job.should.have.key("description") job.should.have.key("description")
@mock_iot
def test_list_jobs():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job1 = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job1.should.have.key("jobId").which.should.equal(job_id)
job1.should.have.key("jobArn")
job1.should.have.key("description")
job2 = client.create_job(
jobId=job_id + "1",
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job2.should.have.key("jobId").which.should.equal(job_id + "1")
job2.should.have.key("jobArn")
job2.should.have.key("description")
jobs = client.list_jobs()
jobs.should.have.key("jobs")
jobs.should_not.have.key("nextToken")
jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id)
jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1")
@mock_iot @mock_iot
def test_describe_job(): def test_describe_job():
client = boto3.client("iot", region_name="eu-west-1") client = boto3.client("iot", region_name="eu-west-1")
@ -1124,3 +1351,387 @@ def test_describe_job_1():
job.should.have.key("job").which.should.have.key( job.should.have.key("job").which.should.have.key(
"jobExecutionsRolloutConfig" "jobExecutionsRolloutConfig"
).which.should.have.key("maximumPerMinute").which.should.equal(10) ).which.should.have.key("maximumPerMinute").which.should.equal(10)
@mock_iot
def test_delete_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
client.delete_job(jobId=job_id)
client.list_jobs()["jobs"].should.have.length_of(0)
@mock_iot
def test_cancel_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are")
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("job").which.should.have.key("status").which.should.equal(
"CANCELED"
)
job.should.have.key("job").which.should.have.key(
"forceCanceled"
).which.should.equal(False)
job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal(
"Because"
)
job.should.have.key("job").which.should.have.key("comment").which.should.equal(
"You are"
)
@mock_iot
def test_get_job_document_with_document_source():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal("")
@mock_iot
def test_get_job_document_with_document():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal('{"field": "value"}')
@mock_iot
def test_describe_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
job_execution = client.describe_job_execution(
jobId=job_id, thingName=name, executionNumber=123
)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_cancel_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.cancel_job_execution(jobId=job_id, thingName=name)
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("status").which.should.equal("CANCELED")
@mock_iot
def test_delete_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_list_job_executions_for_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_job(jobId=job_id)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key(
"thingArn"
).which.should.equal(thing["thingArn"])
@mock_iot
def test_list_job_executions_for_thing():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_thing(thingName=name)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal(
job_id
)

View File

@ -4,15 +4,17 @@ import base64
import re import re
import boto.kms import boto.kms
import boto3
import six import six
import sure # noqa import sure # noqa
from boto.exception import JSONResponseError from boto.exception import JSONResponseError
from boto.kms.exceptions import AlreadyExistsException, NotFoundException from boto.kms.exceptions import AlreadyExistsException, NotFoundException
from nose.tools import assert_raises from nose.tools import assert_raises
from parameterized import parameterized from parameterized import parameterized
from moto.core.exceptions import JsonRESTError
from moto.kms.models import KmsBackend
from moto.kms.exceptions import NotFoundException as MotoNotFoundException from moto.kms.exceptions import NotFoundException as MotoNotFoundException
from moto import mock_kms_deprecated from moto import mock_kms_deprecated, mock_kms
PLAINTEXT_VECTORS = ( PLAINTEXT_VECTORS = (
(b"some encodeable plaintext",), (b"some encodeable plaintext",),
@ -679,3 +681,77 @@ def test__assert_default_policy():
_assert_default_policy.when.called_with("default").should_not.throw( _assert_default_policy.when.called_with("default").should_not.throw(
MotoNotFoundException MotoNotFoundException
) )
if six.PY2:
sort = sorted
else:
sort = lambda l: sorted(l, key=lambda d: d.keys())
@mock_kms
def test_key_tag_on_create_key_happy():
client = boto3.client("kms", region_name="us-east-1")
tags = [
{"TagKey": "key1", "TagValue": "value1"},
{"TagKey": "key2", "TagValue": "value2"},
]
key = client.create_key(Description="test-key-tagging", Tags=tags)
key_id = key["KeyMetadata"]["KeyId"]
result = client.list_resource_tags(KeyId=key_id)
actual = result.get("Tags", [])
assert sort(tags) == sort(actual)
client.untag_resource(KeyId=key_id, TagKeys=["key1"])
actual = client.list_resource_tags(KeyId=key_id).get("Tags", [])
expected = [{"TagKey": "key2", "TagValue": "value2"}]
assert sort(expected) == sort(actual)
@mock_kms
def test_key_tag_added_happy():
client = boto3.client("kms", region_name="us-east-1")
key = client.create_key(Description="test-key-tagging")
key_id = key["KeyMetadata"]["KeyId"]
tags = [
{"TagKey": "key1", "TagValue": "value1"},
{"TagKey": "key2", "TagValue": "value2"},
]
client.tag_resource(KeyId=key_id, Tags=tags)
result = client.list_resource_tags(KeyId=key_id)
actual = result.get("Tags", [])
assert sort(tags) == sort(actual)
client.untag_resource(KeyId=key_id, TagKeys=["key1"])
actual = client.list_resource_tags(KeyId=key_id).get("Tags", [])
expected = [{"TagKey": "key2", "TagValue": "value2"}]
assert sort(expected) == sort(actual)
@mock_kms_deprecated
def test_key_tagging_sad():
b = KmsBackend()
try:
b.tag_resource("unknown", [])
raise "tag_resource should fail if KeyId is not known"
except JsonRESTError:
pass
try:
b.untag_resource("unknown", [])
raise "untag_resource should fail if KeyId is not known"
except JsonRESTError:
pass
try:
b.list_resource_tags("unknown")
raise "list_resource_tags should fail if KeyId is not known"
except JsonRESTError:
pass

View File

@ -102,7 +102,7 @@ def test_deserialize_ciphertext_blob(raw, serialized):
@parameterized(((ec[0],) for ec in ENCRYPTION_CONTEXT_VECTORS)) @parameterized(((ec[0],) for ec in ENCRYPTION_CONTEXT_VECTORS))
def test_encrypt_decrypt_cycle(encryption_context): def test_encrypt_decrypt_cycle(encryption_context):
plaintext = b"some secret plaintext" plaintext = b"some secret plaintext"
master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key = Key("nop", "nop", "nop", "nop", "nop")
master_key_map = {master_key.id: master_key} master_key_map = {master_key.id: master_key}
ciphertext_blob = encrypt( ciphertext_blob = encrypt(
@ -133,7 +133,7 @@ def test_encrypt_unknown_key_id():
def test_decrypt_invalid_ciphertext_format(): def test_decrypt_invalid_ciphertext_format():
master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key = Key("nop", "nop", "nop", "nop", "nop")
master_key_map = {master_key.id: master_key} master_key_map = {master_key.id: master_key}
with assert_raises(InvalidCiphertextException): with assert_raises(InvalidCiphertextException):
@ -153,7 +153,7 @@ def test_decrypt_unknwown_key_id():
def test_decrypt_invalid_ciphertext(): def test_decrypt_invalid_ciphertext():
master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key = Key("nop", "nop", "nop", "nop", "nop")
master_key_map = {master_key.id: master_key} master_key_map = {master_key.id: master_key}
ciphertext_blob = ( ciphertext_blob = (
master_key.id.encode("utf-8") + b"123456789012" master_key.id.encode("utf-8") + b"123456789012"
@ -171,7 +171,7 @@ def test_decrypt_invalid_ciphertext():
def test_decrypt_invalid_encryption_context(): def test_decrypt_invalid_encryption_context():
plaintext = b"some secret plaintext" plaintext = b"some secret plaintext"
master_key = Key("nop", "nop", "nop", "nop", [], "nop") master_key = Key("nop", "nop", "nop", "nop", "nop")
master_key_map = {master_key.id: master_key} master_key_map = {master_key.id: master_key}
ciphertext_blob = encrypt( ciphertext_blob = encrypt(

View File

@ -1,76 +1,76 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto3 import boto3
from freezegun import freeze_time from freezegun import freeze_time
import sure # noqa import sure # noqa
import re import re
from moto import mock_opsworks from moto import mock_opsworks
@freeze_time("2015-01-01") @freeze_time("2015-01-01")
@mock_opsworks @mock_opsworks
def test_create_app_response(): def test_create_app_response():
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack( stack_id = client.create_stack(
Name="test_stack_1", Name="test_stack_1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp") response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
response.should.contain("AppId") response.should.contain("AppId")
second_stack_id = client.create_stack( second_stack_id = client.create_stack(
Name="test_stack_2", Name="test_stack_2",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp") response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
response.should.contain("AppId") response.should.contain("AppId")
# ClientError # ClientError
client.create_app.when.called_with( client.create_app.when.called_with(
StackId=stack_id, Type="other", Name="TestApp" StackId=stack_id, Type="other", Name="TestApp"
).should.throw(Exception, re.compile(r'already an app named "TestApp"')) ).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
# ClientError # ClientError
client.create_app.when.called_with( client.create_app.when.called_with(
StackId="nothere", Type="other", Name="TestApp" StackId="nothere", Type="other", Name="TestApp"
).should.throw(Exception, "nothere") ).should.throw(Exception, "nothere")
@freeze_time("2015-01-01") @freeze_time("2015-01-01")
@mock_opsworks @mock_opsworks
def test_describe_apps(): def test_describe_apps():
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack( stack_id = client.create_stack(
Name="test_stack_1", Name="test_stack_1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"] app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
rv1 = client.describe_apps(StackId=stack_id) rv1 = client.describe_apps(StackId=stack_id)
rv2 = client.describe_apps(AppIds=[app_id]) rv2 = client.describe_apps(AppIds=[app_id])
rv1["Apps"].should.equal(rv2["Apps"]) rv1["Apps"].should.equal(rv2["Apps"])
rv1["Apps"][0]["Name"].should.equal("TestApp") rv1["Apps"][0]["Name"].should.equal("TestApp")
# ClientError # ClientError
client.describe_apps.when.called_with( client.describe_apps.when.called_with(
StackId=stack_id, AppIds=[app_id] StackId=stack_id, AppIds=[app_id]
).should.throw(Exception, "Please provide one or more app IDs or a stack ID") ).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
# ClientError # ClientError
client.describe_apps.when.called_with(StackId="nothere").should.throw( client.describe_apps.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere" Exception, "Unable to find stack with ID nothere"
) )
# ClientError # ClientError
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw( client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
Exception, "nothere" Exception, "nothere"
) )

View File

@ -1,206 +1,206 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto3 import boto3
import sure # noqa import sure # noqa
from moto import mock_opsworks from moto import mock_opsworks
from moto import mock_ec2 from moto import mock_ec2
@mock_opsworks @mock_opsworks
def test_create_instance(): def test_create_instance():
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack( stack_id = client.create_stack(
Name="test_stack_1", Name="test_stack_1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
layer_id = client.create_layer( layer_id = client.create_layer(
StackId=stack_id, StackId=stack_id,
Type="custom", Type="custom",
Name="TestLayer", Name="TestLayer",
Shortname="TestLayerShortName", Shortname="TestLayerShortName",
)["LayerId"] )["LayerId"]
second_stack_id = client.create_stack( second_stack_id = client.create_stack(
Name="test_stack_2", Name="test_stack_2",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
second_layer_id = client.create_layer( second_layer_id = client.create_layer(
StackId=second_stack_id, StackId=second_stack_id,
Type="custom", Type="custom",
Name="SecondTestLayer", Name="SecondTestLayer",
Shortname="SecondTestLayerShortName", Shortname="SecondTestLayerShortName",
)["LayerId"] )["LayerId"]
response = client.create_instance( response = client.create_instance(
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro" StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
) )
response.should.contain("InstanceId") response.should.contain("InstanceId")
client.create_instance.when.called_with( client.create_instance.when.called_with(
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro" StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Unable to find stack with ID nothere") ).should.throw(Exception, "Unable to find stack with ID nothere")
client.create_instance.when.called_with( client.create_instance.when.called_with(
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro" StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
).should.throw(Exception, "nothere") ).should.throw(Exception, "nothere")
# ClientError # ClientError
client.create_instance.when.called_with( client.create_instance.when.called_with(
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro" StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Please only provide layer IDs from the same stack") ).should.throw(Exception, "Please only provide layer IDs from the same stack")
# ClientError # ClientError
client.start_instance.when.called_with(InstanceId="nothere").should.throw( client.start_instance.when.called_with(InstanceId="nothere").should.throw(
Exception, "Unable to find instance with ID nothere" Exception, "Unable to find instance with ID nothere"
) )
@mock_opsworks @mock_opsworks
def test_describe_instances(): def test_describe_instances():
""" """
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2) create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2) populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
populate S2L1 with 1 instance (S2L1_i1) populate S2L1 with 1 instance (S2L1_i1)
populate S2L2 with 3 instances (S2L2_i1..2) populate S2L2 with 3 instances (S2L2_i1..2)
""" """
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
S1 = client.create_stack( S1 = client.create_stack(
Name="S1", Name="S1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
S1L1 = client.create_layer( S1L1 = client.create_layer(
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1" StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"] )["LayerId"]
S2 = client.create_stack( S2 = client.create_stack(
Name="S2", Name="S2",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
S2L1 = client.create_layer( S2L1 = client.create_layer(
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1" StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
)["LayerId"] )["LayerId"]
S2L2 = client.create_layer( S2L2 = client.create_layer(
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2" StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
)["LayerId"] )["LayerId"]
S1L1_i1 = client.create_instance( S1L1_i1 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"] )["InstanceId"]
S1L1_i2 = client.create_instance( S1L1_i2 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro" StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"] )["InstanceId"]
S2L1_i1 = client.create_instance( S2L1_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro" StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
)["InstanceId"] )["InstanceId"]
S2L2_i1 = client.create_instance( S2L2_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"] )["InstanceId"]
S2L2_i2 = client.create_instance( S2L2_i2 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro" StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"] )["InstanceId"]
# instances in Stack 1 # instances in Stack 1
response = client.describe_instances(StackId=S1)["Instances"] response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2) response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response]) S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response]) S1L1_i2.should.be.within([i["InstanceId"] for i in response])
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"] response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
sorted(response2, key=lambda d: d["InstanceId"]).should.equal( sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"]) sorted(response, key=lambda d: d["InstanceId"])
) )
response3 = client.describe_instances(LayerId=S1L1)["Instances"] response3 = client.describe_instances(LayerId=S1L1)["Instances"]
sorted(response3, key=lambda d: d["InstanceId"]).should.equal( sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"]) sorted(response, key=lambda d: d["InstanceId"])
) )
response = client.describe_instances(StackId=S1)["Instances"] response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2) response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response]) S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response]) S1L1_i2.should.be.within([i["InstanceId"] for i in response])
# instances in Stack 2 # instances in Stack 2
response = client.describe_instances(StackId=S2)["Instances"] response = client.describe_instances(StackId=S2)["Instances"]
response.should.have.length_of(3) response.should.have.length_of(3)
S2L1_i1.should.be.within([i["InstanceId"] for i in response]) S2L1_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i1.should.be.within([i["InstanceId"] for i in response]) S2L2_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i2.should.be.within([i["InstanceId"] for i in response]) S2L2_i2.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L1)["Instances"] response = client.describe_instances(LayerId=S2L1)["Instances"]
response.should.have.length_of(1) response.should.have.length_of(1)
S2L1_i1.should.be.within([i["InstanceId"] for i in response]) S2L1_i1.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L2)["Instances"] response = client.describe_instances(LayerId=S2L2)["Instances"]
response.should.have.length_of(2) response.should.have.length_of(2)
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response]) S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
# ClientError # ClientError
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw( client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
Exception, "Please provide either one or more" Exception, "Please provide either one or more"
) )
# ClientError # ClientError
client.describe_instances.when.called_with(StackId="nothere").should.throw( client.describe_instances.when.called_with(StackId="nothere").should.throw(
Exception, "nothere" Exception, "nothere"
) )
# ClientError # ClientError
client.describe_instances.when.called_with(LayerId="nothere").should.throw( client.describe_instances.when.called_with(LayerId="nothere").should.throw(
Exception, "nothere" Exception, "nothere"
) )
# ClientError # ClientError
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw( client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
Exception, "nothere" Exception, "nothere"
) )
@mock_opsworks @mock_opsworks
@mock_ec2 @mock_ec2
def test_ec2_integration(): def test_ec2_integration():
""" """
instances created via OpsWorks should be discoverable via ec2 instances created via OpsWorks should be discoverable via ec2
""" """
opsworks = boto3.client("opsworks", region_name="us-east-1") opsworks = boto3.client("opsworks", region_name="us-east-1")
stack_id = opsworks.create_stack( stack_id = opsworks.create_stack(
Name="S1", Name="S1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
layer_id = opsworks.create_layer( layer_id = opsworks.create_layer(
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1" StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"] )["LayerId"]
instance_id = opsworks.create_instance( instance_id = opsworks.create_instance(
StackId=stack_id, StackId=stack_id,
LayerIds=[layer_id], LayerIds=[layer_id],
InstanceType="t2.micro", InstanceType="t2.micro",
SshKeyName="testSSH", SshKeyName="testSSH",
)["InstanceId"] )["InstanceId"]
ec2 = boto3.client("ec2", region_name="us-east-1") ec2 = boto3.client("ec2", region_name="us-east-1")
# Before starting the instance, it shouldn't be discoverable via ec2 # Before starting the instance, it shouldn't be discoverable via ec2
reservations = ec2.describe_instances()["Reservations"] reservations = ec2.describe_instances()["Reservations"]
assert reservations.should.be.empty assert reservations.should.be.empty
# After starting the instance, it should be discoverable via ec2 # After starting the instance, it should be discoverable via ec2
opsworks.start_instance(InstanceId=instance_id) opsworks.start_instance(InstanceId=instance_id)
reservations = ec2.describe_instances()["Reservations"] reservations = ec2.describe_instances()["Reservations"]
reservations[0]["Instances"].should.have.length_of(1) reservations[0]["Instances"].should.have.length_of(1)
instance = reservations[0]["Instances"][0] instance = reservations[0]["Instances"][0]
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0] opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"]) instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"]) instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])

View File

@ -1,96 +1,96 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto3 import boto3
from freezegun import freeze_time from freezegun import freeze_time
import sure # noqa import sure # noqa
import re import re
from moto import mock_opsworks from moto import mock_opsworks
@freeze_time("2015-01-01") @freeze_time("2015-01-01")
@mock_opsworks @mock_opsworks
def test_create_layer_response(): def test_create_layer_response():
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack( stack_id = client.create_stack(
Name="test_stack_1", Name="test_stack_1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
response = client.create_layer( response = client.create_layer(
StackId=stack_id, StackId=stack_id,
Type="custom", Type="custom",
Name="TestLayer", Name="TestLayer",
Shortname="TestLayerShortName", Shortname="TestLayerShortName",
) )
response.should.contain("LayerId") response.should.contain("LayerId")
second_stack_id = client.create_stack( second_stack_id = client.create_stack(
Name="test_stack_2", Name="test_stack_2",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
response = client.create_layer( response = client.create_layer(
StackId=second_stack_id, StackId=second_stack_id,
Type="custom", Type="custom",
Name="TestLayer", Name="TestLayer",
Shortname="TestLayerShortName", Shortname="TestLayerShortName",
) )
response.should.contain("LayerId") response.should.contain("LayerId")
# ClientError # ClientError
client.create_layer.when.called_with( client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_" StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"')) ).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
# ClientError # ClientError
client.create_layer.when.called_with( client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName" StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
).should.throw( ).should.throw(
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"') Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
) )
# ClientError # ClientError
client.create_layer.when.called_with( client.create_layer.when.called_with(
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_" StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, "nothere") ).should.throw(Exception, "nothere")
@freeze_time("2015-01-01") @freeze_time("2015-01-01")
@mock_opsworks @mock_opsworks
def test_describe_layers(): def test_describe_layers():
client = boto3.client("opsworks", region_name="us-east-1") client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack( stack_id = client.create_stack(
Name="test_stack_1", Name="test_stack_1",
Region="us-east-1", Region="us-east-1",
ServiceRoleArn="service_arn", ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn", DefaultInstanceProfileArn="profile_arn",
)["StackId"] )["StackId"]
layer_id = client.create_layer( layer_id = client.create_layer(
StackId=stack_id, StackId=stack_id,
Type="custom", Type="custom",
Name="TestLayer", Name="TestLayer",
Shortname="TestLayerShortName", Shortname="TestLayerShortName",
)["LayerId"] )["LayerId"]
rv1 = client.describe_layers(StackId=stack_id) rv1 = client.describe_layers(StackId=stack_id)
rv2 = client.describe_layers(LayerIds=[layer_id]) rv2 = client.describe_layers(LayerIds=[layer_id])
rv1["Layers"].should.equal(rv2["Layers"]) rv1["Layers"].should.equal(rv2["Layers"])
rv1["Layers"][0]["Name"].should.equal("TestLayer") rv1["Layers"][0]["Name"].should.equal("TestLayer")
# ClientError # ClientError
client.describe_layers.when.called_with( client.describe_layers.when.called_with(
StackId=stack_id, LayerIds=[layer_id] StackId=stack_id, LayerIds=[layer_id]
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID") ).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
# ClientError # ClientError
client.describe_layers.when.called_with(StackId="nothere").should.throw( client.describe_layers.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere" Exception, "Unable to find stack with ID nothere"
) )
# ClientError # ClientError
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw( client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
Exception, "nothere" Exception, "nothere"
) )

View File

@ -1,263 +1,263 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import boto3 import boto3
import sure # noqa import sure # noqa
from nose.tools import assert_raises from nose.tools import assert_raises
from moto import mock_polly from moto import mock_polly
# Polly only available in a few regions # Polly only available in a few regions
DEFAULT_REGION = "eu-west-1" DEFAULT_REGION = "eu-west-1"
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?> LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
<lexicon version="1.0" <lexicon version="1.0"
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon" xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd" http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
alphabet="ipa" alphabet="ipa"
xml:lang="en-US"> xml:lang="en-US">
<lexeme> <lexeme>
<grapheme>W3C</grapheme> <grapheme>W3C</grapheme>
<alias>World Wide Web Consortium</alias> <alias>World Wide Web Consortium</alias>
</lexeme> </lexeme>
</lexicon>""" </lexicon>"""
@mock_polly @mock_polly
def test_describe_voices(): def test_describe_voices():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
resp = client.describe_voices() resp = client.describe_voices()
len(resp["Voices"]).should.be.greater_than(1) len(resp["Voices"]).should.be.greater_than(1)
resp = client.describe_voices(LanguageCode="en-GB") resp = client.describe_voices(LanguageCode="en-GB")
len(resp["Voices"]).should.equal(3) len(resp["Voices"]).should.equal(3)
try: try:
client.describe_voices(LanguageCode="SOME_LANGUAGE") client.describe_voices(LanguageCode="SOME_LANGUAGE")
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("400") err.response["Error"]["Code"].should.equal("400")
else: else:
raise RuntimeError("Should of raised an exception") raise RuntimeError("Should of raised an exception")
@mock_polly @mock_polly
def test_put_list_lexicon(): def test_put_list_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing # Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.list_lexicons() resp = client.list_lexicons()
len(resp["Lexicons"]).should.equal(1) len(resp["Lexicons"]).should.equal(1)
@mock_polly @mock_polly
def test_put_get_lexicon(): def test_put_get_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing # Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.get_lexicon(Name="test") resp = client.get_lexicon(Name="test")
resp.should.contain("Lexicon") resp.should.contain("Lexicon")
resp.should.contain("LexiconAttributes") resp.should.contain("LexiconAttributes")
@mock_polly @mock_polly
def test_put_lexicon_bad_name(): def test_put_lexicon_bad_name():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
try: try:
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML) client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue") err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else: else:
raise RuntimeError("Should of raised an exception") raise RuntimeError("Should of raised an exception")
@mock_polly @mock_polly
def test_synthesize_speech(): def test_synthesize_speech():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing # Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg")) tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
for output_format, content_type in tests: for output_format, content_type in tests:
resp = client.synthesize_speech( resp = client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat=output_format, OutputFormat=output_format,
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
VoiceId="Astrid", VoiceId="Astrid",
) )
resp["ContentType"].should.equal(content_type) resp["ContentType"].should.equal(content_type)
@mock_polly @mock_polly
def test_synthesize_speech_bad_lexicon(): def test_synthesize_speech_bad_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test2"], LexiconNames=["test2"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("LexiconNotFoundException") err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
else: else:
raise RuntimeError("Should of raised LexiconNotFoundException") raise RuntimeError("Should of raised LexiconNotFoundException")
@mock_polly @mock_polly
def test_synthesize_speech_bad_output_format(): def test_synthesize_speech_bad_output_format():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="invalid", OutputFormat="invalid",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue") err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_bad_sample_rate(): def test_synthesize_speech_bad_sample_rate():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="18000", SampleRate="18000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidSampleRateException") err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_bad_text_type(): def test_synthesize_speech_bad_text_type():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="invalid", TextType="invalid",
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue") err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_bad_voice_id(): def test_synthesize_speech_bad_voice_id():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
VoiceId="Luke", VoiceId="Luke",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue") err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_text_too_long(): def test_synthesize_speech_text_too_long():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234" * 376, # = 3008 characters Text="test1234" * 376, # = 3008 characters
TextType="text", TextType="text",
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal("TextLengthExceededException") err.response["Error"]["Code"].should.equal("TextLengthExceededException")
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_bad_speech_marks1(): def test_synthesize_speech_bad_speech_marks1():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="text", TextType="text",
SpeechMarkTypes=["word"], SpeechMarkTypes=["word"],
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal( err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException" "MarksNotSupportedForFormatException"
) )
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")
@mock_polly @mock_polly
def test_synthesize_speech_bad_speech_marks2(): def test_synthesize_speech_bad_speech_marks2():
client = boto3.client("polly", region_name=DEFAULT_REGION) client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML) client.put_lexicon(Name="test", Content=LEXICON_XML)
try: try:
client.synthesize_speech( client.synthesize_speech(
LexiconNames=["test"], LexiconNames=["test"],
OutputFormat="pcm", OutputFormat="pcm",
SampleRate="16000", SampleRate="16000",
Text="test1234", Text="test1234",
TextType="ssml", TextType="ssml",
SpeechMarkTypes=["word"], SpeechMarkTypes=["word"],
VoiceId="Astrid", VoiceId="Astrid",
) )
except ClientError as err: except ClientError as err:
err.response["Error"]["Code"].should.equal( err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException" "MarksNotSupportedForFormatException"
) )
else: else:
raise RuntimeError("Should of raised ") raise RuntimeError("Should of raised ")

View File

@ -5,6 +5,7 @@ import datetime
import os import os
import sys import sys
from boto3 import Session
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError from six.moves.urllib.error import HTTPError
from functools import wraps from functools import wraps
@ -1135,6 +1136,380 @@ if not settings.TEST_SERVER_MODE:
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to." "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to."
) )
# All tests for s3-control cannot be run under the server without a modification of the
# hosts file on your system. This is due to the fact that the URL to the host is in the form of:
# ACCOUNT_ID.s3-control.amazonaws.com <-- That Account ID part is the problem. If you want to
# make use of the moto server, update your hosts file for `THE_ACCOUNT_ID_FOR_MOTO.localhost`
# and this will work fine.
@mock_s3
def test_get_public_access_block_for_account():
from moto.s3.models import ACCOUNT_ID
client = boto3.client("s3control", region_name="us-west-2")
# With an invalid account ID:
with assert_raises(ClientError) as ce:
client.get_public_access_block(AccountId="111111111111")
assert ce.exception.response["Error"]["Code"] == "AccessDenied"
# Without one defined:
with assert_raises(ClientError) as ce:
client.get_public_access_block(AccountId=ACCOUNT_ID)
assert (
ce.exception.response["Error"]["Code"]
== "NoSuchPublicAccessBlockConfiguration"
)
# Put a with an invalid account ID:
with assert_raises(ClientError) as ce:
client.put_public_access_block(
AccountId="111111111111",
PublicAccessBlockConfiguration={"BlockPublicAcls": True},
)
assert ce.exception.response["Error"]["Code"] == "AccessDenied"
# Put with an invalid PAB:
with assert_raises(ClientError) as ce:
client.put_public_access_block(
AccountId=ACCOUNT_ID, PublicAccessBlockConfiguration={}
)
assert ce.exception.response["Error"]["Code"] == "InvalidRequest"
assert (
"Must specify at least one configuration."
in ce.exception.response["Error"]["Message"]
)
# Correct PAB:
client.put_public_access_block(
AccountId=ACCOUNT_ID,
PublicAccessBlockConfiguration={
"BlockPublicAcls": True,
"IgnorePublicAcls": True,
"BlockPublicPolicy": True,
"RestrictPublicBuckets": True,
},
)
# Get the correct PAB (for all regions):
for region in Session().get_available_regions("s3control"):
region_client = boto3.client("s3control", region_name=region)
assert region_client.get_public_access_block(AccountId=ACCOUNT_ID)[
"PublicAccessBlockConfiguration"
] == {
"BlockPublicAcls": True,
"IgnorePublicAcls": True,
"BlockPublicPolicy": True,
"RestrictPublicBuckets": True,
}
# Delete with an invalid account ID:
with assert_raises(ClientError) as ce:
client.delete_public_access_block(AccountId="111111111111")
assert ce.exception.response["Error"]["Code"] == "AccessDenied"
# Delete successfully:
client.delete_public_access_block(AccountId=ACCOUNT_ID)
# Confirm that it's deleted:
with assert_raises(ClientError) as ce:
client.get_public_access_block(AccountId=ACCOUNT_ID)
assert (
ce.exception.response["Error"]["Code"]
== "NoSuchPublicAccessBlockConfiguration"
)
@mock_s3
@mock_config
def test_config_list_account_pab():
from moto.s3.models import ACCOUNT_ID
client = boto3.client("s3control", region_name="us-west-2")
config_client = boto3.client("config", region_name="us-west-2")
# Create the aggregator:
account_aggregation_source = {
"AccountIds": [ACCOUNT_ID],
"AllAwsRegions": True,
}
config_client.put_configuration_aggregator(
ConfigurationAggregatorName="testing",
AccountAggregationSources=[account_aggregation_source],
)
# Without a PAB in place:
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock"
)
assert not result["resourceIdentifiers"]
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
)
assert not result["ResourceIdentifiers"]
# Create a PAB:
client.put_public_access_block(
AccountId=ACCOUNT_ID,
PublicAccessBlockConfiguration={
"BlockPublicAcls": True,
"IgnorePublicAcls": True,
"BlockPublicPolicy": True,
"RestrictPublicBuckets": True,
},
)
# Test that successful queries work (non-aggregated):
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock"
)
assert result["resourceIdentifiers"] == [
{
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": ACCOUNT_ID,
}
]
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock",
resourceIds=[ACCOUNT_ID, "nope"],
)
assert result["resourceIdentifiers"] == [
{
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": ACCOUNT_ID,
}
]
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock", resourceName=""
)
assert result["resourceIdentifiers"] == [
{
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": ACCOUNT_ID,
}
]
# Test that successful queries work (aggregated):
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
)
regions = {region for region in Session().get_available_regions("config")}
for r in result["ResourceIdentifiers"]:
regions.remove(r.pop("SourceRegion"))
assert r == {
"ResourceType": "AWS::S3::AccountPublicAccessBlock",
"SourceAccountId": ACCOUNT_ID,
"ResourceId": ACCOUNT_ID,
}
# Just check that the len is the same -- this should be reasonable
regions = {region for region in Session().get_available_regions("config")}
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={"ResourceName": ""},
)
assert len(regions) == len(result["ResourceIdentifiers"])
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={"ResourceName": "", "ResourceId": ACCOUNT_ID},
)
assert len(regions) == len(result["ResourceIdentifiers"])
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={
"ResourceName": "",
"ResourceId": ACCOUNT_ID,
"Region": "us-west-2",
},
)
assert (
result["ResourceIdentifiers"][0]["SourceRegion"] == "us-west-2"
and len(result["ResourceIdentifiers"]) == 1
)
# Test aggregator pagination:
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Limit=1,
)
regions = sorted(
[region for region in Session().get_available_regions("config")]
)
assert result["ResourceIdentifiers"][0] == {
"ResourceType": "AWS::S3::AccountPublicAccessBlock",
"SourceAccountId": ACCOUNT_ID,
"ResourceId": ACCOUNT_ID,
"SourceRegion": regions[0],
}
assert result["NextToken"] == regions[1]
# Get the next region:
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Limit=1,
NextToken=regions[1],
)
assert result["ResourceIdentifiers"][0] == {
"ResourceType": "AWS::S3::AccountPublicAccessBlock",
"SourceAccountId": ACCOUNT_ID,
"ResourceId": ACCOUNT_ID,
"SourceRegion": regions[1],
}
# Non-aggregated with incorrect info:
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock", resourceName="nope"
)
assert not result["resourceIdentifiers"]
result = config_client.list_discovered_resources(
resourceType="AWS::S3::AccountPublicAccessBlock", resourceIds=["nope"]
)
assert not result["resourceIdentifiers"]
# Aggregated with incorrect info:
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={"ResourceName": "nope"},
)
assert not result["ResourceIdentifiers"]
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={"ResourceId": "nope"},
)
assert not result["ResourceIdentifiers"]
result = config_client.list_aggregate_discovered_resources(
ResourceType="AWS::S3::AccountPublicAccessBlock",
ConfigurationAggregatorName="testing",
Filters={"Region": "Nope"},
)
assert not result["ResourceIdentifiers"]
@mock_s3
@mock_config
def test_config_get_account_pab():
from moto.s3.models import ACCOUNT_ID
client = boto3.client("s3control", region_name="us-west-2")
config_client = boto3.client("config", region_name="us-west-2")
# Create the aggregator:
account_aggregation_source = {
"AccountIds": [ACCOUNT_ID],
"AllAwsRegions": True,
}
config_client.put_configuration_aggregator(
ConfigurationAggregatorName="testing",
AccountAggregationSources=[account_aggregation_source],
)
# Without a PAB in place:
with assert_raises(ClientError) as ce:
config_client.get_resource_config_history(
resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID
)
assert (
ce.exception.response["Error"]["Code"] == "ResourceNotDiscoveredException"
)
# aggregate
result = config_client.batch_get_resource_config(
resourceKeys=[
{
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": "ACCOUNT_ID",
}
]
)
assert not result["baseConfigurationItems"]
result = config_client.batch_get_aggregate_resource_config(
ConfigurationAggregatorName="testing",
ResourceIdentifiers=[
{
"SourceAccountId": ACCOUNT_ID,
"SourceRegion": "us-west-2",
"ResourceId": ACCOUNT_ID,
"ResourceType": "AWS::S3::AccountPublicAccessBlock",
"ResourceName": "",
}
],
)
assert not result["BaseConfigurationItems"]
# Create a PAB:
client.put_public_access_block(
AccountId=ACCOUNT_ID,
PublicAccessBlockConfiguration={
"BlockPublicAcls": True,
"IgnorePublicAcls": True,
"BlockPublicPolicy": True,
"RestrictPublicBuckets": True,
},
)
# Get the proper config:
proper_config = {
"blockPublicAcls": True,
"ignorePublicAcls": True,
"blockPublicPolicy": True,
"restrictPublicBuckets": True,
}
result = config_client.get_resource_config_history(
resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID
)
assert (
json.loads(result["configurationItems"][0]["configuration"])
== proper_config
)
assert (
result["configurationItems"][0]["accountId"]
== result["configurationItems"][0]["resourceId"]
== ACCOUNT_ID
)
result = config_client.batch_get_resource_config(
resourceKeys=[
{
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": ACCOUNT_ID,
}
]
)
assert len(result["baseConfigurationItems"]) == 1
assert (
json.loads(result["baseConfigurationItems"][0]["configuration"])
== proper_config
)
assert (
result["baseConfigurationItems"][0]["accountId"]
== result["baseConfigurationItems"][0]["resourceId"]
== ACCOUNT_ID
)
for region in Session().get_available_regions("s3control"):
result = config_client.batch_get_aggregate_resource_config(
ConfigurationAggregatorName="testing",
ResourceIdentifiers=[
{
"SourceAccountId": ACCOUNT_ID,
"SourceRegion": region,
"ResourceId": ACCOUNT_ID,
"ResourceType": "AWS::S3::AccountPublicAccessBlock",
"ResourceName": "",
}
],
)
assert len(result["BaseConfigurationItems"]) == 1
assert (
json.loads(result["BaseConfigurationItems"][0]["configuration"])
== proper_config
)
@mock_s3_deprecated @mock_s3_deprecated
def test_ranged_get(): def test_ranged_get():
@ -1768,6 +2143,34 @@ def test_boto3_copy_object_from_unversioned_to_versioned_bucket():
obj2_version_new.should_not.equal(None) obj2_version_new.should_not.equal(None)
@mock_s3
def test_boto3_copy_object_with_replacement_tagging():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="mybucket")
client.put_object(
Bucket="mybucket", Key="original", Body=b"test", Tagging="tag=old"
)
client.copy_object(
CopySource={"Bucket": "mybucket", "Key": "original"},
Bucket="mybucket",
Key="copy1",
TaggingDirective="REPLACE",
Tagging="tag=new",
)
client.copy_object(
CopySource={"Bucket": "mybucket", "Key": "original"},
Bucket="mybucket",
Key="copy2",
TaggingDirective="COPY",
)
tags1 = client.get_object_tagging(Bucket="mybucket", Key="copy1")["TagSet"]
tags1.should.equal([{"Key": "tag", "Value": "new"}])
tags2 = client.get_object_tagging(Bucket="mybucket", Key="copy2")["TagSet"]
tags2.should.equal([{"Key": "tag", "Value": "old"}])
@mock_s3 @mock_s3
def test_boto3_deleted_versionings_list(): def test_boto3_deleted_versionings_list():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)

View File

@ -1,107 +1,107 @@
from collections import namedtuple from collections import namedtuple
import sure # noqa import sure # noqa
from moto.swf.exceptions import SWFUnknownResourceFault from moto.swf.exceptions import SWFUnknownResourceFault
from moto.swf.models import Domain from moto.swf.models import Domain
# Ensure 'assert_raises' context manager support for Python 2.6 # Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises # noqa import tests.backport_assert_raises # noqa
# Fake WorkflowExecution for tests purposes # Fake WorkflowExecution for tests purposes
WorkflowExecution = namedtuple( WorkflowExecution = namedtuple(
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"] "WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
) )
def test_domain_short_dict_representation(): def test_domain_short_dict_representation():
domain = Domain("foo", "52") domain = Domain("foo", "52")
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"}) domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
domain.description = "foo bar" domain.description = "foo bar"
domain.to_short_dict()["description"].should.equal("foo bar") domain.to_short_dict()["description"].should.equal("foo bar")
def test_domain_full_dict_representation(): def test_domain_full_dict_representation():
domain = Domain("foo", "52") domain = Domain("foo", "52")
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict()) domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
_config = domain.to_full_dict()["configuration"] _config = domain.to_full_dict()["configuration"]
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52") _config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
def test_domain_string_representation(): def test_domain_string_representation():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)") str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
def test_domain_add_to_activity_task_list(): def test_domain_add_to_activity_task_list():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar") domain.add_to_activity_task_list("foo", "bar")
domain.activity_task_lists.should.equal({"foo": ["bar"]}) domain.activity_task_lists.should.equal({"foo": ["bar"]})
def test_domain_activity_tasks(): def test_domain_activity_tasks():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar") domain.add_to_activity_task_list("foo", "bar")
domain.add_to_activity_task_list("other", "baz") domain.add_to_activity_task_list("other", "baz")
sorted(domain.activity_tasks).should.equal(["bar", "baz"]) sorted(domain.activity_tasks).should.equal(["bar", "baz"])
def test_domain_add_to_decision_task_list(): def test_domain_add_to_decision_task_list():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar") domain.add_to_decision_task_list("foo", "bar")
domain.decision_task_lists.should.equal({"foo": ["bar"]}) domain.decision_task_lists.should.equal({"foo": ["bar"]})
def test_domain_decision_tasks(): def test_domain_decision_tasks():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar") domain.add_to_decision_task_list("foo", "bar")
domain.add_to_decision_task_list("other", "baz") domain.add_to_decision_task_list("other", "baz")
sorted(domain.decision_tasks).should.equal(["bar", "baz"]) sorted(domain.decision_tasks).should.equal(["bar", "baz"])
def test_domain_get_workflow_execution(): def test_domain_get_workflow_execution():
domain = Domain("my-domain", "60") domain = Domain("my-domain", "60")
wfe1 = WorkflowExecution( wfe1 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
) )
wfe2 = WorkflowExecution( wfe2 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
) )
wfe3 = WorkflowExecution( wfe3 = WorkflowExecution(
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
) )
wfe4 = WorkflowExecution( wfe4 = WorkflowExecution(
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
) )
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4] domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
# get workflow execution through workflow_id and run_id # get workflow execution through workflow_id and run_id
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1) domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2) domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4) domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
domain.get_workflow_execution.when.called_with( domain.get_workflow_execution.when.called_with(
"wf-id-1", run_id="non-existent" "wf-id-1", run_id="non-existent"
).should.throw(SWFUnknownResourceFault) ).should.throw(SWFUnknownResourceFault)
# get OPEN workflow execution by default if no run_id # get OPEN workflow execution by default if no run_id
domain.get_workflow_execution("wf-id-1").should.equal(wfe1) domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw( domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
SWFUnknownResourceFault SWFUnknownResourceFault
) )
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw( domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
SWFUnknownResourceFault SWFUnknownResourceFault
) )
# raise_if_closed attribute # raise_if_closed attribute
domain.get_workflow_execution( domain.get_workflow_execution(
"wf-id-1", run_id="run-id-1", raise_if_closed=True "wf-id-1", run_id="run-id-1", raise_if_closed=True
).should.equal(wfe1) ).should.equal(wfe1)
domain.get_workflow_execution.when.called_with( domain.get_workflow_execution.when.called_with(
"wf-id-3", run_id="run-id-4", raise_if_closed=True "wf-id-3", run_id="run-id-4", raise_if_closed=True
).should.throw(SWFUnknownResourceFault) ).should.throw(SWFUnknownResourceFault)
# raise_if_none attribute # raise_if_none attribute
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none domain.get_workflow_execution("foo", raise_if_none=False).should.be.none

View File

@ -1,19 +1,19 @@
from freezegun import freeze_time from freezegun import freeze_time
import sure # noqa import sure # noqa
from moto.swf.models import Timeout from moto.swf.models import Timeout
from ..utils import make_workflow_execution from ..utils import make_workflow_execution
def test_timeout_creation(): def test_timeout_creation():
wfe = make_workflow_execution() wfe = make_workflow_execution()
# epoch 1420113600 == "2015-01-01 13:00:00" # epoch 1420113600 == "2015-01-01 13:00:00"
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE") timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
with freeze_time("2015-01-01 12:00:00"): with freeze_time("2015-01-01 12:00:00"):
timeout.reached.should.be.falsy timeout.reached.should.be.falsy
with freeze_time("2015-01-01 13:00:00"): with freeze_time("2015-01-01 13:00:00"):
timeout.reached.should.be.truthy timeout.reached.should.be.truthy

View File

@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation():
) )
def test_closed_workflow_execution_full_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow",
"v1.0",
task_list="queue",
default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
wfe.execution_status = "CLOSED"
wfe.close_status = "CANCELED"
wfe.close_timestamp = 1420066801.123
fd = wfe.to_full_dict()
medium_dict = wfe.to_medium_dict()
medium_dict["closeStatus"] = "CANCELED"
medium_dict["closeTimestamp"] = 1420066801.123
fd["executionInfo"].should.equal(medium_dict)
fd["openCounts"]["openTimers"].should.equal(0)
fd["openCounts"]["openDecisionTasks"].should.equal(0)
fd["openCounts"]["openActivityTasks"].should.equal(0)
fd["executionConfiguration"].should.equal(
{
"childPolicy": "ABANDON",
"executionStartToCloseTimeout": "300",
"taskList": {"name": "queue"},
"taskStartToCloseTimeout": "300",
}
)
def test_workflow_execution_list_dict_representation(): def test_workflow_execution_list_dict_representation():
domain = get_basic_domain() domain = get_basic_domain()
wf_type = WorkflowType( wf_type = WorkflowType(

View File

@ -1,114 +1,114 @@
import boto import boto
from boto.swf.exceptions import SWFResponseError from boto.swf.exceptions import SWFResponseError
import sure # noqa import sure # noqa
from moto import mock_swf_deprecated from moto import mock_swf_deprecated
# RegisterDomain endpoint # RegisterDomain endpoint
@mock_swf_deprecated @mock_swf_deprecated
def test_register_domain(): def test_register_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain("test-domain", "60", description="A test domain")
all_domains = conn.list_domains("REGISTERED") all_domains = conn.list_domains("REGISTERED")
domain = all_domains["domainInfos"][0] domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain") domain["name"].should.equal("test-domain")
domain["status"].should.equal("REGISTERED") domain["status"].should.equal("REGISTERED")
domain["description"].should.equal("A test domain") domain["description"].should.equal("A test domain")
@mock_swf_deprecated @mock_swf_deprecated
def test_register_already_existing_domain(): def test_register_already_existing_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain("test-domain", "60", description="A test domain")
conn.register_domain.when.called_with( conn.register_domain.when.called_with(
"test-domain", "60", description="A test domain" "test-domain", "60", description="A test domain"
).should.throw(SWFResponseError) ).should.throw(SWFResponseError)
@mock_swf_deprecated @mock_swf_deprecated
def test_register_with_wrong_parameter_type(): def test_register_with_wrong_parameter_type():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain.when.called_with( conn.register_domain.when.called_with(
"test-domain", 60, description="A test domain" "test-domain", 60, description="A test domain"
).should.throw(SWFResponseError) ).should.throw(SWFResponseError)
# ListDomains endpoint # ListDomains endpoint
@mock_swf_deprecated @mock_swf_deprecated
def test_list_domains_order(): def test_list_domains_order():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60") conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60") conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60") conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED") all_domains = conn.list_domains("REGISTERED")
names = [domain["name"] for domain in all_domains["domainInfos"]] names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
@mock_swf_deprecated @mock_swf_deprecated
def test_list_domains_reverse_order(): def test_list_domains_reverse_order():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60") conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60") conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60") conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED", reverse_order=True) all_domains = conn.list_domains("REGISTERED", reverse_order=True)
names = [domain["name"] for domain in all_domains["domainInfos"]] names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
# DeprecateDomain endpoint # DeprecateDomain endpoint
@mock_swf_deprecated @mock_swf_deprecated
def test_deprecate_domain(): def test_deprecate_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain") conn.deprecate_domain("test-domain")
all_domains = conn.list_domains("DEPRECATED") all_domains = conn.list_domains("DEPRECATED")
domain = all_domains["domainInfos"][0] domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain") domain["name"].should.equal("test-domain")
@mock_swf_deprecated @mock_swf_deprecated
def test_deprecate_already_deprecated_domain(): def test_deprecate_already_deprecated_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain") conn.deprecate_domain("test-domain")
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError) conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
@mock_swf_deprecated @mock_swf_deprecated
def test_deprecate_non_existent_domain(): def test_deprecate_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.deprecate_domain.when.called_with("non-existent").should.throw( conn.deprecate_domain.when.called_with("non-existent").should.throw(
SWFResponseError SWFResponseError
) )
# DescribeDomain endpoint # DescribeDomain endpoint
@mock_swf_deprecated @mock_swf_deprecated
def test_describe_domain(): def test_describe_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain("test-domain", "60", description="A test domain")
domain = conn.describe_domain("test-domain") domain = conn.describe_domain("test-domain")
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60") domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
domain["domainInfo"]["description"].should.equal("A test domain") domain["domainInfo"]["description"].should.equal("A test domain")
domain["domainInfo"]["name"].should.equal("test-domain") domain["domainInfo"]["name"].should.equal("test-domain")
domain["domainInfo"]["status"].should.equal("REGISTERED") domain["domainInfo"]["status"].should.equal("REGISTERED")
@mock_swf_deprecated @mock_swf_deprecated
def test_describe_non_existent_domain(): def test_describe_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError) conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)

View File

@ -1,7 +1,9 @@
import sure import sure
import boto import boto
import boto3
from moto import mock_swf_deprecated from moto import mock_swf_deprecated
from moto import mock_swf
from boto.swf.exceptions import SWFResponseError from boto.swf.exceptions import SWFResponseError
@ -133,6 +135,41 @@ def test_describe_workflow_type():
infos["status"].should.equal("REGISTERED") infos["status"].should.equal("REGISTERED")
@mock_swf
def test_describe_workflow_type_full_boto3():
# boto3 required as boto doesn't support all of the arguments
client = boto3.client("swf", region_name="us-east-1")
client.register_domain(
name="test-domain", workflowExecutionRetentionPeriodInDays="2"
)
client.register_workflow_type(
domain="test-domain",
name="test-workflow",
version="v1.0",
description="Test workflow.",
defaultTaskStartToCloseTimeout="20",
defaultExecutionStartToCloseTimeout="60",
defaultTaskList={"name": "foo"},
defaultTaskPriority="-2",
defaultChildPolicy="ABANDON",
defaultLambdaRole="arn:bar",
)
resp = client.describe_workflow_type(
domain="test-domain", workflowType={"name": "test-workflow", "version": "v1.0"}
)
resp["typeInfo"]["workflowType"]["name"].should.equal("test-workflow")
resp["typeInfo"]["workflowType"]["version"].should.equal("v1.0")
resp["typeInfo"]["status"].should.equal("REGISTERED")
resp["typeInfo"]["description"].should.equal("Test workflow.")
resp["configuration"]["defaultTaskStartToCloseTimeout"].should.equal("20")
resp["configuration"]["defaultExecutionStartToCloseTimeout"].should.equal("60")
resp["configuration"]["defaultTaskList"]["name"].should.equal("foo")
resp["configuration"]["defaultTaskPriority"].should.equal("-2")
resp["configuration"]["defaultChildPolicy"].should.equal("ABANDON")
resp["configuration"]["defaultLambdaRole"].should.equal("arn:bar")
@mock_swf_deprecated @mock_swf_deprecated
def test_describe_non_existent_workflow_type(): def test_describe_non_existent_workflow_type():
conn = boto.connect_swf("the_key", "the_secret") conn = boto.connect_swf("the_key", "the_secret")

View File

@ -1,9 +1,9 @@
import sure # noqa import sure # noqa
from moto.swf.utils import decapitalize from moto.swf.utils import decapitalize
def test_decapitalize(): def test_decapitalize():
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"} cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
for before, after in cases.items(): for before, after in cases.items():
decapitalize(before).should.equal(after) decapitalize(before).should.equal(after)

View File

@ -0,0 +1,79 @@
import sure
from moto.utilities.tagging_service import TaggingService
def test_list_empty():
svc = TaggingService()
result = svc.list_tags_for_resource("test")
{"Tags": []}.should.be.equal(result)
def test_create_tag():
svc = TaggingService("TheTags", "TagKey", "TagValue")
tags = [{"TagKey": "key_key", "TagValue": "value_value"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]}
expected.should.be.equal(actual)
def test_create_tag_without_value():
svc = TaggingService()
tags = [{"Key": "key_key"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"Tags": [{"Key": "key_key", "Value": None}]}
expected.should.be.equal(actual)
def test_delete_tag_using_names():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_all_tags_for_resource():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
tags2 = [{"Key": "key_key2", "Value": "value_value2"}]
svc.tag_resource("arn", tags)
svc.tag_resource("arn", tags2)
svc.delete_all_tags_for_resource("arn")
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_list_empty_delete():
svc = TaggingService()
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_tag_using_tags():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_tags("arn", tags)
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_extract_tag_names():
svc = TaggingService()
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
actual = svc.extract_tag_names(tags)
expected = ["key1", "key2"]
expected.should.be.equal(actual)