Merge branch 'master' into feature-apigw-authorizers

This commit is contained in:
Jon Beilke 2020-02-18 10:49:53 -06:00
commit 40208363be
72 changed files with 5444 additions and 2991 deletions

View File

@ -26,11 +26,12 @@ install:
fi fi
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh & docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh &
fi fi
travis_retry pip install -r requirements-dev.txt
travis_retry pip install boto==2.45.0 travis_retry pip install boto==2.45.0
travis_retry pip install boto3 travis_retry pip install boto3
travis_retry pip install dist/moto*.gz travis_retry pip install dist/moto*.gz
travis_retry pip install coveralls==1.1 travis_retry pip install coveralls==1.1
travis_retry pip install -r requirements-dev.txt travis_retry pip install coverage==4.5.4
if [ "$TEST_SERVER_MODE" = "true" ]; then if [ "$TEST_SERVER_MODE" = "true" ]; then
python wait_for.py python wait_for.py

View File

@ -450,6 +450,16 @@ boto3.resource(
) )
``` ```
### Caveats
The standalone server has some caveats with some services. The following services
require that you update your hosts file for your code to work properly:
1. `s3-control`
For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`.
As a result, you need to add that entry to your host file for your tests to function properly.
## Install ## Install

View File

@ -56,9 +56,10 @@ author = 'Steve Pulec'
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = '0.4.10' import moto
version = moto.__version__
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = '0.4.10' release = moto.__version__
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.

View File

@ -24,8 +24,7 @@ For example, we have the following code we want to test:
.. sourcecode:: python .. sourcecode:: python
import boto import boto3
from boto.s3.key import Key
class MyModel(object): class MyModel(object):
def __init__(self, name, value): def __init__(self, name, value):
@ -33,11 +32,8 @@ For example, we have the following code we want to test:
self.value = value self.value = value
def save(self): def save(self):
conn = boto.connect_s3() s3 = boto3.client('s3', region_name='us-east-1')
bucket = conn.get_bucket('mybucket') s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
k = Key(bucket)
k.key = self.name
k.set_contents_from_string(self.value)
There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment. There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment.
@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out.
.. sourcecode:: python .. sourcecode:: python
import boto import boto3
from moto import mock_s3 from moto import mock_s3
from mymodule import MyModel from mymodule import MyModel
@mock_s3 @mock_s3
def test_my_model_save(): def test_my_model_save():
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account # We need to create the bucket since this is all in Moto's 'virtual' AWS account
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Context manager Context manager
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out.
def test_my_model_save(): def test_my_model_save():
with mock_s3(): with mock_s3():
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Raw Raw
~~~ ~~~
@ -91,13 +93,16 @@ You can also start and stop the mocking manually.
mock = mock_s3() mock = mock_s3()
mock.start() mock.start()
conn = boto.connect_s3() conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket('mybucket') conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome') model_instance = MyModel('steve', 'is awesome')
model_instance.save() model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome' body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
mock.stop() mock.stop()

View File

@ -84,14 +84,14 @@ class MethodResponse(BaseModel, dict):
class Method(BaseModel, dict): class Method(BaseModel, dict):
def __init__(self, method_type, authorization_type): def __init__(self, method_type, authorization_type, **kwargs):
super(Method, self).__init__() super(Method, self).__init__()
self.update( self.update(
dict( dict(
httpMethod=method_type, httpMethod=method_type,
authorizationType=authorization_type, authorizationType=authorization_type,
authorizerId=None, authorizerId=None,
apiKeyRequired=None, apiKeyRequired=kwargs.get("api_key_required") or False,
requestParameters=None, requestParameters=None,
requestModels=None, requestModels=None,
methodIntegration=None, methodIntegration=None,
@ -118,14 +118,15 @@ class Resource(BaseModel):
self.api_id = api_id self.api_id = api_id
self.path_part = path_part self.path_part = path_part
self.parent_id = parent_id self.parent_id = parent_id
self.resource_methods = {"GET": {}} self.resource_methods = {}
def to_dict(self): def to_dict(self):
response = { response = {
"path": self.get_path(), "path": self.get_path(),
"id": self.id, "id": self.id,
"resourceMethods": self.resource_methods,
} }
if self.resource_methods:
response["resourceMethods"] = self.resource_methods
if self.parent_id: if self.parent_id:
response["parentId"] = self.parent_id response["parentId"] = self.parent_id
response["pathPart"] = self.path_part response["pathPart"] = self.path_part
@ -159,8 +160,12 @@ class Resource(BaseModel):
) )
return response.status_code, response.text return response.status_code, response.text
def add_method(self, method_type, authorization_type): def add_method(self, method_type, authorization_type, api_key_required):
method = Method(method_type=method_type, authorization_type=authorization_type) method = Method(
method_type=method_type,
authorization_type=authorization_type,
api_key_required=api_key_required,
)
self.resource_methods[method_type] = method self.resource_methods[method_type] = method
return method return method
@ -675,9 +680,18 @@ class APIGatewayBackend(BaseBackend):
resource = self.get_resource(function_id, resource_id) resource = self.get_resource(function_id, resource_id)
return resource.get_method(method_type) return resource.get_method(method_type)
def create_method(self, function_id, resource_id, method_type, authorization_type): def create_method(
self,
function_id,
resource_id,
method_type,
authorization_type,
api_key_required=None,
):
resource = self.get_resource(function_id, resource_id) resource = self.get_resource(function_id, resource_id)
method = resource.add_method(method_type, authorization_type) method = resource.add_method(
method_type, authorization_type, api_key_required=api_key_required
)
return method return method
def get_authorizer(self, restapi_id, authorizer_id): def get_authorizer(self, restapi_id, authorizer_id):

View File

@ -147,8 +147,13 @@ class APIGatewayResponse(BaseResponse):
return 200, {}, json.dumps(method) return 200, {}, json.dumps(method)
elif self.method == "PUT": elif self.method == "PUT":
authorization_type = self._get_param("authorizationType") authorization_type = self._get_param("authorizationType")
api_key_required = self._get_param("apiKeyRequired")
method = self.backend.create_method( method = self.backend.create_method(
function_id, resource_id, method_type, authorization_type function_id,
resource_id,
method_type,
authorization_type,
api_key_required,
) )
return 200, {}, json.dumps(method) return 200, {}, json.dumps(method)

View File

@ -184,7 +184,13 @@ class LambdaResponse(BaseResponse):
function_name, qualifier, self.body, self.headers, response_headers function_name, qualifier, self.body, self.headers, response_headers
) )
if payload: if payload:
return 202, response_headers, payload if request.headers["X-Amz-Invocation-Type"] == "Event":
status_code = 202
elif request.headers["X-Amz-Invocation-Type"] == "DryRun":
status_code = 204
else:
status_code = 200
return status_code, response_headers, payload
else: else:
return 404, response_headers, "{}" return 404, response_headers, "{}"

View File

@ -14,6 +14,7 @@ from jose import jws
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
from .exceptions import ( from .exceptions import (
GroupExistsException, GroupExistsException,
NotAuthorizedError, NotAuthorizedError,
@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config): def __init__(self, region, name, extended_config):
self.region = region self.region = region
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex)) self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format(
self.region, DEFAULT_ACCOUNT_ID, self.id
)
self.name = name self.name = name
self.status = None self.status = None
self.extended_config = extended_config or {} self.extended_config = extended_config or {}
@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel):
def _base_json(self): def _base_json(self):
return { return {
"Id": self.id, "Id": self.id,
"Arn": self.arn,
"Name": self.name, "Name": self.name,
"Status": self.status, "Status": self.status,
"CreationDate": time.mktime(self.creation_date.timetuple()), "CreationDate": time.mktime(self.creation_date.timetuple()),
@ -564,12 +569,17 @@ class CognitoIdpBackend(BaseBackend):
user.groups.discard(group) user.groups.discard(group)
# User # User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes): def admin_create_user(
self, user_pool_id, username, message_action, temporary_password, attributes
):
user_pool = self.user_pools.get(user_pool_id) user_pool = self.user_pools.get(user_pool_id)
if not user_pool: if not user_pool:
raise ResourceNotFoundError(user_pool_id) raise ResourceNotFoundError(user_pool_id)
if username in user_pool.users: if message_action and message_action == "RESEND":
if username not in user_pool.users:
raise UserNotFoundError(username)
elif username in user_pool.users:
raise UsernameExistsException(username) raise UsernameExistsException(username)
user = CognitoIdpUser( user = CognitoIdpUser(

View File

@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse):
def admin_create_user(self): def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId") user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username") username = self._get_param("Username")
message_action = self._get_param("MessageAction")
temporary_password = self._get_param("TemporaryPassword") temporary_password = self._get_param("TemporaryPassword")
user = cognitoidp_backends[self.region].admin_create_user( user = cognitoidp_backends[self.region].admin_create_user(
user_pool_id, user_pool_id,
username, username,
message_action,
temporary_password, temporary_password,
self._get_param("UserAttributes", []), self._get_param("UserAttributes", []),
) )

View File

@ -43,7 +43,7 @@ from moto.config.exceptions import (
) )
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.s3.config import s3_config_query from moto.s3.config import s3_account_public_access_block_query, s3_config_query
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
@ -58,7 +58,10 @@ POP_STRINGS = [
DEFAULT_PAGE_SIZE = 100 DEFAULT_PAGE_SIZE = 100
# Map the Config resource type to a backend: # Map the Config resource type to a backend:
RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query} RESOURCE_MAP = {
"AWS::S3::Bucket": s3_config_query,
"AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query,
}
def datetime2int(date): def datetime2int(date):
@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend):
backend_region=backend_query_region, backend_region=backend_query_region,
) )
result = { resource_identifiers = []
"resourceIdentifiers": [ for identifier in identifiers:
{ item = {"resourceType": identifier["type"], "resourceId": identifier["id"]}
"resourceType": identifier["type"],
"resourceId": identifier["id"], # Some resource types lack names:
"resourceName": identifier["name"], if identifier.get("name"):
} item["resourceName"] = identifier["name"]
for identifier in identifiers
] resource_identifiers.append(item)
}
result = {"resourceIdentifiers": resource_identifiers}
if new_token: if new_token:
result["nextToken"] = new_token result["nextToken"] = new_token
@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend):
resource_region=resource_region, resource_region=resource_region,
) )
result = { resource_identifiers = []
"ResourceIdentifiers": [ for identifier in identifiers:
{ item = {
"SourceAccountId": DEFAULT_ACCOUNT_ID, "SourceAccountId": DEFAULT_ACCOUNT_ID,
"SourceRegion": identifier["region"], "SourceRegion": identifier["region"],
"ResourceType": identifier["type"], "ResourceType": identifier["type"],
"ResourceId": identifier["id"], "ResourceId": identifier["id"],
"ResourceName": identifier["name"], }
}
for identifier in identifiers if identifier.get("name"):
] item["ResourceName"] = identifier["name"]
}
resource_identifiers.append(item)
result = {"ResourceIdentifiers": resource_identifiers}
if new_token: if new_token:
result["NextToken"] = new_token result["NextToken"] = new_token

View File

@ -606,12 +606,13 @@ class ConfigQueryModel(object):
As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter
from there. It may be valuable to make this a concatenation of the region and resource name. from there. It may be valuable to make this a concatenation of the region and resource name.
:param resource_region: :param resource_ids: A list of resource IDs
:param resource_ids: :param resource_name: The individual name of a resource
:param resource_name: :param limit: How many per page
:param limit: :param next_token: The item that will page on
:param next_token:
:param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query. :param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query.
:param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a
non-aggregated query.
:return: This should return a list of Dicts that have the following fields: :return: This should return a list of Dicts that have the following fields:
[ [
{ {

View File

@ -448,13 +448,18 @@ class Item(BaseModel):
if list_append_re: if list_append_re:
new_value = expression_attribute_values[list_append_re.group(2).strip()] new_value = expression_attribute_values[list_append_re.group(2).strip()]
old_list_key = list_append_re.group(1) old_list_key = list_append_re.group(1)
# Get the existing value # old_key could be a function itself (if_not_exists)
old_list = self.attrs[old_list_key.split(".")[0]] if old_list_key.startswith("if_not_exists"):
if "." in old_list_key: old_list = DynamoType(
# Value is nested inside a map - find the appropriate child attr expression_attribute_values[self._get_default(old_list_key)]
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
) )
else:
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
)
if not old_list.is_list(): if not old_list.is_list():
raise ParamValidationError raise ParamValidationError
old_list.value.extend([DynamoType(v) for v in new_value["L"]]) old_list.value.extend([DynamoType(v) for v in new_value["L"]])

View File

@ -27,6 +27,7 @@ from moto.core.utils import (
iso_8601_datetime_with_milliseconds, iso_8601_datetime_with_milliseconds,
camelcase_to_underscores, camelcase_to_underscores,
) )
from moto.iam.models import ACCOUNT_ID
from .exceptions import ( from .exceptions import (
CidrLimitExceeded, CidrLimitExceeded,
DependencyViolationError, DependencyViolationError,
@ -155,7 +156,7 @@ AMIS = _load_resource(
) )
OWNER_ID = "111122223333" OWNER_ID = ACCOUNT_ID
def utc_date_and_time(): def utc_date_and_time():
@ -1341,7 +1342,7 @@ class AmiBackend(object):
source_ami=None, source_ami=None,
name=name, name=name,
description=description, description=description,
owner_id=context.get_current_user() if context else OWNER_ID, owner_id=OWNER_ID,
) )
self.amis[ami_id] = ami self.amis[ami_id] = ami
return ami return ami
@ -1392,14 +1393,7 @@ class AmiBackend(object):
# Limit by owner ids # Limit by owner ids
if owners: if owners:
# support filtering by Owners=['self'] # support filtering by Owners=['self']
owners = list( owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,))
map(
lambda o: context.get_current_user()
if context and o == "self"
else o,
owners,
)
)
images = [ami for ami in images if ami.owner_id in owners] images = [ami for ami in images if ami.owner_id in owners]
# Generic filters # Generic filters

View File

@ -6,6 +6,7 @@ from boto3 import Session
from moto.core.exceptions import JsonRESTError from moto.core.exceptions import JsonRESTError
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.sts.models import ACCOUNT_ID from moto.sts.models import ACCOUNT_ID
from moto.utilities.tagging_service import TaggingService
class Rule(BaseModel): class Rule(BaseModel):
@ -104,6 +105,7 @@ class EventsBackend(BaseBackend):
self.region_name = region_name self.region_name = region_name
self.event_buses = {} self.event_buses = {}
self.event_sources = {} self.event_sources = {}
self.tagger = TaggingService()
self._add_default_event_bus() self._add_default_event_bus()
@ -141,6 +143,9 @@ class EventsBackend(BaseBackend):
def delete_rule(self, name): def delete_rule(self, name):
self.rules_order.pop(self.rules_order.index(name)) self.rules_order.pop(self.rules_order.index(name))
arn = self.rules.get(name).arn
if self.tagger.has_tags(arn):
self.tagger.delete_all_tags_for_resource(arn)
return self.rules.pop(name) is not None return self.rules.pop(name) is not None
def describe_rule(self, name): def describe_rule(self, name):
@ -361,6 +366,32 @@ class EventsBackend(BaseBackend):
self.event_buses.pop(name, None) self.event_buses.pop(name, None)
def list_tags_for_resource(self, arn):
name = arn.split("/")[-1]
if name in self.rules:
return self.tagger.list_tags_for_resource(self.rules[name].arn)
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def tag_resource(self, arn, tags):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.tag_resource(self.rules[name].arn, tags)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def untag_resource(self, arn, tag_names):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.untag_resource_using_names(self.rules[name].arn, tag_names)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
events_backends = {} events_backends = {}
for region in Session().get_available_regions("events"): for region in Session().get_available_regions("events"):

View File

@ -297,3 +297,26 @@ class EventsHandler(BaseResponse):
self.events_backend.delete_event_bus(name) self.events_backend.delete_event_bus(name)
return "", self.response_headers return "", self.response_headers
def list_tags_for_resource(self):
arn = self._get_param("ResourceARN")
result = self.events_backend.list_tags_for_resource(arn)
return json.dumps(result), self.response_headers
def tag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("Tags")
result = self.events_backend.tag_resource(arn, tags)
return json.dumps(result), self.response_headers
def untag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("TagKeys")
result = self.events_backend.untag_resource(arn, tags)
return json.dumps(result), self.response_headers

View File

@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError):
) )
class InvalidStateTransitionException(IoTClientError):
def __init__(self, msg=None):
self.code = 409
super(InvalidStateTransitionException, self).__init__(
"InvalidStateTransitionException",
msg or "An attempt was made to change to an invalid state.",
)
class VersionConflictException(IoTClientError): class VersionConflictException(IoTClientError):
def __init__(self, name): def __init__(self, name):
self.code = 409 self.code = 409

View File

@ -17,6 +17,7 @@ from .exceptions import (
DeleteConflictException, DeleteConflictException,
ResourceNotFoundException, ResourceNotFoundException,
InvalidRequestException, InvalidRequestException,
InvalidStateTransitionException,
VersionConflictException, VersionConflictException,
) )
@ -29,7 +30,7 @@ class FakeThing(BaseModel):
self.attributes = attributes self.attributes = attributes
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name) self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
self.version = 1 self.version = 1
# TODO: we need to handle 'version'? # TODO: we need to handle "version"?
# for iot-data # for iot-data
self.thing_shadow = None self.thing_shadow = None
@ -174,18 +175,19 @@ class FakeCertificate(BaseModel):
class FakePolicy(BaseModel): class FakePolicy(BaseModel):
def __init__(self, name, document, region_name): def __init__(self, name, document, region_name, default_version_id="1"):
self.name = name self.name = name
self.document = document self.document = document
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name) self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
self.version = "1" # TODO: handle version self.default_version_id = default_version_id
self.versions = [FakePolicyVersion(self.name, document, True, region_name)]
def to_get_dict(self): def to_get_dict(self):
return { return {
"policyName": self.name, "policyName": self.name,
"policyArn": self.arn, "policyArn": self.arn,
"policyDocument": self.document, "policyDocument": self.document,
"defaultVersionId": self.version, "defaultVersionId": self.default_version_id,
} }
def to_dict_at_creation(self): def to_dict_at_creation(self):
@ -193,13 +195,52 @@ class FakePolicy(BaseModel):
"policyName": self.name, "policyName": self.name,
"policyArn": self.arn, "policyArn": self.arn,
"policyDocument": self.document, "policyDocument": self.document,
"policyVersionId": self.version, "policyVersionId": self.default_version_id,
} }
def to_dict(self): def to_dict(self):
return {"policyName": self.name, "policyArn": self.arn} return {"policyName": self.name, "policyArn": self.arn}
class FakePolicyVersion(object):
def __init__(self, policy_name, document, is_default, region_name):
self.name = policy_name
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name)
self.document = document or {}
self.is_default = is_default
self.version_id = "1"
self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple())
def to_get_dict(self):
return {
"policyName": self.name,
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
"creationDate": self.create_datetime,
"lastModifiedDate": self.last_modified_datetime,
"generationId": self.version_id,
}
def to_dict_at_creation(self):
return {
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
}
def to_dict(self):
return {
"versionId": self.version_id,
"isDefaultVersion": self.is_default,
"createDate": self.create_datetime,
}
class FakeJob(BaseModel): class FakeJob(BaseModel):
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]" JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN) JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
@ -226,12 +267,14 @@ class FakeJob(BaseModel):
self.targets = targets self.targets = targets
self.document_source = document_source self.document_source = document_source
self.document = document self.document = document
self.force = False
self.description = description self.description = description
self.presigned_url_config = presigned_url_config self.presigned_url_config = presigned_url_config
self.target_selection = target_selection self.target_selection = target_selection
self.job_executions_rollout_config = job_executions_rollout_config self.job_executions_rollout_config = job_executions_rollout_config
self.status = None # IN_PROGRESS | CANCELED | COMPLETED self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED
self.comment = None self.comment = None
self.reason_code = None
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple()) self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.completed_at = None self.completed_at = None
@ -258,9 +301,11 @@ class FakeJob(BaseModel):
"jobExecutionsRolloutConfig": self.job_executions_rollout_config, "jobExecutionsRolloutConfig": self.job_executions_rollout_config,
"status": self.status, "status": self.status,
"comment": self.comment, "comment": self.comment,
"forceCanceled": self.force,
"reasonCode": self.reason_code,
"createdAt": self.created_at, "createdAt": self.created_at,
"lastUpdatedAt": self.last_updated_at, "lastUpdatedAt": self.last_updated_at,
"completedAt": self.completedAt, "completedAt": self.completed_at,
"jobProcessDetails": self.job_process_details, "jobProcessDetails": self.job_process_details,
"documentParameters": self.document_parameters, "documentParameters": self.document_parameters,
"document": self.document, "document": self.document,
@ -275,12 +320,67 @@ class FakeJob(BaseModel):
return regex_match and length_match return regex_match and length_match
class FakeJobExecution(BaseModel):
def __init__(
self,
job_id,
thing_arn,
status="QUEUED",
force_canceled=False,
status_details_map={},
):
self.job_id = job_id
self.status = status # IN_PROGRESS | CANCELED | COMPLETED
self.force_canceled = force_canceled
self.status_details_map = status_details_map
self.thing_arn = thing_arn
self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.started_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.execution_number = 123
self.version_number = 123
self.approximate_seconds_before_time_out = 123
def to_get_dict(self):
obj = {
"jobId": self.job_id,
"status": self.status,
"forceCanceled": self.force_canceled,
"statusDetails": {"detailsMap": self.status_details_map},
"thingArn": self.thing_arn,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
"versionNumber": self.version_number,
"approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out,
}
return obj
def to_dict(self):
obj = {
"jobId": self.job_id,
"thingArn": self.thing_arn,
"jobExecutionSummary": {
"status": self.status,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
},
}
return obj
class IoTBackend(BaseBackend): class IoTBackend(BaseBackend):
def __init__(self, region_name=None): def __init__(self, region_name=None):
super(IoTBackend, self).__init__() super(IoTBackend, self).__init__()
self.region_name = region_name self.region_name = region_name
self.things = OrderedDict() self.things = OrderedDict()
self.jobs = OrderedDict() self.jobs = OrderedDict()
self.job_executions = OrderedDict()
self.thing_types = OrderedDict() self.thing_types = OrderedDict()
self.thing_groups = OrderedDict() self.thing_groups = OrderedDict()
self.certificates = OrderedDict() self.certificates = OrderedDict()
@ -535,6 +635,28 @@ class IoTBackend(BaseBackend):
self.policies[policy.name] = policy self.policies[policy.name] = policy
return policy return policy
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def list_attached_policies(self, target):
policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target]
return policies
def list_policies(self): def list_policies(self):
policies = self.policies.values() policies = self.policies.values()
return policies return policies
@ -559,6 +681,60 @@ class IoTBackend(BaseBackend):
policy = self.get_policy(policy_name) policy = self.get_policy(policy_name)
del self.policies[policy.name] del self.policies[policy.name]
def create_policy_version(self, policy_name, policy_document, set_as_default):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
version = FakePolicyVersion(
policy_name, policy_document, set_as_default, self.region_name
)
policy.versions.append(version)
version.version_id = "{0}".format(len(policy.versions))
if set_as_default:
self.set_default_policy_version(policy_name, version.version_id)
return version
def set_default_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
version.is_default = True
policy.default_version_id = version.version_id
policy.document = version.document
else:
version.is_default = False
def get_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
return version
raise ResourceNotFoundException()
def list_policy_versions(self, policy_name):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
return policy.versions
def delete_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
if version_id == policy.default_version_id:
raise InvalidRequestException(
"Cannot delete the default version of a policy"
)
for i, v in enumerate(policy.versions):
if v.version_id == version_id:
del policy.versions[i]
return
raise ResourceNotFoundException()
def _get_principal(self, principal_arn): def _get_principal(self, principal_arn):
""" """
raise ResourceNotFoundException raise ResourceNotFoundException
@ -574,14 +750,6 @@ class IoTBackend(BaseBackend):
pass pass
raise ResourceNotFoundException() raise ResourceNotFoundException()
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def attach_principal_policy(self, policy_name, principal_arn): def attach_principal_policy(self, policy_name, principal_arn):
principal = self._get_principal(principal_arn) principal = self._get_principal(principal_arn)
policy = self.get_policy(policy_name) policy = self.get_policy(policy_name)
@ -590,15 +758,6 @@ class IoTBackend(BaseBackend):
return return
self.principal_policies[k] = (principal, policy) self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def detach_principal_policy(self, policy_name, principal_arn): def detach_principal_policy(self, policy_name, principal_arn):
# this may raises ResourceNotFoundException # this may raises ResourceNotFoundException
self._get_principal(principal_arn) self._get_principal(principal_arn)
@ -819,11 +978,187 @@ class IoTBackend(BaseBackend):
self.region_name, self.region_name,
) )
self.jobs[job_id] = job self.jobs[job_id] = job
for thing_arn in targets:
thing_name = thing_arn.split(":")[-1].split("/")[-1]
job_execution = FakeJobExecution(job_id, thing_arn)
self.job_executions[(job_id, thing_name)] = job_execution
return job.job_arn, job_id, description return job.job_arn, job_id, description
def describe_job(self, job_id): def describe_job(self, job_id):
jobs = [_ for _ in self.jobs.values() if _.job_id == job_id]
if len(jobs) == 0:
raise ResourceNotFoundException()
return jobs[0]
def delete_job(self, job_id, force):
job = self.jobs[job_id]
if job.status == "IN_PROGRESS" and force:
del self.jobs[job_id]
elif job.status != "IN_PROGRESS":
del self.jobs[job_id]
else:
raise InvalidStateTransitionException()
def cancel_job(self, job_id, reason_code, comment, force):
job = self.jobs[job_id]
job.reason_code = reason_code if reason_code is not None else job.reason_code
job.comment = comment if comment is not None else job.comment
job.force = force if force is not None and force != job.force else job.force
job.status = "CANCELED"
if job.status == "IN_PROGRESS" and force:
self.jobs[job_id] = job
elif job.status != "IN_PROGRESS":
self.jobs[job_id] = job
else:
raise InvalidStateTransitionException()
return job
def get_job_document(self, job_id):
return self.jobs[job_id] return self.jobs[job_id]
def list_jobs(
self,
status,
target_selection,
max_results,
token,
thing_group_name,
thing_group_id,
):
# TODO: implement filters
all_jobs = [_.to_dict() for _ in self.jobs.values()]
filtered_jobs = all_jobs
if token is None:
jobs = filtered_jobs[0:max_results]
next_token = str(max_results) if len(filtered_jobs) > max_results else None
else:
token = int(token)
jobs = filtered_jobs[token : token + max_results]
next_token = (
str(token + max_results)
if len(filtered_jobs) > token + max_results
else None
)
return jobs, next_token
def describe_job_execution(self, job_id, thing_name, execution_number):
try:
job_execution = self.job_executions[(job_id, thing_name)]
except KeyError:
raise ResourceNotFoundException()
if job_execution is None or (
execution_number is not None
and job_execution.execution_number != execution_number
):
raise ResourceNotFoundException()
return job_execution
def cancel_job_execution(
self, job_id, thing_name, force, expected_version, status_details
):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution is None:
raise ResourceNotFoundException()
job_execution.force_canceled = (
force if force is not None else job_execution.force_canceled
)
# TODO: implement expected_version and status_details (at most 10 can be specified)
if job_execution.status == "IN_PROGRESS" and force:
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
elif job_execution.status != "IN_PROGRESS":
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
else:
raise InvalidStateTransitionException()
def delete_job_execution(self, job_id, thing_name, execution_number, force):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution.execution_number != execution_number:
raise ResourceNotFoundException()
if job_execution.status == "IN_PROGRESS" and force:
del self.job_executions[(job_id, thing_name)]
elif job_execution.status != "IN_PROGRESS":
del self.job_executions[(job_id, thing_name)]
else:
raise InvalidStateTransitionException()
def list_job_executions_for_job(self, job_id, status, max_results, next_token):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[0] == job_id
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
def list_job_executions_for_thing(
self, thing_name, status, max_results, next_token
):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[1] == thing_name
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
iot_backends = {} iot_backends = {}
for region in Session().get_available_regions("iot"): for region in Session().get_available_regions("iot"):

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import json import json
from six.moves.urllib.parse import unquote
from moto.core.responses import BaseResponse from moto.core.responses import BaseResponse
from .models import iot_backends from .models import iot_backends
@ -141,6 +142,8 @@ class IoTResponse(BaseResponse):
createdAt=job.created_at, createdAt=job.created_at,
description=job.description, description=job.description,
documentParameters=job.document_parameters, documentParameters=job.document_parameters,
forceCanceled=job.force,
reasonCode=job.reason_code,
jobArn=job.job_arn, jobArn=job.job_arn,
jobExecutionsRolloutConfig=job.job_executions_rollout_config, jobExecutionsRolloutConfig=job.job_executions_rollout_config,
jobId=job.job_id, jobId=job.job_id,
@ -154,6 +157,127 @@ class IoTResponse(BaseResponse):
) )
) )
def delete_job(self):
job_id = self._get_param("jobId")
force = self._get_bool_param("force")
self.iot_backend.delete_job(job_id=job_id, force=force)
return json.dumps(dict())
def cancel_job(self):
job_id = self._get_param("jobId")
reason_code = self._get_param("reasonCode")
comment = self._get_param("comment")
force = self._get_bool_param("force")
job = self.iot_backend.cancel_job(
job_id=job_id, reason_code=reason_code, comment=comment, force=force
)
return json.dumps(job.to_dict())
def get_job_document(self):
job = self.iot_backend.get_job_document(job_id=self._get_param("jobId"))
if job.document is not None:
return json.dumps({"document": job.document})
else:
# job.document_source is not None:
# TODO: needs to be implemented to get document_source's content from S3
return json.dumps({"document": ""})
def list_jobs(self):
status = (self._get_param("status"),)
target_selection = (self._get_param("targetSelection"),)
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
previous_next_token = self._get_param("nextToken")
thing_group_name = (self._get_param("thingGroupName"),)
thing_group_id = self._get_param("thingGroupId")
jobs, next_token = self.iot_backend.list_jobs(
status=status,
target_selection=target_selection,
max_results=max_results,
token=previous_next_token,
thing_group_name=thing_group_name,
thing_group_id=thing_group_id,
)
return json.dumps(dict(jobs=jobs, nextToken=next_token))
def describe_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
job_execution = self.iot_backend.describe_job_execution(
job_id=job_id, thing_name=thing_name, execution_number=execution_number
)
return json.dumps(dict(execution=job_execution.to_get_dict()))
def cancel_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
force = self._get_bool_param("force")
expected_version = self._get_int_param("expectedVersion")
status_details = self._get_param("statusDetails")
self.iot_backend.cancel_job_execution(
job_id=job_id,
thing_name=thing_name,
force=force,
expected_version=expected_version,
status_details=status_details,
)
return json.dumps(dict())
def delete_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
force = self._get_bool_param("force")
self.iot_backend.delete_job_execution(
job_id=job_id,
thing_name=thing_name,
execution_number=execution_number,
force=force,
)
return json.dumps(dict())
def list_job_executions_for_job(self):
job_id = self._get_param("jobId")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_job(
job_id=job_id, status=status, max_results=max_results, next_token=next_token
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def list_job_executions_for_thing(self):
thing_name = self._get_param("thingName")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_thing(
thing_name=thing_name,
status=status,
max_results=max_results,
next_token=next_token,
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def create_keys_and_certificate(self): def create_keys_and_certificate(self):
set_as_active = self._get_bool_param("setAsActive") set_as_active = self._get_bool_param("setAsActive")
cert, key_pair = self.iot_backend.create_keys_and_certificate( cert, key_pair = self.iot_backend.create_keys_and_certificate(
@ -241,12 +365,61 @@ class IoTResponse(BaseResponse):
self.iot_backend.delete_policy(policy_name=policy_name) self.iot_backend.delete_policy(policy_name=policy_name)
return json.dumps(dict()) return json.dumps(dict())
def create_policy_version(self):
policy_name = self._get_param("policyName")
policy_document = self._get_param("policyDocument")
set_as_default = self._get_bool_param("setAsDefault")
policy_version = self.iot_backend.create_policy_version(
policy_name, policy_document, set_as_default
)
return json.dumps(dict(policy_version.to_dict_at_creation()))
def set_default_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.set_default_policy_version(policy_name, version_id)
return json.dumps(dict())
def get_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
policy_version = self.iot_backend.get_policy_version(policy_name, version_id)
return json.dumps(dict(policy_version.to_get_dict()))
def list_policy_versions(self):
policy_name = self._get_param("policyName")
policiy_versions = self.iot_backend.list_policy_versions(
policy_name=policy_name
)
return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions]))
def delete_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.delete_policy_version(policy_name, version_id)
return json.dumps(dict())
def attach_policy(self): def attach_policy(self):
policy_name = self._get_param("policyName") policy_name = self._get_param("policyName")
target = self._get_param("target") target = self._get_param("target")
self.iot_backend.attach_policy(policy_name=policy_name, target=target) self.iot_backend.attach_policy(policy_name=policy_name, target=target)
return json.dumps(dict()) return json.dumps(dict())
def list_attached_policies(self):
principal = unquote(self._get_param("target"))
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
policies = self.iot_backend.list_attached_policies(target=principal)
# TODO: implement pagination in the future
next_marker = None
return json.dumps(
dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker)
)
def attach_principal_policy(self): def attach_principal_policy(self):
policy_name = self._get_param("policyName") policy_name = self._get_param("policyName")
principal = self.headers.get("x-amzn-iot-principal") principal = self.headers.get("x-amzn-iot-principal")

View File

@ -1,8 +1,13 @@
import datetime
import json import json
import time
from boto3 import Session
from moto.core.exceptions import InvalidNextTokenException from moto.core.exceptions import InvalidNextTokenException
from moto.core.models import ConfigQueryModel from moto.core.models import ConfigQueryModel
from moto.s3 import s3_backends from moto.s3 import s3_backends
from moto.s3.models import get_moto_s3_account_id
class S3ConfigQuery(ConfigQueryModel): class S3ConfigQuery(ConfigQueryModel):
@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel):
return config_data return config_data
class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel):
def list_config_service_resources(
self,
resource_ids,
resource_name,
limit,
next_token,
backend_region=None,
resource_region=None,
):
# For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID
# There is no resource name -- it should be a blank string "" if provided.
# The resource name can only ever be None or an empty string:
if resource_name is not None and resource_name != "":
return [], None
pab = None
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# If a resource ID was passed in, then filter accordingly:
if resource_ids:
for id in resource_ids:
if account_id == id:
pab = self.backends["global"].account_public_access_block
break
# Otherwise, just grab the one from the backend:
if not resource_ids:
pab = self.backends["global"].account_public_access_block
# If it's not present, then return nothing
if not pab:
return [], None
# Filter on regions (and paginate on them as well):
if backend_region:
pab_list = [backend_region]
elif resource_region:
# Invalid region?
if resource_region not in regions:
return [], None
pab_list = [resource_region]
# Aggregated query where no regions were supplied so return them all:
else:
pab_list = regions
# Pagination logic:
sorted_regions = sorted(pab_list)
new_token = None
# Get the start:
if not next_token:
start = 0
else:
# Tokens for this moto feature is just the region-name:
# For OTHER non-global resource types, it's the region concatenated with the resource ID.
if next_token not in sorted_regions:
raise InvalidNextTokenException()
start = sorted_regions.index(next_token)
# Get the list of items to collect:
pab_list = sorted_regions[start : (start + limit)]
if len(sorted_regions) > (start + limit):
new_token = sorted_regions[start + limit]
return (
[
{
"type": "AWS::S3::AccountPublicAccessBlock",
"id": account_id,
"region": region,
}
for region in pab_list
],
new_token,
)
def get_config_resource(
self, resource_id, resource_name=None, backend_region=None, resource_region=None
):
# Do we even have this defined?
if not self.backends["global"].account_public_access_block:
return None
# Resource name can only ever be "" if it's supplied:
if resource_name is not None and resource_name != "":
return None
# Are we filtering based on region?
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# Is the resource ID correct?:
if account_id == resource_id:
if backend_region:
pab_region = backend_region
# Invalid region?
elif resource_region not in regions:
return None
else:
pab_region = resource_region
else:
return None
# Format the PAB to the AWS Config format:
creation_time = datetime.datetime.utcnow()
config_data = {
"version": "1.3",
"accountId": account_id,
"configurationItemCaptureTime": str(creation_time),
"configurationItemStatus": "OK",
"configurationStateId": str(
int(time.mktime(creation_time.timetuple()))
), # PY2 and 3 compatible
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": account_id,
"awsRegion": pab_region,
"availabilityZone": "Not Applicable",
"configuration": self.backends[
"global"
].account_public_access_block.to_config_dict(),
"supplementaryConfiguration": {},
}
# The 'configuration' field is also a JSON string:
config_data["configuration"] = json.dumps(config_data["configuration"])
return config_data
s3_config_query = S3ConfigQuery(s3_backends) s3_config_query = S3ConfigQuery(s3_backends)
s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery(
s3_backends
)

View File

@ -127,6 +127,18 @@ class InvalidRequest(S3ClientError):
) )
class IllegalLocationConstraintException(S3ClientError):
code = 400
def __init__(self, *args, **kwargs):
super(IllegalLocationConstraintException, self).__init__(
"IllegalLocationConstraintException",
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.",
*args,
**kwargs
)
class MalformedXML(S3ClientError): class MalformedXML(S3ClientError):
code = 400 code = 400
@ -347,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError):
*args, *args,
**kwargs **kwargs
) )
class WrongPublicAccessBlockAccountIdError(S3ClientError):
code = 403
def __init__(self):
super(WrongPublicAccessBlockAccountIdError, self).__init__(
"AccessDenied", "Access Denied"
)

View File

@ -19,7 +19,7 @@ import uuid
import six import six
from bisect import insort from bisect import insort
from moto.core import BaseBackend, BaseModel from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from .exceptions import ( from .exceptions import (
BucketAlreadyExists, BucketAlreadyExists,
@ -37,6 +37,7 @@ from .exceptions import (
CrossLocationLoggingProhibitted, CrossLocationLoggingProhibitted,
NoSuchPublicAccessBlockConfiguration, NoSuchPublicAccessBlockConfiguration,
InvalidPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration,
WrongPublicAccessBlockAccountIdError,
) )
from .utils import clean_key_name, _VersionedKeyStore from .utils import clean_key_name, _VersionedKeyStore
@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a" OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
def get_moto_s3_account_id():
"""This makes it easy for mocking AWS Account IDs when using AWS Config
-- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free.
"""
return ACCOUNT_ID
class FakeDeleteMarker(BaseModel): class FakeDeleteMarker(BaseModel):
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel):
class S3Backend(BaseBackend): class S3Backend(BaseBackend):
def __init__(self): def __init__(self):
self.buckets = {} self.buckets = {}
self.account_public_access_block = None
def create_bucket(self, bucket_name, region_name): def create_bucket(self, bucket_name, region_name):
if bucket_name in self.buckets: if bucket_name in self.buckets:
@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend):
return bucket.public_access_block return bucket.public_access_block
def get_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not self.account_public_access_block:
raise NoSuchPublicAccessBlockConfiguration()
return self.account_public_access_block
def set_key( def set_key(
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
): ):
@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
bucket.public_access_block = None bucket.public_access_block = None
def delete_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
self.account_public_access_block = None
def put_bucket_notification_configuration(self, bucket_name, notification_config): def put_bucket_notification_configuration(self, bucket_name, notification_config):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
bucket.set_notification_configuration(notification_config) bucket.set_notification_configuration(notification_config)
@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend):
pub_block_config.get("RestrictPublicBuckets"), pub_block_config.get("RestrictPublicBuckets"),
) )
def put_account_public_access_block(self, account_id, pub_block_config):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not pub_block_config:
raise InvalidPublicAccessBlockConfiguration()
self.account_public_access_block = PublicAccessBlock(
pub_block_config.get("BlockPublicAcls"),
pub_block_config.get("IgnorePublicAcls"),
pub_block_config.get("BlockPublicPolicy"),
pub_block_config.get("RestrictPublicBuckets"),
)
def initiate_multipart(self, bucket_name, key_name, metadata): def initiate_multipart(self, bucket_name, key_name, metadata):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
new_multipart = FakeMultipart(key_name, metadata) new_multipart = FakeMultipart(key_name, metadata)

View File

@ -4,6 +4,7 @@ import re
import sys import sys
import six import six
from botocore.awsrequest import AWSPreparedRequest
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
from six.moves.urllib.parse import parse_qs, urlparse, unquote from six.moves.urllib.parse import parse_qs, urlparse, unquote
@ -29,6 +30,7 @@ from .exceptions import (
InvalidPartOrder, InvalidPartOrder,
MalformedXML, MalformedXML,
MalformedACLError, MalformedACLError,
IllegalLocationConstraintException,
InvalidNotificationARN, InvalidNotificationARN,
InvalidNotificationEvent, InvalidNotificationEvent,
ObjectNotInActiveTierError, ObjectNotInActiveTierError,
@ -122,6 +124,11 @@ ACTION_MAP = {
"uploadId": "PutObject", "uploadId": "PutObject",
}, },
}, },
"CONTROL": {
"GET": {"publicAccessBlock": "GetPublicAccessBlock"},
"PUT": {"publicAccessBlock": "PutPublicAccessBlock"},
"DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"},
},
} }
@ -167,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
or host.startswith("localhost") or host.startswith("localhost")
or host.startswith("localstack") or host.startswith("localstack")
or re.match(r"^[^.]+$", host) or re.match(r"^[^.]+$", host)
or re.match(r"^.*\.svc\.cluster\.local$", host) or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host)
): ):
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev), # Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
# (3) local host names that do not contain a "." (e.g., Docker container host names), or # (3) local host names that do not contain a "." (e.g., Docker container host names), or
@ -219,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
# Depending on which calling format the client is using, we don't know # Depending on which calling format the client is using, we don't know
# if this is a bucket or key request so we have to check # if this is a bucket or key request so we have to check
if self.subdomain_based_buckets(request): if self.subdomain_based_buckets(request):
return self.key_response(request, full_url, headers) return self.key_or_control_response(request, full_url, headers)
else: else:
# Using path-based buckets # Using path-based buckets
return self.bucket_response(request, full_url, headers) return self.bucket_response(request, full_url, headers)
@ -286,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return self._bucket_response_post(request, body, bucket_name) return self._bucket_response_post(request, body, bucket_name)
else: else:
raise NotImplementedError( raise NotImplementedError(
"Method {0} has not been impelemented in the S3 backend yet".format( "Method {0} has not been implemented in the S3 backend yet".format(
method method
) )
) )
@ -585,6 +592,29 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
next_continuation_token = None next_continuation_token = None
return result_keys, is_truncated, next_continuation_token return result_keys, is_truncated, next_continuation_token
def _body_contains_location_constraint(self, body):
if body:
try:
xmltodict.parse(body)["CreateBucketConfiguration"]["LocationConstraint"]
return True
except KeyError:
pass
return False
def _parse_pab_config(self, body):
parsed_xml = xmltodict.parse(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
return parsed_xml
def _bucket_response_put( def _bucket_response_put(
self, request, body, region_name, bucket_name, querystring self, request, body, region_name, bucket_name, querystring
): ):
@ -663,27 +693,23 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
raise e raise e
elif "publicAccessBlock" in querystring: elif "publicAccessBlock" in querystring:
parsed_xml = xmltodict.parse(body) pab_config = self._parse_pab_config(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
self.backend.put_bucket_public_access_block( self.backend.put_bucket_public_access_block(
bucket_name, parsed_xml["PublicAccessBlockConfiguration"] bucket_name, pab_config["PublicAccessBlockConfiguration"]
) )
return "" return ""
else: else:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
# - LocationConstraint has to be specified if outside us-east-1
if (
region_name != DEFAULT_REGION_NAME
and not self._body_contains_location_constraint(body)
):
raise IllegalLocationConstraintException()
if body: if body:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
try: try:
forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][ forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][
"LocationConstraint" "LocationConstraint"
@ -854,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
) )
return 206, response_headers, response_content[begin : end + 1] return 206, response_headers, response_content[begin : end + 1]
def key_response(self, request, full_url, headers): def key_or_control_response(self, request, full_url, headers):
# Key and Control are lumped in because splitting out the regex is too much of a pain :/
self.method = request.method self.method = request.method
self.path = self._get_path(request) self.path = self._get_path(request)
self.headers = request.headers self.headers = request.headers
if "host" not in self.headers: if "host" not in self.headers:
self.headers["host"] = urlparse(full_url).netloc self.headers["host"] = urlparse(full_url).netloc
response_headers = {} response_headers = {}
try: try:
response = self._key_response(request, full_url, headers) # Is this an S3 control response?
if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url:
response = self._control_response(request, full_url, headers)
else:
response = self._key_response(request, full_url, headers)
except S3ClientError as s3error: except S3ClientError as s3error:
response = s3error.code, {}, s3error.description response = s3error.code, {}, s3error.description
@ -878,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
) )
return status_code, response_headers, response_content return status_code, response_headers, response_content
def _control_response(self, request, full_url, headers):
parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
if hasattr(request, "body"):
# Boto
body = request.body
if hasattr(body, "read"):
body = body.read()
else:
# Flask server
body = request.data
if body is None:
body = b""
if method == "GET":
return self._control_response_get(request, query, headers)
elif method == "PUT":
return self._control_response_put(request, body, query, headers)
elif method == "DELETE":
return self._control_response_delete(request, query, headers)
else:
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(
method
)
)
def _control_response_get(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "GET", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
public_block_config = self.backend.get_account_public_access_block(
headers["x-amz-account-id"]
)
template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION)
return (
200,
response_headers,
template.render(public_block_config=public_block_config),
)
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_put(self, request, body, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "PUT", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
pab_config = self._parse_pab_config(body)
self.backend.put_account_public_access_block(
headers["x-amz-account-id"],
pab_config["PublicAccessBlockConfiguration"],
)
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_delete(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "DELETE", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
self.backend.delete_account_public_access_block(headers["x-amz-account-id"])
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _key_response(self, request, full_url, headers): def _key_response(self, request, full_url, headers):
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True) query = parse_qs(parsed_url.query, keep_blank_values=True)
@ -1082,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if mdirective is not None and mdirective == "REPLACE": if mdirective is not None and mdirective == "REPLACE":
metadata = metadata_from_headers(request.headers) metadata = metadata_from_headers(request.headers)
new_key.set_metadata(metadata, replace=True) new_key.set_metadata(metadata, replace=True)
tdirective = request.headers.get("x-amz-tagging-directive")
if tdirective == "REPLACE":
tagging = self._tagging_from_headers(request.headers)
new_key.set_tagging(tagging)
template = self.response_template(S3_OBJECT_COPY_RESPONSE) template = self.response_template(S3_OBJECT_COPY_RESPONSE)
response_headers.update(new_key.response_dict) response_headers.update(new_key.response_dict)
return 200, response_headers, template.render(key=new_key) return 200, response_headers, template.render(key=new_key)

View File

@ -13,7 +13,7 @@ url_paths = {
# subdomain key of path-based bucket # subdomain key of path-based bucket
"{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response, "{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response,
# path-based bucket + key # path-based bucket + key
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_response, "{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
# subdomain bucket + key with empty first part of path # subdomain bucket + key with empty first part of path
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_response, "{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
} }

View File

@ -37,7 +37,7 @@ def bucket_name_from_url(url):
REGION_URL_REGEX = re.compile( REGION_URL_REGEX = re.compile(
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|" r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
r"(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?" r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
) )

View File

@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel):
"executionInfo": self.to_medium_dict(), "executionInfo": self.to_medium_dict(),
"executionConfiguration": {"taskList": {"name": self.task_list}}, "executionConfiguration": {"taskList": {"name": self.task_list}},
} }
# info
if self.execution_status == "CLOSED":
hsh["executionInfo"]["closeStatus"] = self.close_status
hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp
# configuration # configuration
for key in self._configuration_keys: for key in self._configuration_keys:
attr = camelcase_to_underscores(key) attr = camelcase_to_underscores(key)

View File

View File

@ -0,0 +1,62 @@
class TaggingService:
def __init__(self, tagName="Tags", keyName="Key", valueName="Value"):
self.tagName = tagName
self.keyName = keyName
self.valueName = valueName
self.tags = {}
def list_tags_for_resource(self, arn):
result = []
if arn in self.tags:
for k, v in self.tags[arn].items():
result.append({self.keyName: k, self.valueName: v})
return {self.tagName: result}
def delete_all_tags_for_resource(self, arn):
del self.tags[arn]
def has_tags(self, arn):
return arn in self.tags
def tag_resource(self, arn, tags):
if arn not in self.tags:
self.tags[arn] = {}
for t in tags:
if self.valueName in t:
self.tags[arn][t[self.keyName]] = t[self.valueName]
else:
self.tags[arn][t[self.keyName]] = None
def untag_resource_using_names(self, arn, tag_names):
for name in tag_names:
if name in self.tags.get(arn, {}):
del self.tags[arn][name]
def untag_resource_using_tags(self, arn, tags):
m = self.tags.get(arn, {})
for t in tags:
if self.keyName in t:
if t[self.keyName] in m:
if self.valueName in t:
if m[t[self.keyName]] != t[self.valueName]:
continue
# If both key and value are provided, match both before deletion
del m[t[self.keyName]]
def extract_tag_names(self, tags):
results = []
if len(tags) == 0:
return results
for tag in tags:
if self.keyName in tag:
results.append(tag[self.keyName])
return results
def flatten_tag_list(self, tags):
result = {}
for t in tags:
if self.valueName in t:
result[t[self.keyName]] = t[self.valueName]
else:
result[t[self.keyName]] = None
return result

View File

@ -204,12 +204,7 @@ def test_create_resource():
root_resource["ResponseMetadata"].pop("HTTPHeaders", None) root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
root_resource["ResponseMetadata"].pop("RetryAttempts", None) root_resource["ResponseMetadata"].pop("RetryAttempts", None)
root_resource.should.equal( root_resource.should.equal(
{ {"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},}
"path": "/",
"id": root_id,
"ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
}
) )
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users") client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
@ -257,7 +252,6 @@ def test_child_resource():
"parentId": users_id, "parentId": users_id,
"id": tags_id, "id": tags_id,
"ResponseMetadata": {"HTTPStatusCode": 200}, "ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
} }
) )
@ -286,6 +280,41 @@ def test_create_method():
{ {
"httpMethod": "GET", "httpMethod": "GET",
"authorizationType": "none", "authorizationType": "none",
"apiKeyRequired": False,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
@mock_apigateway
def test_create_method_apikeyrequired():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
authorizationType="none",
apiKeyRequired=True,
)
response = client.get_method(restApiId=api_id, resourceId=root_id, httpMethod="GET")
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"httpMethod": "GET",
"authorizationType": "none",
"apiKeyRequired": True,
"ResponseMetadata": {"HTTPStatusCode": 200}, "ResponseMetadata": {"HTTPStatusCode": 200},
} }
) )

View File

@ -86,14 +86,14 @@ def lambda_handler(event, context):
@mock_lambda @mock_lambda
def test_list_functions(): def test_list_functions():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
result = conn.list_functions() result = conn.list_functions()
result["Functions"].should.have.length_of(0) result["Functions"].should.have.length_of(0)
@mock_lambda @mock_lambda
def test_invoke_requestresponse_function(): def test_invoke_requestresponse_function():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
Runtime="python2.7", Runtime="python2.7",
@ -113,7 +113,7 @@ def test_invoke_requestresponse_function():
Payload=json.dumps(in_data), Payload=json.dumps(in_data),
) )
success_result["StatusCode"].should.equal(202) success_result["StatusCode"].should.equal(200)
result_obj = json.loads( result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8") base64.b64decode(success_result["LogResult"]).decode("utf-8")
) )
@ -150,7 +150,7 @@ def test_invoke_requestresponse_function_with_arn():
Payload=json.dumps(in_data), Payload=json.dumps(in_data),
) )
success_result["StatusCode"].should.equal(202) success_result["StatusCode"].should.equal(200)
result_obj = json.loads( result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8") base64.b64decode(success_result["LogResult"]).decode("utf-8")
) )
@ -163,7 +163,7 @@ def test_invoke_requestresponse_function_with_arn():
@mock_lambda @mock_lambda
def test_invoke_event_function(): def test_invoke_event_function():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
Runtime="python2.7", Runtime="python2.7",
@ -188,16 +188,44 @@ def test_invoke_event_function():
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data) json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
@mock_lambda
def test_invoke_dryrun_function():
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1(),},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
conn.invoke.when.called_with(
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
).should.throw(botocore.client.ClientError)
in_data = {"msg": "So long and thanks for all the fish"}
success_result = conn.invoke(
FunctionName="testFunction",
InvocationType="DryRun",
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(204)
if settings.TEST_SERVER_MODE: if settings.TEST_SERVER_MODE:
@mock_ec2 @mock_ec2
@mock_lambda @mock_lambda
def test_invoke_function_get_ec2_volume(): def test_invoke_function_get_ec2_volume():
conn = boto3.resource("ec2", "us-west-2") conn = boto3.resource("ec2", _lambda_region)
vol = conn.create_volume(Size=99, AvailabilityZone="us-west-2") vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
vol = conn.Volume(vol.id) vol = conn.Volume(vol.id)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
Runtime="python3.7", Runtime="python3.7",
@ -216,7 +244,7 @@ if settings.TEST_SERVER_MODE:
InvocationType="RequestResponse", InvocationType="RequestResponse",
Payload=json.dumps(in_data), Payload=json.dumps(in_data),
) )
result["StatusCode"].should.equal(202) result["StatusCode"].should.equal(200)
actual_payload = json.loads(result["Payload"].read().decode("utf-8")) actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size} expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
actual_payload.should.equal(expected_payload) actual_payload.should.equal(expected_payload)
@ -227,14 +255,14 @@ if settings.TEST_SERVER_MODE:
@mock_ec2 @mock_ec2
@mock_lambda @mock_lambda
def test_invoke_function_from_sns(): def test_invoke_function_from_sns():
logs_conn = boto3.client("logs", region_name="us-west-2") logs_conn = boto3.client("logs", region_name=_lambda_region)
sns_conn = boto3.client("sns", region_name="us-west-2") sns_conn = boto3.client("sns", region_name=_lambda_region)
sns_conn.create_topic(Name="some-topic") sns_conn.create_topic(Name="some-topic")
topics_json = sns_conn.list_topics() topics_json = sns_conn.list_topics()
topics = topics_json["Topics"] topics = topics_json["Topics"]
topic_arn = topics[0]["TopicArn"] topic_arn = topics[0]["TopicArn"]
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
result = conn.create_function( result = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
Runtime="python2.7", Runtime="python2.7",
@ -277,7 +305,7 @@ def test_invoke_function_from_sns():
@mock_lambda @mock_lambda
def test_create_based_on_s3_with_missing_bucket(): def test_create_based_on_s3_with_missing_bucket():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function.when.called_with( conn.create_function.when.called_with(
FunctionName="testFunction", FunctionName="testFunction",
@ -297,12 +325,15 @@ def test_create_based_on_s3_with_missing_bucket():
@mock_s3 @mock_s3
@freeze_time("2015-01-01 00:00:00") @freeze_time("2015-01-01 00:00:00")
def test_create_function_from_aws_bucket(): def test_create_function_from_aws_bucket():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
result = conn.create_function( result = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -350,7 +381,7 @@ def test_create_function_from_aws_bucket():
@mock_lambda @mock_lambda
@freeze_time("2015-01-01 00:00:00") @freeze_time("2015-01-01 00:00:00")
def test_create_function_from_zipfile(): def test_create_function_from_zipfile():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
result = conn.create_function( result = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -395,12 +426,15 @@ def test_create_function_from_zipfile():
@mock_s3 @mock_s3
@freeze_time("2015-01-01 00:00:00") @freeze_time("2015-01-01 00:00:00")
def test_get_function(): def test_get_function():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -464,7 +498,10 @@ def test_get_function():
def test_get_function_by_arn(): def test_get_function_by_arn():
bucket_name = "test-bucket" bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1") s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name) s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -489,12 +526,15 @@ def test_get_function_by_arn():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_delete_function(): def test_delete_function():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -525,7 +565,10 @@ def test_delete_function():
def test_delete_function_by_arn(): def test_delete_function_by_arn():
bucket_name = "test-bucket" bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1") s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name) s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -550,7 +593,7 @@ def test_delete_function_by_arn():
@mock_lambda @mock_lambda
def test_delete_unknown_function(): def test_delete_unknown_function():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.delete_function.when.called_with( conn.delete_function.when.called_with(
FunctionName="testFunctionThatDoesntExist" FunctionName="testFunctionThatDoesntExist"
).should.throw(botocore.client.ClientError) ).should.throw(botocore.client.ClientError)
@ -559,12 +602,15 @@ def test_delete_unknown_function():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_publish(): def test_publish():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -609,12 +655,15 @@ def test_list_create_list_get_delete_list():
test `list -> create -> list -> get -> delete -> list` integration test `list -> create -> list -> get -> delete -> list` integration
""" """
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.list_functions()["Functions"].should.have.length_of(0) conn.list_functions()["Functions"].should.have.length_of(0)
@ -711,12 +760,15 @@ def test_tags():
""" """
test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration
""" """
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
function = conn.create_function( function = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -768,7 +820,7 @@ def test_tags_not_found():
""" """
Test list_tags and tag_resource when the lambda with the given arn does not exist Test list_tags and tag_resource when the lambda with the given arn does not exist
""" """
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.list_tags.when.called_with( conn.list_tags.when.called_with(
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID) Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
).should.throw(botocore.client.ClientError) ).should.throw(botocore.client.ClientError)
@ -786,7 +838,7 @@ def test_tags_not_found():
@mock_lambda @mock_lambda
def test_invoke_async_function(): def test_invoke_async_function():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
Runtime="python2.7", Runtime="python2.7",
@ -809,7 +861,7 @@ def test_invoke_async_function():
@mock_lambda @mock_lambda
@freeze_time("2015-01-01 00:00:00") @freeze_time("2015-01-01 00:00:00")
def test_get_function_created_with_zipfile(): def test_get_function_created_with_zipfile():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
result = conn.create_function( result = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -855,7 +907,7 @@ def test_get_function_created_with_zipfile():
@mock_lambda @mock_lambda
def test_add_function_permission(): def test_add_function_permission():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -886,7 +938,7 @@ def test_add_function_permission():
@mock_lambda @mock_lambda
def test_get_function_policy(): def test_get_function_policy():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -921,12 +973,15 @@ def test_get_function_policy():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_list_versions_by_function(): def test_list_versions_by_function():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -977,12 +1032,15 @@ def test_list_versions_by_function():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_create_function_with_already_exists(): def test_create_function_with_already_exists():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
conn.create_function( conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -1014,7 +1072,7 @@ def test_create_function_with_already_exists():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_list_versions_by_function_for_nonexistent_function(): def test_list_versions_by_function_for_nonexistent_function():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
versions = conn.list_versions_by_function(FunctionName="testFunction") versions = conn.list_versions_by_function(FunctionName="testFunction")
assert len(versions["Versions"]) == 0 assert len(versions["Versions"]) == 0
@ -1363,12 +1421,15 @@ def test_delete_event_source_mapping():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_update_configuration(): def test_update_configuration():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2() zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function( fxn = conn.create_function(
FunctionName="testFunction", FunctionName="testFunction",
@ -1411,7 +1472,7 @@ def test_update_configuration():
@mock_lambda @mock_lambda
def test_update_function_zip(): def test_update_function_zip():
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content_one = get_test_zip_file1() zip_content_one = get_test_zip_file1()
@ -1466,13 +1527,16 @@ def test_update_function_zip():
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_update_function_s3(): def test_update_function_s3():
s3_conn = boto3.client("s3", "us-west-2") s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(Bucket="test-bucket") s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content) s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function( fxn = conn.create_function(
FunctionName="testFunctionS3", FunctionName="testFunctionS3",
@ -1553,7 +1617,7 @@ def test_create_function_with_unknown_arn():
def create_invalid_lambda(role): def create_invalid_lambda(role):
conn = boto3.client("lambda", "us-west-2") conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
with assert_raises(ClientError) as err: with assert_raises(ClientError) as err:
conn.create_function( conn.create_function(
@ -1572,7 +1636,7 @@ def create_invalid_lambda(role):
def get_role_name(): def get_role_name():
with mock_iam(): with mock_iam():
iam = boto3.client("iam", region_name="us-west-2") iam = boto3.client("iam", region_name=_lambda_region)
try: try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"] return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError: except ClientError:

View File

@ -143,7 +143,7 @@ def test_create_stack_with_notification_arn():
@mock_s3_deprecated @mock_s3_deprecated
def test_create_stack_from_s3_url(): def test_create_stack_from_s3_url():
s3_conn = boto.s3.connect_to_region("us-west-1") s3_conn = boto.s3.connect_to_region("us-west-1")
bucket = s3_conn.create_bucket("foobar") bucket = s3_conn.create_bucket("foobar", location="us-west-1")
key = boto.s3.key.Key(bucket) key = boto.s3.key.Key(bucket)
key.key = "template-key" key.key = "template-key"
key.set_contents_from_string(dummy_template_json) key.set_contents_from_string(dummy_template_json)

View File

@ -27,6 +27,11 @@ def test_create_user_pool():
result["UserPool"]["Id"].should_not.be.none result["UserPool"]["Id"].should_not.be.none
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+") result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
result["UserPool"]["Arn"].should.equal(
"arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format(
ACCOUNT_ID, result["UserPool"]["Id"]
)
)
result["UserPool"]["Name"].should.equal(name) result["UserPool"]["Name"].should.equal(name)
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value) result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
@ -911,6 +916,55 @@ def test_admin_create_existing_user():
caught.should.be.true caught.should.be.true
@mock_cognitoidp
def test_admin_resend_invitation_existing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
)
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UsernameExistsException:
caught = True
caught.should.be.false
@mock_cognitoidp
def test_admin_resend_invitation_missing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UserNotFoundException:
caught = True
caught.should.be.true
@mock_cognitoidp @mock_cognitoidp
def test_admin_get_user(): def test_admin_get_user():
conn = boto3.client("cognito-idp", "us-west-2") conn = boto3.client("cognito-idp", "us-west-2")

View File

@ -46,4 +46,4 @@ def test_domain_dispatched_with_service():
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3") dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"}) backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"})
keys = set(backend_app.view_functions.keys()) keys = set(backend_app.view_functions.keys())
keys.should.contain("ResponseObject.key_response") keys.should.contain("ResponseObject.key_or_control_response")

View File

@ -3609,6 +3609,31 @@ def test_update_supports_list_append_maps():
) )
@mock_dynamodb2
def test_update_supports_list_append_with_nested_if_not_exists_operation():
dynamo = boto3.resource("dynamodb", region_name="us-west-1")
table_name = "test"
dynamo.create_table(
TableName=table_name,
AttributeDefinitions=[{"AttributeName": "Id", "AttributeType": "S"}],
KeySchema=[{"AttributeName": "Id", "KeyType": "HASH"}],
ProvisionedThroughput={"ReadCapacityUnits": 20, "WriteCapacityUnits": 20},
)
table = dynamo.Table(table_name)
table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}})
table.update_item(
Key={"Id": "item-id"},
UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)",
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
)
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
{"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}}
)
@mock_dynamodb2 @mock_dynamodb2
def test_update_catches_invalid_list_append_operation(): def test_update_catches_invalid_list_append_operation():
client = boto3.client("dynamodb", region_name="us-east-1") client = boto3.client("dynamodb", region_name="us-east-1")

View File

@ -12,6 +12,7 @@ import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2 from moto import mock_ec2_deprecated, mock_ec2
from moto.ec2.models import AMIS, OWNER_ID from moto.ec2.models import AMIS, OWNER_ID
from moto.iam.models import ACCOUNT_ID
from tests.helpers import requires_boto_gte from tests.helpers import requires_boto_gte
@ -251,6 +252,19 @@ def test_ami_pulls_attributes_from_instance():
image.kernel_id.should.equal("test-kernel") image.kernel_id.should.equal("test-kernel")
@mock_ec2_deprecated
def test_ami_uses_account_id_if_valid_access_key_is_supplied():
access_key = "AKIAXXXXXXXXXXXXXXXX"
conn = boto.connect_ec2(access_key, "the_secret")
reservation = conn.run_instances("ami-1234abcd")
instance = reservation.instances[0]
instance.modify_attribute("kernel", "test-kernel")
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
images = conn.get_all_images(owners=["self"])
[(ami.id, ami.owner_id) for ami in images].should.equal([(image_id, ACCOUNT_ID)])
@mock_ec2_deprecated @mock_ec2_deprecated
def test_ami_filters(): def test_ami_filters():
conn = boto.connect_ec2("the_key", "the_secret") conn = boto.connect_ec2("the_key", "the_secret")
@ -773,7 +787,7 @@ def test_ami_filter_wildcard():
instance.create_image(Name="not-matching-image") instance.create_image(Name="not-matching-image")
my_images = ec2_client.describe_images( my_images = ec2_client.describe_images(
Owners=["111122223333"], Filters=[{"Name": "name", "Values": ["test*"]}] Owners=[ACCOUNT_ID], Filters=[{"Name": "name", "Values": ["test*"]}]
)["Images"] )["Images"]
my_images.should.have.length_of(1) my_images.should.have.length_of(1)

View File

@ -1,12 +1,15 @@
import random
import boto3
import json import json
import sure # noqa import random
import unittest
from moto.events import mock_events import boto3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from nose.tools import assert_raises from nose.tools import assert_raises
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
from moto.core.exceptions import JsonRESTError
from moto.events import mock_events
from moto.events.models import EventsBackend
RULES = [ RULES = [
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"}, {"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
@ -136,14 +139,6 @@ def test_list_rule_names_by_target():
assert rule in test_2_target["Rules"] assert rule in test_2_target["Rules"]
@mock_events
def test_list_rules():
client = generate_environment()
rules = client.list_rules()
assert len(rules["Rules"]) == len(RULES)
@mock_events @mock_events
def test_delete_rule(): def test_delete_rule():
client = generate_environment() client = generate_environment()
@ -461,3 +456,50 @@ def test_delete_event_bus_errors():
client.delete_event_bus.when.called_with(Name="default").should.throw( client.delete_event_bus.when.called_with(Name="default").should.throw(
ClientError, "Cannot delete event bus default." ClientError, "Cannot delete event bus default."
) )
@mock_events
def test_rule_tagging_happy():
client = generate_environment()
rule_name = get_random_rule()["Name"]
rule_arn = client.describe_rule(Name=rule_name).get("Arn")
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
client.tag_resource(ResourceARN=rule_arn, Tags=tags)
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
tc = unittest.TestCase("__init__")
expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}]
tc.assertTrue(
(expected[0] == actual[0] and expected[1] == actual[1])
or (expected[1] == actual[0] and expected[0] == actual[1])
)
client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"])
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
expected = [{"Key": "key2", "Value": "value2"}]
assert expected == actual
@mock_events
def test_rule_tagging_sad():
back_end = EventsBackend("us-west-2")
try:
back_end.tag_resource("unknown", [])
raise "tag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.untag_resource("unknown", [])
raise "untag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.list_tags_for_resource("unknown")
raise "list_tags_for_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass

View File

@ -9,6 +9,173 @@ from botocore.exceptions import ClientError
from nose.tools import assert_raises from nose.tools import assert_raises
@mock_iot
def test_attach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
@mock_iot
def test_detach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
client.detach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.be.empty
@mock_iot
def test_list_attached_policies():
client = boto3.client("iot", region_name="ap-northeast-1")
cert = client.create_keys_and_certificate(setAsActive=True)
policies = client.list_attached_policies(target=cert["certificateArn"])
policies["policies"].should.be.empty
@mock_iot
def test_policy_versions():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
policy = client.create_policy(policyName=policy_name, policyDocument=doc)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("policyVersionId").which.should.equal("1")
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("defaultVersionId").which.should.equal(
policy["defaultVersionId"]
)
policy1 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_1"}),
setAsDefault=True,
)
policy1.should.have.key("policyArn").which.should_not.be.none
policy1.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy1.should.have.key("policyVersionId").which.should.equal("2")
policy1.should.have.key("isDefaultVersion").which.should.equal(True)
policy2 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_2"}),
setAsDefault=False,
)
policy2.should.have.key("policyArn").which.should_not.be.none
policy2.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy2.should.have.key("policyVersionId").which.should.equal("3")
policy2.should.have.key("isDefaultVersion").which.should.equal(False)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy1["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
client.set_default_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy2["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy2["policyVersionId"]
)
client.delete_policy_version(policyName=policy_name, policyVersionId="1")
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(2)
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(1)
# should fail as it"s the default policy. Should use delete_policy instead
try:
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
assert False, "Should have failed in previous call"
except Exception as exception:
exception.response["Error"]["Message"].should.equal(
"Cannot delete the default version of a policy"
)
@mock_iot @mock_iot
def test_things(): def test_things():
client = boto3.client("iot", region_name="ap-northeast-1") client = boto3.client("iot", region_name="ap-northeast-1")
@ -994,7 +1161,10 @@ def test_create_job():
client = boto3.client("iot", region_name="eu-west-1") client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing" name = "my-thing"
job_id = "TestJob" job_id = "TestJob"
# thing # thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name) thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name) thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn") thing.should.have.key("thingArn")
@ -1020,6 +1190,63 @@ def test_create_job():
job.should.have.key("description") job.should.have.key("description")
@mock_iot
def test_list_jobs():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job1 = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job1.should.have.key("jobId").which.should.equal(job_id)
job1.should.have.key("jobArn")
job1.should.have.key("description")
job2 = client.create_job(
jobId=job_id + "1",
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job2.should.have.key("jobId").which.should.equal(job_id + "1")
job2.should.have.key("jobArn")
job2.should.have.key("description")
jobs = client.list_jobs()
jobs.should.have.key("jobs")
jobs.should_not.have.key("nextToken")
jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id)
jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1")
@mock_iot @mock_iot
def test_describe_job(): def test_describe_job():
client = boto3.client("iot", region_name="eu-west-1") client = boto3.client("iot", region_name="eu-west-1")
@ -1124,3 +1351,387 @@ def test_describe_job_1():
job.should.have.key("job").which.should.have.key( job.should.have.key("job").which.should.have.key(
"jobExecutionsRolloutConfig" "jobExecutionsRolloutConfig"
).which.should.have.key("maximumPerMinute").which.should.equal(10) ).which.should.have.key("maximumPerMinute").which.should.equal(10)
@mock_iot
def test_delete_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
client.delete_job(jobId=job_id)
client.list_jobs()["jobs"].should.have.length_of(0)
@mock_iot
def test_cancel_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are")
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("job").which.should.have.key("status").which.should.equal(
"CANCELED"
)
job.should.have.key("job").which.should.have.key(
"forceCanceled"
).which.should.equal(False)
job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal(
"Because"
)
job.should.have.key("job").which.should.have.key("comment").which.should.equal(
"You are"
)
@mock_iot
def test_get_job_document_with_document_source():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal("")
@mock_iot
def test_get_job_document_with_document():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal('{"field": "value"}')
@mock_iot
def test_describe_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
job_execution = client.describe_job_execution(
jobId=job_id, thingName=name, executionNumber=123
)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_cancel_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.cancel_job_execution(jobId=job_id, thingName=name)
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("status").which.should.equal("CANCELED")
@mock_iot
def test_delete_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_list_job_executions_for_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_job(jobId=job_id)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key(
"thingArn"
).which.should.equal(thing["thingArn"])
@mock_iot
def test_list_job_executions_for_thing():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_thing(thingName=name)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal(
job_id
)

View File

@ -21,7 +21,10 @@ def test_get_resources_s3():
# Create 4 buckets # Create 4 buckets
for i in range(1, 5): for i in range(1, 5):
i_str = str(i) i_str = str(i)
s3_client.create_bucket(Bucket="test_bucket" + i_str) s3_client.create_bucket(
Bucket="test_bucket" + i_str,
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
)
s3_client.put_bucket_tagging( s3_client.put_bucket_tagging(
Bucket="test_bucket" + i_str, Bucket="test_bucket" + i_str,
Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]}, Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]},

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,7 @@ from moto import mock_s3_deprecated, mock_s3
@mock_s3_deprecated @mock_s3_deprecated
def test_lifecycle_create(): def test_lifecycle_create():
conn = boto.s3.connect_to_region("us-west-1") conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar") bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle() lifecycle = Lifecycle()
lifecycle.add_rule("myid", "", "Enabled", 30) lifecycle.add_rule("myid", "", "Enabled", 30)
@ -33,7 +33,9 @@ def test_lifecycle_create():
@mock_s3 @mock_s3
def test_lifecycle_with_filters(): def test_lifecycle_with_filters():
client = boto3.client("s3") client = boto3.client("s3")
client.create_bucket(Bucket="bucket") client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Create a lifecycle rule with a Filter (no tags): # Create a lifecycle rule with a Filter (no tags):
lfc = { lfc = {
@ -245,7 +247,9 @@ def test_lifecycle_with_filters():
@mock_s3 @mock_s3
def test_lifecycle_with_eodm(): def test_lifecycle_with_eodm():
client = boto3.client("s3") client = boto3.client("s3")
client.create_bucket(Bucket="bucket") client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = { lfc = {
"Rules": [ "Rules": [
@ -293,7 +297,9 @@ def test_lifecycle_with_eodm():
@mock_s3 @mock_s3
def test_lifecycle_with_nve(): def test_lifecycle_with_nve():
client = boto3.client("s3") client = boto3.client("s3")
client.create_bucket(Bucket="bucket") client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = { lfc = {
"Rules": [ "Rules": [
@ -327,7 +333,9 @@ def test_lifecycle_with_nve():
@mock_s3 @mock_s3
def test_lifecycle_with_nvt(): def test_lifecycle_with_nvt():
client = boto3.client("s3") client = boto3.client("s3")
client.create_bucket(Bucket="bucket") client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = { lfc = {
"Rules": [ "Rules": [
@ -393,7 +401,9 @@ def test_lifecycle_with_nvt():
@mock_s3 @mock_s3
def test_lifecycle_with_aimu(): def test_lifecycle_with_aimu():
client = boto3.client("s3") client = boto3.client("s3")
client.create_bucket(Bucket="bucket") client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = { lfc = {
"Rules": [ "Rules": [
@ -432,7 +442,7 @@ def test_lifecycle_with_aimu():
@mock_s3_deprecated @mock_s3_deprecated
def test_lifecycle_with_glacier_transition(): def test_lifecycle_with_glacier_transition():
conn = boto.s3.connect_to_region("us-west-1") conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar") bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle() lifecycle = Lifecycle()
transition = Transition(days=30, storage_class="GLACIER") transition = Transition(days=30, storage_class="GLACIER")
@ -451,7 +461,7 @@ def test_lifecycle_with_glacier_transition():
@mock_s3_deprecated @mock_s3_deprecated
def test_lifecycle_multi(): def test_lifecycle_multi():
conn = boto.s3.connect_to_region("us-west-1") conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar") bucket = conn.create_bucket("foobar", location="us-west-1")
date = "2022-10-12T00:00:00.000Z" date = "2022-10-12T00:00:00.000Z"
sc = "GLACIER" sc = "GLACIER"
@ -493,7 +503,7 @@ def test_lifecycle_multi():
@mock_s3_deprecated @mock_s3_deprecated
def test_lifecycle_delete(): def test_lifecycle_delete():
conn = boto.s3.connect_to_region("us-west-1") conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar") bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle() lifecycle = Lifecycle()
lifecycle.add_rule(expiration=30) lifecycle.add_rule(expiration=30)

View File

@ -11,7 +11,7 @@ from moto import mock_s3
@mock_s3 @mock_s3
def test_s3_storage_class_standard(): def test_s3_storage_class_standard():
s3 = boto3.client("s3") s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(Bucket="Bucket")
# add an object to the bucket with standard storage # add an object to the bucket with standard storage
@ -26,7 +26,9 @@ def test_s3_storage_class_standard():
@mock_s3 @mock_s3
def test_s3_storage_class_infrequent_access(): def test_s3_storage_class_infrequent_access():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
)
# add an object to the bucket with standard storage # add an object to the bucket with standard storage
@ -46,7 +48,9 @@ def test_s3_storage_class_infrequent_access():
def test_s3_storage_class_intelligent_tiering(): def test_s3_storage_class_intelligent_tiering():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
)
s3.put_object( s3.put_object(
Bucket="Bucket", Bucket="Bucket",
Key="my_key_infrequent", Key="my_key_infrequent",
@ -61,7 +65,7 @@ def test_s3_storage_class_intelligent_tiering():
@mock_s3 @mock_s3
def test_s3_storage_class_copy(): def test_s3_storage_class_copy():
s3 = boto3.client("s3") s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(Bucket="Bucket")
s3.put_object( s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD" Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -86,7 +90,7 @@ def test_s3_storage_class_copy():
@mock_s3 @mock_s3
def test_s3_invalid_copied_storage_class(): def test_s3_invalid_copied_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(Bucket="Bucket")
s3.put_object( s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD" Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -119,7 +123,9 @@ def test_s3_invalid_copied_storage_class():
@mock_s3 @mock_s3
def test_s3_invalid_storage_class(): def test_s3_invalid_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Try to add an object with an invalid storage class # Try to add an object with an invalid storage class
with assert_raises(ClientError) as err: with assert_raises(ClientError) as err:
@ -137,7 +143,9 @@ def test_s3_invalid_storage_class():
@mock_s3 @mock_s3
def test_s3_default_storage_class(): def test_s3_default_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body") s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
@ -150,7 +158,9 @@ def test_s3_default_storage_class():
@mock_s3 @mock_s3
def test_s3_copy_object_error_for_glacier_storage_class(): def test_s3_copy_object_error_for_glacier_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object( s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER" Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
@ -169,7 +179,9 @@ def test_s3_copy_object_error_for_glacier_storage_class():
@mock_s3 @mock_s3
def test_s3_copy_object_error_for_deep_archive_storage_class(): def test_s3_copy_object_error_for_deep_archive_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket") s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object( s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE" Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"

View File

@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation():
) )
def test_closed_workflow_execution_full_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow",
"v1.0",
task_list="queue",
default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
wfe.execution_status = "CLOSED"
wfe.close_status = "CANCELED"
wfe.close_timestamp = 1420066801.123
fd = wfe.to_full_dict()
medium_dict = wfe.to_medium_dict()
medium_dict["closeStatus"] = "CANCELED"
medium_dict["closeTimestamp"] = 1420066801.123
fd["executionInfo"].should.equal(medium_dict)
fd["openCounts"]["openTimers"].should.equal(0)
fd["openCounts"]["openDecisionTasks"].should.equal(0)
fd["openCounts"]["openActivityTasks"].should.equal(0)
fd["executionConfiguration"].should.equal(
{
"childPolicy": "ABANDON",
"executionStartToCloseTimeout": "300",
"taskList": {"name": "queue"},
"taskStartToCloseTimeout": "300",
}
)
def test_workflow_execution_list_dict_representation(): def test_workflow_execution_list_dict_representation():
domain = get_basic_domain() domain = get_basic_domain()
wf_type = WorkflowType( wf_type = WorkflowType(

View File

@ -0,0 +1,79 @@
import sure
from moto.utilities.tagging_service import TaggingService
def test_list_empty():
svc = TaggingService()
result = svc.list_tags_for_resource("test")
{"Tags": []}.should.be.equal(result)
def test_create_tag():
svc = TaggingService("TheTags", "TagKey", "TagValue")
tags = [{"TagKey": "key_key", "TagValue": "value_value"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]}
expected.should.be.equal(actual)
def test_create_tag_without_value():
svc = TaggingService()
tags = [{"Key": "key_key"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"Tags": [{"Key": "key_key", "Value": None}]}
expected.should.be.equal(actual)
def test_delete_tag_using_names():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_all_tags_for_resource():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
tags2 = [{"Key": "key_key2", "Value": "value_value2"}]
svc.tag_resource("arn", tags)
svc.tag_resource("arn", tags2)
svc.delete_all_tags_for_resource("arn")
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_list_empty_delete():
svc = TaggingService()
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_tag_using_tags():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_tags("arn", tags)
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_extract_tag_names():
svc = TaggingService()
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
actual = svc.extract_tag_names(tags)
expected = ["key1", "key2"]
expected.should.be.equal(actual)