Merge branch 'master' into feature-apigw-authorizers

This commit is contained in:
Jon Beilke 2020-02-18 10:49:53 -06:00
commit 40208363be
72 changed files with 5444 additions and 2991 deletions

View File

@ -26,11 +26,12 @@ install:
fi
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh &
fi
travis_retry pip install -r requirements-dev.txt
travis_retry pip install boto==2.45.0
travis_retry pip install boto3
travis_retry pip install dist/moto*.gz
travis_retry pip install coveralls==1.1
travis_retry pip install -r requirements-dev.txt
travis_retry pip install coverage==4.5.4
if [ "$TEST_SERVER_MODE" = "true" ]; then
python wait_for.py

View File

@ -450,6 +450,16 @@ boto3.resource(
)
```
### Caveats
The standalone server has some caveats with some services. The following services
require that you update your hosts file for your code to work properly:
1. `s3-control`
For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`.
As a result, you need to add that entry to your host file for your tests to function properly.
## Install

View File

@ -56,9 +56,10 @@ author = 'Steve Pulec'
# built documents.
#
# The short X.Y version.
version = '0.4.10'
import moto
version = moto.__version__
# The full version, including alpha/beta/rc tags.
release = '0.4.10'
release = moto.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

View File

@ -24,8 +24,7 @@ For example, we have the following code we want to test:
.. sourcecode:: python
import boto
from boto.s3.key import Key
import boto3
class MyModel(object):
def __init__(self, name, value):
@ -33,11 +32,8 @@ For example, we have the following code we want to test:
self.value = value
def save(self):
conn = boto.connect_s3()
bucket = conn.get_bucket('mybucket')
k = Key(bucket)
k.key = self.name
k.set_contents_from_string(self.value)
s3 = boto3.client('s3', region_name='us-east-1')
s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment.
@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out.
.. sourcecode:: python
import boto
import boto3
from moto import mock_s3
from mymodule import MyModel
@mock_s3
def test_my_model_save():
conn = boto.connect_s3()
conn = boto3.resource('s3', region_name='us-east-1')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
conn.create_bucket('mybucket')
conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Context manager
~~~~~~~~~~~~~~~
@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out.
def test_my_model_save():
with mock_s3():
conn = boto.connect_s3()
conn.create_bucket('mybucket')
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
Raw
~~~
@ -91,13 +93,16 @@ You can also start and stop the mocking manually.
mock = mock_s3()
mock.start()
conn = boto.connect_s3()
conn.create_bucket('mybucket')
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
body = conn.Object('mybucket', 'steve').get()[
'Body'].read().decode("utf-8")
assert body == 'is awesome'
mock.stop()

View File

@ -84,14 +84,14 @@ class MethodResponse(BaseModel, dict):
class Method(BaseModel, dict):
def __init__(self, method_type, authorization_type):
def __init__(self, method_type, authorization_type, **kwargs):
super(Method, self).__init__()
self.update(
dict(
httpMethod=method_type,
authorizationType=authorization_type,
authorizerId=None,
apiKeyRequired=None,
apiKeyRequired=kwargs.get("api_key_required") or False,
requestParameters=None,
requestModels=None,
methodIntegration=None,
@ -118,14 +118,15 @@ class Resource(BaseModel):
self.api_id = api_id
self.path_part = path_part
self.parent_id = parent_id
self.resource_methods = {"GET": {}}
self.resource_methods = {}
def to_dict(self):
response = {
"path": self.get_path(),
"id": self.id,
"resourceMethods": self.resource_methods,
}
if self.resource_methods:
response["resourceMethods"] = self.resource_methods
if self.parent_id:
response["parentId"] = self.parent_id
response["pathPart"] = self.path_part
@ -159,8 +160,12 @@ class Resource(BaseModel):
)
return response.status_code, response.text
def add_method(self, method_type, authorization_type):
method = Method(method_type=method_type, authorization_type=authorization_type)
def add_method(self, method_type, authorization_type, api_key_required):
method = Method(
method_type=method_type,
authorization_type=authorization_type,
api_key_required=api_key_required,
)
self.resource_methods[method_type] = method
return method
@ -675,9 +680,18 @@ class APIGatewayBackend(BaseBackend):
resource = self.get_resource(function_id, resource_id)
return resource.get_method(method_type)
def create_method(self, function_id, resource_id, method_type, authorization_type):
def create_method(
self,
function_id,
resource_id,
method_type,
authorization_type,
api_key_required=None,
):
resource = self.get_resource(function_id, resource_id)
method = resource.add_method(method_type, authorization_type)
method = resource.add_method(
method_type, authorization_type, api_key_required=api_key_required
)
return method
def get_authorizer(self, restapi_id, authorizer_id):

View File

@ -147,8 +147,13 @@ class APIGatewayResponse(BaseResponse):
return 200, {}, json.dumps(method)
elif self.method == "PUT":
authorization_type = self._get_param("authorizationType")
api_key_required = self._get_param("apiKeyRequired")
method = self.backend.create_method(
function_id, resource_id, method_type, authorization_type
function_id,
resource_id,
method_type,
authorization_type,
api_key_required,
)
return 200, {}, json.dumps(method)

View File

@ -184,7 +184,13 @@ class LambdaResponse(BaseResponse):
function_name, qualifier, self.body, self.headers, response_headers
)
if payload:
return 202, response_headers, payload
if request.headers["X-Amz-Invocation-Type"] == "Event":
status_code = 202
elif request.headers["X-Amz-Invocation-Type"] == "DryRun":
status_code = 204
else:
status_code = 200
return status_code, response_headers, payload
else:
return 404, response_headers, "{}"

View File

@ -14,6 +14,7 @@ from jose import jws
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
from .exceptions import (
GroupExistsException,
NotAuthorizedError,
@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel):
def __init__(self, region, name, extended_config):
self.region = region
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format(
self.region, DEFAULT_ACCOUNT_ID, self.id
)
self.name = name
self.status = None
self.extended_config = extended_config or {}
@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel):
def _base_json(self):
return {
"Id": self.id,
"Arn": self.arn,
"Name": self.name,
"Status": self.status,
"CreationDate": time.mktime(self.creation_date.timetuple()),
@ -564,12 +569,17 @@ class CognitoIdpBackend(BaseBackend):
user.groups.discard(group)
# User
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
def admin_create_user(
self, user_pool_id, username, message_action, temporary_password, attributes
):
user_pool = self.user_pools.get(user_pool_id)
if not user_pool:
raise ResourceNotFoundError(user_pool_id)
if username in user_pool.users:
if message_action and message_action == "RESEND":
if username not in user_pool.users:
raise UserNotFoundError(username)
elif username in user_pool.users:
raise UsernameExistsException(username)
user = CognitoIdpUser(

View File

@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse):
def admin_create_user(self):
user_pool_id = self._get_param("UserPoolId")
username = self._get_param("Username")
message_action = self._get_param("MessageAction")
temporary_password = self._get_param("TemporaryPassword")
user = cognitoidp_backends[self.region].admin_create_user(
user_pool_id,
username,
message_action,
temporary_password,
self._get_param("UserAttributes", []),
)

View File

@ -43,7 +43,7 @@ from moto.config.exceptions import (
)
from moto.core import BaseBackend, BaseModel
from moto.s3.config import s3_config_query
from moto.s3.config import s3_account_public_access_block_query, s3_config_query
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
@ -58,7 +58,10 @@ POP_STRINGS = [
DEFAULT_PAGE_SIZE = 100
# Map the Config resource type to a backend:
RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query}
RESOURCE_MAP = {
"AWS::S3::Bucket": s3_config_query,
"AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query,
}
def datetime2int(date):
@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend):
backend_region=backend_query_region,
)
result = {
"resourceIdentifiers": [
{
"resourceType": identifier["type"],
"resourceId": identifier["id"],
"resourceName": identifier["name"],
}
for identifier in identifiers
]
}
resource_identifiers = []
for identifier in identifiers:
item = {"resourceType": identifier["type"], "resourceId": identifier["id"]}
# Some resource types lack names:
if identifier.get("name"):
item["resourceName"] = identifier["name"]
resource_identifiers.append(item)
result = {"resourceIdentifiers": resource_identifiers}
if new_token:
result["nextToken"] = new_token
@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend):
resource_region=resource_region,
)
result = {
"ResourceIdentifiers": [
{
"SourceAccountId": DEFAULT_ACCOUNT_ID,
"SourceRegion": identifier["region"],
"ResourceType": identifier["type"],
"ResourceId": identifier["id"],
"ResourceName": identifier["name"],
}
for identifier in identifiers
]
}
resource_identifiers = []
for identifier in identifiers:
item = {
"SourceAccountId": DEFAULT_ACCOUNT_ID,
"SourceRegion": identifier["region"],
"ResourceType": identifier["type"],
"ResourceId": identifier["id"],
}
if identifier.get("name"):
item["ResourceName"] = identifier["name"]
resource_identifiers.append(item)
result = {"ResourceIdentifiers": resource_identifiers}
if new_token:
result["NextToken"] = new_token

View File

@ -606,12 +606,13 @@ class ConfigQueryModel(object):
As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter
from there. It may be valuable to make this a concatenation of the region and resource name.
:param resource_region:
:param resource_ids:
:param resource_name:
:param limit:
:param next_token:
:param resource_ids: A list of resource IDs
:param resource_name: The individual name of a resource
:param limit: How many per page
:param next_token: The item that will page on
:param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query.
:param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a
non-aggregated query.
:return: This should return a list of Dicts that have the following fields:
[
{

View File

@ -448,13 +448,18 @@ class Item(BaseModel):
if list_append_re:
new_value = expression_attribute_values[list_append_re.group(2).strip()]
old_list_key = list_append_re.group(1)
# Get the existing value
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
# old_key could be a function itself (if_not_exists)
if old_list_key.startswith("if_not_exists"):
old_list = DynamoType(
expression_attribute_values[self._get_default(old_list_key)]
)
else:
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
)
if not old_list.is_list():
raise ParamValidationError
old_list.value.extend([DynamoType(v) for v in new_value["L"]])

View File

@ -27,6 +27,7 @@ from moto.core.utils import (
iso_8601_datetime_with_milliseconds,
camelcase_to_underscores,
)
from moto.iam.models import ACCOUNT_ID
from .exceptions import (
CidrLimitExceeded,
DependencyViolationError,
@ -155,7 +156,7 @@ AMIS = _load_resource(
)
OWNER_ID = "111122223333"
OWNER_ID = ACCOUNT_ID
def utc_date_and_time():
@ -1341,7 +1342,7 @@ class AmiBackend(object):
source_ami=None,
name=name,
description=description,
owner_id=context.get_current_user() if context else OWNER_ID,
owner_id=OWNER_ID,
)
self.amis[ami_id] = ami
return ami
@ -1392,14 +1393,7 @@ class AmiBackend(object):
# Limit by owner ids
if owners:
# support filtering by Owners=['self']
owners = list(
map(
lambda o: context.get_current_user()
if context and o == "self"
else o,
owners,
)
)
owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,))
images = [ami for ami in images if ami.owner_id in owners]
# Generic filters

View File

@ -6,6 +6,7 @@ from boto3 import Session
from moto.core.exceptions import JsonRESTError
from moto.core import BaseBackend, BaseModel
from moto.sts.models import ACCOUNT_ID
from moto.utilities.tagging_service import TaggingService
class Rule(BaseModel):
@ -104,6 +105,7 @@ class EventsBackend(BaseBackend):
self.region_name = region_name
self.event_buses = {}
self.event_sources = {}
self.tagger = TaggingService()
self._add_default_event_bus()
@ -141,6 +143,9 @@ class EventsBackend(BaseBackend):
def delete_rule(self, name):
self.rules_order.pop(self.rules_order.index(name))
arn = self.rules.get(name).arn
if self.tagger.has_tags(arn):
self.tagger.delete_all_tags_for_resource(arn)
return self.rules.pop(name) is not None
def describe_rule(self, name):
@ -361,6 +366,32 @@ class EventsBackend(BaseBackend):
self.event_buses.pop(name, None)
def list_tags_for_resource(self, arn):
name = arn.split("/")[-1]
if name in self.rules:
return self.tagger.list_tags_for_resource(self.rules[name].arn)
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def tag_resource(self, arn, tags):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.tag_resource(self.rules[name].arn, tags)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
def untag_resource(self, arn, tag_names):
name = arn.split("/")[-1]
if name in self.rules:
self.tagger.untag_resource_using_names(self.rules[name].arn, tag_names)
return {}
raise JsonRESTError(
"ResourceNotFoundException", "An entity that you specified does not exist."
)
events_backends = {}
for region in Session().get_available_regions("events"):

View File

@ -297,3 +297,26 @@ class EventsHandler(BaseResponse):
self.events_backend.delete_event_bus(name)
return "", self.response_headers
def list_tags_for_resource(self):
arn = self._get_param("ResourceARN")
result = self.events_backend.list_tags_for_resource(arn)
return json.dumps(result), self.response_headers
def tag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("Tags")
result = self.events_backend.tag_resource(arn, tags)
return json.dumps(result), self.response_headers
def untag_resource(self):
arn = self._get_param("ResourceARN")
tags = self._get_param("TagKeys")
result = self.events_backend.untag_resource(arn, tags)
return json.dumps(result), self.response_headers

View File

@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError):
)
class InvalidStateTransitionException(IoTClientError):
def __init__(self, msg=None):
self.code = 409
super(InvalidStateTransitionException, self).__init__(
"InvalidStateTransitionException",
msg or "An attempt was made to change to an invalid state.",
)
class VersionConflictException(IoTClientError):
def __init__(self, name):
self.code = 409

View File

@ -17,6 +17,7 @@ from .exceptions import (
DeleteConflictException,
ResourceNotFoundException,
InvalidRequestException,
InvalidStateTransitionException,
VersionConflictException,
)
@ -29,7 +30,7 @@ class FakeThing(BaseModel):
self.attributes = attributes
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
self.version = 1
# TODO: we need to handle 'version'?
# TODO: we need to handle "version"?
# for iot-data
self.thing_shadow = None
@ -174,18 +175,19 @@ class FakeCertificate(BaseModel):
class FakePolicy(BaseModel):
def __init__(self, name, document, region_name):
def __init__(self, name, document, region_name, default_version_id="1"):
self.name = name
self.document = document
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
self.version = "1" # TODO: handle version
self.default_version_id = default_version_id
self.versions = [FakePolicyVersion(self.name, document, True, region_name)]
def to_get_dict(self):
return {
"policyName": self.name,
"policyArn": self.arn,
"policyDocument": self.document,
"defaultVersionId": self.version,
"defaultVersionId": self.default_version_id,
}
def to_dict_at_creation(self):
@ -193,13 +195,52 @@ class FakePolicy(BaseModel):
"policyName": self.name,
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version,
"policyVersionId": self.default_version_id,
}
def to_dict(self):
return {"policyName": self.name, "policyArn": self.arn}
class FakePolicyVersion(object):
def __init__(self, policy_name, document, is_default, region_name):
self.name = policy_name
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name)
self.document = document or {}
self.is_default = is_default
self.version_id = "1"
self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple())
def to_get_dict(self):
return {
"policyName": self.name,
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
"creationDate": self.create_datetime,
"lastModifiedDate": self.last_modified_datetime,
"generationId": self.version_id,
}
def to_dict_at_creation(self):
return {
"policyArn": self.arn,
"policyDocument": self.document,
"policyVersionId": self.version_id,
"isDefaultVersion": self.is_default,
}
def to_dict(self):
return {
"versionId": self.version_id,
"isDefaultVersion": self.is_default,
"createDate": self.create_datetime,
}
class FakeJob(BaseModel):
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
@ -226,12 +267,14 @@ class FakeJob(BaseModel):
self.targets = targets
self.document_source = document_source
self.document = document
self.force = False
self.description = description
self.presigned_url_config = presigned_url_config
self.target_selection = target_selection
self.job_executions_rollout_config = job_executions_rollout_config
self.status = None # IN_PROGRESS | CANCELED | COMPLETED
self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED
self.comment = None
self.reason_code = None
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.completed_at = None
@ -258,9 +301,11 @@ class FakeJob(BaseModel):
"jobExecutionsRolloutConfig": self.job_executions_rollout_config,
"status": self.status,
"comment": self.comment,
"forceCanceled": self.force,
"reasonCode": self.reason_code,
"createdAt": self.created_at,
"lastUpdatedAt": self.last_updated_at,
"completedAt": self.completedAt,
"completedAt": self.completed_at,
"jobProcessDetails": self.job_process_details,
"documentParameters": self.document_parameters,
"document": self.document,
@ -275,12 +320,67 @@ class FakeJob(BaseModel):
return regex_match and length_match
class FakeJobExecution(BaseModel):
def __init__(
self,
job_id,
thing_arn,
status="QUEUED",
force_canceled=False,
status_details_map={},
):
self.job_id = job_id
self.status = status # IN_PROGRESS | CANCELED | COMPLETED
self.force_canceled = force_canceled
self.status_details_map = status_details_map
self.thing_arn = thing_arn
self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.started_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
self.execution_number = 123
self.version_number = 123
self.approximate_seconds_before_time_out = 123
def to_get_dict(self):
obj = {
"jobId": self.job_id,
"status": self.status,
"forceCanceled": self.force_canceled,
"statusDetails": {"detailsMap": self.status_details_map},
"thingArn": self.thing_arn,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
"versionNumber": self.version_number,
"approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out,
}
return obj
def to_dict(self):
obj = {
"jobId": self.job_id,
"thingArn": self.thing_arn,
"jobExecutionSummary": {
"status": self.status,
"queuedAt": self.queued_at,
"startedAt": self.started_at,
"lastUpdatedAt": self.last_updated_at,
"executionNumber": self.execution_number,
},
}
return obj
class IoTBackend(BaseBackend):
def __init__(self, region_name=None):
super(IoTBackend, self).__init__()
self.region_name = region_name
self.things = OrderedDict()
self.jobs = OrderedDict()
self.job_executions = OrderedDict()
self.thing_types = OrderedDict()
self.thing_groups = OrderedDict()
self.certificates = OrderedDict()
@ -535,6 +635,28 @@ class IoTBackend(BaseBackend):
self.policies[policy.name] = policy
return policy
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def list_attached_policies(self, target):
policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target]
return policies
def list_policies(self):
policies = self.policies.values()
return policies
@ -559,6 +681,60 @@ class IoTBackend(BaseBackend):
policy = self.get_policy(policy_name)
del self.policies[policy.name]
def create_policy_version(self, policy_name, policy_document, set_as_default):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
version = FakePolicyVersion(
policy_name, policy_document, set_as_default, self.region_name
)
policy.versions.append(version)
version.version_id = "{0}".format(len(policy.versions))
if set_as_default:
self.set_default_policy_version(policy_name, version.version_id)
return version
def set_default_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
version.is_default = True
policy.default_version_id = version.version_id
policy.document = version.document
else:
version.is_default = False
def get_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
for version in policy.versions:
if version.version_id == version_id:
return version
raise ResourceNotFoundException()
def list_policy_versions(self, policy_name):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
return policy.versions
def delete_policy_version(self, policy_name, version_id):
policy = self.get_policy(policy_name)
if not policy:
raise ResourceNotFoundException()
if version_id == policy.default_version_id:
raise InvalidRequestException(
"Cannot delete the default version of a policy"
)
for i, v in enumerate(policy.versions):
if v.version_id == version_id:
del policy.versions[i]
return
raise ResourceNotFoundException()
def _get_principal(self, principal_arn):
"""
raise ResourceNotFoundException
@ -574,14 +750,6 @@ class IoTBackend(BaseBackend):
pass
raise ResourceNotFoundException()
def attach_policy(self, policy_name, target):
principal = self._get_principal(target)
policy = self.get_policy(policy_name)
k = (target, policy_name)
if k in self.principal_policies:
return
self.principal_policies[k] = (principal, policy)
def attach_principal_policy(self, policy_name, principal_arn):
principal = self._get_principal(principal_arn)
policy = self.get_policy(policy_name)
@ -590,15 +758,6 @@ class IoTBackend(BaseBackend):
return
self.principal_policies[k] = (principal, policy)
def detach_policy(self, policy_name, target):
# this may raises ResourceNotFoundException
self._get_principal(target)
self.get_policy(policy_name)
k = (target, policy_name)
if k not in self.principal_policies:
raise ResourceNotFoundException()
del self.principal_policies[k]
def detach_principal_policy(self, policy_name, principal_arn):
# this may raises ResourceNotFoundException
self._get_principal(principal_arn)
@ -819,11 +978,187 @@ class IoTBackend(BaseBackend):
self.region_name,
)
self.jobs[job_id] = job
for thing_arn in targets:
thing_name = thing_arn.split(":")[-1].split("/")[-1]
job_execution = FakeJobExecution(job_id, thing_arn)
self.job_executions[(job_id, thing_name)] = job_execution
return job.job_arn, job_id, description
def describe_job(self, job_id):
jobs = [_ for _ in self.jobs.values() if _.job_id == job_id]
if len(jobs) == 0:
raise ResourceNotFoundException()
return jobs[0]
def delete_job(self, job_id, force):
job = self.jobs[job_id]
if job.status == "IN_PROGRESS" and force:
del self.jobs[job_id]
elif job.status != "IN_PROGRESS":
del self.jobs[job_id]
else:
raise InvalidStateTransitionException()
def cancel_job(self, job_id, reason_code, comment, force):
job = self.jobs[job_id]
job.reason_code = reason_code if reason_code is not None else job.reason_code
job.comment = comment if comment is not None else job.comment
job.force = force if force is not None and force != job.force else job.force
job.status = "CANCELED"
if job.status == "IN_PROGRESS" and force:
self.jobs[job_id] = job
elif job.status != "IN_PROGRESS":
self.jobs[job_id] = job
else:
raise InvalidStateTransitionException()
return job
def get_job_document(self, job_id):
return self.jobs[job_id]
def list_jobs(
self,
status,
target_selection,
max_results,
token,
thing_group_name,
thing_group_id,
):
# TODO: implement filters
all_jobs = [_.to_dict() for _ in self.jobs.values()]
filtered_jobs = all_jobs
if token is None:
jobs = filtered_jobs[0:max_results]
next_token = str(max_results) if len(filtered_jobs) > max_results else None
else:
token = int(token)
jobs = filtered_jobs[token : token + max_results]
next_token = (
str(token + max_results)
if len(filtered_jobs) > token + max_results
else None
)
return jobs, next_token
def describe_job_execution(self, job_id, thing_name, execution_number):
try:
job_execution = self.job_executions[(job_id, thing_name)]
except KeyError:
raise ResourceNotFoundException()
if job_execution is None or (
execution_number is not None
and job_execution.execution_number != execution_number
):
raise ResourceNotFoundException()
return job_execution
def cancel_job_execution(
self, job_id, thing_name, force, expected_version, status_details
):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution is None:
raise ResourceNotFoundException()
job_execution.force_canceled = (
force if force is not None else job_execution.force_canceled
)
# TODO: implement expected_version and status_details (at most 10 can be specified)
if job_execution.status == "IN_PROGRESS" and force:
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
elif job_execution.status != "IN_PROGRESS":
job_execution.status = "CANCELED"
self.job_executions[(job_id, thing_name)] = job_execution
else:
raise InvalidStateTransitionException()
def delete_job_execution(self, job_id, thing_name, execution_number, force):
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution.execution_number != execution_number:
raise ResourceNotFoundException()
if job_execution.status == "IN_PROGRESS" and force:
del self.job_executions[(job_id, thing_name)]
elif job_execution.status != "IN_PROGRESS":
del self.job_executions[(job_id, thing_name)]
else:
raise InvalidStateTransitionException()
def list_job_executions_for_job(self, job_id, status, max_results, next_token):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[0] == job_id
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
def list_job_executions_for_thing(
self, thing_name, status, max_results, next_token
):
job_executions = [
self.job_executions[je].to_dict()
for je in self.job_executions
if je[1] == thing_name
]
if status is not None:
job_executions = list(
filter(
lambda elem: status in elem["status"] and elem["status"] == status,
job_executions,
)
)
token = next_token
if token is None:
job_executions = job_executions[0:max_results]
next_token = str(max_results) if len(job_executions) > max_results else None
else:
token = int(token)
job_executions = job_executions[token : token + max_results]
next_token = (
str(token + max_results)
if len(job_executions) > token + max_results
else None
)
return job_executions, next_token
iot_backends = {}
for region in Session().get_available_regions("iot"):

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals
import json
from six.moves.urllib.parse import unquote
from moto.core.responses import BaseResponse
from .models import iot_backends
@ -141,6 +142,8 @@ class IoTResponse(BaseResponse):
createdAt=job.created_at,
description=job.description,
documentParameters=job.document_parameters,
forceCanceled=job.force,
reasonCode=job.reason_code,
jobArn=job.job_arn,
jobExecutionsRolloutConfig=job.job_executions_rollout_config,
jobId=job.job_id,
@ -154,6 +157,127 @@ class IoTResponse(BaseResponse):
)
)
def delete_job(self):
job_id = self._get_param("jobId")
force = self._get_bool_param("force")
self.iot_backend.delete_job(job_id=job_id, force=force)
return json.dumps(dict())
def cancel_job(self):
job_id = self._get_param("jobId")
reason_code = self._get_param("reasonCode")
comment = self._get_param("comment")
force = self._get_bool_param("force")
job = self.iot_backend.cancel_job(
job_id=job_id, reason_code=reason_code, comment=comment, force=force
)
return json.dumps(job.to_dict())
def get_job_document(self):
job = self.iot_backend.get_job_document(job_id=self._get_param("jobId"))
if job.document is not None:
return json.dumps({"document": job.document})
else:
# job.document_source is not None:
# TODO: needs to be implemented to get document_source's content from S3
return json.dumps({"document": ""})
def list_jobs(self):
status = (self._get_param("status"),)
target_selection = (self._get_param("targetSelection"),)
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
previous_next_token = self._get_param("nextToken")
thing_group_name = (self._get_param("thingGroupName"),)
thing_group_id = self._get_param("thingGroupId")
jobs, next_token = self.iot_backend.list_jobs(
status=status,
target_selection=target_selection,
max_results=max_results,
token=previous_next_token,
thing_group_name=thing_group_name,
thing_group_id=thing_group_id,
)
return json.dumps(dict(jobs=jobs, nextToken=next_token))
def describe_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
job_execution = self.iot_backend.describe_job_execution(
job_id=job_id, thing_name=thing_name, execution_number=execution_number
)
return json.dumps(dict(execution=job_execution.to_get_dict()))
def cancel_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
force = self._get_bool_param("force")
expected_version = self._get_int_param("expectedVersion")
status_details = self._get_param("statusDetails")
self.iot_backend.cancel_job_execution(
job_id=job_id,
thing_name=thing_name,
force=force,
expected_version=expected_version,
status_details=status_details,
)
return json.dumps(dict())
def delete_job_execution(self):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
execution_number = self._get_int_param("executionNumber")
force = self._get_bool_param("force")
self.iot_backend.delete_job_execution(
job_id=job_id,
thing_name=thing_name,
execution_number=execution_number,
force=force,
)
return json.dumps(dict())
def list_job_executions_for_job(self):
job_id = self._get_param("jobId")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_job(
job_id=job_id, status=status, max_results=max_results, next_token=next_token
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def list_job_executions_for_thing(self):
thing_name = self._get_param("thingName")
status = self._get_param("status")
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
next_token = self._get_param("nextToken")
job_executions, next_token = self.iot_backend.list_job_executions_for_thing(
thing_name=thing_name,
status=status,
max_results=max_results,
next_token=next_token,
)
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
def create_keys_and_certificate(self):
set_as_active = self._get_bool_param("setAsActive")
cert, key_pair = self.iot_backend.create_keys_and_certificate(
@ -241,12 +365,61 @@ class IoTResponse(BaseResponse):
self.iot_backend.delete_policy(policy_name=policy_name)
return json.dumps(dict())
def create_policy_version(self):
policy_name = self._get_param("policyName")
policy_document = self._get_param("policyDocument")
set_as_default = self._get_bool_param("setAsDefault")
policy_version = self.iot_backend.create_policy_version(
policy_name, policy_document, set_as_default
)
return json.dumps(dict(policy_version.to_dict_at_creation()))
def set_default_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.set_default_policy_version(policy_name, version_id)
return json.dumps(dict())
def get_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
policy_version = self.iot_backend.get_policy_version(policy_name, version_id)
return json.dumps(dict(policy_version.to_get_dict()))
def list_policy_versions(self):
policy_name = self._get_param("policyName")
policiy_versions = self.iot_backend.list_policy_versions(
policy_name=policy_name
)
return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions]))
def delete_policy_version(self):
policy_name = self._get_param("policyName")
version_id = self._get_param("policyVersionId")
self.iot_backend.delete_policy_version(policy_name, version_id)
return json.dumps(dict())
def attach_policy(self):
policy_name = self._get_param("policyName")
target = self._get_param("target")
self.iot_backend.attach_policy(policy_name=policy_name, target=target)
return json.dumps(dict())
def list_attached_policies(self):
principal = unquote(self._get_param("target"))
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
policies = self.iot_backend.list_attached_policies(target=principal)
# TODO: implement pagination in the future
next_marker = None
return json.dumps(
dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker)
)
def attach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get("x-amzn-iot-principal")

View File

@ -1,8 +1,13 @@
import datetime
import json
import time
from boto3 import Session
from moto.core.exceptions import InvalidNextTokenException
from moto.core.models import ConfigQueryModel
from moto.s3 import s3_backends
from moto.s3.models import get_moto_s3_account_id
class S3ConfigQuery(ConfigQueryModel):
@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel):
return config_data
class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel):
def list_config_service_resources(
self,
resource_ids,
resource_name,
limit,
next_token,
backend_region=None,
resource_region=None,
):
# For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID
# There is no resource name -- it should be a blank string "" if provided.
# The resource name can only ever be None or an empty string:
if resource_name is not None and resource_name != "":
return [], None
pab = None
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# If a resource ID was passed in, then filter accordingly:
if resource_ids:
for id in resource_ids:
if account_id == id:
pab = self.backends["global"].account_public_access_block
break
# Otherwise, just grab the one from the backend:
if not resource_ids:
pab = self.backends["global"].account_public_access_block
# If it's not present, then return nothing
if not pab:
return [], None
# Filter on regions (and paginate on them as well):
if backend_region:
pab_list = [backend_region]
elif resource_region:
# Invalid region?
if resource_region not in regions:
return [], None
pab_list = [resource_region]
# Aggregated query where no regions were supplied so return them all:
else:
pab_list = regions
# Pagination logic:
sorted_regions = sorted(pab_list)
new_token = None
# Get the start:
if not next_token:
start = 0
else:
# Tokens for this moto feature is just the region-name:
# For OTHER non-global resource types, it's the region concatenated with the resource ID.
if next_token not in sorted_regions:
raise InvalidNextTokenException()
start = sorted_regions.index(next_token)
# Get the list of items to collect:
pab_list = sorted_regions[start : (start + limit)]
if len(sorted_regions) > (start + limit):
new_token = sorted_regions[start + limit]
return (
[
{
"type": "AWS::S3::AccountPublicAccessBlock",
"id": account_id,
"region": region,
}
for region in pab_list
],
new_token,
)
def get_config_resource(
self, resource_id, resource_name=None, backend_region=None, resource_region=None
):
# Do we even have this defined?
if not self.backends["global"].account_public_access_block:
return None
# Resource name can only ever be "" if it's supplied:
if resource_name is not None and resource_name != "":
return None
# Are we filtering based on region?
account_id = get_moto_s3_account_id()
regions = [region for region in Session().get_available_regions("config")]
# Is the resource ID correct?:
if account_id == resource_id:
if backend_region:
pab_region = backend_region
# Invalid region?
elif resource_region not in regions:
return None
else:
pab_region = resource_region
else:
return None
# Format the PAB to the AWS Config format:
creation_time = datetime.datetime.utcnow()
config_data = {
"version": "1.3",
"accountId": account_id,
"configurationItemCaptureTime": str(creation_time),
"configurationItemStatus": "OK",
"configurationStateId": str(
int(time.mktime(creation_time.timetuple()))
), # PY2 and 3 compatible
"resourceType": "AWS::S3::AccountPublicAccessBlock",
"resourceId": account_id,
"awsRegion": pab_region,
"availabilityZone": "Not Applicable",
"configuration": self.backends[
"global"
].account_public_access_block.to_config_dict(),
"supplementaryConfiguration": {},
}
# The 'configuration' field is also a JSON string:
config_data["configuration"] = json.dumps(config_data["configuration"])
return config_data
s3_config_query = S3ConfigQuery(s3_backends)
s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery(
s3_backends
)

View File

@ -127,6 +127,18 @@ class InvalidRequest(S3ClientError):
)
class IllegalLocationConstraintException(S3ClientError):
code = 400
def __init__(self, *args, **kwargs):
super(IllegalLocationConstraintException, self).__init__(
"IllegalLocationConstraintException",
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.",
*args,
**kwargs
)
class MalformedXML(S3ClientError):
code = 400
@ -347,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError):
*args,
**kwargs
)
class WrongPublicAccessBlockAccountIdError(S3ClientError):
code = 403
def __init__(self):
super(WrongPublicAccessBlockAccountIdError, self).__init__(
"AccessDenied", "Access Denied"
)

View File

@ -19,7 +19,7 @@ import uuid
import six
from bisect import insort
from moto.core import BaseBackend, BaseModel
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
from .exceptions import (
BucketAlreadyExists,
@ -37,6 +37,7 @@ from .exceptions import (
CrossLocationLoggingProhibitted,
NoSuchPublicAccessBlockConfiguration,
InvalidPublicAccessBlockConfiguration,
WrongPublicAccessBlockAccountIdError,
)
from .utils import clean_key_name, _VersionedKeyStore
@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
def get_moto_s3_account_id():
"""This makes it easy for mocking AWS Account IDs when using AWS Config
-- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free.
"""
return ACCOUNT_ID
class FakeDeleteMarker(BaseModel):
def __init__(self, key):
self.key = key
@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel):
class S3Backend(BaseBackend):
def __init__(self):
self.buckets = {}
self.account_public_access_block = None
def create_bucket(self, bucket_name, region_name):
if bucket_name in self.buckets:
@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend):
return bucket.public_access_block
def get_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not self.account_public_access_block:
raise NoSuchPublicAccessBlockConfiguration()
return self.account_public_access_block
def set_key(
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
):
@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name)
bucket.public_access_block = None
def delete_account_public_access_block(self, account_id):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
self.account_public_access_block = None
def put_bucket_notification_configuration(self, bucket_name, notification_config):
bucket = self.get_bucket(bucket_name)
bucket.set_notification_configuration(notification_config)
@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend):
pub_block_config.get("RestrictPublicBuckets"),
)
def put_account_public_access_block(self, account_id, pub_block_config):
# The account ID should equal the account id that is set for Moto:
if account_id != ACCOUNT_ID:
raise WrongPublicAccessBlockAccountIdError()
if not pub_block_config:
raise InvalidPublicAccessBlockConfiguration()
self.account_public_access_block = PublicAccessBlock(
pub_block_config.get("BlockPublicAcls"),
pub_block_config.get("IgnorePublicAcls"),
pub_block_config.get("BlockPublicPolicy"),
pub_block_config.get("RestrictPublicBuckets"),
)
def initiate_multipart(self, bucket_name, key_name, metadata):
bucket = self.get_bucket(bucket_name)
new_multipart = FakeMultipart(key_name, metadata)

View File

@ -4,6 +4,7 @@ import re
import sys
import six
from botocore.awsrequest import AWSPreparedRequest
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
from six.moves.urllib.parse import parse_qs, urlparse, unquote
@ -29,6 +30,7 @@ from .exceptions import (
InvalidPartOrder,
MalformedXML,
MalformedACLError,
IllegalLocationConstraintException,
InvalidNotificationARN,
InvalidNotificationEvent,
ObjectNotInActiveTierError,
@ -122,6 +124,11 @@ ACTION_MAP = {
"uploadId": "PutObject",
},
},
"CONTROL": {
"GET": {"publicAccessBlock": "GetPublicAccessBlock"},
"PUT": {"publicAccessBlock": "PutPublicAccessBlock"},
"DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"},
},
}
@ -167,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
or host.startswith("localhost")
or host.startswith("localstack")
or re.match(r"^[^.]+$", host)
or re.match(r"^.*\.svc\.cluster\.local$", host)
or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host)
):
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
# (3) local host names that do not contain a "." (e.g., Docker container host names), or
@ -219,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
# Depending on which calling format the client is using, we don't know
# if this is a bucket or key request so we have to check
if self.subdomain_based_buckets(request):
return self.key_response(request, full_url, headers)
return self.key_or_control_response(request, full_url, headers)
else:
# Using path-based buckets
return self.bucket_response(request, full_url, headers)
@ -286,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return self._bucket_response_post(request, body, bucket_name)
else:
raise NotImplementedError(
"Method {0} has not been impelemented in the S3 backend yet".format(
"Method {0} has not been implemented in the S3 backend yet".format(
method
)
)
@ -585,6 +592,29 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
next_continuation_token = None
return result_keys, is_truncated, next_continuation_token
def _body_contains_location_constraint(self, body):
if body:
try:
xmltodict.parse(body)["CreateBucketConfiguration"]["LocationConstraint"]
return True
except KeyError:
pass
return False
def _parse_pab_config(self, body):
parsed_xml = xmltodict.parse(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
return parsed_xml
def _bucket_response_put(
self, request, body, region_name, bucket_name, querystring
):
@ -663,27 +693,23 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
raise e
elif "publicAccessBlock" in querystring:
parsed_xml = xmltodict.parse(body)
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
# If Python 2, fix the unicode strings:
if sys.version_info[0] < 3:
parsed_xml = {
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
dict(parsed_xml["PublicAccessBlockConfiguration"])
)
}
pab_config = self._parse_pab_config(body)
self.backend.put_bucket_public_access_block(
bucket_name, parsed_xml["PublicAccessBlockConfiguration"]
bucket_name, pab_config["PublicAccessBlockConfiguration"]
)
return ""
else:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
# - LocationConstraint has to be specified if outside us-east-1
if (
region_name != DEFAULT_REGION_NAME
and not self._body_contains_location_constraint(body)
):
raise IllegalLocationConstraintException()
if body:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
try:
forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][
"LocationConstraint"
@ -854,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
)
return 206, response_headers, response_content[begin : end + 1]
def key_response(self, request, full_url, headers):
def key_or_control_response(self, request, full_url, headers):
# Key and Control are lumped in because splitting out the regex is too much of a pain :/
self.method = request.method
self.path = self._get_path(request)
self.headers = request.headers
if "host" not in self.headers:
self.headers["host"] = urlparse(full_url).netloc
response_headers = {}
try:
response = self._key_response(request, full_url, headers)
# Is this an S3 control response?
if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url:
response = self._control_response(request, full_url, headers)
else:
response = self._key_response(request, full_url, headers)
except S3ClientError as s3error:
response = s3error.code, {}, s3error.description
@ -878,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
)
return status_code, response_headers, response_content
def _control_response(self, request, full_url, headers):
parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
if hasattr(request, "body"):
# Boto
body = request.body
if hasattr(body, "read"):
body = body.read()
else:
# Flask server
body = request.data
if body is None:
body = b""
if method == "GET":
return self._control_response_get(request, query, headers)
elif method == "PUT":
return self._control_response_put(request, body, query, headers)
elif method == "DELETE":
return self._control_response_delete(request, query, headers)
else:
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(
method
)
)
def _control_response_get(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "GET", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
public_block_config = self.backend.get_account_public_access_block(
headers["x-amz-account-id"]
)
template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION)
return (
200,
response_headers,
template.render(public_block_config=public_block_config),
)
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_put(self, request, body, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "PUT", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
pab_config = self._parse_pab_config(body)
self.backend.put_account_public_access_block(
headers["x-amz-account-id"],
pab_config["PublicAccessBlockConfiguration"],
)
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _control_response_delete(self, request, query, headers):
action = self.path.split("?")[0].split("/")[
-1
] # Gets the action out of the URL sans query params.
self._set_action("CONTROL", "DELETE", action)
self._authenticate_and_authorize_s3_action()
response_headers = {}
if "publicAccessBlock" in action:
self.backend.delete_account_public_access_block(headers["x-amz-account-id"])
return 200, response_headers, ""
raise NotImplementedError(
"Method {0} has not been implemented in the S3 backend yet".format(action)
)
def _key_response(self, request, full_url, headers):
parsed_url = urlparse(full_url)
query = parse_qs(parsed_url.query, keep_blank_values=True)
@ -1082,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if mdirective is not None and mdirective == "REPLACE":
metadata = metadata_from_headers(request.headers)
new_key.set_metadata(metadata, replace=True)
tdirective = request.headers.get("x-amz-tagging-directive")
if tdirective == "REPLACE":
tagging = self._tagging_from_headers(request.headers)
new_key.set_tagging(tagging)
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
response_headers.update(new_key.response_dict)
return 200, response_headers, template.render(key=new_key)

View File

@ -13,7 +13,7 @@ url_paths = {
# subdomain key of path-based bucket
"{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response,
# path-based bucket + key
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_response,
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
# subdomain bucket + key with empty first part of path
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_response,
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
}

View File

@ -37,7 +37,7 @@ def bucket_name_from_url(url):
REGION_URL_REGEX = re.compile(
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
r"(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?"
r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
)

View File

@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel):
"executionInfo": self.to_medium_dict(),
"executionConfiguration": {"taskList": {"name": self.task_list}},
}
# info
if self.execution_status == "CLOSED":
hsh["executionInfo"]["closeStatus"] = self.close_status
hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp
# configuration
for key in self._configuration_keys:
attr = camelcase_to_underscores(key)

View File

View File

@ -0,0 +1,62 @@
class TaggingService:
def __init__(self, tagName="Tags", keyName="Key", valueName="Value"):
self.tagName = tagName
self.keyName = keyName
self.valueName = valueName
self.tags = {}
def list_tags_for_resource(self, arn):
result = []
if arn in self.tags:
for k, v in self.tags[arn].items():
result.append({self.keyName: k, self.valueName: v})
return {self.tagName: result}
def delete_all_tags_for_resource(self, arn):
del self.tags[arn]
def has_tags(self, arn):
return arn in self.tags
def tag_resource(self, arn, tags):
if arn not in self.tags:
self.tags[arn] = {}
for t in tags:
if self.valueName in t:
self.tags[arn][t[self.keyName]] = t[self.valueName]
else:
self.tags[arn][t[self.keyName]] = None
def untag_resource_using_names(self, arn, tag_names):
for name in tag_names:
if name in self.tags.get(arn, {}):
del self.tags[arn][name]
def untag_resource_using_tags(self, arn, tags):
m = self.tags.get(arn, {})
for t in tags:
if self.keyName in t:
if t[self.keyName] in m:
if self.valueName in t:
if m[t[self.keyName]] != t[self.valueName]:
continue
# If both key and value are provided, match both before deletion
del m[t[self.keyName]]
def extract_tag_names(self, tags):
results = []
if len(tags) == 0:
return results
for tag in tags:
if self.keyName in tag:
results.append(tag[self.keyName])
return results
def flatten_tag_list(self, tags):
result = {}
for t in tags:
if self.valueName in t:
result[t[self.keyName]] = t[self.valueName]
else:
result[t[self.keyName]] = None
return result

View File

@ -204,12 +204,7 @@ def test_create_resource():
root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
root_resource["ResponseMetadata"].pop("RetryAttempts", None)
root_resource.should.equal(
{
"path": "/",
"id": root_id,
"ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
}
{"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},}
)
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
@ -257,7 +252,6 @@ def test_child_resource():
"parentId": users_id,
"id": tags_id,
"ResponseMetadata": {"HTTPStatusCode": 200},
"resourceMethods": {"GET": {}},
}
)
@ -286,6 +280,41 @@ def test_create_method():
{
"httpMethod": "GET",
"authorizationType": "none",
"apiKeyRequired": False,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
@mock_apigateway
def test_create_method_apikeyrequired():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
authorizationType="none",
apiKeyRequired=True,
)
response = client.get_method(restApiId=api_id, resourceId=root_id, httpMethod="GET")
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"httpMethod": "GET",
"authorizationType": "none",
"apiKeyRequired": True,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)

View File

@ -86,14 +86,14 @@ def lambda_handler(event, context):
@mock_lambda
def test_list_functions():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.list_functions()
result["Functions"].should.have.length_of(0)
@mock_lambda
def test_invoke_requestresponse_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -113,7 +113,7 @@ def test_invoke_requestresponse_function():
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(202)
success_result["StatusCode"].should.equal(200)
result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8")
)
@ -150,7 +150,7 @@ def test_invoke_requestresponse_function_with_arn():
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(202)
success_result["StatusCode"].should.equal(200)
result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8")
)
@ -163,7 +163,7 @@ def test_invoke_requestresponse_function_with_arn():
@mock_lambda
def test_invoke_event_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -188,16 +188,44 @@ def test_invoke_event_function():
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
@mock_lambda
def test_invoke_dryrun_function():
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1(),},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
conn.invoke.when.called_with(
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
).should.throw(botocore.client.ClientError)
in_data = {"msg": "So long and thanks for all the fish"}
success_result = conn.invoke(
FunctionName="testFunction",
InvocationType="DryRun",
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(204)
if settings.TEST_SERVER_MODE:
@mock_ec2
@mock_lambda
def test_invoke_function_get_ec2_volume():
conn = boto3.resource("ec2", "us-west-2")
vol = conn.create_volume(Size=99, AvailabilityZone="us-west-2")
conn = boto3.resource("ec2", _lambda_region)
vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
vol = conn.Volume(vol.id)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python3.7",
@ -216,7 +244,7 @@ if settings.TEST_SERVER_MODE:
InvocationType="RequestResponse",
Payload=json.dumps(in_data),
)
result["StatusCode"].should.equal(202)
result["StatusCode"].should.equal(200)
actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
actual_payload.should.equal(expected_payload)
@ -227,14 +255,14 @@ if settings.TEST_SERVER_MODE:
@mock_ec2
@mock_lambda
def test_invoke_function_from_sns():
logs_conn = boto3.client("logs", region_name="us-west-2")
sns_conn = boto3.client("sns", region_name="us-west-2")
logs_conn = boto3.client("logs", region_name=_lambda_region)
sns_conn = boto3.client("sns", region_name=_lambda_region)
sns_conn.create_topic(Name="some-topic")
topics_json = sns_conn.list_topics()
topics = topics_json["Topics"]
topic_arn = topics[0]["TopicArn"]
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -277,7 +305,7 @@ def test_invoke_function_from_sns():
@mock_lambda
def test_create_based_on_s3_with_missing_bucket():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function.when.called_with(
FunctionName="testFunction",
@ -297,12 +325,15 @@ def test_create_based_on_s3_with_missing_bucket():
@mock_s3
@freeze_time("2015-01-01 00:00:00")
def test_create_function_from_aws_bucket():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.create_function(
FunctionName="testFunction",
@ -350,7 +381,7 @@ def test_create_function_from_aws_bucket():
@mock_lambda
@freeze_time("2015-01-01 00:00:00")
def test_create_function_from_zipfile():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
result = conn.create_function(
FunctionName="testFunction",
@ -395,12 +426,15 @@ def test_create_function_from_zipfile():
@mock_s3
@freeze_time("2015-01-01 00:00:00")
def test_get_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -464,7 +498,10 @@ def test_get_function():
def test_get_function_by_arn():
bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name)
s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -489,12 +526,15 @@ def test_get_function_by_arn():
@mock_lambda
@mock_s3
def test_delete_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -525,7 +565,10 @@ def test_delete_function():
def test_delete_function_by_arn():
bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name)
s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -550,7 +593,7 @@ def test_delete_function_by_arn():
@mock_lambda
def test_delete_unknown_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.delete_function.when.called_with(
FunctionName="testFunctionThatDoesntExist"
).should.throw(botocore.client.ClientError)
@ -559,12 +602,15 @@ def test_delete_unknown_function():
@mock_lambda
@mock_s3
def test_publish():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -609,12 +655,15 @@ def test_list_create_list_get_delete_list():
test `list -> create -> list -> get -> delete -> list` integration
"""
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.list_functions()["Functions"].should.have.length_of(0)
@ -711,12 +760,15 @@ def test_tags():
"""
test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration
"""
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
function = conn.create_function(
FunctionName="testFunction",
@ -768,7 +820,7 @@ def test_tags_not_found():
"""
Test list_tags and tag_resource when the lambda with the given arn does not exist
"""
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.list_tags.when.called_with(
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
).should.throw(botocore.client.ClientError)
@ -786,7 +838,7 @@ def test_tags_not_found():
@mock_lambda
def test_invoke_async_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -809,7 +861,7 @@ def test_invoke_async_function():
@mock_lambda
@freeze_time("2015-01-01 00:00:00")
def test_get_function_created_with_zipfile():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
result = conn.create_function(
FunctionName="testFunction",
@ -855,7 +907,7 @@ def test_get_function_created_with_zipfile():
@mock_lambda
def test_add_function_permission():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
conn.create_function(
FunctionName="testFunction",
@ -886,7 +938,7 @@ def test_add_function_permission():
@mock_lambda
def test_get_function_policy():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
conn.create_function(
FunctionName="testFunction",
@ -921,12 +973,15 @@ def test_get_function_policy():
@mock_lambda
@mock_s3
def test_list_versions_by_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -977,12 +1032,15 @@ def test_list_versions_by_function():
@mock_lambda
@mock_s3
def test_create_function_with_already_exists():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -1014,7 +1072,7 @@ def test_create_function_with_already_exists():
@mock_lambda
@mock_s3
def test_list_versions_by_function_for_nonexistent_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
versions = conn.list_versions_by_function(FunctionName="testFunction")
assert len(versions["Versions"]) == 0
@ -1363,12 +1421,15 @@ def test_delete_event_source_mapping():
@mock_lambda
@mock_s3
def test_update_configuration():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function(
FunctionName="testFunction",
@ -1411,7 +1472,7 @@ def test_update_configuration():
@mock_lambda
def test_update_function_zip():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content_one = get_test_zip_file1()
@ -1466,13 +1527,16 @@ def test_update_function_zip():
@mock_lambda
@mock_s3
def test_update_function_s3():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function(
FunctionName="testFunctionS3",
@ -1553,7 +1617,7 @@ def test_create_function_with_unknown_arn():
def create_invalid_lambda(role):
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
with assert_raises(ClientError) as err:
conn.create_function(
@ -1572,7 +1636,7 @@ def create_invalid_lambda(role):
def get_role_name():
with mock_iam():
iam = boto3.client("iam", region_name="us-west-2")
iam = boto3.client("iam", region_name=_lambda_region)
try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError:

View File

@ -1,5 +1,5 @@
from __future__ import unicode_literals
template = {
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
}
from __future__ import unicode_literals
template = {
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
}

View File

@ -1,276 +1,276 @@
from __future__ import unicode_literals
template = {
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
"Parameters": {
"SSHLocation": {
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
"Description": " The IP address range that can be used to SSH to the EC2 instances",
"Default": "0.0.0.0/0",
"MinLength": "9",
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
"MaxLength": "18",
"Type": "String",
},
"KeyName": {
"Type": "String",
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
"MinLength": "1",
"AllowedPattern": "[\\x20-\\x7E]*",
"MaxLength": "255",
"ConstraintDescription": "can contain only ASCII characters.",
},
"InstanceType": {
"Default": "m1.small",
"ConstraintDescription": "must be a valid EC2 instance type.",
"Type": "String",
"Description": "WebServer EC2 instance type",
"AllowedValues": [
"t1.micro",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge",
"m2.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m3.xlarge",
"m3.2xlarge",
"c1.medium",
"c1.xlarge",
"cc1.4xlarge",
"cc2.8xlarge",
"cg1.4xlarge",
],
},
},
"AWSTemplateFormatVersion": "2010-09-09",
"Outputs": {
"URL": {
"Description": "Newly created application URL",
"Value": {
"Fn::Join": [
"",
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
]
},
}
},
"Resources": {
"Subnet": {
"Type": "AWS::EC2::Subnet",
"Properties": {
"VpcId": {"Ref": "VPC"},
"CidrBlock": "10.0.0.0/24",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
"Route": {
"Type": "AWS::EC2::Route",
"Properties": {
"GatewayId": {"Ref": "InternetGateway"},
"DestinationCidrBlock": "0.0.0.0/0",
"RouteTableId": {"Ref": "RouteTable"},
},
"DependsOn": "AttachGateway",
},
"SubnetRouteTableAssociation": {
"Type": "AWS::EC2::SubnetRouteTableAssociation",
"Properties": {
"SubnetId": {"Ref": "Subnet"},
"RouteTableId": {"Ref": "RouteTable"},
},
},
"InternetGateway": {
"Type": "AWS::EC2::InternetGateway",
"Properties": {
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
},
},
"RouteTable": {
"Type": "AWS::EC2::RouteTable",
"Properties": {
"VpcId": {"Ref": "VPC"},
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"WebServerWaitCondition": {
"Type": "AWS::CloudFormation::WaitCondition",
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
"DependsOn": "WebServerInstance",
},
"VPC": {
"Type": "AWS::EC2::VPC",
"Properties": {
"CidrBlock": "10.0.0.0/16",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"InstanceSecurityGroup": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"SecurityGroupIngress": [
{
"ToPort": "22",
"IpProtocol": "tcp",
"CidrIp": {"Ref": "SSHLocation"},
"FromPort": "22",
},
{
"ToPort": "80",
"IpProtocol": "tcp",
"CidrIp": "0.0.0.0/0",
"FromPort": "80",
},
],
"VpcId": {"Ref": "VPC"},
"GroupDescription": "Enable SSH access via port 22",
},
},
"WebServerInstance": {
"Type": "AWS::EC2::Instance",
"Properties": {
"UserData": {
"Fn::Base64": {
"Fn::Join": [
"",
[
"#!/bin/bash\n",
"yum update -y aws-cfn-bootstrap\n",
"# Helper function\n",
"function error_exit\n",
"{\n",
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
{"Ref": "WebServerWaitHandle"},
"'\n",
" exit 1\n",
"}\n",
"# Install the simple web page\n",
"/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"},
" -r WebServerInstance ",
" --region ",
{"Ref": "AWS::Region"},
" || error_exit 'Failed to run cfn-init'\n",
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
"# All done so signal success\n",
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
{"Ref": "WebServerWaitHandle"},
"'\n",
],
]
}
},
"Tags": [
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
{"Value": "Bar", "Key": "Foo"},
],
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
"KeyName": {"Ref": "KeyName"},
"SubnetId": {"Ref": "Subnet"},
"ImageId": {
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
},
"InstanceType": {"Ref": "InstanceType"},
},
"Metadata": {
"Comment": "Install a simple PHP application",
"AWS::CloudFormation::Init": {
"config": {
"files": {
"/etc/cfn/cfn-hup.conf": {
"content": {
"Fn::Join": [
"",
[
"[main]\n",
"stack=",
{"Ref": "AWS::StackId"},
"\n",
"region=",
{"Ref": "AWS::Region"},
"\n",
],
]
},
"owner": "root",
"group": "root",
"mode": "000400",
},
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
"content": {
"Fn::Join": [
"",
[
"[cfn-auto-reloader-hook]\n",
"triggers=post.update\n",
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
"action=/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"},
" -r WebServerInstance ",
" --region ",
{"Ref": "AWS::Region"},
"\n",
"runas=root\n",
],
]
}
},
"/var/www/html/index.php": {
"content": {
"Fn::Join": [
"",
[
"<?php\n",
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
"?>\n",
],
]
},
"owner": "apache",
"group": "apache",
"mode": "000644",
},
},
"services": {
"sysvinit": {
"httpd": {"ensureRunning": "true", "enabled": "true"},
"sendmail": {
"ensureRunning": "false",
"enabled": "false",
},
}
},
"packages": {"yum": {"httpd": [], "php": []}},
}
},
},
},
"IPAddress": {
"Type": "AWS::EC2::EIP",
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
"DependsOn": "AttachGateway",
},
"AttachGateway": {
"Type": "AWS::EC2::VPCGatewayAttachment",
"Properties": {
"VpcId": {"Ref": "VPC"},
"InternetGatewayId": {"Ref": "InternetGateway"},
},
},
},
"Mappings": {
"RegionMap": {
"ap-southeast-1": {"AMI": "ami-74dda626"},
"ap-southeast-2": {"AMI": "ami-b3990e89"},
"us-west-2": {"AMI": "ami-16fd7026"},
"us-east-1": {"AMI": "ami-7f418316"},
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
"us-west-1": {"AMI": "ami-951945d0"},
"eu-west-1": {"AMI": "ami-24506250"},
"sa-east-1": {"AMI": "ami-3e3be423"},
}
},
}
from __future__ import unicode_literals
template = {
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
"Parameters": {
"SSHLocation": {
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
"Description": " The IP address range that can be used to SSH to the EC2 instances",
"Default": "0.0.0.0/0",
"MinLength": "9",
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
"MaxLength": "18",
"Type": "String",
},
"KeyName": {
"Type": "String",
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
"MinLength": "1",
"AllowedPattern": "[\\x20-\\x7E]*",
"MaxLength": "255",
"ConstraintDescription": "can contain only ASCII characters.",
},
"InstanceType": {
"Default": "m1.small",
"ConstraintDescription": "must be a valid EC2 instance type.",
"Type": "String",
"Description": "WebServer EC2 instance type",
"AllowedValues": [
"t1.micro",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge",
"m2.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m3.xlarge",
"m3.2xlarge",
"c1.medium",
"c1.xlarge",
"cc1.4xlarge",
"cc2.8xlarge",
"cg1.4xlarge",
],
},
},
"AWSTemplateFormatVersion": "2010-09-09",
"Outputs": {
"URL": {
"Description": "Newly created application URL",
"Value": {
"Fn::Join": [
"",
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
]
},
}
},
"Resources": {
"Subnet": {
"Type": "AWS::EC2::Subnet",
"Properties": {
"VpcId": {"Ref": "VPC"},
"CidrBlock": "10.0.0.0/24",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
"Route": {
"Type": "AWS::EC2::Route",
"Properties": {
"GatewayId": {"Ref": "InternetGateway"},
"DestinationCidrBlock": "0.0.0.0/0",
"RouteTableId": {"Ref": "RouteTable"},
},
"DependsOn": "AttachGateway",
},
"SubnetRouteTableAssociation": {
"Type": "AWS::EC2::SubnetRouteTableAssociation",
"Properties": {
"SubnetId": {"Ref": "Subnet"},
"RouteTableId": {"Ref": "RouteTable"},
},
},
"InternetGateway": {
"Type": "AWS::EC2::InternetGateway",
"Properties": {
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
},
},
"RouteTable": {
"Type": "AWS::EC2::RouteTable",
"Properties": {
"VpcId": {"Ref": "VPC"},
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"WebServerWaitCondition": {
"Type": "AWS::CloudFormation::WaitCondition",
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
"DependsOn": "WebServerInstance",
},
"VPC": {
"Type": "AWS::EC2::VPC",
"Properties": {
"CidrBlock": "10.0.0.0/16",
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
},
},
"InstanceSecurityGroup": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"SecurityGroupIngress": [
{
"ToPort": "22",
"IpProtocol": "tcp",
"CidrIp": {"Ref": "SSHLocation"},
"FromPort": "22",
},
{
"ToPort": "80",
"IpProtocol": "tcp",
"CidrIp": "0.0.0.0/0",
"FromPort": "80",
},
],
"VpcId": {"Ref": "VPC"},
"GroupDescription": "Enable SSH access via port 22",
},
},
"WebServerInstance": {
"Type": "AWS::EC2::Instance",
"Properties": {
"UserData": {
"Fn::Base64": {
"Fn::Join": [
"",
[
"#!/bin/bash\n",
"yum update -y aws-cfn-bootstrap\n",
"# Helper function\n",
"function error_exit\n",
"{\n",
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
{"Ref": "WebServerWaitHandle"},
"'\n",
" exit 1\n",
"}\n",
"# Install the simple web page\n",
"/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"},
" -r WebServerInstance ",
" --region ",
{"Ref": "AWS::Region"},
" || error_exit 'Failed to run cfn-init'\n",
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
"# All done so signal success\n",
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
{"Ref": "WebServerWaitHandle"},
"'\n",
],
]
}
},
"Tags": [
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
{"Value": "Bar", "Key": "Foo"},
],
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
"KeyName": {"Ref": "KeyName"},
"SubnetId": {"Ref": "Subnet"},
"ImageId": {
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
},
"InstanceType": {"Ref": "InstanceType"},
},
"Metadata": {
"Comment": "Install a simple PHP application",
"AWS::CloudFormation::Init": {
"config": {
"files": {
"/etc/cfn/cfn-hup.conf": {
"content": {
"Fn::Join": [
"",
[
"[main]\n",
"stack=",
{"Ref": "AWS::StackId"},
"\n",
"region=",
{"Ref": "AWS::Region"},
"\n",
],
]
},
"owner": "root",
"group": "root",
"mode": "000400",
},
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
"content": {
"Fn::Join": [
"",
[
"[cfn-auto-reloader-hook]\n",
"triggers=post.update\n",
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
"action=/opt/aws/bin/cfn-init -s ",
{"Ref": "AWS::StackId"},
" -r WebServerInstance ",
" --region ",
{"Ref": "AWS::Region"},
"\n",
"runas=root\n",
],
]
}
},
"/var/www/html/index.php": {
"content": {
"Fn::Join": [
"",
[
"<?php\n",
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
"?>\n",
],
]
},
"owner": "apache",
"group": "apache",
"mode": "000644",
},
},
"services": {
"sysvinit": {
"httpd": {"ensureRunning": "true", "enabled": "true"},
"sendmail": {
"ensureRunning": "false",
"enabled": "false",
},
}
},
"packages": {"yum": {"httpd": [], "php": []}},
}
},
},
},
"IPAddress": {
"Type": "AWS::EC2::EIP",
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
"DependsOn": "AttachGateway",
},
"AttachGateway": {
"Type": "AWS::EC2::VPCGatewayAttachment",
"Properties": {
"VpcId": {"Ref": "VPC"},
"InternetGatewayId": {"Ref": "InternetGateway"},
},
},
},
"Mappings": {
"RegionMap": {
"ap-southeast-1": {"AMI": "ami-74dda626"},
"ap-southeast-2": {"AMI": "ami-b3990e89"},
"us-west-2": {"AMI": "ami-16fd7026"},
"us-east-1": {"AMI": "ami-7f418316"},
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
"us-west-1": {"AMI": "ami-951945d0"},
"eu-west-1": {"AMI": "ami-24506250"},
"sa-east-1": {"AMI": "ami-3e3be423"},
}
},
}

View File

@ -143,7 +143,7 @@ def test_create_stack_with_notification_arn():
@mock_s3_deprecated
def test_create_stack_from_s3_url():
s3_conn = boto.s3.connect_to_region("us-west-1")
bucket = s3_conn.create_bucket("foobar")
bucket = s3_conn.create_bucket("foobar", location="us-west-1")
key = boto.s3.key.Key(bucket)
key.key = "template-key"
key.set_contents_from_string(dummy_template_json)

View File

@ -1,117 +1,117 @@
import boto
from boto.ec2.cloudwatch.alarm import MetricAlarm
import sure # noqa
from moto import mock_cloudwatch_deprecated
def alarm_fixture(name="tester", action=None):
action = action or ["arn:alarm"]
return MetricAlarm(
name=name,
namespace="{0}_namespace".format(name),
metric="{0}_metric".format(name),
comparison=">=",
threshold=2.0,
period=60,
evaluation_periods=5,
statistic="Average",
description="A test",
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
alarm_actions=action,
ok_actions=["arn:ok"],
insufficient_data_actions=["arn:insufficient"],
unit="Seconds",
)
@mock_cloudwatch_deprecated
def test_create_alarm():
conn = boto.connect_cloudwatch()
alarm = alarm_fixture()
conn.create_alarm(alarm)
alarms = conn.describe_alarms()
alarms.should.have.length_of(1)
alarm = alarms[0]
alarm.name.should.equal("tester")
alarm.namespace.should.equal("tester_namespace")
alarm.metric.should.equal("tester_metric")
alarm.comparison.should.equal(">=")
alarm.threshold.should.equal(2.0)
alarm.period.should.equal(60)
alarm.evaluation_periods.should.equal(5)
alarm.statistic.should.equal("Average")
alarm.description.should.equal("A test")
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
list(alarm.alarm_actions).should.equal(["arn:alarm"])
list(alarm.ok_actions).should.equal(["arn:ok"])
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
alarm.unit.should.equal("Seconds")
@mock_cloudwatch_deprecated
def test_delete_alarm():
conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
alarm = alarm_fixture()
conn.create_alarm(alarm)
alarms = conn.describe_alarms()
alarms.should.have.length_of(1)
alarms[0].delete()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
@mock_cloudwatch_deprecated
def test_put_metric_data():
conn = boto.connect_cloudwatch()
conn.put_metric_data(
namespace="tester",
name="metric",
value=1.5,
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
)
metrics = conn.list_metrics()
metrics.should.have.length_of(1)
metric = metrics[0]
metric.namespace.should.equal("tester")
metric.name.should.equal("metric")
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
@mock_cloudwatch_deprecated
def test_describe_alarms():
conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
alarms = conn.describe_alarms()
alarms.should.have.length_of(4)
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
alarms.should.have.length_of(2)
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
alarms.should.have.length_of(3)
alarms = conn.describe_alarms(action_prefix="afoo")
alarms.should.have.length_of(2)
for alarm in conn.describe_alarms():
alarm.delete()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
import boto
from boto.ec2.cloudwatch.alarm import MetricAlarm
import sure # noqa
from moto import mock_cloudwatch_deprecated
def alarm_fixture(name="tester", action=None):
action = action or ["arn:alarm"]
return MetricAlarm(
name=name,
namespace="{0}_namespace".format(name),
metric="{0}_metric".format(name),
comparison=">=",
threshold=2.0,
period=60,
evaluation_periods=5,
statistic="Average",
description="A test",
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
alarm_actions=action,
ok_actions=["arn:ok"],
insufficient_data_actions=["arn:insufficient"],
unit="Seconds",
)
@mock_cloudwatch_deprecated
def test_create_alarm():
conn = boto.connect_cloudwatch()
alarm = alarm_fixture()
conn.create_alarm(alarm)
alarms = conn.describe_alarms()
alarms.should.have.length_of(1)
alarm = alarms[0]
alarm.name.should.equal("tester")
alarm.namespace.should.equal("tester_namespace")
alarm.metric.should.equal("tester_metric")
alarm.comparison.should.equal(">=")
alarm.threshold.should.equal(2.0)
alarm.period.should.equal(60)
alarm.evaluation_periods.should.equal(5)
alarm.statistic.should.equal("Average")
alarm.description.should.equal("A test")
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
list(alarm.alarm_actions).should.equal(["arn:alarm"])
list(alarm.ok_actions).should.equal(["arn:ok"])
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
alarm.unit.should.equal("Seconds")
@mock_cloudwatch_deprecated
def test_delete_alarm():
conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
alarm = alarm_fixture()
conn.create_alarm(alarm)
alarms = conn.describe_alarms()
alarms.should.have.length_of(1)
alarms[0].delete()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
@mock_cloudwatch_deprecated
def test_put_metric_data():
conn = boto.connect_cloudwatch()
conn.put_metric_data(
namespace="tester",
name="metric",
value=1.5,
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
)
metrics = conn.list_metrics()
metrics.should.have.length_of(1)
metric = metrics[0]
metric.namespace.should.equal("tester")
metric.name.should.equal("metric")
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
@mock_cloudwatch_deprecated
def test_describe_alarms():
conn = boto.connect_cloudwatch()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
alarms = conn.describe_alarms()
alarms.should.have.length_of(4)
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
alarms.should.have.length_of(2)
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
alarms.should.have.length_of(3)
alarms = conn.describe_alarms(action_prefix="afoo")
alarms.should.have.length_of(2)
for alarm in conn.describe_alarms():
alarm.delete()
alarms = conn.describe_alarms()
alarms.should.have.length_of(0)

View File

@ -27,6 +27,11 @@ def test_create_user_pool():
result["UserPool"]["Id"].should_not.be.none
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
result["UserPool"]["Arn"].should.equal(
"arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format(
ACCOUNT_ID, result["UserPool"]["Id"]
)
)
result["UserPool"]["Name"].should.equal(name)
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
@ -911,6 +916,55 @@ def test_admin_create_existing_user():
caught.should.be.true
@mock_cognitoidp
def test_admin_resend_invitation_existing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
)
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UsernameExistsException:
caught = True
caught.should.be.false
@mock_cognitoidp
def test_admin_resend_invitation_missing_user():
conn = boto3.client("cognito-idp", "us-west-2")
username = str(uuid.uuid4())
value = str(uuid.uuid4())
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
caught = False
try:
conn.admin_create_user(
UserPoolId=user_pool_id,
Username=username,
UserAttributes=[{"Name": "thing", "Value": value}],
MessageAction="RESEND",
)
except conn.exceptions.UserNotFoundException:
caught = True
caught.should.be.true
@mock_cognitoidp
def test_admin_get_user():
conn = boto3.client("cognito-idp", "us-west-2")

View File

@ -46,4 +46,4 @@ def test_domain_dispatched_with_service():
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"})
keys = set(backend_app.view_functions.keys())
keys.should.contain("ResponseObject.key_response")
keys.should.contain("ResponseObject.key_or_control_response")

View File

@ -1,182 +1,182 @@
from __future__ import unicode_literals
import boto.datapipeline
import sure # noqa
from moto import mock_datapipeline_deprecated
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
def get_value_from_fields(key, fields):
for field in fields:
if field["key"] == key:
return field["stringValue"]
@mock_datapipeline_deprecated
def test_create_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList"
]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
pipeline_description["name"].should.equal("mypipeline")
pipeline_description["pipelineId"].should.equal(pipeline_id)
fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
PIPELINE_OBJECTS = [
{
"id": "Default",
"name": "Default",
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
},
{
"id": "Schedule",
"name": "Schedule",
"fields": [
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
{"key": "type", "stringValue": "Schedule"},
{"key": "period", "stringValue": "1 hour"},
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
],
},
{
"id": "SayHello",
"name": "SayHello",
"fields": [
{"key": "type", "stringValue": "ShellCommandActivity"},
{"key": "command", "stringValue": "echo hello"},
{"key": "parent", "refValue": "Default"},
{"key": "schedule", "refValue": "Schedule"},
],
},
]
@mock_datapipeline_deprecated
def test_creating_pipeline_definition():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
pipeline_definition["pipelineObjects"].should.have.length_of(3)
default_object = pipeline_definition["pipelineObjects"][0]
default_object["name"].should.equal("Default")
default_object["id"].should.equal("Default")
default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}]
)
@mock_datapipeline_deprecated
def test_describing_pipeline_objects():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
"pipelineObjects"
]
objects.should.have.length_of(2)
default_object = [x for x in objects if x["id"] == "Default"][0]
default_object["name"].should.equal("Default")
default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}]
)
@mock_datapipeline_deprecated
def test_activate_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.activate_pipeline(pipeline_id)
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList"
]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
@mock_datapipeline_deprecated
def test_delete_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.delete_pipeline(pipeline_id)
response = conn.list_pipelines()
response["pipelineIdList"].should.have.length_of(0)
@mock_datapipeline_deprecated
def test_listing_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2")
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
response = conn.list_pipelines()
response["hasMoreResults"].should.be(False)
response["marker"].should.be.none
response["pipelineIdList"].should.have.length_of(2)
response["pipelineIdList"].should.contain(
{"id": res1["pipelineId"], "name": "mypipeline1"}
)
response["pipelineIdList"].should.contain(
{"id": res2["pipelineId"], "name": "mypipeline2"}
)
@mock_datapipeline_deprecated
def test_listing_paginated_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2")
for i in range(100):
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
response = conn.list_pipelines()
response["hasMoreResults"].should.be(True)
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
response["pipelineIdList"].should.have.length_of(50)
# testing a helper function
def test_remove_capitalization_of_dict_keys():
result = remove_capitalization_of_dict_keys(
{
"Id": "IdValue",
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
}
)
result.should.equal(
{
"id": "IdValue",
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
}
)
from __future__ import unicode_literals
import boto.datapipeline
import sure # noqa
from moto import mock_datapipeline_deprecated
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
def get_value_from_fields(key, fields):
for field in fields:
if field["key"] == key:
return field["stringValue"]
@mock_datapipeline_deprecated
def test_create_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList"
]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
pipeline_description["name"].should.equal("mypipeline")
pipeline_description["pipelineId"].should.equal(pipeline_id)
fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
PIPELINE_OBJECTS = [
{
"id": "Default",
"name": "Default",
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
},
{
"id": "Schedule",
"name": "Schedule",
"fields": [
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
{"key": "type", "stringValue": "Schedule"},
{"key": "period", "stringValue": "1 hour"},
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
],
},
{
"id": "SayHello",
"name": "SayHello",
"fields": [
{"key": "type", "stringValue": "ShellCommandActivity"},
{"key": "command", "stringValue": "echo hello"},
{"key": "parent", "refValue": "Default"},
{"key": "schedule", "refValue": "Schedule"},
],
},
]
@mock_datapipeline_deprecated
def test_creating_pipeline_definition():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
pipeline_definition["pipelineObjects"].should.have.length_of(3)
default_object = pipeline_definition["pipelineObjects"][0]
default_object["name"].should.equal("Default")
default_object["id"].should.equal("Default")
default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}]
)
@mock_datapipeline_deprecated
def test_describing_pipeline_objects():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
"pipelineObjects"
]
objects.should.have.length_of(2)
default_object = [x for x in objects if x["id"] == "Default"][0]
default_object["name"].should.equal("Default")
default_object["fields"].should.equal(
[{"key": "workerGroup", "stringValue": "workerGroup"}]
)
@mock_datapipeline_deprecated
def test_activate_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.activate_pipeline(pipeline_id)
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
"pipelineDescriptionList"
]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
fields = pipeline_description["fields"]
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
@mock_datapipeline_deprecated
def test_delete_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.delete_pipeline(pipeline_id)
response = conn.list_pipelines()
response["pipelineIdList"].should.have.length_of(0)
@mock_datapipeline_deprecated
def test_listing_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2")
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
response = conn.list_pipelines()
response["hasMoreResults"].should.be(False)
response["marker"].should.be.none
response["pipelineIdList"].should.have.length_of(2)
response["pipelineIdList"].should.contain(
{"id": res1["pipelineId"], "name": "mypipeline1"}
)
response["pipelineIdList"].should.contain(
{"id": res2["pipelineId"], "name": "mypipeline2"}
)
@mock_datapipeline_deprecated
def test_listing_paginated_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2")
for i in range(100):
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
response = conn.list_pipelines()
response["hasMoreResults"].should.be(True)
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
response["pipelineIdList"].should.have.length_of(50)
# testing a helper function
def test_remove_capitalization_of_dict_keys():
result = remove_capitalization_of_dict_keys(
{
"Id": "IdValue",
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
}
)
result.should.equal(
{
"id": "IdValue",
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
}
)

View File

@ -1,470 +1,470 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from freezegun import freeze_time
from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
from boto.exception import DynamoDBResponseError
def create_table(conn):
message_table_schema = conn.create_schema(
hash_key_name="forum_name",
hash_key_proto_value=str,
range_key_name="subject",
range_key_proto_value=str,
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
return table
@freeze_time("2012-01-14")
@mock_dynamodb_deprecated
def test_create_table():
conn = boto.connect_dynamodb()
create_table(conn)
expected = {
"Table": {
"CreationDateTime": 1326499200.0,
"ItemCount": 0,
"KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
},
"ProvisionedThroughput": {
"ReadCapacityUnits": 10,
"WriteCapacityUnits": 10,
},
"TableName": "messages",
"TableSizeBytes": 0,
"TableStatus": "ACTIVE",
}
}
conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated
def test_delete_table():
conn = boto.connect_dynamodb()
create_table(conn)
conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError
)
@mock_dynamodb_deprecated
def test_update_table_throughput():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.read_units.should.equal(10)
table.write_units.should.equal(10)
table.update_throughput(5, 6)
table.refresh()
table.read_units.should.equal(5)
table.write_units.should.equal(6)
@mock_dynamodb_deprecated
def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
returned_item = table.get_item(
hash_key="LOLCat Forum",
range_key="Check this out!",
attributes_to_get=["Body", "SentBy"],
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"subject": "Check this out!",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
}
)
item["SentBy"] = "User B"
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum",
range_key="Check this out!",
attributes_to_get=["Body", "SentBy"],
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"subject": "Check this out!",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
}
)
@mock_dynamodb_deprecated
def test_item_put_without_table():
conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with(
table_name="undeclared-table",
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_get_missing_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
DynamoDBKeyNotFoundError
)
table.has_item("foobar", "more").should.equal(False)
@mock_dynamodb_deprecated
def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with(
table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated
def test_get_item_without_range_key():
conn = boto.connect_dynamodb()
message_table_schema = conn.create_schema(
hash_key_name="test_hash",
hash_key_proto_value=int,
range_key_name="test_range",
range_key_proto_value=int,
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
hash_key = 3241526475
range_key = 1234567890987
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
new_item.put()
table.get_item.when.called_with(hash_key=hash_key).should.throw(
DynamoDBValidationError
)
@mock_dynamodb_deprecated
def test_delete_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD")
response.should.equal(
{
"Attributes": {
"Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A",
"subject": "Check this out!",
},
"ConsumedCapacityUnits": 0.5,
}
)
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with(
table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_query():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put()
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
results.response["Items"].should.have.length_of(3)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
results.response["Items"].should.have.length_of(2)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
results.response["Items"].should.have.length_of(0)
results = table.query(
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
)
results.response["Items"].should.have.length_of(1)
results = table.query(
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
)
results.response["Items"].should.have.length_of(1)
results = table.query(
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
)
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_query_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with(
table_name="undeclared-table",
hash_key_value={"S": "the-key"},
range_key_conditions={
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put()
results = table.scan()
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with(
table_name="undeclared-table",
scan_filter={
"SentBy": {
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
}
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan_after_has_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
list(table.scan()).should.equal([])
table.has_item(hash_key="the-key", range_key="123")
list(table.scan()).should.equal([])
@mock_dynamodb_deprecated
def test_write_batch():
conn = boto.connect_dynamodb()
table = create_table(conn)
batch_list = conn.new_batch_write_list()
items = []
items.append(
table.new_item(
hash_key="the-key",
range_key="123",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
},
)
)
items.append(
table.new_item(
hash_key="the-key",
range_key="789",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
},
)
)
batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key", "789")])
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(1)
@mock_dynamodb_deprecated
def test_batch_read():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
item.put()
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
# Iterate through so that batch_item gets called
count = len([x for x in items])
count.should.equal(2)
from __future__ import unicode_literals
import boto
import sure # noqa
from freezegun import freeze_time
from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
from boto.exception import DynamoDBResponseError
def create_table(conn):
message_table_schema = conn.create_schema(
hash_key_name="forum_name",
hash_key_proto_value=str,
range_key_name="subject",
range_key_proto_value=str,
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
return table
@freeze_time("2012-01-14")
@mock_dynamodb_deprecated
def test_create_table():
conn = boto.connect_dynamodb()
create_table(conn)
expected = {
"Table": {
"CreationDateTime": 1326499200.0,
"ItemCount": 0,
"KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
},
"ProvisionedThroughput": {
"ReadCapacityUnits": 10,
"WriteCapacityUnits": 10,
},
"TableName": "messages",
"TableSizeBytes": 0,
"TableStatus": "ACTIVE",
}
}
conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated
def test_delete_table():
conn = boto.connect_dynamodb()
create_table(conn)
conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError
)
@mock_dynamodb_deprecated
def test_update_table_throughput():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.read_units.should.equal(10)
table.write_units.should.equal(10)
table.update_throughput(5, 6)
table.refresh()
table.read_units.should.equal(5)
table.write_units.should.equal(6)
@mock_dynamodb_deprecated
def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
returned_item = table.get_item(
hash_key="LOLCat Forum",
range_key="Check this out!",
attributes_to_get=["Body", "SentBy"],
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"subject": "Check this out!",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
}
)
item["SentBy"] = "User B"
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum",
range_key="Check this out!",
attributes_to_get=["Body", "SentBy"],
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"subject": "Check this out!",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
}
)
@mock_dynamodb_deprecated
def test_item_put_without_table():
conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with(
table_name="undeclared-table",
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_get_missing_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
DynamoDBKeyNotFoundError
)
table.has_item("foobar", "more").should.equal(False)
@mock_dynamodb_deprecated
def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with(
table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated
def test_get_item_without_range_key():
conn = boto.connect_dynamodb()
message_table_schema = conn.create_schema(
hash_key_name="test_hash",
hash_key_proto_value=int,
range_key_name="test_range",
range_key_proto_value=int,
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
hash_key = 3241526475
range_key = 1234567890987
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
new_item.put()
table.get_item.when.called_with(hash_key=hash_key).should.throw(
DynamoDBValidationError
)
@mock_dynamodb_deprecated
def test_delete_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD")
response.should.equal(
{
"Attributes": {
"Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A",
"subject": "Check this out!",
},
"ConsumedCapacityUnits": 0.5,
}
)
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with(
table_name="undeclared-table",
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_query():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put()
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
results.response["Items"].should.have.length_of(3)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
results.response["Items"].should.have.length_of(2)
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
results.response["Items"].should.have.length_of(0)
results = table.query(
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
)
results.response["Items"].should.have.length_of(1)
results = table.query(
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
)
results.response["Items"].should.have.length_of(1)
results = table.query(
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
)
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_query_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with(
table_name="undeclared-table",
hash_key_value={"S": "the-key"},
range_key_conditions={
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
item.put()
results = table.scan()
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with(
table_name="undeclared-table",
scan_filter={
"SentBy": {
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
}
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan_after_has_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
list(table.scan()).should.equal([])
table.has_item(hash_key="the-key", range_key="123")
list(table.scan()).should.equal([])
@mock_dynamodb_deprecated
def test_write_batch():
conn = boto.connect_dynamodb()
table = create_table(conn)
batch_list = conn.new_batch_write_list()
items = []
items.append(
table.new_item(
hash_key="the-key",
range_key="123",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
},
)
)
items.append(
table.new_item(
hash_key="the-key",
range_key="789",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
},
)
)
batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key", "789")])
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(1)
@mock_dynamodb_deprecated
def test_batch_read():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
item.put()
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
# Iterate through so that batch_item gets called
count = len([x for x in items])
count.should.equal(2)

View File

@ -1,390 +1,390 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from freezegun import freeze_time
from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
from boto.exception import DynamoDBResponseError
def create_table(conn):
message_table_schema = conn.create_schema(
hash_key_name="forum_name", hash_key_proto_value=str
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
return table
@freeze_time("2012-01-14")
@mock_dynamodb_deprecated
def test_create_table():
conn = boto.connect_dynamodb()
create_table(conn)
expected = {
"Table": {
"CreationDateTime": 1326499200.0,
"ItemCount": 0,
"KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
},
"ProvisionedThroughput": {
"ReadCapacityUnits": 10,
"WriteCapacityUnits": 10,
},
"TableName": "messages",
"TableSizeBytes": 0,
"TableStatus": "ACTIVE",
}
}
conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated
def test_delete_table():
conn = boto.connect_dynamodb()
create_table(conn)
conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError
)
@mock_dynamodb_deprecated
def test_update_table_throughput():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.read_units.should.equal(10)
table.write_units.should.equal(10)
table.update_throughput(5, 6)
table.refresh()
table.read_units.should.equal(5)
table.write_units.should.equal(6)
@mock_dynamodb_deprecated
def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
}
)
item["SentBy"] = "User B"
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
}
)
@mock_dynamodb_deprecated
def test_item_put_without_table():
conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with(
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_get_missing_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.get_item.when.called_with(hash_key="tester").should.throw(
DynamoDBKeyNotFoundError
)
@mock_dynamodb_deprecated
def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated
def test_delete_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD")
response.should.equal(
{
"Attributes": {
"Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A",
},
"ConsumedCapacityUnits": 0.5,
}
)
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_query():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", attrs=item_data)
item.put()
results = table.query(hash_key="the-key")
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_query_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with(
table_name="undeclared-table", hash_key_value={"S": "the-key"}
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="the-key3", attrs=item_data)
item.put()
results = table.scan()
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with(
table_name="undeclared-table",
scan_filter={
"SentBy": {
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
}
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan_after_has_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
list(table.scan()).should.equal([])
table.has_item("the-key")
list(table.scan()).should.equal([])
@mock_dynamodb_deprecated
def test_write_batch():
conn = boto.connect_dynamodb()
table = create_table(conn)
batch_list = conn.new_batch_write_list()
items = []
items.append(
table.new_item(
hash_key="the-key",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
},
)
)
items.append(
table.new_item(
hash_key="the-key2",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
},
)
)
batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key")])
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(1)
@mock_dynamodb_deprecated
def test_batch_read():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key1", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="another-key", attrs=item_data)
item.put()
items = table.batch_get_item([("the-key1"), ("another-key")])
# Iterate through so that batch_item gets called
count = len([x for x in items])
count.should.have.equal(2)
from __future__ import unicode_literals
import boto
import sure # noqa
from freezegun import freeze_time
from moto import mock_dynamodb_deprecated
from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
from boto.exception import DynamoDBResponseError
def create_table(conn):
message_table_schema = conn.create_schema(
hash_key_name="forum_name", hash_key_proto_value=str
)
table = conn.create_table(
name="messages", schema=message_table_schema, read_units=10, write_units=10
)
return table
@freeze_time("2012-01-14")
@mock_dynamodb_deprecated
def test_create_table():
conn = boto.connect_dynamodb()
create_table(conn)
expected = {
"Table": {
"CreationDateTime": 1326499200.0,
"ItemCount": 0,
"KeySchema": {
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
},
"ProvisionedThroughput": {
"ReadCapacityUnits": 10,
"WriteCapacityUnits": 10,
},
"TableName": "messages",
"TableSizeBytes": 0,
"TableStatus": "ACTIVE",
}
}
conn.describe_table("messages").should.equal(expected)
@mock_dynamodb_deprecated
def test_delete_table():
conn = boto.connect_dynamodb()
create_table(conn)
conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table("messages")
conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with("messages").should.throw(
DynamoDBResponseError
)
@mock_dynamodb_deprecated
def test_update_table_throughput():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.read_units.should.equal(10)
table.write_units.should.equal(10)
table.update_throughput(5, 6)
table.refresh()
table.read_units.should.equal(5)
table.write_units.should.equal(6)
@mock_dynamodb_deprecated
def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
}
)
item["SentBy"] = "User B"
item.put()
returned_item = table.get_item(
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
)
dict(returned_item).should.equal(
{
"forum_name": "LOLCat Forum",
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
}
)
@mock_dynamodb_deprecated
def test_item_put_without_table():
conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with(
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_get_missing_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.get_item.when.called_with(hash_key="tester").should.throw(
DynamoDBKeyNotFoundError
)
@mock_dynamodb_deprecated
def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb_deprecated
def test_delete_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete()
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete(return_values="ALL_OLD")
response.should.equal(
{
"Attributes": {
"Body": "http://url_to_lolcat.gif",
"forum_name": "LOLCat Forum",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"SentBy": "User A",
},
"ConsumedCapacityUnits": 0.5,
}
)
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with(
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_query():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", attrs=item_data)
item.put()
results = table.query(hash_key="the-key")
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_query_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with(
table_name="undeclared-table", hash_key_value={"S": "the-key"}
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="the-key3", attrs=item_data)
item.put()
results = table.scan()
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
results.response["Items"].should.have.length_of(3)
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
results.response["Items"].should.have.length_of(1)
results = table.scan(scan_filter={"Ids": condition.NULL()})
results.response["Items"].should.have.length_of(2)
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
results.response["Items"].should.have.length_of(0)
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
results.response["Items"].should.have.length_of(1)
@mock_dynamodb_deprecated
def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with(
table_name="undeclared-table",
scan_filter={
"SentBy": {
"AttributeValueList": [{"S": "User B"}],
"ComparisonOperator": "EQ",
}
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb_deprecated
def test_scan_after_has_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
list(table.scan()).should.equal([])
table.has_item("the-key")
list(table.scan()).should.equal([])
@mock_dynamodb_deprecated
def test_write_batch():
conn = boto.connect_dynamodb()
table = create_table(conn)
batch_list = conn.new_batch_write_list()
items = []
items.append(
table.new_item(
hash_key="the-key",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
},
)
)
items.append(
table.new_item(
hash_key="the-key2",
attrs={
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
},
)
)
batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[("the-key")])
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(1)
@mock_dynamodb_deprecated
def test_batch_read():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User A",
"ReceivedTime": "12/9/2011 11:36:03 PM",
}
item = table.new_item(hash_key="the-key1", attrs=item_data)
item.put()
item = table.new_item(hash_key="the-key2", attrs=item_data)
item.put()
item_data = {
"Body": "http://url_to_lolcat.gif",
"SentBy": "User B",
"ReceivedTime": "12/9/2011 11:36:03 PM",
"Ids": set([1, 2, 3]),
"PK": 7,
}
item = table.new_item(hash_key="another-key", attrs=item_data)
item.put()
items = table.batch_get_item([("the-key1"), ("another-key")])
# Iterate through so that batch_item gets called
count = len([x for x in items])
count.should.have.equal(2)

View File

@ -3609,6 +3609,31 @@ def test_update_supports_list_append_maps():
)
@mock_dynamodb2
def test_update_supports_list_append_with_nested_if_not_exists_operation():
dynamo = boto3.resource("dynamodb", region_name="us-west-1")
table_name = "test"
dynamo.create_table(
TableName=table_name,
AttributeDefinitions=[{"AttributeName": "Id", "AttributeType": "S"}],
KeySchema=[{"AttributeName": "Id", "KeyType": "HASH"}],
ProvisionedThroughput={"ReadCapacityUnits": 20, "WriteCapacityUnits": 20},
)
table = dynamo.Table(table_name)
table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}})
table.update_item(
Key={"Id": "item-id"},
UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)",
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
)
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
{"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}}
)
@mock_dynamodb2
def test_update_catches_invalid_list_append_operation():
client = boto3.client("dynamodb", region_name="us-east-1")

View File

@ -1,37 +1,37 @@
from __future__ import unicode_literals
import boto3
from moto import mock_ec2
import sure # noqa
@mock_ec2
def test_describe_account_attributes():
conn = boto3.client("ec2", region_name="us-east-1")
response = conn.describe_account_attributes()
expected_attribute_values = [
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-security-groups-per-interface",
},
{
"AttributeValues": [{"AttributeValue": "20"}],
"AttributeName": "max-instances",
},
{
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
"AttributeName": "supported-platforms",
},
{
"AttributeValues": [{"AttributeValue": "none"}],
"AttributeName": "default-vpc",
},
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "max-elastic-ips",
},
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-elastic-ips",
},
]
response["AccountAttributes"].should.equal(expected_attribute_values)
from __future__ import unicode_literals
import boto3
from moto import mock_ec2
import sure # noqa
@mock_ec2
def test_describe_account_attributes():
conn = boto3.client("ec2", region_name="us-east-1")
response = conn.describe_account_attributes()
expected_attribute_values = [
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-security-groups-per-interface",
},
{
"AttributeValues": [{"AttributeValue": "20"}],
"AttributeName": "max-instances",
},
{
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
"AttributeName": "supported-platforms",
},
{
"AttributeValues": [{"AttributeValue": "none"}],
"AttributeName": "default-vpc",
},
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "max-elastic-ips",
},
{
"AttributeValues": [{"AttributeValue": "5"}],
"AttributeName": "vpc-max-elastic-ips",
},
]
response["AccountAttributes"].should.equal(expected_attribute_values)

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_amazon_dev_pay():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_amazon_dev_pay():
pass

View File

@ -12,6 +12,7 @@ import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2
from moto.ec2.models import AMIS, OWNER_ID
from moto.iam.models import ACCOUNT_ID
from tests.helpers import requires_boto_gte
@ -251,6 +252,19 @@ def test_ami_pulls_attributes_from_instance():
image.kernel_id.should.equal("test-kernel")
@mock_ec2_deprecated
def test_ami_uses_account_id_if_valid_access_key_is_supplied():
access_key = "AKIAXXXXXXXXXXXXXXXX"
conn = boto.connect_ec2(access_key, "the_secret")
reservation = conn.run_instances("ami-1234abcd")
instance = reservation.instances[0]
instance.modify_attribute("kernel", "test-kernel")
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
images = conn.get_all_images(owners=["self"])
[(ami.id, ami.owner_id) for ami in images].should.equal([(image_id, ACCOUNT_ID)])
@mock_ec2_deprecated
def test_ami_filters():
conn = boto.connect_ec2("the_key", "the_secret")
@ -773,7 +787,7 @@ def test_ami_filter_wildcard():
instance.create_image(Name="not-matching-image")
my_images = ec2_client.describe_images(
Owners=["111122223333"], Filters=[{"Name": "name", "Values": ["test*"]}]
Owners=[ACCOUNT_ID], Filters=[{"Name": "name", "Values": ["test*"]}]
)["Images"]
my_images.should.have.length_of(1)

View File

@ -1 +1 @@
from __future__ import unicode_literals
from __future__ import unicode_literals

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_ip_addresses():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_ip_addresses():
pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_monitoring():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_monitoring():
pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_placement_groups():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_placement_groups():
pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_reserved_instances():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_reserved_instances():
pass

View File

@ -1,96 +1,96 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2_deprecated
@mock_ec2_deprecated
def test_virtual_private_gateways():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.should_not.be.none
vpn_gateway.id.should.match(r"vgw-\w+")
vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated
def test_describe_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(1)
gateway = vgws[0]
gateway.id.should.match(r"vgw-\w+")
gateway.id.should.equal(vpn_gateway.id)
vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated
def test_vpn_gateway_vpc_attachment():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached")
@mock_ec2_deprecated
def test_delete_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.delete_vpn_gateway(vpn_gateway.id)
vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(0)
@mock_ec2_deprecated
def test_vpn_gateway_tagging():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.add_tag("a key", "some value")
tag = conn.get_all_tags()[0]
tag.name.should.equal("a key")
tag.value.should.equal("some value")
# Refresh the subnet
vpn_gateway = conn.get_all_vpn_gateways()[0]
vpn_gateway.tags.should.have.length_of(1)
vpn_gateway.tags["a key"].should.equal("some value")
@mock_ec2_deprecated
def test_detach_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached")
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(0)
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2_deprecated
@mock_ec2_deprecated
def test_virtual_private_gateways():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.should_not.be.none
vpn_gateway.id.should.match(r"vgw-\w+")
vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated
def test_describe_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(1)
gateway = vgws[0]
gateway.id.should.match(r"vgw-\w+")
gateway.id.should.equal(vpn_gateway.id)
vpn_gateway.type.should.equal("ipsec.1")
vpn_gateway.state.should.equal("available")
vpn_gateway.availability_zone.should.equal("us-east-1a")
@mock_ec2_deprecated
def test_vpn_gateway_vpc_attachment():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached")
@mock_ec2_deprecated
def test_delete_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.delete_vpn_gateway(vpn_gateway.id)
vgws = conn.get_all_vpn_gateways()
vgws.should.have.length_of(0)
@mock_ec2_deprecated
def test_vpn_gateway_tagging():
conn = boto.connect_vpc("the_key", "the_secret")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
vpn_gateway.add_tag("a key", "some value")
tag = conn.get_all_tags()[0]
tag.name.should.equal("a key")
tag.value.should.equal("some value")
# Refresh the subnet
vpn_gateway = conn.get_all_vpn_gateways()[0]
vpn_gateway.tags.should.have.length_of(1)
vpn_gateway.tags["a key"].should.equal("some value")
@mock_ec2_deprecated
def test_detach_vpn_gateway():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(1)
attachments[0].vpc_id.should.equal(vpc.id)
attachments[0].state.should.equal("attached")
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
gateway = conn.get_all_vpn_gateways()[0]
attachments = gateway.attachments
attachments.should.have.length_of(0)

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_vm_export():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_vm_export():
pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_vm_import():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_vm_import():
pass

View File

@ -1,10 +1,10 @@
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_windows():
pass
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_windows():
pass

View File

@ -1,12 +1,15 @@
import random
import boto3
import json
import sure # noqa
import random
import unittest
from moto.events import mock_events
import boto3
from botocore.exceptions import ClientError
from nose.tools import assert_raises
from moto.core import ACCOUNT_ID
from moto.core.exceptions import JsonRESTError
from moto.events import mock_events
from moto.events.models import EventsBackend
RULES = [
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
@ -136,14 +139,6 @@ def test_list_rule_names_by_target():
assert rule in test_2_target["Rules"]
@mock_events
def test_list_rules():
client = generate_environment()
rules = client.list_rules()
assert len(rules["Rules"]) == len(RULES)
@mock_events
def test_delete_rule():
client = generate_environment()
@ -461,3 +456,50 @@ def test_delete_event_bus_errors():
client.delete_event_bus.when.called_with(Name="default").should.throw(
ClientError, "Cannot delete event bus default."
)
@mock_events
def test_rule_tagging_happy():
client = generate_environment()
rule_name = get_random_rule()["Name"]
rule_arn = client.describe_rule(Name=rule_name).get("Arn")
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
client.tag_resource(ResourceARN=rule_arn, Tags=tags)
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
tc = unittest.TestCase("__init__")
expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}]
tc.assertTrue(
(expected[0] == actual[0] and expected[1] == actual[1])
or (expected[1] == actual[0] and expected[0] == actual[1])
)
client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"])
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
expected = [{"Key": "key2", "Value": "value2"}]
assert expected == actual
@mock_events
def test_rule_tagging_sad():
back_end = EventsBackend("us-west-2")
try:
back_end.tag_resource("unknown", [])
raise "tag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.untag_resource("unknown", [])
raise "untag_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass
try:
back_end.list_tags_for_resource("unknown")
raise "list_tags_for_resource should fail if ResourceARN is not known"
except JsonRESTError:
pass

View File

@ -1,21 +1,21 @@
from __future__ import unicode_literals
from tempfile import NamedTemporaryFile
import boto.glacier
import sure # noqa
from moto import mock_glacier_deprecated
@mock_glacier_deprecated
def test_create_and_delete_archive():
the_file = NamedTemporaryFile(delete=False)
the_file.write(b"some stuff")
the_file.close()
conn = boto.glacier.connect_to_region("us-west-2")
vault = conn.create_vault("my_vault")
archive_id = vault.upload_archive(the_file.name)
vault.delete_archive(archive_id)
from __future__ import unicode_literals
from tempfile import NamedTemporaryFile
import boto.glacier
import sure # noqa
from moto import mock_glacier_deprecated
@mock_glacier_deprecated
def test_create_and_delete_archive():
the_file = NamedTemporaryFile(delete=False)
the_file.write(b"some stuff")
the_file.close()
conn = boto.glacier.connect_to_region("us-west-2")
vault = conn.create_vault("my_vault")
archive_id = vault.upload_archive(the_file.name)
vault.delete_archive(archive_id)

View File

@ -1,31 +1,31 @@
from __future__ import unicode_literals
import boto.glacier
import sure # noqa
from moto import mock_glacier_deprecated
@mock_glacier_deprecated
def test_create_vault():
conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(1)
vaults[0].name.should.equal("my_vault")
@mock_glacier_deprecated
def test_delete_vault():
conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(1)
conn.delete_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(0)
from __future__ import unicode_literals
import boto.glacier
import sure # noqa
from moto import mock_glacier_deprecated
@mock_glacier_deprecated
def test_create_vault():
conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(1)
vaults[0].name.should.equal("my_vault")
@mock_glacier_deprecated
def test_delete_vault():
conn = boto.glacier.connect_to_region("us-west-2")
conn.create_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(1)
conn.delete_vault("my_vault")
vaults = conn.list_vaults()
vaults.should.have.length_of(0)

View File

@ -1 +1 @@
from __future__ import unicode_literals
from __future__ import unicode_literals

View File

@ -1 +1 @@
from __future__ import unicode_literals
from __future__ import unicode_literals

View File

@ -1,97 +1,97 @@
from __future__ import unicode_literals
import copy
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
def create_database(client, database_name):
return client.create_database(DatabaseInput={"Name": database_name})
def get_database(client, database_name):
return client.get_database(Name=database_name)
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
table_input = copy.deepcopy(TABLE_INPUT)
table_input["Name"] = table_name
table_input["PartitionKeys"] = partition_keys
table_input["StorageDescriptor"]["Columns"] = columns
table_input["StorageDescriptor"][
"Location"
] = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name
)
return table_input
def create_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs)
return client.create_table(DatabaseName=database_name, TableInput=table_input)
def update_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs)
return client.update_table(DatabaseName=database_name, TableInput=table_input)
def get_table(client, database_name, table_name):
return client.get_table(DatabaseName=database_name, Name=table_name)
def get_tables(client, database_name):
return client.get_tables(DatabaseName=database_name)
def get_table_versions(client, database_name, table_name):
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
def get_table_version(client, database_name, table_name, version_id):
return client.get_table_version(
DatabaseName=database_name, TableName=table_name, VersionId=version_id
)
def create_partition_input(database_name, table_name, values=[], columns=[]):
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name
)
part_input = copy.deepcopy(PARTITION_INPUT)
part_input["Values"] = values
part_input["StorageDescriptor"]["Columns"] = columns
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
return part_input
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.create_partition(
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
)
def update_partition(
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
):
if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.update_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInput=partiton_input,
PartitionValueList=old_values,
)
def get_partition(client, database_name, table_name, values):
return client.get_partition(
DatabaseName=database_name, TableName=table_name, PartitionValues=values
)
from __future__ import unicode_literals
import copy
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
def create_database(client, database_name):
return client.create_database(DatabaseInput={"Name": database_name})
def get_database(client, database_name):
return client.get_database(Name=database_name)
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
table_input = copy.deepcopy(TABLE_INPUT)
table_input["Name"] = table_name
table_input["PartitionKeys"] = partition_keys
table_input["StorageDescriptor"]["Columns"] = columns
table_input["StorageDescriptor"][
"Location"
] = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name
)
return table_input
def create_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs)
return client.create_table(DatabaseName=database_name, TableInput=table_input)
def update_table(client, database_name, table_name, table_input=None, **kwargs):
if table_input is None:
table_input = create_table_input(database_name, table_name, **kwargs)
return client.update_table(DatabaseName=database_name, TableInput=table_input)
def get_table(client, database_name, table_name):
return client.get_table(DatabaseName=database_name, Name=table_name)
def get_tables(client, database_name):
return client.get_tables(DatabaseName=database_name)
def get_table_versions(client, database_name, table_name):
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
def get_table_version(client, database_name, table_name, version_id):
return client.get_table_version(
DatabaseName=database_name, TableName=table_name, VersionId=version_id
)
def create_partition_input(database_name, table_name, values=[], columns=[]):
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
database_name=database_name, table_name=table_name
)
part_input = copy.deepcopy(PARTITION_INPUT)
part_input["Values"] = values
part_input["StorageDescriptor"]["Columns"] = columns
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
return part_input
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.create_partition(
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
)
def update_partition(
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
):
if partiton_input is None:
partiton_input = create_partition_input(database_name, table_name, **kwargs)
return client.update_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInput=partiton_input,
PartitionValueList=old_values,
)
def get_partition(client, database_name, table_name, values):
return client.get_partition(
DatabaseName=database_name, TableName=table_name, PartitionValues=values
)

View File

@ -9,6 +9,173 @@ from botocore.exceptions import ClientError
from nose.tools import assert_raises
@mock_iot
def test_attach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
@mock_iot
def test_detach_policy():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
cert = client.create_keys_and_certificate(setAsActive=True)
cert_arn = cert["certificateArn"]
client.create_policy(policyName=policy_name, policyDocument=doc)
client.attach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.have.length_of(1)
res["policies"][0]["policyName"].should.equal("my-policy")
client.detach_policy(policyName=policy_name, target=cert_arn)
res = client.list_attached_policies(target=cert_arn)
res.should.have.key("policies").which.should.be.empty
@mock_iot
def test_list_attached_policies():
client = boto3.client("iot", region_name="ap-northeast-1")
cert = client.create_keys_and_certificate(setAsActive=True)
policies = client.list_attached_policies(target=cert["certificateArn"])
policies["policies"].should.be.empty
@mock_iot
def test_policy_versions():
client = boto3.client("iot", region_name="ap-northeast-1")
policy_name = "my-policy"
doc = "{}"
policy = client.create_policy(policyName=policy_name, policyDocument=doc)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("policyVersionId").which.should.equal("1")
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
policy.should.have.key("defaultVersionId").which.should.equal(
policy["defaultVersionId"]
)
policy1 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_1"}),
setAsDefault=True,
)
policy1.should.have.key("policyArn").which.should_not.be.none
policy1.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy1.should.have.key("policyVersionId").which.should.equal("2")
policy1.should.have.key("isDefaultVersion").which.should.equal(True)
policy2 = client.create_policy_version(
policyName=policy_name,
policyDocument=json.dumps({"version": "version_2"}),
setAsDefault=False,
)
policy2.should.have.key("policyArn").which.should_not.be.none
policy2.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy2.should.have.key("policyVersionId").which.should.equal("3")
policy2.should.have.key("isDefaultVersion").which.should.equal(False)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy1["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_1"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy1["policyVersionId"]
)
client.set_default_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
list(
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
).count(True).should.equal(1)
default_policy = list(
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
)
default_policy[0].should.have.key("versionId").should.equal(
policy2["policyVersionId"]
)
policy = client.get_policy(policyName=policy_name)
policy.should.have.key("policyName").which.should.equal(policy_name)
policy.should.have.key("policyArn").which.should_not.be.none
policy.should.have.key("policyDocument").which.should.equal(
json.dumps({"version": "version_2"})
)
policy.should.have.key("defaultVersionId").which.should.equal(
policy2["policyVersionId"]
)
client.delete_policy_version(policyName=policy_name, policyVersionId="1")
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(2)
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy1["policyVersionId"]
)
policy_versions = client.list_policy_versions(policyName=policy_name)
policy_versions.should.have.key("policyVersions").which.should.have.length_of(1)
# should fail as it"s the default policy. Should use delete_policy instead
try:
client.delete_policy_version(
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
)
assert False, "Should have failed in previous call"
except Exception as exception:
exception.response["Error"]["Message"].should.equal(
"Cannot delete the default version of a policy"
)
@mock_iot
def test_things():
client = boto3.client("iot", region_name="ap-northeast-1")
@ -994,7 +1161,10 @@ def test_create_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
# thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
@ -1020,6 +1190,63 @@ def test_create_job():
job.should.have.key("description")
@mock_iot
def test_list_jobs():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing# job document
# job_document = {
# "field": "value"
# }
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job1 = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job1.should.have.key("jobId").which.should.equal(job_id)
job1.should.have.key("jobArn")
job1.should.have.key("description")
job2 = client.create_job(
jobId=job_id + "1",
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job2.should.have.key("jobId").which.should.equal(job_id + "1")
job2.should.have.key("jobArn")
job2.should.have.key("description")
jobs = client.list_jobs()
jobs.should.have.key("jobs")
jobs.should_not.have.key("nextToken")
jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id)
jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1")
@mock_iot
def test_describe_job():
client = boto3.client("iot", region_name="eu-west-1")
@ -1124,3 +1351,387 @@ def test_describe_job_1():
job.should.have.key("job").which.should.have.key(
"jobExecutionsRolloutConfig"
).which.should.have.key("maximumPerMinute").which.should.equal(10)
@mock_iot
def test_delete_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
client.delete_job(jobId=job_id)
client.list_jobs()["jobs"].should.have.length_of(0)
@mock_iot
def test_cancel_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are")
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job = client.describe_job(jobId=job_id)
job.should.have.key("job")
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("job").which.should.have.key("status").which.should.equal(
"CANCELED"
)
job.should.have.key("job").which.should.have.key(
"forceCanceled"
).which.should.equal(False)
job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal(
"Because"
)
job.should.have.key("job").which.should.have.key("comment").which.should.equal(
"You are"
)
@mock_iot
def test_get_job_document_with_document_source():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal("")
@mock_iot
def test_get_job_document_with_document():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job_document = client.get_job_document(jobId=job_id)
job_document.should.have.key("document").which.should.equal('{"field": "value"}')
@mock_iot
def test_describe_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
job_execution = client.describe_job_execution(
jobId=job_id, thingName=name, executionNumber=123
)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
False
)
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
{"detailsMap": {}}
)
job_execution["execution"].should.have.key("thingArn").which.should.equal(
thing["thingArn"]
)
job_execution["execution"].should.have.key("queuedAt")
job_execution["execution"].should.have.key("startedAt")
job_execution["execution"].should.have.key("lastUpdatedAt")
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
123
)
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
job_execution["execution"].should.have.key(
"approximateSecondsBeforeTimedOut"
).which.should.equal(123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_cancel_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.cancel_job_execution(jobId=job_id, thingName=name)
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
job_execution.should.have.key("execution")
job_execution["execution"].should.have.key("status").which.should.equal("CANCELED")
@mock_iot
def test_delete_job_execution():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123)
try:
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123)
except ClientError as exc:
error_code = exc.response["Error"]["Code"]
error_code.should.equal("ResourceNotFoundException")
else:
raise Exception("Should have raised error")
@mock_iot
def test_list_job_executions_for_job():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_job(jobId=job_id)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key(
"thingArn"
).which.should.equal(thing["thingArn"])
@mock_iot
def test_list_job_executions_for_thing():
client = boto3.client("iot", region_name="eu-west-1")
name = "my-thing"
job_id = "TestJob"
# thing
thing = client.create_thing(thingName=name)
thing.should.have.key("thingName").which.should.equal(name)
thing.should.have.key("thingArn")
# job document
job_document = {"field": "value"}
job = client.create_job(
jobId=job_id,
targets=[thing["thingArn"]],
document=json.dumps(job_document),
description="Description",
presignedUrlConfig={
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
"expiresInSec": 123,
},
targetSelection="CONTINUOUS",
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
)
job.should.have.key("jobId").which.should.equal(job_id)
job.should.have.key("jobArn")
job.should.have.key("description")
job_execution = client.list_job_executions_for_thing(thingName=name)
job_execution.should.have.key("executionSummaries")
job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal(
job_id
)

View File

@ -1,76 +1,76 @@
from __future__ import unicode_literals
import boto3
from freezegun import freeze_time
import sure # noqa
import re
from moto import mock_opsworks
@freeze_time("2015-01-01")
@mock_opsworks
def test_create_app_response():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
response.should.contain("AppId")
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
response.should.contain("AppId")
# ClientError
client.create_app.when.called_with(
StackId=stack_id, Type="other", Name="TestApp"
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
# ClientError
client.create_app.when.called_with(
StackId="nothere", Type="other", Name="TestApp"
).should.throw(Exception, "nothere")
@freeze_time("2015-01-01")
@mock_opsworks
def test_describe_apps():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
rv1 = client.describe_apps(StackId=stack_id)
rv2 = client.describe_apps(AppIds=[app_id])
rv1["Apps"].should.equal(rv2["Apps"])
rv1["Apps"][0]["Name"].should.equal("TestApp")
# ClientError
client.describe_apps.when.called_with(
StackId=stack_id, AppIds=[app_id]
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
# ClientError
client.describe_apps.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere"
)
# ClientError
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
Exception, "nothere"
)
from __future__ import unicode_literals
import boto3
from freezegun import freeze_time
import sure # noqa
import re
from moto import mock_opsworks
@freeze_time("2015-01-01")
@mock_opsworks
def test_create_app_response():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
response.should.contain("AppId")
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
response.should.contain("AppId")
# ClientError
client.create_app.when.called_with(
StackId=stack_id, Type="other", Name="TestApp"
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
# ClientError
client.create_app.when.called_with(
StackId="nothere", Type="other", Name="TestApp"
).should.throw(Exception, "nothere")
@freeze_time("2015-01-01")
@mock_opsworks
def test_describe_apps():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
rv1 = client.describe_apps(StackId=stack_id)
rv2 = client.describe_apps(AppIds=[app_id])
rv1["Apps"].should.equal(rv2["Apps"])
rv1["Apps"][0]["Name"].should.equal("TestApp")
# ClientError
client.describe_apps.when.called_with(
StackId=stack_id, AppIds=[app_id]
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
# ClientError
client.describe_apps.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere"
)
# ClientError
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
Exception, "nothere"
)

View File

@ -1,206 +1,206 @@
from __future__ import unicode_literals
import boto3
import sure # noqa
from moto import mock_opsworks
from moto import mock_ec2
@mock_opsworks
def test_create_instance():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)["LayerId"]
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
second_layer_id = client.create_layer(
StackId=second_stack_id,
Type="custom",
Name="SecondTestLayer",
Shortname="SecondTestLayerShortName",
)["LayerId"]
response = client.create_instance(
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
)
response.should.contain("InstanceId")
client.create_instance.when.called_with(
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Unable to find stack with ID nothere")
client.create_instance.when.called_with(
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
).should.throw(Exception, "nothere")
# ClientError
client.create_instance.when.called_with(
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Please only provide layer IDs from the same stack")
# ClientError
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
Exception, "Unable to find instance with ID nothere"
)
@mock_opsworks
def test_describe_instances():
"""
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
populate S2L1 with 1 instance (S2L1_i1)
populate S2L2 with 3 instances (S2L2_i1..2)
"""
client = boto3.client("opsworks", region_name="us-east-1")
S1 = client.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
S1L1 = client.create_layer(
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"]
S2 = client.create_stack(
Name="S2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
S2L1 = client.create_layer(
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
)["LayerId"]
S2L2 = client.create_layer(
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
)["LayerId"]
S1L1_i1 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"]
S1L1_i2 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"]
S2L1_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
)["InstanceId"]
S2L2_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"]
S2L2_i2 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"]
# instances in Stack 1
response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"])
)
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"])
)
response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
# instances in Stack 2
response = client.describe_instances(StackId=S2)["Instances"]
response.should.have.length_of(3)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L1)["Instances"]
response.should.have.length_of(1)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L2)["Instances"]
response.should.have.length_of(2)
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
# ClientError
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
Exception, "Please provide either one or more"
)
# ClientError
client.describe_instances.when.called_with(StackId="nothere").should.throw(
Exception, "nothere"
)
# ClientError
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
Exception, "nothere"
)
# ClientError
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
Exception, "nothere"
)
@mock_opsworks
@mock_ec2
def test_ec2_integration():
"""
instances created via OpsWorks should be discoverable via ec2
"""
opsworks = boto3.client("opsworks", region_name="us-east-1")
stack_id = opsworks.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = opsworks.create_layer(
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"]
instance_id = opsworks.create_instance(
StackId=stack_id,
LayerIds=[layer_id],
InstanceType="t2.micro",
SshKeyName="testSSH",
)["InstanceId"]
ec2 = boto3.client("ec2", region_name="us-east-1")
# Before starting the instance, it shouldn't be discoverable via ec2
reservations = ec2.describe_instances()["Reservations"]
assert reservations.should.be.empty
# After starting the instance, it should be discoverable via ec2
opsworks.start_instance(InstanceId=instance_id)
reservations = ec2.describe_instances()["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
instance = reservations[0]["Instances"][0]
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])
from __future__ import unicode_literals
import boto3
import sure # noqa
from moto import mock_opsworks
from moto import mock_ec2
@mock_opsworks
def test_create_instance():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)["LayerId"]
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
second_layer_id = client.create_layer(
StackId=second_stack_id,
Type="custom",
Name="SecondTestLayer",
Shortname="SecondTestLayerShortName",
)["LayerId"]
response = client.create_instance(
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
)
response.should.contain("InstanceId")
client.create_instance.when.called_with(
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Unable to find stack with ID nothere")
client.create_instance.when.called_with(
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
).should.throw(Exception, "nothere")
# ClientError
client.create_instance.when.called_with(
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Please only provide layer IDs from the same stack")
# ClientError
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
Exception, "Unable to find instance with ID nothere"
)
@mock_opsworks
def test_describe_instances():
"""
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
populate S2L1 with 1 instance (S2L1_i1)
populate S2L2 with 3 instances (S2L2_i1..2)
"""
client = boto3.client("opsworks", region_name="us-east-1")
S1 = client.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
S1L1 = client.create_layer(
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"]
S2 = client.create_stack(
Name="S2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
S2L1 = client.create_layer(
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
)["LayerId"]
S2L2 = client.create_layer(
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
)["LayerId"]
S1L1_i1 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"]
S1L1_i2 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)["InstanceId"]
S2L1_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
)["InstanceId"]
S2L2_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"]
S2L2_i2 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)["InstanceId"]
# instances in Stack 1
response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"])
)
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
sorted(response, key=lambda d: d["InstanceId"])
)
response = client.describe_instances(StackId=S1)["Instances"]
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
# instances in Stack 2
response = client.describe_instances(StackId=S2)["Instances"]
response.should.have.length_of(3)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L1)["Instances"]
response.should.have.length_of(1)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L2)["Instances"]
response.should.have.length_of(2)
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
# ClientError
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
Exception, "Please provide either one or more"
)
# ClientError
client.describe_instances.when.called_with(StackId="nothere").should.throw(
Exception, "nothere"
)
# ClientError
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
Exception, "nothere"
)
# ClientError
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
Exception, "nothere"
)
@mock_opsworks
@mock_ec2
def test_ec2_integration():
"""
instances created via OpsWorks should be discoverable via ec2
"""
opsworks = boto3.client("opsworks", region_name="us-east-1")
stack_id = opsworks.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = opsworks.create_layer(
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
)["LayerId"]
instance_id = opsworks.create_instance(
StackId=stack_id,
LayerIds=[layer_id],
InstanceType="t2.micro",
SshKeyName="testSSH",
)["InstanceId"]
ec2 = boto3.client("ec2", region_name="us-east-1")
# Before starting the instance, it shouldn't be discoverable via ec2
reservations = ec2.describe_instances()["Reservations"]
assert reservations.should.be.empty
# After starting the instance, it should be discoverable via ec2
opsworks.start_instance(InstanceId=instance_id)
reservations = ec2.describe_instances()["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
instance = reservations[0]["Instances"][0]
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])

View File

@ -1,96 +1,96 @@
from __future__ import unicode_literals
import boto3
from freezegun import freeze_time
import sure # noqa
import re
from moto import mock_opsworks
@freeze_time("2015-01-01")
@mock_opsworks
def test_create_layer_response():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)
response.should.contain("LayerId")
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_layer(
StackId=second_stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)
response.should.contain("LayerId")
# ClientError
client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
# ClientError
client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
).should.throw(
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
)
# ClientError
client.create_layer.when.called_with(
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, "nothere")
@freeze_time("2015-01-01")
@mock_opsworks
def test_describe_layers():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)["LayerId"]
rv1 = client.describe_layers(StackId=stack_id)
rv2 = client.describe_layers(LayerIds=[layer_id])
rv1["Layers"].should.equal(rv2["Layers"])
rv1["Layers"][0]["Name"].should.equal("TestLayer")
# ClientError
client.describe_layers.when.called_with(
StackId=stack_id, LayerIds=[layer_id]
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
# ClientError
client.describe_layers.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere"
)
# ClientError
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
Exception, "nothere"
)
from __future__ import unicode_literals
import boto3
from freezegun import freeze_time
import sure # noqa
import re
from moto import mock_opsworks
@freeze_time("2015-01-01")
@mock_opsworks
def test_create_layer_response():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)
response.should.contain("LayerId")
second_stack_id = client.create_stack(
Name="test_stack_2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
response = client.create_layer(
StackId=second_stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)
response.should.contain("LayerId")
# ClientError
client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
# ClientError
client.create_layer.when.called_with(
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
).should.throw(
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
)
# ClientError
client.create_layer.when.called_with(
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
).should.throw(Exception, "nothere")
@freeze_time("2015-01-01")
@mock_opsworks
def test_describe_layers():
client = boto3.client("opsworks", region_name="us-east-1")
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn",
)["StackId"]
layer_id = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName",
)["LayerId"]
rv1 = client.describe_layers(StackId=stack_id)
rv2 = client.describe_layers(LayerIds=[layer_id])
rv1["Layers"].should.equal(rv2["Layers"])
rv1["Layers"][0]["Name"].should.equal("TestLayer")
# ClientError
client.describe_layers.when.called_with(
StackId=stack_id, LayerIds=[layer_id]
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
# ClientError
client.describe_layers.when.called_with(StackId="nothere").should.throw(
Exception, "Unable to find stack with ID nothere"
)
# ClientError
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
Exception, "nothere"
)

View File

@ -1,263 +1,263 @@
from __future__ import unicode_literals
from botocore.exceptions import ClientError
import boto3
import sure # noqa
from nose.tools import assert_raises
from moto import mock_polly
# Polly only available in a few regions
DEFAULT_REGION = "eu-west-1"
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
<lexicon version="1.0"
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
alphabet="ipa"
xml:lang="en-US">
<lexeme>
<grapheme>W3C</grapheme>
<alias>World Wide Web Consortium</alias>
</lexeme>
</lexicon>"""
@mock_polly
def test_describe_voices():
client = boto3.client("polly", region_name=DEFAULT_REGION)
resp = client.describe_voices()
len(resp["Voices"]).should.be.greater_than(1)
resp = client.describe_voices(LanguageCode="en-GB")
len(resp["Voices"]).should.equal(3)
try:
client.describe_voices(LanguageCode="SOME_LANGUAGE")
except ClientError as err:
err.response["Error"]["Code"].should.equal("400")
else:
raise RuntimeError("Should of raised an exception")
@mock_polly
def test_put_list_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.list_lexicons()
len(resp["Lexicons"]).should.equal(1)
@mock_polly
def test_put_get_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.get_lexicon(Name="test")
resp.should.contain("Lexicon")
resp.should.contain("LexiconAttributes")
@mock_polly
def test_put_lexicon_bad_name():
client = boto3.client("polly", region_name=DEFAULT_REGION)
try:
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised an exception")
@mock_polly
def test_synthesize_speech():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
for output_format, content_type in tests:
resp = client.synthesize_speech(
LexiconNames=["test"],
OutputFormat=output_format,
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
resp["ContentType"].should.equal(content_type)
@mock_polly
def test_synthesize_speech_bad_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test2"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
else:
raise RuntimeError("Should of raised LexiconNotFoundException")
@mock_polly
def test_synthesize_speech_bad_output_format():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="invalid",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_sample_rate():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="18000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_text_type():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="invalid",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_voice_id():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Luke",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_text_too_long():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234" * 376, # = 3008 characters
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_speech_marks1():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
SpeechMarkTypes=["word"],
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException"
)
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_speech_marks2():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="ssml",
SpeechMarkTypes=["word"],
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException"
)
else:
raise RuntimeError("Should of raised ")
from __future__ import unicode_literals
from botocore.exceptions import ClientError
import boto3
import sure # noqa
from nose.tools import assert_raises
from moto import mock_polly
# Polly only available in a few regions
DEFAULT_REGION = "eu-west-1"
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
<lexicon version="1.0"
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
alphabet="ipa"
xml:lang="en-US">
<lexeme>
<grapheme>W3C</grapheme>
<alias>World Wide Web Consortium</alias>
</lexeme>
</lexicon>"""
@mock_polly
def test_describe_voices():
client = boto3.client("polly", region_name=DEFAULT_REGION)
resp = client.describe_voices()
len(resp["Voices"]).should.be.greater_than(1)
resp = client.describe_voices(LanguageCode="en-GB")
len(resp["Voices"]).should.equal(3)
try:
client.describe_voices(LanguageCode="SOME_LANGUAGE")
except ClientError as err:
err.response["Error"]["Code"].should.equal("400")
else:
raise RuntimeError("Should of raised an exception")
@mock_polly
def test_put_list_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.list_lexicons()
len(resp["Lexicons"]).should.equal(1)
@mock_polly
def test_put_get_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
resp = client.get_lexicon(Name="test")
resp.should.contain("Lexicon")
resp.should.contain("LexiconAttributes")
@mock_polly
def test_put_lexicon_bad_name():
client = boto3.client("polly", region_name=DEFAULT_REGION)
try:
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised an exception")
@mock_polly
def test_synthesize_speech():
client = boto3.client("polly", region_name=DEFAULT_REGION)
# Return nothing
client.put_lexicon(Name="test", Content=LEXICON_XML)
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
for output_format, content_type in tests:
resp = client.synthesize_speech(
LexiconNames=["test"],
OutputFormat=output_format,
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
resp["ContentType"].should.equal(content_type)
@mock_polly
def test_synthesize_speech_bad_lexicon():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test2"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
else:
raise RuntimeError("Should of raised LexiconNotFoundException")
@mock_polly
def test_synthesize_speech_bad_output_format():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="invalid",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_sample_rate():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="18000",
Text="test1234",
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_text_type():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="invalid",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_voice_id():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
VoiceId="Luke",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_text_too_long():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234" * 376, # = 3008 characters
TextType="text",
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_speech_marks1():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="text",
SpeechMarkTypes=["word"],
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException"
)
else:
raise RuntimeError("Should of raised ")
@mock_polly
def test_synthesize_speech_bad_speech_marks2():
client = boto3.client("polly", region_name=DEFAULT_REGION)
client.put_lexicon(Name="test", Content=LEXICON_XML)
try:
client.synthesize_speech(
LexiconNames=["test"],
OutputFormat="pcm",
SampleRate="16000",
Text="test1234",
TextType="ssml",
SpeechMarkTypes=["word"],
VoiceId="Astrid",
)
except ClientError as err:
err.response["Error"]["Code"].should.equal(
"MarksNotSupportedForFormatException"
)
else:
raise RuntimeError("Should of raised ")

View File

@ -21,7 +21,10 @@ def test_get_resources_s3():
# Create 4 buckets
for i in range(1, 5):
i_str = str(i)
s3_client.create_bucket(Bucket="test_bucket" + i_str)
s3_client.create_bucket(
Bucket="test_bucket" + i_str,
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
)
s3_client.put_bucket_tagging(
Bucket="test_bucket" + i_str,
Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]},

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,7 @@ from moto import mock_s3_deprecated, mock_s3
@mock_s3_deprecated
def test_lifecycle_create():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
lifecycle.add_rule("myid", "", "Enabled", 30)
@ -33,7 +33,9 @@ def test_lifecycle_create():
@mock_s3
def test_lifecycle_with_filters():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Create a lifecycle rule with a Filter (no tags):
lfc = {
@ -245,7 +247,9 @@ def test_lifecycle_with_filters():
@mock_s3
def test_lifecycle_with_eodm():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -293,7 +297,9 @@ def test_lifecycle_with_eodm():
@mock_s3
def test_lifecycle_with_nve():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -327,7 +333,9 @@ def test_lifecycle_with_nve():
@mock_s3
def test_lifecycle_with_nvt():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -393,7 +401,9 @@ def test_lifecycle_with_nvt():
@mock_s3
def test_lifecycle_with_aimu():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -432,7 +442,7 @@ def test_lifecycle_with_aimu():
@mock_s3_deprecated
def test_lifecycle_with_glacier_transition():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
transition = Transition(days=30, storage_class="GLACIER")
@ -451,7 +461,7 @@ def test_lifecycle_with_glacier_transition():
@mock_s3_deprecated
def test_lifecycle_multi():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
date = "2022-10-12T00:00:00.000Z"
sc = "GLACIER"
@ -493,7 +503,7 @@ def test_lifecycle_multi():
@mock_s3_deprecated
def test_lifecycle_delete():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
lifecycle.add_rule(expiration=30)

View File

@ -11,7 +11,7 @@ from moto import mock_s3
@mock_s3
def test_s3_storage_class_standard():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
# add an object to the bucket with standard storage
@ -26,7 +26,9 @@ def test_s3_storage_class_standard():
@mock_s3
def test_s3_storage_class_infrequent_access():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
)
# add an object to the bucket with standard storage
@ -46,7 +48,9 @@ def test_s3_storage_class_infrequent_access():
def test_s3_storage_class_intelligent_tiering():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
)
s3.put_object(
Bucket="Bucket",
Key="my_key_infrequent",
@ -61,7 +65,7 @@ def test_s3_storage_class_intelligent_tiering():
@mock_s3
def test_s3_storage_class_copy():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -86,7 +90,7 @@ def test_s3_storage_class_copy():
@mock_s3
def test_s3_invalid_copied_storage_class():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -119,7 +123,9 @@ def test_s3_invalid_copied_storage_class():
@mock_s3
def test_s3_invalid_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Try to add an object with an invalid storage class
with assert_raises(ClientError) as err:
@ -137,7 +143,9 @@ def test_s3_invalid_storage_class():
@mock_s3
def test_s3_default_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
@ -150,7 +158,9 @@ def test_s3_default_storage_class():
@mock_s3
def test_s3_copy_object_error_for_glacier_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
@ -169,7 +179,9 @@ def test_s3_copy_object_error_for_glacier_storage_class():
@mock_s3
def test_s3_copy_object_error_for_deep_archive_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"

View File

@ -1,107 +1,107 @@
from collections import namedtuple
import sure # noqa
from moto.swf.exceptions import SWFUnknownResourceFault
from moto.swf.models import Domain
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises # noqa
# Fake WorkflowExecution for tests purposes
WorkflowExecution = namedtuple(
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
)
def test_domain_short_dict_representation():
domain = Domain("foo", "52")
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
domain.description = "foo bar"
domain.to_short_dict()["description"].should.equal("foo bar")
def test_domain_full_dict_representation():
domain = Domain("foo", "52")
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
_config = domain.to_full_dict()["configuration"]
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
def test_domain_string_representation():
domain = Domain("my-domain", "60")
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
def test_domain_add_to_activity_task_list():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.activity_task_lists.should.equal({"foo": ["bar"]})
def test_domain_activity_tasks():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.add_to_activity_task_list("other", "baz")
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
def test_domain_add_to_decision_task_list():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.decision_task_lists.should.equal({"foo": ["bar"]})
def test_domain_decision_tasks():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.add_to_decision_task_list("other", "baz")
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
def test_domain_get_workflow_execution():
domain = Domain("my-domain", "60")
wfe1 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
)
wfe2 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
)
wfe3 = WorkflowExecution(
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
)
wfe4 = WorkflowExecution(
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
)
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
# get workflow execution through workflow_id and run_id
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
domain.get_workflow_execution.when.called_with(
"wf-id-1", run_id="non-existent"
).should.throw(SWFUnknownResourceFault)
# get OPEN workflow execution by default if no run_id
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
SWFUnknownResourceFault
)
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
SWFUnknownResourceFault
)
# raise_if_closed attribute
domain.get_workflow_execution(
"wf-id-1", run_id="run-id-1", raise_if_closed=True
).should.equal(wfe1)
domain.get_workflow_execution.when.called_with(
"wf-id-3", run_id="run-id-4", raise_if_closed=True
).should.throw(SWFUnknownResourceFault)
# raise_if_none attribute
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
from collections import namedtuple
import sure # noqa
from moto.swf.exceptions import SWFUnknownResourceFault
from moto.swf.models import Domain
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises # noqa
# Fake WorkflowExecution for tests purposes
WorkflowExecution = namedtuple(
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
)
def test_domain_short_dict_representation():
domain = Domain("foo", "52")
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
domain.description = "foo bar"
domain.to_short_dict()["description"].should.equal("foo bar")
def test_domain_full_dict_representation():
domain = Domain("foo", "52")
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
_config = domain.to_full_dict()["configuration"]
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
def test_domain_string_representation():
domain = Domain("my-domain", "60")
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
def test_domain_add_to_activity_task_list():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.activity_task_lists.should.equal({"foo": ["bar"]})
def test_domain_activity_tasks():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.add_to_activity_task_list("other", "baz")
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
def test_domain_add_to_decision_task_list():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.decision_task_lists.should.equal({"foo": ["bar"]})
def test_domain_decision_tasks():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.add_to_decision_task_list("other", "baz")
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
def test_domain_get_workflow_execution():
domain = Domain("my-domain", "60")
wfe1 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
)
wfe2 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
)
wfe3 = WorkflowExecution(
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
)
wfe4 = WorkflowExecution(
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
)
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
# get workflow execution through workflow_id and run_id
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
domain.get_workflow_execution.when.called_with(
"wf-id-1", run_id="non-existent"
).should.throw(SWFUnknownResourceFault)
# get OPEN workflow execution by default if no run_id
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
SWFUnknownResourceFault
)
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
SWFUnknownResourceFault
)
# raise_if_closed attribute
domain.get_workflow_execution(
"wf-id-1", run_id="run-id-1", raise_if_closed=True
).should.equal(wfe1)
domain.get_workflow_execution.when.called_with(
"wf-id-3", run_id="run-id-4", raise_if_closed=True
).should.throw(SWFUnknownResourceFault)
# raise_if_none attribute
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none

View File

@ -1,19 +1,19 @@
from freezegun import freeze_time
import sure # noqa
from moto.swf.models import Timeout
from ..utils import make_workflow_execution
def test_timeout_creation():
wfe = make_workflow_execution()
# epoch 1420113600 == "2015-01-01 13:00:00"
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
with freeze_time("2015-01-01 12:00:00"):
timeout.reached.should.be.falsy
with freeze_time("2015-01-01 13:00:00"):
timeout.reached.should.be.truthy
from freezegun import freeze_time
import sure # noqa
from moto.swf.models import Timeout
from ..utils import make_workflow_execution
def test_timeout_creation():
wfe = make_workflow_execution()
# epoch 1420113600 == "2015-01-01 13:00:00"
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
with freeze_time("2015-01-01 12:00:00"):
timeout.reached.should.be.falsy
with freeze_time("2015-01-01 13:00:00"):
timeout.reached.should.be.truthy

View File

@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation():
)
def test_closed_workflow_execution_full_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow",
"v1.0",
task_list="queue",
default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
wfe.execution_status = "CLOSED"
wfe.close_status = "CANCELED"
wfe.close_timestamp = 1420066801.123
fd = wfe.to_full_dict()
medium_dict = wfe.to_medium_dict()
medium_dict["closeStatus"] = "CANCELED"
medium_dict["closeTimestamp"] = 1420066801.123
fd["executionInfo"].should.equal(medium_dict)
fd["openCounts"]["openTimers"].should.equal(0)
fd["openCounts"]["openDecisionTasks"].should.equal(0)
fd["openCounts"]["openActivityTasks"].should.equal(0)
fd["executionConfiguration"].should.equal(
{
"childPolicy": "ABANDON",
"executionStartToCloseTimeout": "300",
"taskList": {"name": "queue"},
"taskStartToCloseTimeout": "300",
}
)
def test_workflow_execution_list_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(

View File

@ -1,114 +1,114 @@
import boto
from boto.swf.exceptions import SWFResponseError
import sure # noqa
from moto import mock_swf_deprecated
# RegisterDomain endpoint
@mock_swf_deprecated
def test_register_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
all_domains = conn.list_domains("REGISTERED")
domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain")
domain["status"].should.equal("REGISTERED")
domain["description"].should.equal("A test domain")
@mock_swf_deprecated
def test_register_already_existing_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.register_domain.when.called_with(
"test-domain", "60", description="A test domain"
).should.throw(SWFResponseError)
@mock_swf_deprecated
def test_register_with_wrong_parameter_type():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain.when.called_with(
"test-domain", 60, description="A test domain"
).should.throw(SWFResponseError)
# ListDomains endpoint
@mock_swf_deprecated
def test_list_domains_order():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED")
names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
@mock_swf_deprecated
def test_list_domains_reverse_order():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
# DeprecateDomain endpoint
@mock_swf_deprecated
def test_deprecate_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain")
all_domains = conn.list_domains("DEPRECATED")
domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain")
@mock_swf_deprecated
def test_deprecate_already_deprecated_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain")
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
@mock_swf_deprecated
def test_deprecate_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.deprecate_domain.when.called_with("non-existent").should.throw(
SWFResponseError
)
# DescribeDomain endpoint
@mock_swf_deprecated
def test_describe_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
domain = conn.describe_domain("test-domain")
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
domain["domainInfo"]["description"].should.equal("A test domain")
domain["domainInfo"]["name"].should.equal("test-domain")
domain["domainInfo"]["status"].should.equal("REGISTERED")
@mock_swf_deprecated
def test_describe_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)
import boto
from boto.swf.exceptions import SWFResponseError
import sure # noqa
from moto import mock_swf_deprecated
# RegisterDomain endpoint
@mock_swf_deprecated
def test_register_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
all_domains = conn.list_domains("REGISTERED")
domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain")
domain["status"].should.equal("REGISTERED")
domain["description"].should.equal("A test domain")
@mock_swf_deprecated
def test_register_already_existing_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.register_domain.when.called_with(
"test-domain", "60", description="A test domain"
).should.throw(SWFResponseError)
@mock_swf_deprecated
def test_register_with_wrong_parameter_type():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain.when.called_with(
"test-domain", 60, description="A test domain"
).should.throw(SWFResponseError)
# ListDomains endpoint
@mock_swf_deprecated
def test_list_domains_order():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED")
names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
@mock_swf_deprecated
def test_list_domains_reverse_order():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("b-test-domain", "60")
conn.register_domain("a-test-domain", "60")
conn.register_domain("c-test-domain", "60")
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
names = [domain["name"] for domain in all_domains["domainInfos"]]
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
# DeprecateDomain endpoint
@mock_swf_deprecated
def test_deprecate_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain")
all_domains = conn.list_domains("DEPRECATED")
domain = all_domains["domainInfos"][0]
domain["name"].should.equal("test-domain")
@mock_swf_deprecated
def test_deprecate_already_deprecated_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
conn.deprecate_domain("test-domain")
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
@mock_swf_deprecated
def test_deprecate_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.deprecate_domain.when.called_with("non-existent").should.throw(
SWFResponseError
)
# DescribeDomain endpoint
@mock_swf_deprecated
def test_describe_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.register_domain("test-domain", "60", description="A test domain")
domain = conn.describe_domain("test-domain")
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
domain["domainInfo"]["description"].should.equal("A test domain")
domain["domainInfo"]["name"].should.equal("test-domain")
domain["domainInfo"]["status"].should.equal("REGISTERED")
@mock_swf_deprecated
def test_describe_non_existent_domain():
conn = boto.connect_swf("the_key", "the_secret")
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)

View File

@ -1,9 +1,9 @@
import sure # noqa
from moto.swf.utils import decapitalize
def test_decapitalize():
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
for before, after in cases.items():
decapitalize(before).should.equal(after)
import sure # noqa
from moto.swf.utils import decapitalize
def test_decapitalize():
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
for before, after in cases.items():
decapitalize(before).should.equal(after)

View File

@ -0,0 +1,79 @@
import sure
from moto.utilities.tagging_service import TaggingService
def test_list_empty():
svc = TaggingService()
result = svc.list_tags_for_resource("test")
{"Tags": []}.should.be.equal(result)
def test_create_tag():
svc = TaggingService("TheTags", "TagKey", "TagValue")
tags = [{"TagKey": "key_key", "TagValue": "value_value"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]}
expected.should.be.equal(actual)
def test_create_tag_without_value():
svc = TaggingService()
tags = [{"Key": "key_key"}]
svc.tag_resource("arn", tags)
actual = svc.list_tags_for_resource("arn")
expected = {"Tags": [{"Key": "key_key", "Value": None}]}
expected.should.be.equal(actual)
def test_delete_tag_using_names():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_all_tags_for_resource():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
tags2 = [{"Key": "key_key2", "Value": "value_value2"}]
svc.tag_resource("arn", tags)
svc.tag_resource("arn", tags2)
svc.delete_all_tags_for_resource("arn")
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_list_empty_delete():
svc = TaggingService()
svc.untag_resource_using_names("arn", ["key_key"])
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_delete_tag_using_tags():
svc = TaggingService()
tags = [{"Key": "key_key", "Value": "value_value"}]
svc.tag_resource("arn", tags)
svc.untag_resource_using_tags("arn", tags)
result = svc.list_tags_for_resource("arn")
{"Tags": []}.should.be.equal(result)
def test_extract_tag_names():
svc = TaggingService()
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
actual = svc.extract_tag_names(tags)
expected = ["key1", "key2"]
expected.should.be.equal(actual)