Merge branch 'master' into feature-apigw-authorizers
This commit is contained in:
commit
40208363be
@ -26,11 +26,12 @@ install:
|
|||||||
fi
|
fi
|
||||||
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh &
|
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${PYTHON_DOCKER_TAG} /moto/travis_moto_server.sh &
|
||||||
fi
|
fi
|
||||||
|
travis_retry pip install -r requirements-dev.txt
|
||||||
travis_retry pip install boto==2.45.0
|
travis_retry pip install boto==2.45.0
|
||||||
travis_retry pip install boto3
|
travis_retry pip install boto3
|
||||||
travis_retry pip install dist/moto*.gz
|
travis_retry pip install dist/moto*.gz
|
||||||
travis_retry pip install coveralls==1.1
|
travis_retry pip install coveralls==1.1
|
||||||
travis_retry pip install -r requirements-dev.txt
|
travis_retry pip install coverage==4.5.4
|
||||||
|
|
||||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||||
python wait_for.py
|
python wait_for.py
|
||||||
|
10
README.md
10
README.md
@ -450,6 +450,16 @@ boto3.resource(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Caveats
|
||||||
|
The standalone server has some caveats with some services. The following services
|
||||||
|
require that you update your hosts file for your code to work properly:
|
||||||
|
|
||||||
|
1. `s3-control`
|
||||||
|
|
||||||
|
For the above services, this is required because the hostname is in the form of `AWS_ACCOUNT_ID.localhost`.
|
||||||
|
As a result, you need to add that entry to your host file for your tests to function properly.
|
||||||
|
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,9 +56,10 @@ author = 'Steve Pulec'
|
|||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '0.4.10'
|
import moto
|
||||||
|
version = moto.__version__
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '0.4.10'
|
release = moto.__version__
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
|
@ -24,8 +24,7 @@ For example, we have the following code we want to test:
|
|||||||
|
|
||||||
.. sourcecode:: python
|
.. sourcecode:: python
|
||||||
|
|
||||||
import boto
|
import boto3
|
||||||
from boto.s3.key import Key
|
|
||||||
|
|
||||||
class MyModel(object):
|
class MyModel(object):
|
||||||
def __init__(self, name, value):
|
def __init__(self, name, value):
|
||||||
@ -33,11 +32,8 @@ For example, we have the following code we want to test:
|
|||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
conn = boto.connect_s3()
|
s3 = boto3.client('s3', region_name='us-east-1')
|
||||||
bucket = conn.get_bucket('mybucket')
|
s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
|
||||||
k = Key(bucket)
|
|
||||||
k.key = self.name
|
|
||||||
k.set_contents_from_string(self.value)
|
|
||||||
|
|
||||||
There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment.
|
There are several ways to do this, but you should keep in mind that Moto creates a full, blank environment.
|
||||||
|
|
||||||
@ -48,20 +44,23 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out.
|
|||||||
|
|
||||||
.. sourcecode:: python
|
.. sourcecode:: python
|
||||||
|
|
||||||
import boto
|
import boto3
|
||||||
from moto import mock_s3
|
from moto import mock_s3
|
||||||
from mymodule import MyModel
|
from mymodule import MyModel
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_my_model_save():
|
def test_my_model_save():
|
||||||
conn = boto.connect_s3()
|
conn = boto3.resource('s3', region_name='us-east-1')
|
||||||
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
|
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
|
||||||
conn.create_bucket('mybucket')
|
conn.create_bucket(Bucket='mybucket')
|
||||||
|
|
||||||
model_instance = MyModel('steve', 'is awesome')
|
model_instance = MyModel('steve', 'is awesome')
|
||||||
model_instance.save()
|
model_instance.save()
|
||||||
|
|
||||||
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
|
body = conn.Object('mybucket', 'steve').get()[
|
||||||
|
'Body'].read().decode("utf-8")
|
||||||
|
|
||||||
|
assert body == 'is awesome'
|
||||||
|
|
||||||
Context manager
|
Context manager
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
@ -72,13 +71,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out.
|
|||||||
|
|
||||||
def test_my_model_save():
|
def test_my_model_save():
|
||||||
with mock_s3():
|
with mock_s3():
|
||||||
conn = boto.connect_s3()
|
conn = boto3.resource('s3', region_name='us-east-1')
|
||||||
conn.create_bucket('mybucket')
|
conn.create_bucket(Bucket='mybucket')
|
||||||
|
|
||||||
model_instance = MyModel('steve', 'is awesome')
|
model_instance = MyModel('steve', 'is awesome')
|
||||||
model_instance.save()
|
model_instance.save()
|
||||||
|
|
||||||
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
|
body = conn.Object('mybucket', 'steve').get()[
|
||||||
|
'Body'].read().decode("utf-8")
|
||||||
|
|
||||||
|
assert body == 'is awesome'
|
||||||
|
|
||||||
Raw
|
Raw
|
||||||
~~~
|
~~~
|
||||||
@ -91,13 +93,16 @@ You can also start and stop the mocking manually.
|
|||||||
mock = mock_s3()
|
mock = mock_s3()
|
||||||
mock.start()
|
mock.start()
|
||||||
|
|
||||||
conn = boto.connect_s3()
|
conn = boto3.resource('s3', region_name='us-east-1')
|
||||||
conn.create_bucket('mybucket')
|
conn.create_bucket(Bucket='mybucket')
|
||||||
|
|
||||||
model_instance = MyModel('steve', 'is awesome')
|
model_instance = MyModel('steve', 'is awesome')
|
||||||
model_instance.save()
|
model_instance.save()
|
||||||
|
|
||||||
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
|
body = conn.Object('mybucket', 'steve').get()[
|
||||||
|
'Body'].read().decode("utf-8")
|
||||||
|
|
||||||
|
assert body == 'is awesome'
|
||||||
|
|
||||||
mock.stop()
|
mock.stop()
|
||||||
|
|
||||||
|
@ -84,14 +84,14 @@ class MethodResponse(BaseModel, dict):
|
|||||||
|
|
||||||
|
|
||||||
class Method(BaseModel, dict):
|
class Method(BaseModel, dict):
|
||||||
def __init__(self, method_type, authorization_type):
|
def __init__(self, method_type, authorization_type, **kwargs):
|
||||||
super(Method, self).__init__()
|
super(Method, self).__init__()
|
||||||
self.update(
|
self.update(
|
||||||
dict(
|
dict(
|
||||||
httpMethod=method_type,
|
httpMethod=method_type,
|
||||||
authorizationType=authorization_type,
|
authorizationType=authorization_type,
|
||||||
authorizerId=None,
|
authorizerId=None,
|
||||||
apiKeyRequired=None,
|
apiKeyRequired=kwargs.get("api_key_required") or False,
|
||||||
requestParameters=None,
|
requestParameters=None,
|
||||||
requestModels=None,
|
requestModels=None,
|
||||||
methodIntegration=None,
|
methodIntegration=None,
|
||||||
@ -118,14 +118,15 @@ class Resource(BaseModel):
|
|||||||
self.api_id = api_id
|
self.api_id = api_id
|
||||||
self.path_part = path_part
|
self.path_part = path_part
|
||||||
self.parent_id = parent_id
|
self.parent_id = parent_id
|
||||||
self.resource_methods = {"GET": {}}
|
self.resource_methods = {}
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
response = {
|
response = {
|
||||||
"path": self.get_path(),
|
"path": self.get_path(),
|
||||||
"id": self.id,
|
"id": self.id,
|
||||||
"resourceMethods": self.resource_methods,
|
|
||||||
}
|
}
|
||||||
|
if self.resource_methods:
|
||||||
|
response["resourceMethods"] = self.resource_methods
|
||||||
if self.parent_id:
|
if self.parent_id:
|
||||||
response["parentId"] = self.parent_id
|
response["parentId"] = self.parent_id
|
||||||
response["pathPart"] = self.path_part
|
response["pathPart"] = self.path_part
|
||||||
@ -159,8 +160,12 @@ class Resource(BaseModel):
|
|||||||
)
|
)
|
||||||
return response.status_code, response.text
|
return response.status_code, response.text
|
||||||
|
|
||||||
def add_method(self, method_type, authorization_type):
|
def add_method(self, method_type, authorization_type, api_key_required):
|
||||||
method = Method(method_type=method_type, authorization_type=authorization_type)
|
method = Method(
|
||||||
|
method_type=method_type,
|
||||||
|
authorization_type=authorization_type,
|
||||||
|
api_key_required=api_key_required,
|
||||||
|
)
|
||||||
self.resource_methods[method_type] = method
|
self.resource_methods[method_type] = method
|
||||||
return method
|
return method
|
||||||
|
|
||||||
@ -675,9 +680,18 @@ class APIGatewayBackend(BaseBackend):
|
|||||||
resource = self.get_resource(function_id, resource_id)
|
resource = self.get_resource(function_id, resource_id)
|
||||||
return resource.get_method(method_type)
|
return resource.get_method(method_type)
|
||||||
|
|
||||||
def create_method(self, function_id, resource_id, method_type, authorization_type):
|
def create_method(
|
||||||
|
self,
|
||||||
|
function_id,
|
||||||
|
resource_id,
|
||||||
|
method_type,
|
||||||
|
authorization_type,
|
||||||
|
api_key_required=None,
|
||||||
|
):
|
||||||
resource = self.get_resource(function_id, resource_id)
|
resource = self.get_resource(function_id, resource_id)
|
||||||
method = resource.add_method(method_type, authorization_type)
|
method = resource.add_method(
|
||||||
|
method_type, authorization_type, api_key_required=api_key_required
|
||||||
|
)
|
||||||
return method
|
return method
|
||||||
|
|
||||||
def get_authorizer(self, restapi_id, authorizer_id):
|
def get_authorizer(self, restapi_id, authorizer_id):
|
||||||
|
@ -147,8 +147,13 @@ class APIGatewayResponse(BaseResponse):
|
|||||||
return 200, {}, json.dumps(method)
|
return 200, {}, json.dumps(method)
|
||||||
elif self.method == "PUT":
|
elif self.method == "PUT":
|
||||||
authorization_type = self._get_param("authorizationType")
|
authorization_type = self._get_param("authorizationType")
|
||||||
|
api_key_required = self._get_param("apiKeyRequired")
|
||||||
method = self.backend.create_method(
|
method = self.backend.create_method(
|
||||||
function_id, resource_id, method_type, authorization_type
|
function_id,
|
||||||
|
resource_id,
|
||||||
|
method_type,
|
||||||
|
authorization_type,
|
||||||
|
api_key_required,
|
||||||
)
|
)
|
||||||
return 200, {}, json.dumps(method)
|
return 200, {}, json.dumps(method)
|
||||||
|
|
||||||
|
@ -184,7 +184,13 @@ class LambdaResponse(BaseResponse):
|
|||||||
function_name, qualifier, self.body, self.headers, response_headers
|
function_name, qualifier, self.body, self.headers, response_headers
|
||||||
)
|
)
|
||||||
if payload:
|
if payload:
|
||||||
return 202, response_headers, payload
|
if request.headers["X-Amz-Invocation-Type"] == "Event":
|
||||||
|
status_code = 202
|
||||||
|
elif request.headers["X-Amz-Invocation-Type"] == "DryRun":
|
||||||
|
status_code = 204
|
||||||
|
else:
|
||||||
|
status_code = 200
|
||||||
|
return status_code, response_headers, payload
|
||||||
else:
|
else:
|
||||||
return 404, response_headers, "{}"
|
return 404, response_headers, "{}"
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@ from jose import jws
|
|||||||
|
|
||||||
from moto.compat import OrderedDict
|
from moto.compat import OrderedDict
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
|
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
GroupExistsException,
|
GroupExistsException,
|
||||||
NotAuthorizedError,
|
NotAuthorizedError,
|
||||||
@ -69,6 +70,9 @@ class CognitoIdpUserPool(BaseModel):
|
|||||||
def __init__(self, region, name, extended_config):
|
def __init__(self, region, name, extended_config):
|
||||||
self.region = region
|
self.region = region
|
||||||
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
|
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
|
||||||
|
self.arn = "arn:aws:cognito-idp:{}:{}:userpool/{}".format(
|
||||||
|
self.region, DEFAULT_ACCOUNT_ID, self.id
|
||||||
|
)
|
||||||
self.name = name
|
self.name = name
|
||||||
self.status = None
|
self.status = None
|
||||||
self.extended_config = extended_config or {}
|
self.extended_config = extended_config or {}
|
||||||
@ -91,6 +95,7 @@ class CognitoIdpUserPool(BaseModel):
|
|||||||
def _base_json(self):
|
def _base_json(self):
|
||||||
return {
|
return {
|
||||||
"Id": self.id,
|
"Id": self.id,
|
||||||
|
"Arn": self.arn,
|
||||||
"Name": self.name,
|
"Name": self.name,
|
||||||
"Status": self.status,
|
"Status": self.status,
|
||||||
"CreationDate": time.mktime(self.creation_date.timetuple()),
|
"CreationDate": time.mktime(self.creation_date.timetuple()),
|
||||||
@ -564,12 +569,17 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
user.groups.discard(group)
|
user.groups.discard(group)
|
||||||
|
|
||||||
# User
|
# User
|
||||||
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
|
def admin_create_user(
|
||||||
|
self, user_pool_id, username, message_action, temporary_password, attributes
|
||||||
|
):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
if not user_pool:
|
if not user_pool:
|
||||||
raise ResourceNotFoundError(user_pool_id)
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
if username in user_pool.users:
|
if message_action and message_action == "RESEND":
|
||||||
|
if username not in user_pool.users:
|
||||||
|
raise UserNotFoundError(username)
|
||||||
|
elif username in user_pool.users:
|
||||||
raise UsernameExistsException(username)
|
raise UsernameExistsException(username)
|
||||||
|
|
||||||
user = CognitoIdpUser(
|
user = CognitoIdpUser(
|
||||||
|
@ -259,10 +259,12 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
def admin_create_user(self):
|
def admin_create_user(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
username = self._get_param("Username")
|
username = self._get_param("Username")
|
||||||
|
message_action = self._get_param("MessageAction")
|
||||||
temporary_password = self._get_param("TemporaryPassword")
|
temporary_password = self._get_param("TemporaryPassword")
|
||||||
user = cognitoidp_backends[self.region].admin_create_user(
|
user = cognitoidp_backends[self.region].admin_create_user(
|
||||||
user_pool_id,
|
user_pool_id,
|
||||||
username,
|
username,
|
||||||
|
message_action,
|
||||||
temporary_password,
|
temporary_password,
|
||||||
self._get_param("UserAttributes", []),
|
self._get_param("UserAttributes", []),
|
||||||
)
|
)
|
||||||
|
@ -43,7 +43,7 @@ from moto.config.exceptions import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from moto.s3.config import s3_config_query
|
from moto.s3.config import s3_account_public_access_block_query, s3_config_query
|
||||||
|
|
||||||
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
|
from moto.core import ACCOUNT_ID as DEFAULT_ACCOUNT_ID
|
||||||
|
|
||||||
@ -58,7 +58,10 @@ POP_STRINGS = [
|
|||||||
DEFAULT_PAGE_SIZE = 100
|
DEFAULT_PAGE_SIZE = 100
|
||||||
|
|
||||||
# Map the Config resource type to a backend:
|
# Map the Config resource type to a backend:
|
||||||
RESOURCE_MAP = {"AWS::S3::Bucket": s3_config_query}
|
RESOURCE_MAP = {
|
||||||
|
"AWS::S3::Bucket": s3_config_query,
|
||||||
|
"AWS::S3::AccountPublicAccessBlock": s3_account_public_access_block_query,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def datetime2int(date):
|
def datetime2int(date):
|
||||||
@ -867,16 +870,17 @@ class ConfigBackend(BaseBackend):
|
|||||||
backend_region=backend_query_region,
|
backend_region=backend_query_region,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = {
|
resource_identifiers = []
|
||||||
"resourceIdentifiers": [
|
for identifier in identifiers:
|
||||||
{
|
item = {"resourceType": identifier["type"], "resourceId": identifier["id"]}
|
||||||
"resourceType": identifier["type"],
|
|
||||||
"resourceId": identifier["id"],
|
# Some resource types lack names:
|
||||||
"resourceName": identifier["name"],
|
if identifier.get("name"):
|
||||||
}
|
item["resourceName"] = identifier["name"]
|
||||||
for identifier in identifiers
|
|
||||||
]
|
resource_identifiers.append(item)
|
||||||
}
|
|
||||||
|
result = {"resourceIdentifiers": resource_identifiers}
|
||||||
|
|
||||||
if new_token:
|
if new_token:
|
||||||
result["nextToken"] = new_token
|
result["nextToken"] = new_token
|
||||||
@ -927,18 +931,21 @@ class ConfigBackend(BaseBackend):
|
|||||||
resource_region=resource_region,
|
resource_region=resource_region,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = {
|
resource_identifiers = []
|
||||||
"ResourceIdentifiers": [
|
for identifier in identifiers:
|
||||||
{
|
item = {
|
||||||
"SourceAccountId": DEFAULT_ACCOUNT_ID,
|
"SourceAccountId": DEFAULT_ACCOUNT_ID,
|
||||||
"SourceRegion": identifier["region"],
|
"SourceRegion": identifier["region"],
|
||||||
"ResourceType": identifier["type"],
|
"ResourceType": identifier["type"],
|
||||||
"ResourceId": identifier["id"],
|
"ResourceId": identifier["id"],
|
||||||
"ResourceName": identifier["name"],
|
}
|
||||||
}
|
|
||||||
for identifier in identifiers
|
if identifier.get("name"):
|
||||||
]
|
item["ResourceName"] = identifier["name"]
|
||||||
}
|
|
||||||
|
resource_identifiers.append(item)
|
||||||
|
|
||||||
|
result = {"ResourceIdentifiers": resource_identifiers}
|
||||||
|
|
||||||
if new_token:
|
if new_token:
|
||||||
result["NextToken"] = new_token
|
result["NextToken"] = new_token
|
||||||
|
@ -606,12 +606,13 @@ class ConfigQueryModel(object):
|
|||||||
As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter
|
As such, the proper way to implement is to first obtain a full list of results from all the region backends, and then filter
|
||||||
from there. It may be valuable to make this a concatenation of the region and resource name.
|
from there. It may be valuable to make this a concatenation of the region and resource name.
|
||||||
|
|
||||||
:param resource_region:
|
:param resource_ids: A list of resource IDs
|
||||||
:param resource_ids:
|
:param resource_name: The individual name of a resource
|
||||||
:param resource_name:
|
:param limit: How many per page
|
||||||
:param limit:
|
:param next_token: The item that will page on
|
||||||
:param next_token:
|
|
||||||
:param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query.
|
:param backend_region: The region for the backend to pull results from. Set to `None` if this is an aggregated query.
|
||||||
|
:param resource_region: The region for where the resources reside to pull results from. Set to `None` if this is a
|
||||||
|
non-aggregated query.
|
||||||
:return: This should return a list of Dicts that have the following fields:
|
:return: This should return a list of Dicts that have the following fields:
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
|
@ -448,13 +448,18 @@ class Item(BaseModel):
|
|||||||
if list_append_re:
|
if list_append_re:
|
||||||
new_value = expression_attribute_values[list_append_re.group(2).strip()]
|
new_value = expression_attribute_values[list_append_re.group(2).strip()]
|
||||||
old_list_key = list_append_re.group(1)
|
old_list_key = list_append_re.group(1)
|
||||||
# Get the existing value
|
# old_key could be a function itself (if_not_exists)
|
||||||
old_list = self.attrs[old_list_key.split(".")[0]]
|
if old_list_key.startswith("if_not_exists"):
|
||||||
if "." in old_list_key:
|
old_list = DynamoType(
|
||||||
# Value is nested inside a map - find the appropriate child attr
|
expression_attribute_values[self._get_default(old_list_key)]
|
||||||
old_list = old_list.child_attr(
|
|
||||||
".".join(old_list_key.split(".")[1:])
|
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
old_list = self.attrs[old_list_key.split(".")[0]]
|
||||||
|
if "." in old_list_key:
|
||||||
|
# Value is nested inside a map - find the appropriate child attr
|
||||||
|
old_list = old_list.child_attr(
|
||||||
|
".".join(old_list_key.split(".")[1:])
|
||||||
|
)
|
||||||
if not old_list.is_list():
|
if not old_list.is_list():
|
||||||
raise ParamValidationError
|
raise ParamValidationError
|
||||||
old_list.value.extend([DynamoType(v) for v in new_value["L"]])
|
old_list.value.extend([DynamoType(v) for v in new_value["L"]])
|
||||||
|
@ -27,6 +27,7 @@ from moto.core.utils import (
|
|||||||
iso_8601_datetime_with_milliseconds,
|
iso_8601_datetime_with_milliseconds,
|
||||||
camelcase_to_underscores,
|
camelcase_to_underscores,
|
||||||
)
|
)
|
||||||
|
from moto.iam.models import ACCOUNT_ID
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
CidrLimitExceeded,
|
CidrLimitExceeded,
|
||||||
DependencyViolationError,
|
DependencyViolationError,
|
||||||
@ -155,7 +156,7 @@ AMIS = _load_resource(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
OWNER_ID = "111122223333"
|
OWNER_ID = ACCOUNT_ID
|
||||||
|
|
||||||
|
|
||||||
def utc_date_and_time():
|
def utc_date_and_time():
|
||||||
@ -1341,7 +1342,7 @@ class AmiBackend(object):
|
|||||||
source_ami=None,
|
source_ami=None,
|
||||||
name=name,
|
name=name,
|
||||||
description=description,
|
description=description,
|
||||||
owner_id=context.get_current_user() if context else OWNER_ID,
|
owner_id=OWNER_ID,
|
||||||
)
|
)
|
||||||
self.amis[ami_id] = ami
|
self.amis[ami_id] = ami
|
||||||
return ami
|
return ami
|
||||||
@ -1392,14 +1393,7 @@ class AmiBackend(object):
|
|||||||
# Limit by owner ids
|
# Limit by owner ids
|
||||||
if owners:
|
if owners:
|
||||||
# support filtering by Owners=['self']
|
# support filtering by Owners=['self']
|
||||||
owners = list(
|
owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,))
|
||||||
map(
|
|
||||||
lambda o: context.get_current_user()
|
|
||||||
if context and o == "self"
|
|
||||||
else o,
|
|
||||||
owners,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
images = [ami for ami in images if ami.owner_id in owners]
|
images = [ami for ami in images if ami.owner_id in owners]
|
||||||
|
|
||||||
# Generic filters
|
# Generic filters
|
||||||
|
@ -6,6 +6,7 @@ from boto3 import Session
|
|||||||
from moto.core.exceptions import JsonRESTError
|
from moto.core.exceptions import JsonRESTError
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from moto.sts.models import ACCOUNT_ID
|
from moto.sts.models import ACCOUNT_ID
|
||||||
|
from moto.utilities.tagging_service import TaggingService
|
||||||
|
|
||||||
|
|
||||||
class Rule(BaseModel):
|
class Rule(BaseModel):
|
||||||
@ -104,6 +105,7 @@ class EventsBackend(BaseBackend):
|
|||||||
self.region_name = region_name
|
self.region_name = region_name
|
||||||
self.event_buses = {}
|
self.event_buses = {}
|
||||||
self.event_sources = {}
|
self.event_sources = {}
|
||||||
|
self.tagger = TaggingService()
|
||||||
|
|
||||||
self._add_default_event_bus()
|
self._add_default_event_bus()
|
||||||
|
|
||||||
@ -141,6 +143,9 @@ class EventsBackend(BaseBackend):
|
|||||||
|
|
||||||
def delete_rule(self, name):
|
def delete_rule(self, name):
|
||||||
self.rules_order.pop(self.rules_order.index(name))
|
self.rules_order.pop(self.rules_order.index(name))
|
||||||
|
arn = self.rules.get(name).arn
|
||||||
|
if self.tagger.has_tags(arn):
|
||||||
|
self.tagger.delete_all_tags_for_resource(arn)
|
||||||
return self.rules.pop(name) is not None
|
return self.rules.pop(name) is not None
|
||||||
|
|
||||||
def describe_rule(self, name):
|
def describe_rule(self, name):
|
||||||
@ -361,6 +366,32 @@ class EventsBackend(BaseBackend):
|
|||||||
|
|
||||||
self.event_buses.pop(name, None)
|
self.event_buses.pop(name, None)
|
||||||
|
|
||||||
|
def list_tags_for_resource(self, arn):
|
||||||
|
name = arn.split("/")[-1]
|
||||||
|
if name in self.rules:
|
||||||
|
return self.tagger.list_tags_for_resource(self.rules[name].arn)
|
||||||
|
raise JsonRESTError(
|
||||||
|
"ResourceNotFoundException", "An entity that you specified does not exist."
|
||||||
|
)
|
||||||
|
|
||||||
|
def tag_resource(self, arn, tags):
|
||||||
|
name = arn.split("/")[-1]
|
||||||
|
if name in self.rules:
|
||||||
|
self.tagger.tag_resource(self.rules[name].arn, tags)
|
||||||
|
return {}
|
||||||
|
raise JsonRESTError(
|
||||||
|
"ResourceNotFoundException", "An entity that you specified does not exist."
|
||||||
|
)
|
||||||
|
|
||||||
|
def untag_resource(self, arn, tag_names):
|
||||||
|
name = arn.split("/")[-1]
|
||||||
|
if name in self.rules:
|
||||||
|
self.tagger.untag_resource_using_names(self.rules[name].arn, tag_names)
|
||||||
|
return {}
|
||||||
|
raise JsonRESTError(
|
||||||
|
"ResourceNotFoundException", "An entity that you specified does not exist."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
events_backends = {}
|
events_backends = {}
|
||||||
for region in Session().get_available_regions("events"):
|
for region in Session().get_available_regions("events"):
|
||||||
|
@ -297,3 +297,26 @@ class EventsHandler(BaseResponse):
|
|||||||
self.events_backend.delete_event_bus(name)
|
self.events_backend.delete_event_bus(name)
|
||||||
|
|
||||||
return "", self.response_headers
|
return "", self.response_headers
|
||||||
|
|
||||||
|
def list_tags_for_resource(self):
|
||||||
|
arn = self._get_param("ResourceARN")
|
||||||
|
|
||||||
|
result = self.events_backend.list_tags_for_resource(arn)
|
||||||
|
|
||||||
|
return json.dumps(result), self.response_headers
|
||||||
|
|
||||||
|
def tag_resource(self):
|
||||||
|
arn = self._get_param("ResourceARN")
|
||||||
|
tags = self._get_param("Tags")
|
||||||
|
|
||||||
|
result = self.events_backend.tag_resource(arn, tags)
|
||||||
|
|
||||||
|
return json.dumps(result), self.response_headers
|
||||||
|
|
||||||
|
def untag_resource(self):
|
||||||
|
arn = self._get_param("ResourceARN")
|
||||||
|
tags = self._get_param("TagKeys")
|
||||||
|
|
||||||
|
result = self.events_backend.untag_resource(arn, tags)
|
||||||
|
|
||||||
|
return json.dumps(result), self.response_headers
|
||||||
|
@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidStateTransitionException(IoTClientError):
|
||||||
|
def __init__(self, msg=None):
|
||||||
|
self.code = 409
|
||||||
|
super(InvalidStateTransitionException, self).__init__(
|
||||||
|
"InvalidStateTransitionException",
|
||||||
|
msg or "An attempt was made to change to an invalid state.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class VersionConflictException(IoTClientError):
|
class VersionConflictException(IoTClientError):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.code = 409
|
self.code = 409
|
||||||
|
@ -17,6 +17,7 @@ from .exceptions import (
|
|||||||
DeleteConflictException,
|
DeleteConflictException,
|
||||||
ResourceNotFoundException,
|
ResourceNotFoundException,
|
||||||
InvalidRequestException,
|
InvalidRequestException,
|
||||||
|
InvalidStateTransitionException,
|
||||||
VersionConflictException,
|
VersionConflictException,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ class FakeThing(BaseModel):
|
|||||||
self.attributes = attributes
|
self.attributes = attributes
|
||||||
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
|
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
|
||||||
self.version = 1
|
self.version = 1
|
||||||
# TODO: we need to handle 'version'?
|
# TODO: we need to handle "version"?
|
||||||
|
|
||||||
# for iot-data
|
# for iot-data
|
||||||
self.thing_shadow = None
|
self.thing_shadow = None
|
||||||
@ -174,18 +175,19 @@ class FakeCertificate(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class FakePolicy(BaseModel):
|
class FakePolicy(BaseModel):
|
||||||
def __init__(self, name, document, region_name):
|
def __init__(self, name, document, region_name, default_version_id="1"):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.document = document
|
self.document = document
|
||||||
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
|
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
|
||||||
self.version = "1" # TODO: handle version
|
self.default_version_id = default_version_id
|
||||||
|
self.versions = [FakePolicyVersion(self.name, document, True, region_name)]
|
||||||
|
|
||||||
def to_get_dict(self):
|
def to_get_dict(self):
|
||||||
return {
|
return {
|
||||||
"policyName": self.name,
|
"policyName": self.name,
|
||||||
"policyArn": self.arn,
|
"policyArn": self.arn,
|
||||||
"policyDocument": self.document,
|
"policyDocument": self.document,
|
||||||
"defaultVersionId": self.version,
|
"defaultVersionId": self.default_version_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
def to_dict_at_creation(self):
|
def to_dict_at_creation(self):
|
||||||
@ -193,13 +195,52 @@ class FakePolicy(BaseModel):
|
|||||||
"policyName": self.name,
|
"policyName": self.name,
|
||||||
"policyArn": self.arn,
|
"policyArn": self.arn,
|
||||||
"policyDocument": self.document,
|
"policyDocument": self.document,
|
||||||
"policyVersionId": self.version,
|
"policyVersionId": self.default_version_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {"policyName": self.name, "policyArn": self.arn}
|
return {"policyName": self.name, "policyArn": self.arn}
|
||||||
|
|
||||||
|
|
||||||
|
class FakePolicyVersion(object):
|
||||||
|
def __init__(self, policy_name, document, is_default, region_name):
|
||||||
|
self.name = policy_name
|
||||||
|
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name)
|
||||||
|
self.document = document or {}
|
||||||
|
self.is_default = is_default
|
||||||
|
self.version_id = "1"
|
||||||
|
|
||||||
|
self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
|
self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple())
|
||||||
|
|
||||||
|
def to_get_dict(self):
|
||||||
|
return {
|
||||||
|
"policyName": self.name,
|
||||||
|
"policyArn": self.arn,
|
||||||
|
"policyDocument": self.document,
|
||||||
|
"policyVersionId": self.version_id,
|
||||||
|
"isDefaultVersion": self.is_default,
|
||||||
|
"creationDate": self.create_datetime,
|
||||||
|
"lastModifiedDate": self.last_modified_datetime,
|
||||||
|
"generationId": self.version_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_dict_at_creation(self):
|
||||||
|
return {
|
||||||
|
"policyArn": self.arn,
|
||||||
|
"policyDocument": self.document,
|
||||||
|
"policyVersionId": self.version_id,
|
||||||
|
"isDefaultVersion": self.is_default,
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
"versionId": self.version_id,
|
||||||
|
"isDefaultVersion": self.is_default,
|
||||||
|
"createDate": self.create_datetime,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class FakeJob(BaseModel):
|
class FakeJob(BaseModel):
|
||||||
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
|
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
|
||||||
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
|
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
|
||||||
@ -226,12 +267,14 @@ class FakeJob(BaseModel):
|
|||||||
self.targets = targets
|
self.targets = targets
|
||||||
self.document_source = document_source
|
self.document_source = document_source
|
||||||
self.document = document
|
self.document = document
|
||||||
|
self.force = False
|
||||||
self.description = description
|
self.description = description
|
||||||
self.presigned_url_config = presigned_url_config
|
self.presigned_url_config = presigned_url_config
|
||||||
self.target_selection = target_selection
|
self.target_selection = target_selection
|
||||||
self.job_executions_rollout_config = job_executions_rollout_config
|
self.job_executions_rollout_config = job_executions_rollout_config
|
||||||
self.status = None # IN_PROGRESS | CANCELED | COMPLETED
|
self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED
|
||||||
self.comment = None
|
self.comment = None
|
||||||
|
self.reason_code = None
|
||||||
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
self.completed_at = None
|
self.completed_at = None
|
||||||
@ -258,9 +301,11 @@ class FakeJob(BaseModel):
|
|||||||
"jobExecutionsRolloutConfig": self.job_executions_rollout_config,
|
"jobExecutionsRolloutConfig": self.job_executions_rollout_config,
|
||||||
"status": self.status,
|
"status": self.status,
|
||||||
"comment": self.comment,
|
"comment": self.comment,
|
||||||
|
"forceCanceled": self.force,
|
||||||
|
"reasonCode": self.reason_code,
|
||||||
"createdAt": self.created_at,
|
"createdAt": self.created_at,
|
||||||
"lastUpdatedAt": self.last_updated_at,
|
"lastUpdatedAt": self.last_updated_at,
|
||||||
"completedAt": self.completedAt,
|
"completedAt": self.completed_at,
|
||||||
"jobProcessDetails": self.job_process_details,
|
"jobProcessDetails": self.job_process_details,
|
||||||
"documentParameters": self.document_parameters,
|
"documentParameters": self.document_parameters,
|
||||||
"document": self.document,
|
"document": self.document,
|
||||||
@ -275,12 +320,67 @@ class FakeJob(BaseModel):
|
|||||||
return regex_match and length_match
|
return regex_match and length_match
|
||||||
|
|
||||||
|
|
||||||
|
class FakeJobExecution(BaseModel):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
job_id,
|
||||||
|
thing_arn,
|
||||||
|
status="QUEUED",
|
||||||
|
force_canceled=False,
|
||||||
|
status_details_map={},
|
||||||
|
):
|
||||||
|
self.job_id = job_id
|
||||||
|
self.status = status # IN_PROGRESS | CANCELED | COMPLETED
|
||||||
|
self.force_canceled = force_canceled
|
||||||
|
self.status_details_map = status_details_map
|
||||||
|
self.thing_arn = thing_arn
|
||||||
|
self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
|
self.started_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
|
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||||
|
self.execution_number = 123
|
||||||
|
self.version_number = 123
|
||||||
|
self.approximate_seconds_before_time_out = 123
|
||||||
|
|
||||||
|
def to_get_dict(self):
|
||||||
|
obj = {
|
||||||
|
"jobId": self.job_id,
|
||||||
|
"status": self.status,
|
||||||
|
"forceCanceled": self.force_canceled,
|
||||||
|
"statusDetails": {"detailsMap": self.status_details_map},
|
||||||
|
"thingArn": self.thing_arn,
|
||||||
|
"queuedAt": self.queued_at,
|
||||||
|
"startedAt": self.started_at,
|
||||||
|
"lastUpdatedAt": self.last_updated_at,
|
||||||
|
"executionNumber": self.execution_number,
|
||||||
|
"versionNumber": self.version_number,
|
||||||
|
"approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out,
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
obj = {
|
||||||
|
"jobId": self.job_id,
|
||||||
|
"thingArn": self.thing_arn,
|
||||||
|
"jobExecutionSummary": {
|
||||||
|
"status": self.status,
|
||||||
|
"queuedAt": self.queued_at,
|
||||||
|
"startedAt": self.started_at,
|
||||||
|
"lastUpdatedAt": self.last_updated_at,
|
||||||
|
"executionNumber": self.execution_number,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
class IoTBackend(BaseBackend):
|
class IoTBackend(BaseBackend):
|
||||||
def __init__(self, region_name=None):
|
def __init__(self, region_name=None):
|
||||||
super(IoTBackend, self).__init__()
|
super(IoTBackend, self).__init__()
|
||||||
self.region_name = region_name
|
self.region_name = region_name
|
||||||
self.things = OrderedDict()
|
self.things = OrderedDict()
|
||||||
self.jobs = OrderedDict()
|
self.jobs = OrderedDict()
|
||||||
|
self.job_executions = OrderedDict()
|
||||||
self.thing_types = OrderedDict()
|
self.thing_types = OrderedDict()
|
||||||
self.thing_groups = OrderedDict()
|
self.thing_groups = OrderedDict()
|
||||||
self.certificates = OrderedDict()
|
self.certificates = OrderedDict()
|
||||||
@ -535,6 +635,28 @@ class IoTBackend(BaseBackend):
|
|||||||
self.policies[policy.name] = policy
|
self.policies[policy.name] = policy
|
||||||
return policy
|
return policy
|
||||||
|
|
||||||
|
def attach_policy(self, policy_name, target):
|
||||||
|
principal = self._get_principal(target)
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
k = (target, policy_name)
|
||||||
|
if k in self.principal_policies:
|
||||||
|
return
|
||||||
|
self.principal_policies[k] = (principal, policy)
|
||||||
|
|
||||||
|
def detach_policy(self, policy_name, target):
|
||||||
|
# this may raises ResourceNotFoundException
|
||||||
|
self._get_principal(target)
|
||||||
|
self.get_policy(policy_name)
|
||||||
|
|
||||||
|
k = (target, policy_name)
|
||||||
|
if k not in self.principal_policies:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
del self.principal_policies[k]
|
||||||
|
|
||||||
|
def list_attached_policies(self, target):
|
||||||
|
policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target]
|
||||||
|
return policies
|
||||||
|
|
||||||
def list_policies(self):
|
def list_policies(self):
|
||||||
policies = self.policies.values()
|
policies = self.policies.values()
|
||||||
return policies
|
return policies
|
||||||
@ -559,6 +681,60 @@ class IoTBackend(BaseBackend):
|
|||||||
policy = self.get_policy(policy_name)
|
policy = self.get_policy(policy_name)
|
||||||
del self.policies[policy.name]
|
del self.policies[policy.name]
|
||||||
|
|
||||||
|
def create_policy_version(self, policy_name, policy_document, set_as_default):
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
if not policy:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
version = FakePolicyVersion(
|
||||||
|
policy_name, policy_document, set_as_default, self.region_name
|
||||||
|
)
|
||||||
|
policy.versions.append(version)
|
||||||
|
version.version_id = "{0}".format(len(policy.versions))
|
||||||
|
if set_as_default:
|
||||||
|
self.set_default_policy_version(policy_name, version.version_id)
|
||||||
|
return version
|
||||||
|
|
||||||
|
def set_default_policy_version(self, policy_name, version_id):
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
if not policy:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
for version in policy.versions:
|
||||||
|
if version.version_id == version_id:
|
||||||
|
version.is_default = True
|
||||||
|
policy.default_version_id = version.version_id
|
||||||
|
policy.document = version.document
|
||||||
|
else:
|
||||||
|
version.is_default = False
|
||||||
|
|
||||||
|
def get_policy_version(self, policy_name, version_id):
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
if not policy:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
for version in policy.versions:
|
||||||
|
if version.version_id == version_id:
|
||||||
|
return version
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
def list_policy_versions(self, policy_name):
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
if not policy:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
return policy.versions
|
||||||
|
|
||||||
|
def delete_policy_version(self, policy_name, version_id):
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
if not policy:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
if version_id == policy.default_version_id:
|
||||||
|
raise InvalidRequestException(
|
||||||
|
"Cannot delete the default version of a policy"
|
||||||
|
)
|
||||||
|
for i, v in enumerate(policy.versions):
|
||||||
|
if v.version_id == version_id:
|
||||||
|
del policy.versions[i]
|
||||||
|
return
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
def _get_principal(self, principal_arn):
|
def _get_principal(self, principal_arn):
|
||||||
"""
|
"""
|
||||||
raise ResourceNotFoundException
|
raise ResourceNotFoundException
|
||||||
@ -574,14 +750,6 @@ class IoTBackend(BaseBackend):
|
|||||||
pass
|
pass
|
||||||
raise ResourceNotFoundException()
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
def attach_policy(self, policy_name, target):
|
|
||||||
principal = self._get_principal(target)
|
|
||||||
policy = self.get_policy(policy_name)
|
|
||||||
k = (target, policy_name)
|
|
||||||
if k in self.principal_policies:
|
|
||||||
return
|
|
||||||
self.principal_policies[k] = (principal, policy)
|
|
||||||
|
|
||||||
def attach_principal_policy(self, policy_name, principal_arn):
|
def attach_principal_policy(self, policy_name, principal_arn):
|
||||||
principal = self._get_principal(principal_arn)
|
principal = self._get_principal(principal_arn)
|
||||||
policy = self.get_policy(policy_name)
|
policy = self.get_policy(policy_name)
|
||||||
@ -590,15 +758,6 @@ class IoTBackend(BaseBackend):
|
|||||||
return
|
return
|
||||||
self.principal_policies[k] = (principal, policy)
|
self.principal_policies[k] = (principal, policy)
|
||||||
|
|
||||||
def detach_policy(self, policy_name, target):
|
|
||||||
# this may raises ResourceNotFoundException
|
|
||||||
self._get_principal(target)
|
|
||||||
self.get_policy(policy_name)
|
|
||||||
k = (target, policy_name)
|
|
||||||
if k not in self.principal_policies:
|
|
||||||
raise ResourceNotFoundException()
|
|
||||||
del self.principal_policies[k]
|
|
||||||
|
|
||||||
def detach_principal_policy(self, policy_name, principal_arn):
|
def detach_principal_policy(self, policy_name, principal_arn):
|
||||||
# this may raises ResourceNotFoundException
|
# this may raises ResourceNotFoundException
|
||||||
self._get_principal(principal_arn)
|
self._get_principal(principal_arn)
|
||||||
@ -819,11 +978,187 @@ class IoTBackend(BaseBackend):
|
|||||||
self.region_name,
|
self.region_name,
|
||||||
)
|
)
|
||||||
self.jobs[job_id] = job
|
self.jobs[job_id] = job
|
||||||
|
|
||||||
|
for thing_arn in targets:
|
||||||
|
thing_name = thing_arn.split(":")[-1].split("/")[-1]
|
||||||
|
job_execution = FakeJobExecution(job_id, thing_arn)
|
||||||
|
self.job_executions[(job_id, thing_name)] = job_execution
|
||||||
return job.job_arn, job_id, description
|
return job.job_arn, job_id, description
|
||||||
|
|
||||||
def describe_job(self, job_id):
|
def describe_job(self, job_id):
|
||||||
|
jobs = [_ for _ in self.jobs.values() if _.job_id == job_id]
|
||||||
|
if len(jobs) == 0:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
return jobs[0]
|
||||||
|
|
||||||
|
def delete_job(self, job_id, force):
|
||||||
|
job = self.jobs[job_id]
|
||||||
|
|
||||||
|
if job.status == "IN_PROGRESS" and force:
|
||||||
|
del self.jobs[job_id]
|
||||||
|
elif job.status != "IN_PROGRESS":
|
||||||
|
del self.jobs[job_id]
|
||||||
|
else:
|
||||||
|
raise InvalidStateTransitionException()
|
||||||
|
|
||||||
|
def cancel_job(self, job_id, reason_code, comment, force):
|
||||||
|
job = self.jobs[job_id]
|
||||||
|
|
||||||
|
job.reason_code = reason_code if reason_code is not None else job.reason_code
|
||||||
|
job.comment = comment if comment is not None else job.comment
|
||||||
|
job.force = force if force is not None and force != job.force else job.force
|
||||||
|
job.status = "CANCELED"
|
||||||
|
|
||||||
|
if job.status == "IN_PROGRESS" and force:
|
||||||
|
self.jobs[job_id] = job
|
||||||
|
elif job.status != "IN_PROGRESS":
|
||||||
|
self.jobs[job_id] = job
|
||||||
|
else:
|
||||||
|
raise InvalidStateTransitionException()
|
||||||
|
|
||||||
|
return job
|
||||||
|
|
||||||
|
def get_job_document(self, job_id):
|
||||||
return self.jobs[job_id]
|
return self.jobs[job_id]
|
||||||
|
|
||||||
|
def list_jobs(
|
||||||
|
self,
|
||||||
|
status,
|
||||||
|
target_selection,
|
||||||
|
max_results,
|
||||||
|
token,
|
||||||
|
thing_group_name,
|
||||||
|
thing_group_id,
|
||||||
|
):
|
||||||
|
# TODO: implement filters
|
||||||
|
all_jobs = [_.to_dict() for _ in self.jobs.values()]
|
||||||
|
filtered_jobs = all_jobs
|
||||||
|
|
||||||
|
if token is None:
|
||||||
|
jobs = filtered_jobs[0:max_results]
|
||||||
|
next_token = str(max_results) if len(filtered_jobs) > max_results else None
|
||||||
|
else:
|
||||||
|
token = int(token)
|
||||||
|
jobs = filtered_jobs[token : token + max_results]
|
||||||
|
next_token = (
|
||||||
|
str(token + max_results)
|
||||||
|
if len(filtered_jobs) > token + max_results
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return jobs, next_token
|
||||||
|
|
||||||
|
def describe_job_execution(self, job_id, thing_name, execution_number):
|
||||||
|
try:
|
||||||
|
job_execution = self.job_executions[(job_id, thing_name)]
|
||||||
|
except KeyError:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
if job_execution is None or (
|
||||||
|
execution_number is not None
|
||||||
|
and job_execution.execution_number != execution_number
|
||||||
|
):
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
return job_execution
|
||||||
|
|
||||||
|
def cancel_job_execution(
|
||||||
|
self, job_id, thing_name, force, expected_version, status_details
|
||||||
|
):
|
||||||
|
job_execution = self.job_executions[(job_id, thing_name)]
|
||||||
|
|
||||||
|
if job_execution is None:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
job_execution.force_canceled = (
|
||||||
|
force if force is not None else job_execution.force_canceled
|
||||||
|
)
|
||||||
|
# TODO: implement expected_version and status_details (at most 10 can be specified)
|
||||||
|
|
||||||
|
if job_execution.status == "IN_PROGRESS" and force:
|
||||||
|
job_execution.status = "CANCELED"
|
||||||
|
self.job_executions[(job_id, thing_name)] = job_execution
|
||||||
|
elif job_execution.status != "IN_PROGRESS":
|
||||||
|
job_execution.status = "CANCELED"
|
||||||
|
self.job_executions[(job_id, thing_name)] = job_execution
|
||||||
|
else:
|
||||||
|
raise InvalidStateTransitionException()
|
||||||
|
|
||||||
|
def delete_job_execution(self, job_id, thing_name, execution_number, force):
|
||||||
|
job_execution = self.job_executions[(job_id, thing_name)]
|
||||||
|
|
||||||
|
if job_execution.execution_number != execution_number:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
if job_execution.status == "IN_PROGRESS" and force:
|
||||||
|
del self.job_executions[(job_id, thing_name)]
|
||||||
|
elif job_execution.status != "IN_PROGRESS":
|
||||||
|
del self.job_executions[(job_id, thing_name)]
|
||||||
|
else:
|
||||||
|
raise InvalidStateTransitionException()
|
||||||
|
|
||||||
|
def list_job_executions_for_job(self, job_id, status, max_results, next_token):
|
||||||
|
job_executions = [
|
||||||
|
self.job_executions[je].to_dict()
|
||||||
|
for je in self.job_executions
|
||||||
|
if je[0] == job_id
|
||||||
|
]
|
||||||
|
|
||||||
|
if status is not None:
|
||||||
|
job_executions = list(
|
||||||
|
filter(
|
||||||
|
lambda elem: status in elem["status"] and elem["status"] == status,
|
||||||
|
job_executions,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
token = next_token
|
||||||
|
if token is None:
|
||||||
|
job_executions = job_executions[0:max_results]
|
||||||
|
next_token = str(max_results) if len(job_executions) > max_results else None
|
||||||
|
else:
|
||||||
|
token = int(token)
|
||||||
|
job_executions = job_executions[token : token + max_results]
|
||||||
|
next_token = (
|
||||||
|
str(token + max_results)
|
||||||
|
if len(job_executions) > token + max_results
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return job_executions, next_token
|
||||||
|
|
||||||
|
def list_job_executions_for_thing(
|
||||||
|
self, thing_name, status, max_results, next_token
|
||||||
|
):
|
||||||
|
job_executions = [
|
||||||
|
self.job_executions[je].to_dict()
|
||||||
|
for je in self.job_executions
|
||||||
|
if je[1] == thing_name
|
||||||
|
]
|
||||||
|
|
||||||
|
if status is not None:
|
||||||
|
job_executions = list(
|
||||||
|
filter(
|
||||||
|
lambda elem: status in elem["status"] and elem["status"] == status,
|
||||||
|
job_executions,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
token = next_token
|
||||||
|
if token is None:
|
||||||
|
job_executions = job_executions[0:max_results]
|
||||||
|
next_token = str(max_results) if len(job_executions) > max_results else None
|
||||||
|
else:
|
||||||
|
token = int(token)
|
||||||
|
job_executions = job_executions[token : token + max_results]
|
||||||
|
next_token = (
|
||||||
|
str(token + max_results)
|
||||||
|
if len(job_executions) > token + max_results
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return job_executions, next_token
|
||||||
|
|
||||||
|
|
||||||
iot_backends = {}
|
iot_backends = {}
|
||||||
for region in Session().get_available_regions("iot"):
|
for region in Session().get_available_regions("iot"):
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from six.moves.urllib.parse import unquote
|
||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from .models import iot_backends
|
from .models import iot_backends
|
||||||
@ -141,6 +142,8 @@ class IoTResponse(BaseResponse):
|
|||||||
createdAt=job.created_at,
|
createdAt=job.created_at,
|
||||||
description=job.description,
|
description=job.description,
|
||||||
documentParameters=job.document_parameters,
|
documentParameters=job.document_parameters,
|
||||||
|
forceCanceled=job.force,
|
||||||
|
reasonCode=job.reason_code,
|
||||||
jobArn=job.job_arn,
|
jobArn=job.job_arn,
|
||||||
jobExecutionsRolloutConfig=job.job_executions_rollout_config,
|
jobExecutionsRolloutConfig=job.job_executions_rollout_config,
|
||||||
jobId=job.job_id,
|
jobId=job.job_id,
|
||||||
@ -154,6 +157,127 @@ class IoTResponse(BaseResponse):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def delete_job(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
force = self._get_bool_param("force")
|
||||||
|
|
||||||
|
self.iot_backend.delete_job(job_id=job_id, force=force)
|
||||||
|
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def cancel_job(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
reason_code = self._get_param("reasonCode")
|
||||||
|
comment = self._get_param("comment")
|
||||||
|
force = self._get_bool_param("force")
|
||||||
|
|
||||||
|
job = self.iot_backend.cancel_job(
|
||||||
|
job_id=job_id, reason_code=reason_code, comment=comment, force=force
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(job.to_dict())
|
||||||
|
|
||||||
|
def get_job_document(self):
|
||||||
|
job = self.iot_backend.get_job_document(job_id=self._get_param("jobId"))
|
||||||
|
|
||||||
|
if job.document is not None:
|
||||||
|
return json.dumps({"document": job.document})
|
||||||
|
else:
|
||||||
|
# job.document_source is not None:
|
||||||
|
# TODO: needs to be implemented to get document_source's content from S3
|
||||||
|
return json.dumps({"document": ""})
|
||||||
|
|
||||||
|
def list_jobs(self):
|
||||||
|
status = (self._get_param("status"),)
|
||||||
|
target_selection = (self._get_param("targetSelection"),)
|
||||||
|
max_results = self._get_int_param(
|
||||||
|
"maxResults", 50
|
||||||
|
) # not the default, but makes testing easier
|
||||||
|
previous_next_token = self._get_param("nextToken")
|
||||||
|
thing_group_name = (self._get_param("thingGroupName"),)
|
||||||
|
thing_group_id = self._get_param("thingGroupId")
|
||||||
|
jobs, next_token = self.iot_backend.list_jobs(
|
||||||
|
status=status,
|
||||||
|
target_selection=target_selection,
|
||||||
|
max_results=max_results,
|
||||||
|
token=previous_next_token,
|
||||||
|
thing_group_name=thing_group_name,
|
||||||
|
thing_group_id=thing_group_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(jobs=jobs, nextToken=next_token))
|
||||||
|
|
||||||
|
def describe_job_execution(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
thing_name = self._get_param("thingName")
|
||||||
|
execution_number = self._get_int_param("executionNumber")
|
||||||
|
job_execution = self.iot_backend.describe_job_execution(
|
||||||
|
job_id=job_id, thing_name=thing_name, execution_number=execution_number
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(execution=job_execution.to_get_dict()))
|
||||||
|
|
||||||
|
def cancel_job_execution(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
thing_name = self._get_param("thingName")
|
||||||
|
force = self._get_bool_param("force")
|
||||||
|
expected_version = self._get_int_param("expectedVersion")
|
||||||
|
status_details = self._get_param("statusDetails")
|
||||||
|
|
||||||
|
self.iot_backend.cancel_job_execution(
|
||||||
|
job_id=job_id,
|
||||||
|
thing_name=thing_name,
|
||||||
|
force=force,
|
||||||
|
expected_version=expected_version,
|
||||||
|
status_details=status_details,
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def delete_job_execution(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
thing_name = self._get_param("thingName")
|
||||||
|
execution_number = self._get_int_param("executionNumber")
|
||||||
|
force = self._get_bool_param("force")
|
||||||
|
|
||||||
|
self.iot_backend.delete_job_execution(
|
||||||
|
job_id=job_id,
|
||||||
|
thing_name=thing_name,
|
||||||
|
execution_number=execution_number,
|
||||||
|
force=force,
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def list_job_executions_for_job(self):
|
||||||
|
job_id = self._get_param("jobId")
|
||||||
|
status = self._get_param("status")
|
||||||
|
max_results = self._get_int_param(
|
||||||
|
"maxResults", 50
|
||||||
|
) # not the default, but makes testing easier
|
||||||
|
next_token = self._get_param("nextToken")
|
||||||
|
job_executions, next_token = self.iot_backend.list_job_executions_for_job(
|
||||||
|
job_id=job_id, status=status, max_results=max_results, next_token=next_token
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
|
||||||
|
|
||||||
|
def list_job_executions_for_thing(self):
|
||||||
|
thing_name = self._get_param("thingName")
|
||||||
|
status = self._get_param("status")
|
||||||
|
max_results = self._get_int_param(
|
||||||
|
"maxResults", 50
|
||||||
|
) # not the default, but makes testing easier
|
||||||
|
next_token = self._get_param("nextToken")
|
||||||
|
job_executions, next_token = self.iot_backend.list_job_executions_for_thing(
|
||||||
|
thing_name=thing_name,
|
||||||
|
status=status,
|
||||||
|
max_results=max_results,
|
||||||
|
next_token=next_token,
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
|
||||||
|
|
||||||
def create_keys_and_certificate(self):
|
def create_keys_and_certificate(self):
|
||||||
set_as_active = self._get_bool_param("setAsActive")
|
set_as_active = self._get_bool_param("setAsActive")
|
||||||
cert, key_pair = self.iot_backend.create_keys_and_certificate(
|
cert, key_pair = self.iot_backend.create_keys_and_certificate(
|
||||||
@ -241,12 +365,61 @@ class IoTResponse(BaseResponse):
|
|||||||
self.iot_backend.delete_policy(policy_name=policy_name)
|
self.iot_backend.delete_policy(policy_name=policy_name)
|
||||||
return json.dumps(dict())
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def create_policy_version(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
policy_document = self._get_param("policyDocument")
|
||||||
|
set_as_default = self._get_bool_param("setAsDefault")
|
||||||
|
policy_version = self.iot_backend.create_policy_version(
|
||||||
|
policy_name, policy_document, set_as_default
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(policy_version.to_dict_at_creation()))
|
||||||
|
|
||||||
|
def set_default_policy_version(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
version_id = self._get_param("policyVersionId")
|
||||||
|
self.iot_backend.set_default_policy_version(policy_name, version_id)
|
||||||
|
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def get_policy_version(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
version_id = self._get_param("policyVersionId")
|
||||||
|
policy_version = self.iot_backend.get_policy_version(policy_name, version_id)
|
||||||
|
return json.dumps(dict(policy_version.to_get_dict()))
|
||||||
|
|
||||||
|
def list_policy_versions(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
policiy_versions = self.iot_backend.list_policy_versions(
|
||||||
|
policy_name=policy_name
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions]))
|
||||||
|
|
||||||
|
def delete_policy_version(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
version_id = self._get_param("policyVersionId")
|
||||||
|
self.iot_backend.delete_policy_version(policy_name, version_id)
|
||||||
|
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
def attach_policy(self):
|
def attach_policy(self):
|
||||||
policy_name = self._get_param("policyName")
|
policy_name = self._get_param("policyName")
|
||||||
target = self._get_param("target")
|
target = self._get_param("target")
|
||||||
self.iot_backend.attach_policy(policy_name=policy_name, target=target)
|
self.iot_backend.attach_policy(policy_name=policy_name, target=target)
|
||||||
return json.dumps(dict())
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def list_attached_policies(self):
|
||||||
|
principal = unquote(self._get_param("target"))
|
||||||
|
# marker = self._get_param("marker")
|
||||||
|
# page_size = self._get_int_param("pageSize")
|
||||||
|
policies = self.iot_backend.list_attached_policies(target=principal)
|
||||||
|
# TODO: implement pagination in the future
|
||||||
|
next_marker = None
|
||||||
|
return json.dumps(
|
||||||
|
dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker)
|
||||||
|
)
|
||||||
|
|
||||||
def attach_principal_policy(self):
|
def attach_principal_policy(self):
|
||||||
policy_name = self._get_param("policyName")
|
policy_name = self._get_param("policyName")
|
||||||
principal = self.headers.get("x-amzn-iot-principal")
|
principal = self.headers.get("x-amzn-iot-principal")
|
||||||
|
@ -1,8 +1,13 @@
|
|||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
from boto3 import Session
|
||||||
|
|
||||||
from moto.core.exceptions import InvalidNextTokenException
|
from moto.core.exceptions import InvalidNextTokenException
|
||||||
from moto.core.models import ConfigQueryModel
|
from moto.core.models import ConfigQueryModel
|
||||||
from moto.s3 import s3_backends
|
from moto.s3 import s3_backends
|
||||||
|
from moto.s3.models import get_moto_s3_account_id
|
||||||
|
|
||||||
|
|
||||||
class S3ConfigQuery(ConfigQueryModel):
|
class S3ConfigQuery(ConfigQueryModel):
|
||||||
@ -118,4 +123,146 @@ class S3ConfigQuery(ConfigQueryModel):
|
|||||||
return config_data
|
return config_data
|
||||||
|
|
||||||
|
|
||||||
|
class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel):
|
||||||
|
def list_config_service_resources(
|
||||||
|
self,
|
||||||
|
resource_ids,
|
||||||
|
resource_name,
|
||||||
|
limit,
|
||||||
|
next_token,
|
||||||
|
backend_region=None,
|
||||||
|
resource_region=None,
|
||||||
|
):
|
||||||
|
# For the Account Public Access Block, they are the same for all regions. The resource ID is the AWS account ID
|
||||||
|
# There is no resource name -- it should be a blank string "" if provided.
|
||||||
|
|
||||||
|
# The resource name can only ever be None or an empty string:
|
||||||
|
if resource_name is not None and resource_name != "":
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
pab = None
|
||||||
|
account_id = get_moto_s3_account_id()
|
||||||
|
regions = [region for region in Session().get_available_regions("config")]
|
||||||
|
|
||||||
|
# If a resource ID was passed in, then filter accordingly:
|
||||||
|
if resource_ids:
|
||||||
|
for id in resource_ids:
|
||||||
|
if account_id == id:
|
||||||
|
pab = self.backends["global"].account_public_access_block
|
||||||
|
break
|
||||||
|
|
||||||
|
# Otherwise, just grab the one from the backend:
|
||||||
|
if not resource_ids:
|
||||||
|
pab = self.backends["global"].account_public_access_block
|
||||||
|
|
||||||
|
# If it's not present, then return nothing
|
||||||
|
if not pab:
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
# Filter on regions (and paginate on them as well):
|
||||||
|
if backend_region:
|
||||||
|
pab_list = [backend_region]
|
||||||
|
elif resource_region:
|
||||||
|
# Invalid region?
|
||||||
|
if resource_region not in regions:
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
pab_list = [resource_region]
|
||||||
|
|
||||||
|
# Aggregated query where no regions were supplied so return them all:
|
||||||
|
else:
|
||||||
|
pab_list = regions
|
||||||
|
|
||||||
|
# Pagination logic:
|
||||||
|
sorted_regions = sorted(pab_list)
|
||||||
|
new_token = None
|
||||||
|
|
||||||
|
# Get the start:
|
||||||
|
if not next_token:
|
||||||
|
start = 0
|
||||||
|
else:
|
||||||
|
# Tokens for this moto feature is just the region-name:
|
||||||
|
# For OTHER non-global resource types, it's the region concatenated with the resource ID.
|
||||||
|
if next_token not in sorted_regions:
|
||||||
|
raise InvalidNextTokenException()
|
||||||
|
|
||||||
|
start = sorted_regions.index(next_token)
|
||||||
|
|
||||||
|
# Get the list of items to collect:
|
||||||
|
pab_list = sorted_regions[start : (start + limit)]
|
||||||
|
|
||||||
|
if len(sorted_regions) > (start + limit):
|
||||||
|
new_token = sorted_regions[start + limit]
|
||||||
|
|
||||||
|
return (
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"type": "AWS::S3::AccountPublicAccessBlock",
|
||||||
|
"id": account_id,
|
||||||
|
"region": region,
|
||||||
|
}
|
||||||
|
for region in pab_list
|
||||||
|
],
|
||||||
|
new_token,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_config_resource(
|
||||||
|
self, resource_id, resource_name=None, backend_region=None, resource_region=None
|
||||||
|
):
|
||||||
|
# Do we even have this defined?
|
||||||
|
if not self.backends["global"].account_public_access_block:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Resource name can only ever be "" if it's supplied:
|
||||||
|
if resource_name is not None and resource_name != "":
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Are we filtering based on region?
|
||||||
|
account_id = get_moto_s3_account_id()
|
||||||
|
regions = [region for region in Session().get_available_regions("config")]
|
||||||
|
|
||||||
|
# Is the resource ID correct?:
|
||||||
|
if account_id == resource_id:
|
||||||
|
if backend_region:
|
||||||
|
pab_region = backend_region
|
||||||
|
|
||||||
|
# Invalid region?
|
||||||
|
elif resource_region not in regions:
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
pab_region = resource_region
|
||||||
|
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Format the PAB to the AWS Config format:
|
||||||
|
creation_time = datetime.datetime.utcnow()
|
||||||
|
config_data = {
|
||||||
|
"version": "1.3",
|
||||||
|
"accountId": account_id,
|
||||||
|
"configurationItemCaptureTime": str(creation_time),
|
||||||
|
"configurationItemStatus": "OK",
|
||||||
|
"configurationStateId": str(
|
||||||
|
int(time.mktime(creation_time.timetuple()))
|
||||||
|
), # PY2 and 3 compatible
|
||||||
|
"resourceType": "AWS::S3::AccountPublicAccessBlock",
|
||||||
|
"resourceId": account_id,
|
||||||
|
"awsRegion": pab_region,
|
||||||
|
"availabilityZone": "Not Applicable",
|
||||||
|
"configuration": self.backends[
|
||||||
|
"global"
|
||||||
|
].account_public_access_block.to_config_dict(),
|
||||||
|
"supplementaryConfiguration": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# The 'configuration' field is also a JSON string:
|
||||||
|
config_data["configuration"] = json.dumps(config_data["configuration"])
|
||||||
|
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
|
||||||
s3_config_query = S3ConfigQuery(s3_backends)
|
s3_config_query = S3ConfigQuery(s3_backends)
|
||||||
|
s3_account_public_access_block_query = S3AccountPublicAccessBlockConfigQuery(
|
||||||
|
s3_backends
|
||||||
|
)
|
||||||
|
@ -127,6 +127,18 @@ class InvalidRequest(S3ClientError):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IllegalLocationConstraintException(S3ClientError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(IllegalLocationConstraintException, self).__init__(
|
||||||
|
"IllegalLocationConstraintException",
|
||||||
|
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.",
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MalformedXML(S3ClientError):
|
class MalformedXML(S3ClientError):
|
||||||
code = 400
|
code = 400
|
||||||
|
|
||||||
@ -347,3 +359,12 @@ class InvalidPublicAccessBlockConfiguration(S3ClientError):
|
|||||||
*args,
|
*args,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WrongPublicAccessBlockAccountIdError(S3ClientError):
|
||||||
|
code = 403
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(WrongPublicAccessBlockAccountIdError, self).__init__(
|
||||||
|
"AccessDenied", "Access Denied"
|
||||||
|
)
|
||||||
|
@ -19,7 +19,7 @@ import uuid
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from bisect import insort
|
from bisect import insort
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
|
||||||
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
BucketAlreadyExists,
|
BucketAlreadyExists,
|
||||||
@ -37,6 +37,7 @@ from .exceptions import (
|
|||||||
CrossLocationLoggingProhibitted,
|
CrossLocationLoggingProhibitted,
|
||||||
NoSuchPublicAccessBlockConfiguration,
|
NoSuchPublicAccessBlockConfiguration,
|
||||||
InvalidPublicAccessBlockConfiguration,
|
InvalidPublicAccessBlockConfiguration,
|
||||||
|
WrongPublicAccessBlockAccountIdError,
|
||||||
)
|
)
|
||||||
from .utils import clean_key_name, _VersionedKeyStore
|
from .utils import clean_key_name, _VersionedKeyStore
|
||||||
|
|
||||||
@ -58,6 +59,13 @@ DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
|
|||||||
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
|
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
|
||||||
|
|
||||||
|
|
||||||
|
def get_moto_s3_account_id():
|
||||||
|
"""This makes it easy for mocking AWS Account IDs when using AWS Config
|
||||||
|
-- Simply mock.patch the ACCOUNT_ID here, and Config gets it for free.
|
||||||
|
"""
|
||||||
|
return ACCOUNT_ID
|
||||||
|
|
||||||
|
|
||||||
class FakeDeleteMarker(BaseModel):
|
class FakeDeleteMarker(BaseModel):
|
||||||
def __init__(self, key):
|
def __init__(self, key):
|
||||||
self.key = key
|
self.key = key
|
||||||
@ -1163,6 +1171,7 @@ class FakeBucket(BaseModel):
|
|||||||
class S3Backend(BaseBackend):
|
class S3Backend(BaseBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.buckets = {}
|
self.buckets = {}
|
||||||
|
self.account_public_access_block = None
|
||||||
|
|
||||||
def create_bucket(self, bucket_name, region_name):
|
def create_bucket(self, bucket_name, region_name):
|
||||||
if bucket_name in self.buckets:
|
if bucket_name in self.buckets:
|
||||||
@ -1264,6 +1273,16 @@ class S3Backend(BaseBackend):
|
|||||||
|
|
||||||
return bucket.public_access_block
|
return bucket.public_access_block
|
||||||
|
|
||||||
|
def get_account_public_access_block(self, account_id):
|
||||||
|
# The account ID should equal the account id that is set for Moto:
|
||||||
|
if account_id != ACCOUNT_ID:
|
||||||
|
raise WrongPublicAccessBlockAccountIdError()
|
||||||
|
|
||||||
|
if not self.account_public_access_block:
|
||||||
|
raise NoSuchPublicAccessBlockConfiguration()
|
||||||
|
|
||||||
|
return self.account_public_access_block
|
||||||
|
|
||||||
def set_key(
|
def set_key(
|
||||||
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
|
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
|
||||||
):
|
):
|
||||||
@ -1356,6 +1375,13 @@ class S3Backend(BaseBackend):
|
|||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
bucket.public_access_block = None
|
bucket.public_access_block = None
|
||||||
|
|
||||||
|
def delete_account_public_access_block(self, account_id):
|
||||||
|
# The account ID should equal the account id that is set for Moto:
|
||||||
|
if account_id != ACCOUNT_ID:
|
||||||
|
raise WrongPublicAccessBlockAccountIdError()
|
||||||
|
|
||||||
|
self.account_public_access_block = None
|
||||||
|
|
||||||
def put_bucket_notification_configuration(self, bucket_name, notification_config):
|
def put_bucket_notification_configuration(self, bucket_name, notification_config):
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
bucket.set_notification_configuration(notification_config)
|
bucket.set_notification_configuration(notification_config)
|
||||||
@ -1384,6 +1410,21 @@ class S3Backend(BaseBackend):
|
|||||||
pub_block_config.get("RestrictPublicBuckets"),
|
pub_block_config.get("RestrictPublicBuckets"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def put_account_public_access_block(self, account_id, pub_block_config):
|
||||||
|
# The account ID should equal the account id that is set for Moto:
|
||||||
|
if account_id != ACCOUNT_ID:
|
||||||
|
raise WrongPublicAccessBlockAccountIdError()
|
||||||
|
|
||||||
|
if not pub_block_config:
|
||||||
|
raise InvalidPublicAccessBlockConfiguration()
|
||||||
|
|
||||||
|
self.account_public_access_block = PublicAccessBlock(
|
||||||
|
pub_block_config.get("BlockPublicAcls"),
|
||||||
|
pub_block_config.get("IgnorePublicAcls"),
|
||||||
|
pub_block_config.get("BlockPublicPolicy"),
|
||||||
|
pub_block_config.get("RestrictPublicBuckets"),
|
||||||
|
)
|
||||||
|
|
||||||
def initiate_multipart(self, bucket_name, key_name, metadata):
|
def initiate_multipart(self, bucket_name, key_name, metadata):
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
new_multipart = FakeMultipart(key_name, metadata)
|
new_multipart = FakeMultipart(key_name, metadata)
|
||||||
|
@ -4,6 +4,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
from botocore.awsrequest import AWSPreparedRequest
|
||||||
|
|
||||||
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
|
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
|
||||||
from six.moves.urllib.parse import parse_qs, urlparse, unquote
|
from six.moves.urllib.parse import parse_qs, urlparse, unquote
|
||||||
@ -29,6 +30,7 @@ from .exceptions import (
|
|||||||
InvalidPartOrder,
|
InvalidPartOrder,
|
||||||
MalformedXML,
|
MalformedXML,
|
||||||
MalformedACLError,
|
MalformedACLError,
|
||||||
|
IllegalLocationConstraintException,
|
||||||
InvalidNotificationARN,
|
InvalidNotificationARN,
|
||||||
InvalidNotificationEvent,
|
InvalidNotificationEvent,
|
||||||
ObjectNotInActiveTierError,
|
ObjectNotInActiveTierError,
|
||||||
@ -122,6 +124,11 @@ ACTION_MAP = {
|
|||||||
"uploadId": "PutObject",
|
"uploadId": "PutObject",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"CONTROL": {
|
||||||
|
"GET": {"publicAccessBlock": "GetPublicAccessBlock"},
|
||||||
|
"PUT": {"publicAccessBlock": "PutPublicAccessBlock"},
|
||||||
|
"DELETE": {"publicAccessBlock": "DeletePublicAccessBlock"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -167,7 +174,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
or host.startswith("localhost")
|
or host.startswith("localhost")
|
||||||
or host.startswith("localstack")
|
or host.startswith("localstack")
|
||||||
or re.match(r"^[^.]+$", host)
|
or re.match(r"^[^.]+$", host)
|
||||||
or re.match(r"^.*\.svc\.cluster\.local$", host)
|
or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host)
|
||||||
):
|
):
|
||||||
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
|
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
|
||||||
# (3) local host names that do not contain a "." (e.g., Docker container host names), or
|
# (3) local host names that do not contain a "." (e.g., Docker container host names), or
|
||||||
@ -219,7 +226,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
# Depending on which calling format the client is using, we don't know
|
# Depending on which calling format the client is using, we don't know
|
||||||
# if this is a bucket or key request so we have to check
|
# if this is a bucket or key request so we have to check
|
||||||
if self.subdomain_based_buckets(request):
|
if self.subdomain_based_buckets(request):
|
||||||
return self.key_response(request, full_url, headers)
|
return self.key_or_control_response(request, full_url, headers)
|
||||||
else:
|
else:
|
||||||
# Using path-based buckets
|
# Using path-based buckets
|
||||||
return self.bucket_response(request, full_url, headers)
|
return self.bucket_response(request, full_url, headers)
|
||||||
@ -286,7 +293,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
return self._bucket_response_post(request, body, bucket_name)
|
return self._bucket_response_post(request, body, bucket_name)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Method {0} has not been impelemented in the S3 backend yet".format(
|
"Method {0} has not been implemented in the S3 backend yet".format(
|
||||||
method
|
method
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -585,6 +592,29 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
next_continuation_token = None
|
next_continuation_token = None
|
||||||
return result_keys, is_truncated, next_continuation_token
|
return result_keys, is_truncated, next_continuation_token
|
||||||
|
|
||||||
|
def _body_contains_location_constraint(self, body):
|
||||||
|
if body:
|
||||||
|
try:
|
||||||
|
xmltodict.parse(body)["CreateBucketConfiguration"]["LocationConstraint"]
|
||||||
|
return True
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _parse_pab_config(self, body):
|
||||||
|
parsed_xml = xmltodict.parse(body)
|
||||||
|
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
|
||||||
|
|
||||||
|
# If Python 2, fix the unicode strings:
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
parsed_xml = {
|
||||||
|
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
|
||||||
|
dict(parsed_xml["PublicAccessBlockConfiguration"])
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed_xml
|
||||||
|
|
||||||
def _bucket_response_put(
|
def _bucket_response_put(
|
||||||
self, request, body, region_name, bucket_name, querystring
|
self, request, body, region_name, bucket_name, querystring
|
||||||
):
|
):
|
||||||
@ -663,27 +693,23 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
elif "publicAccessBlock" in querystring:
|
elif "publicAccessBlock" in querystring:
|
||||||
parsed_xml = xmltodict.parse(body)
|
pab_config = self._parse_pab_config(body)
|
||||||
parsed_xml["PublicAccessBlockConfiguration"].pop("@xmlns", None)
|
|
||||||
|
|
||||||
# If Python 2, fix the unicode strings:
|
|
||||||
if sys.version_info[0] < 3:
|
|
||||||
parsed_xml = {
|
|
||||||
"PublicAccessBlockConfiguration": py2_strip_unicode_keys(
|
|
||||||
dict(parsed_xml["PublicAccessBlockConfiguration"])
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
self.backend.put_bucket_public_access_block(
|
self.backend.put_bucket_public_access_block(
|
||||||
bucket_name, parsed_xml["PublicAccessBlockConfiguration"]
|
bucket_name, pab_config["PublicAccessBlockConfiguration"]
|
||||||
)
|
)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
# us-east-1, the default AWS region behaves a bit differently
|
||||||
|
# - you should not use it as a location constraint --> it fails
|
||||||
|
# - querying the location constraint returns None
|
||||||
|
# - LocationConstraint has to be specified if outside us-east-1
|
||||||
|
if (
|
||||||
|
region_name != DEFAULT_REGION_NAME
|
||||||
|
and not self._body_contains_location_constraint(body)
|
||||||
|
):
|
||||||
|
raise IllegalLocationConstraintException()
|
||||||
if body:
|
if body:
|
||||||
# us-east-1, the default AWS region behaves a bit differently
|
|
||||||
# - you should not use it as a location constraint --> it fails
|
|
||||||
# - querying the location constraint returns None
|
|
||||||
try:
|
try:
|
||||||
forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][
|
forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][
|
||||||
"LocationConstraint"
|
"LocationConstraint"
|
||||||
@ -854,15 +880,21 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
)
|
)
|
||||||
return 206, response_headers, response_content[begin : end + 1]
|
return 206, response_headers, response_content[begin : end + 1]
|
||||||
|
|
||||||
def key_response(self, request, full_url, headers):
|
def key_or_control_response(self, request, full_url, headers):
|
||||||
|
# Key and Control are lumped in because splitting out the regex is too much of a pain :/
|
||||||
self.method = request.method
|
self.method = request.method
|
||||||
self.path = self._get_path(request)
|
self.path = self._get_path(request)
|
||||||
self.headers = request.headers
|
self.headers = request.headers
|
||||||
if "host" not in self.headers:
|
if "host" not in self.headers:
|
||||||
self.headers["host"] = urlparse(full_url).netloc
|
self.headers["host"] = urlparse(full_url).netloc
|
||||||
response_headers = {}
|
response_headers = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._key_response(request, full_url, headers)
|
# Is this an S3 control response?
|
||||||
|
if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url:
|
||||||
|
response = self._control_response(request, full_url, headers)
|
||||||
|
else:
|
||||||
|
response = self._key_response(request, full_url, headers)
|
||||||
except S3ClientError as s3error:
|
except S3ClientError as s3error:
|
||||||
response = s3error.code, {}, s3error.description
|
response = s3error.code, {}, s3error.description
|
||||||
|
|
||||||
@ -878,6 +910,94 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
)
|
)
|
||||||
return status_code, response_headers, response_content
|
return status_code, response_headers, response_content
|
||||||
|
|
||||||
|
def _control_response(self, request, full_url, headers):
|
||||||
|
parsed_url = urlparse(full_url)
|
||||||
|
query = parse_qs(parsed_url.query, keep_blank_values=True)
|
||||||
|
method = request.method
|
||||||
|
|
||||||
|
if hasattr(request, "body"):
|
||||||
|
# Boto
|
||||||
|
body = request.body
|
||||||
|
if hasattr(body, "read"):
|
||||||
|
body = body.read()
|
||||||
|
else:
|
||||||
|
# Flask server
|
||||||
|
body = request.data
|
||||||
|
if body is None:
|
||||||
|
body = b""
|
||||||
|
|
||||||
|
if method == "GET":
|
||||||
|
return self._control_response_get(request, query, headers)
|
||||||
|
elif method == "PUT":
|
||||||
|
return self._control_response_put(request, body, query, headers)
|
||||||
|
elif method == "DELETE":
|
||||||
|
return self._control_response_delete(request, query, headers)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Method {0} has not been implemented in the S3 backend yet".format(
|
||||||
|
method
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _control_response_get(self, request, query, headers):
|
||||||
|
action = self.path.split("?")[0].split("/")[
|
||||||
|
-1
|
||||||
|
] # Gets the action out of the URL sans query params.
|
||||||
|
self._set_action("CONTROL", "GET", action)
|
||||||
|
self._authenticate_and_authorize_s3_action()
|
||||||
|
|
||||||
|
response_headers = {}
|
||||||
|
if "publicAccessBlock" in action:
|
||||||
|
public_block_config = self.backend.get_account_public_access_block(
|
||||||
|
headers["x-amz-account-id"]
|
||||||
|
)
|
||||||
|
template = self.response_template(S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION)
|
||||||
|
return (
|
||||||
|
200,
|
||||||
|
response_headers,
|
||||||
|
template.render(public_block_config=public_block_config),
|
||||||
|
)
|
||||||
|
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Method {0} has not been implemented in the S3 backend yet".format(action)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _control_response_put(self, request, body, query, headers):
|
||||||
|
action = self.path.split("?")[0].split("/")[
|
||||||
|
-1
|
||||||
|
] # Gets the action out of the URL sans query params.
|
||||||
|
self._set_action("CONTROL", "PUT", action)
|
||||||
|
self._authenticate_and_authorize_s3_action()
|
||||||
|
|
||||||
|
response_headers = {}
|
||||||
|
if "publicAccessBlock" in action:
|
||||||
|
pab_config = self._parse_pab_config(body)
|
||||||
|
self.backend.put_account_public_access_block(
|
||||||
|
headers["x-amz-account-id"],
|
||||||
|
pab_config["PublicAccessBlockConfiguration"],
|
||||||
|
)
|
||||||
|
return 200, response_headers, ""
|
||||||
|
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Method {0} has not been implemented in the S3 backend yet".format(action)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _control_response_delete(self, request, query, headers):
|
||||||
|
action = self.path.split("?")[0].split("/")[
|
||||||
|
-1
|
||||||
|
] # Gets the action out of the URL sans query params.
|
||||||
|
self._set_action("CONTROL", "DELETE", action)
|
||||||
|
self._authenticate_and_authorize_s3_action()
|
||||||
|
|
||||||
|
response_headers = {}
|
||||||
|
if "publicAccessBlock" in action:
|
||||||
|
self.backend.delete_account_public_access_block(headers["x-amz-account-id"])
|
||||||
|
return 200, response_headers, ""
|
||||||
|
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Method {0} has not been implemented in the S3 backend yet".format(action)
|
||||||
|
)
|
||||||
|
|
||||||
def _key_response(self, request, full_url, headers):
|
def _key_response(self, request, full_url, headers):
|
||||||
parsed_url = urlparse(full_url)
|
parsed_url = urlparse(full_url)
|
||||||
query = parse_qs(parsed_url.query, keep_blank_values=True)
|
query = parse_qs(parsed_url.query, keep_blank_values=True)
|
||||||
@ -1082,6 +1202,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
if mdirective is not None and mdirective == "REPLACE":
|
if mdirective is not None and mdirective == "REPLACE":
|
||||||
metadata = metadata_from_headers(request.headers)
|
metadata = metadata_from_headers(request.headers)
|
||||||
new_key.set_metadata(metadata, replace=True)
|
new_key.set_metadata(metadata, replace=True)
|
||||||
|
tdirective = request.headers.get("x-amz-tagging-directive")
|
||||||
|
if tdirective == "REPLACE":
|
||||||
|
tagging = self._tagging_from_headers(request.headers)
|
||||||
|
new_key.set_tagging(tagging)
|
||||||
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
||||||
response_headers.update(new_key.response_dict)
|
response_headers.update(new_key.response_dict)
|
||||||
return 200, response_headers, template.render(key=new_key)
|
return 200, response_headers, template.render(key=new_key)
|
||||||
|
@ -13,7 +13,7 @@ url_paths = {
|
|||||||
# subdomain key of path-based bucket
|
# subdomain key of path-based bucket
|
||||||
"{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response,
|
"{0}/(?P<key_or_bucket_name>[^/]+)/?$": S3ResponseInstance.ambiguous_response,
|
||||||
# path-based bucket + key
|
# path-based bucket + key
|
||||||
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_response,
|
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
|
||||||
# subdomain bucket + key with empty first part of path
|
# subdomain bucket + key with empty first part of path
|
||||||
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_response,
|
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ def bucket_name_from_url(url):
|
|||||||
|
|
||||||
REGION_URL_REGEX = re.compile(
|
REGION_URL_REGEX = re.compile(
|
||||||
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
|
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
|
||||||
r"(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?"
|
r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -127,6 +127,10 @@ class WorkflowExecution(BaseModel):
|
|||||||
"executionInfo": self.to_medium_dict(),
|
"executionInfo": self.to_medium_dict(),
|
||||||
"executionConfiguration": {"taskList": {"name": self.task_list}},
|
"executionConfiguration": {"taskList": {"name": self.task_list}},
|
||||||
}
|
}
|
||||||
|
# info
|
||||||
|
if self.execution_status == "CLOSED":
|
||||||
|
hsh["executionInfo"]["closeStatus"] = self.close_status
|
||||||
|
hsh["executionInfo"]["closeTimestamp"] = self.close_timestamp
|
||||||
# configuration
|
# configuration
|
||||||
for key in self._configuration_keys:
|
for key in self._configuration_keys:
|
||||||
attr = camelcase_to_underscores(key)
|
attr = camelcase_to_underscores(key)
|
||||||
|
0
moto/utilities/__init__.py
Normal file
0
moto/utilities/__init__.py
Normal file
62
moto/utilities/tagging_service.py
Normal file
62
moto/utilities/tagging_service.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
class TaggingService:
|
||||||
|
def __init__(self, tagName="Tags", keyName="Key", valueName="Value"):
|
||||||
|
self.tagName = tagName
|
||||||
|
self.keyName = keyName
|
||||||
|
self.valueName = valueName
|
||||||
|
self.tags = {}
|
||||||
|
|
||||||
|
def list_tags_for_resource(self, arn):
|
||||||
|
result = []
|
||||||
|
if arn in self.tags:
|
||||||
|
for k, v in self.tags[arn].items():
|
||||||
|
result.append({self.keyName: k, self.valueName: v})
|
||||||
|
return {self.tagName: result}
|
||||||
|
|
||||||
|
def delete_all_tags_for_resource(self, arn):
|
||||||
|
del self.tags[arn]
|
||||||
|
|
||||||
|
def has_tags(self, arn):
|
||||||
|
return arn in self.tags
|
||||||
|
|
||||||
|
def tag_resource(self, arn, tags):
|
||||||
|
if arn not in self.tags:
|
||||||
|
self.tags[arn] = {}
|
||||||
|
for t in tags:
|
||||||
|
if self.valueName in t:
|
||||||
|
self.tags[arn][t[self.keyName]] = t[self.valueName]
|
||||||
|
else:
|
||||||
|
self.tags[arn][t[self.keyName]] = None
|
||||||
|
|
||||||
|
def untag_resource_using_names(self, arn, tag_names):
|
||||||
|
for name in tag_names:
|
||||||
|
if name in self.tags.get(arn, {}):
|
||||||
|
del self.tags[arn][name]
|
||||||
|
|
||||||
|
def untag_resource_using_tags(self, arn, tags):
|
||||||
|
m = self.tags.get(arn, {})
|
||||||
|
for t in tags:
|
||||||
|
if self.keyName in t:
|
||||||
|
if t[self.keyName] in m:
|
||||||
|
if self.valueName in t:
|
||||||
|
if m[t[self.keyName]] != t[self.valueName]:
|
||||||
|
continue
|
||||||
|
# If both key and value are provided, match both before deletion
|
||||||
|
del m[t[self.keyName]]
|
||||||
|
|
||||||
|
def extract_tag_names(self, tags):
|
||||||
|
results = []
|
||||||
|
if len(tags) == 0:
|
||||||
|
return results
|
||||||
|
for tag in tags:
|
||||||
|
if self.keyName in tag:
|
||||||
|
results.append(tag[self.keyName])
|
||||||
|
return results
|
||||||
|
|
||||||
|
def flatten_tag_list(self, tags):
|
||||||
|
result = {}
|
||||||
|
for t in tags:
|
||||||
|
if self.valueName in t:
|
||||||
|
result[t[self.keyName]] = t[self.valueName]
|
||||||
|
else:
|
||||||
|
result[t[self.keyName]] = None
|
||||||
|
return result
|
@ -204,12 +204,7 @@ def test_create_resource():
|
|||||||
root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
|
root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
|
||||||
root_resource["ResponseMetadata"].pop("RetryAttempts", None)
|
root_resource["ResponseMetadata"].pop("RetryAttempts", None)
|
||||||
root_resource.should.equal(
|
root_resource.should.equal(
|
||||||
{
|
{"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},}
|
||||||
"path": "/",
|
|
||||||
"id": root_id,
|
|
||||||
"ResponseMetadata": {"HTTPStatusCode": 200},
|
|
||||||
"resourceMethods": {"GET": {}},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
|
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
|
||||||
@ -257,7 +252,6 @@ def test_child_resource():
|
|||||||
"parentId": users_id,
|
"parentId": users_id,
|
||||||
"id": tags_id,
|
"id": tags_id,
|
||||||
"ResponseMetadata": {"HTTPStatusCode": 200},
|
"ResponseMetadata": {"HTTPStatusCode": 200},
|
||||||
"resourceMethods": {"GET": {}},
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -286,6 +280,41 @@ def test_create_method():
|
|||||||
{
|
{
|
||||||
"httpMethod": "GET",
|
"httpMethod": "GET",
|
||||||
"authorizationType": "none",
|
"authorizationType": "none",
|
||||||
|
"apiKeyRequired": False,
|
||||||
|
"ResponseMetadata": {"HTTPStatusCode": 200},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_apigateway
|
||||||
|
def test_create_method_apikeyrequired():
|
||||||
|
client = boto3.client("apigateway", region_name="us-west-2")
|
||||||
|
response = client.create_rest_api(name="my_api", description="this is my api")
|
||||||
|
api_id = response["id"]
|
||||||
|
|
||||||
|
resources = client.get_resources(restApiId=api_id)
|
||||||
|
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
|
||||||
|
0
|
||||||
|
]["id"]
|
||||||
|
|
||||||
|
client.put_method(
|
||||||
|
restApiId=api_id,
|
||||||
|
resourceId=root_id,
|
||||||
|
httpMethod="GET",
|
||||||
|
authorizationType="none",
|
||||||
|
apiKeyRequired=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get_method(restApiId=api_id, resourceId=root_id, httpMethod="GET")
|
||||||
|
|
||||||
|
# this is hard to match against, so remove it
|
||||||
|
response["ResponseMetadata"].pop("HTTPHeaders", None)
|
||||||
|
response["ResponseMetadata"].pop("RetryAttempts", None)
|
||||||
|
response.should.equal(
|
||||||
|
{
|
||||||
|
"httpMethod": "GET",
|
||||||
|
"authorizationType": "none",
|
||||||
|
"apiKeyRequired": True,
|
||||||
"ResponseMetadata": {"HTTPStatusCode": 200},
|
"ResponseMetadata": {"HTTPStatusCode": 200},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -86,14 +86,14 @@ def lambda_handler(event, context):
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_list_functions():
|
def test_list_functions():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
result = conn.list_functions()
|
result = conn.list_functions()
|
||||||
result["Functions"].should.have.length_of(0)
|
result["Functions"].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_invoke_requestresponse_function():
|
def test_invoke_requestresponse_function():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
Runtime="python2.7",
|
Runtime="python2.7",
|
||||||
@ -113,7 +113,7 @@ def test_invoke_requestresponse_function():
|
|||||||
Payload=json.dumps(in_data),
|
Payload=json.dumps(in_data),
|
||||||
)
|
)
|
||||||
|
|
||||||
success_result["StatusCode"].should.equal(202)
|
success_result["StatusCode"].should.equal(200)
|
||||||
result_obj = json.loads(
|
result_obj = json.loads(
|
||||||
base64.b64decode(success_result["LogResult"]).decode("utf-8")
|
base64.b64decode(success_result["LogResult"]).decode("utf-8")
|
||||||
)
|
)
|
||||||
@ -150,7 +150,7 @@ def test_invoke_requestresponse_function_with_arn():
|
|||||||
Payload=json.dumps(in_data),
|
Payload=json.dumps(in_data),
|
||||||
)
|
)
|
||||||
|
|
||||||
success_result["StatusCode"].should.equal(202)
|
success_result["StatusCode"].should.equal(200)
|
||||||
result_obj = json.loads(
|
result_obj = json.loads(
|
||||||
base64.b64decode(success_result["LogResult"]).decode("utf-8")
|
base64.b64decode(success_result["LogResult"]).decode("utf-8")
|
||||||
)
|
)
|
||||||
@ -163,7 +163,7 @@ def test_invoke_requestresponse_function_with_arn():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_invoke_event_function():
|
def test_invoke_event_function():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
Runtime="python2.7",
|
Runtime="python2.7",
|
||||||
@ -188,16 +188,44 @@ def test_invoke_event_function():
|
|||||||
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
|
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
def test_invoke_dryrun_function():
|
||||||
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
conn.create_function(
|
||||||
|
FunctionName="testFunction",
|
||||||
|
Runtime="python2.7",
|
||||||
|
Role=get_role_name(),
|
||||||
|
Handler="lambda_function.lambda_handler",
|
||||||
|
Code={"ZipFile": get_test_zip_file1(),},
|
||||||
|
Description="test lambda function",
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
conn.invoke.when.called_with(
|
||||||
|
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
|
||||||
|
).should.throw(botocore.client.ClientError)
|
||||||
|
|
||||||
|
in_data = {"msg": "So long and thanks for all the fish"}
|
||||||
|
success_result = conn.invoke(
|
||||||
|
FunctionName="testFunction",
|
||||||
|
InvocationType="DryRun",
|
||||||
|
Payload=json.dumps(in_data),
|
||||||
|
)
|
||||||
|
success_result["StatusCode"].should.equal(204)
|
||||||
|
|
||||||
|
|
||||||
if settings.TEST_SERVER_MODE:
|
if settings.TEST_SERVER_MODE:
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_invoke_function_get_ec2_volume():
|
def test_invoke_function_get_ec2_volume():
|
||||||
conn = boto3.resource("ec2", "us-west-2")
|
conn = boto3.resource("ec2", _lambda_region)
|
||||||
vol = conn.create_volume(Size=99, AvailabilityZone="us-west-2")
|
vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
|
||||||
vol = conn.Volume(vol.id)
|
vol = conn.Volume(vol.id)
|
||||||
|
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
Runtime="python3.7",
|
Runtime="python3.7",
|
||||||
@ -216,7 +244,7 @@ if settings.TEST_SERVER_MODE:
|
|||||||
InvocationType="RequestResponse",
|
InvocationType="RequestResponse",
|
||||||
Payload=json.dumps(in_data),
|
Payload=json.dumps(in_data),
|
||||||
)
|
)
|
||||||
result["StatusCode"].should.equal(202)
|
result["StatusCode"].should.equal(200)
|
||||||
actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
|
actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
|
||||||
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
|
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
|
||||||
actual_payload.should.equal(expected_payload)
|
actual_payload.should.equal(expected_payload)
|
||||||
@ -227,14 +255,14 @@ if settings.TEST_SERVER_MODE:
|
|||||||
@mock_ec2
|
@mock_ec2
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_invoke_function_from_sns():
|
def test_invoke_function_from_sns():
|
||||||
logs_conn = boto3.client("logs", region_name="us-west-2")
|
logs_conn = boto3.client("logs", region_name=_lambda_region)
|
||||||
sns_conn = boto3.client("sns", region_name="us-west-2")
|
sns_conn = boto3.client("sns", region_name=_lambda_region)
|
||||||
sns_conn.create_topic(Name="some-topic")
|
sns_conn.create_topic(Name="some-topic")
|
||||||
topics_json = sns_conn.list_topics()
|
topics_json = sns_conn.list_topics()
|
||||||
topics = topics_json["Topics"]
|
topics = topics_json["Topics"]
|
||||||
topic_arn = topics[0]["TopicArn"]
|
topic_arn = topics[0]["TopicArn"]
|
||||||
|
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
result = conn.create_function(
|
result = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
Runtime="python2.7",
|
Runtime="python2.7",
|
||||||
@ -277,7 +305,7 @@ def test_invoke_function_from_sns():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_create_based_on_s3_with_missing_bucket():
|
def test_create_based_on_s3_with_missing_bucket():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function.when.called_with(
|
conn.create_function.when.called_with(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -297,12 +325,15 @@ def test_create_based_on_s3_with_missing_bucket():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
@freeze_time("2015-01-01 00:00:00")
|
@freeze_time("2015-01-01 00:00:00")
|
||||||
def test_create_function_from_aws_bucket():
|
def test_create_function_from_aws_bucket():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
|
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
result = conn.create_function(
|
result = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -350,7 +381,7 @@ def test_create_function_from_aws_bucket():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@freeze_time("2015-01-01 00:00:00")
|
@freeze_time("2015-01-01 00:00:00")
|
||||||
def test_create_function_from_zipfile():
|
def test_create_function_from_zipfile():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
result = conn.create_function(
|
result = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -395,12 +426,15 @@ def test_create_function_from_zipfile():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
@freeze_time("2015-01-01 00:00:00")
|
@freeze_time("2015-01-01 00:00:00")
|
||||||
def test_get_function():
|
def test_get_function():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -464,7 +498,10 @@ def test_get_function():
|
|||||||
def test_get_function_by_arn():
|
def test_get_function_by_arn():
|
||||||
bucket_name = "test-bucket"
|
bucket_name = "test-bucket"
|
||||||
s3_conn = boto3.client("s3", "us-east-1")
|
s3_conn = boto3.client("s3", "us-east-1")
|
||||||
s3_conn.create_bucket(Bucket=bucket_name)
|
s3_conn.create_bucket(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
|
||||||
@ -489,12 +526,15 @@ def test_get_function_by_arn():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_delete_function():
|
def test_delete_function():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -525,7 +565,10 @@ def test_delete_function():
|
|||||||
def test_delete_function_by_arn():
|
def test_delete_function_by_arn():
|
||||||
bucket_name = "test-bucket"
|
bucket_name = "test-bucket"
|
||||||
s3_conn = boto3.client("s3", "us-east-1")
|
s3_conn = boto3.client("s3", "us-east-1")
|
||||||
s3_conn.create_bucket(Bucket=bucket_name)
|
s3_conn.create_bucket(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
|
||||||
@ -550,7 +593,7 @@ def test_delete_function_by_arn():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_delete_unknown_function():
|
def test_delete_unknown_function():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.delete_function.when.called_with(
|
conn.delete_function.when.called_with(
|
||||||
FunctionName="testFunctionThatDoesntExist"
|
FunctionName="testFunctionThatDoesntExist"
|
||||||
).should.throw(botocore.client.ClientError)
|
).should.throw(botocore.client.ClientError)
|
||||||
@ -559,12 +602,15 @@ def test_delete_unknown_function():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_publish():
|
def test_publish():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -609,12 +655,15 @@ def test_list_create_list_get_delete_list():
|
|||||||
test `list -> create -> list -> get -> delete -> list` integration
|
test `list -> create -> list -> get -> delete -> list` integration
|
||||||
|
|
||||||
"""
|
"""
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.list_functions()["Functions"].should.have.length_of(0)
|
conn.list_functions()["Functions"].should.have.length_of(0)
|
||||||
|
|
||||||
@ -711,12 +760,15 @@ def test_tags():
|
|||||||
"""
|
"""
|
||||||
test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration
|
test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration
|
||||||
"""
|
"""
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
function = conn.create_function(
|
function = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -768,7 +820,7 @@ def test_tags_not_found():
|
|||||||
"""
|
"""
|
||||||
Test list_tags and tag_resource when the lambda with the given arn does not exist
|
Test list_tags and tag_resource when the lambda with the given arn does not exist
|
||||||
"""
|
"""
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.list_tags.when.called_with(
|
conn.list_tags.when.called_with(
|
||||||
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
|
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
|
||||||
).should.throw(botocore.client.ClientError)
|
).should.throw(botocore.client.ClientError)
|
||||||
@ -786,7 +838,7 @@ def test_tags_not_found():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_invoke_async_function():
|
def test_invoke_async_function():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
Runtime="python2.7",
|
Runtime="python2.7",
|
||||||
@ -809,7 +861,7 @@ def test_invoke_async_function():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@freeze_time("2015-01-01 00:00:00")
|
@freeze_time("2015-01-01 00:00:00")
|
||||||
def test_get_function_created_with_zipfile():
|
def test_get_function_created_with_zipfile():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
result = conn.create_function(
|
result = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -855,7 +907,7 @@ def test_get_function_created_with_zipfile():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_add_function_permission():
|
def test_add_function_permission():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -886,7 +938,7 @@ def test_add_function_permission():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_get_function_policy():
|
def test_get_function_policy():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -921,12 +973,15 @@ def test_get_function_policy():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_list_versions_by_function():
|
def test_list_versions_by_function():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -977,12 +1032,15 @@ def test_list_versions_by_function():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_create_function_with_already_exists():
|
def test_create_function_with_already_exists():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -1014,7 +1072,7 @@ def test_create_function_with_already_exists():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_list_versions_by_function_for_nonexistent_function():
|
def test_list_versions_by_function_for_nonexistent_function():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
versions = conn.list_versions_by_function(FunctionName="testFunction")
|
versions = conn.list_versions_by_function(FunctionName="testFunction")
|
||||||
|
|
||||||
assert len(versions["Versions"]) == 0
|
assert len(versions["Versions"]) == 0
|
||||||
@ -1363,12 +1421,15 @@ def test_delete_event_source_mapping():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_update_configuration():
|
def test_update_configuration():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file2()
|
zip_content = get_test_zip_file2()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
fxn = conn.create_function(
|
fxn = conn.create_function(
|
||||||
FunctionName="testFunction",
|
FunctionName="testFunction",
|
||||||
@ -1411,7 +1472,7 @@ def test_update_configuration():
|
|||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_update_function_zip():
|
def test_update_function_zip():
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
zip_content_one = get_test_zip_file1()
|
zip_content_one = get_test_zip_file1()
|
||||||
|
|
||||||
@ -1466,13 +1527,16 @@ def test_update_function_zip():
|
|||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_update_function_s3():
|
def test_update_function_s3():
|
||||||
s3_conn = boto3.client("s3", "us-west-2")
|
s3_conn = boto3.client("s3", _lambda_region)
|
||||||
s3_conn.create_bucket(Bucket="test-bucket")
|
s3_conn.create_bucket(
|
||||||
|
Bucket="test-bucket",
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||||
|
)
|
||||||
|
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
|
||||||
|
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
|
||||||
fxn = conn.create_function(
|
fxn = conn.create_function(
|
||||||
FunctionName="testFunctionS3",
|
FunctionName="testFunctionS3",
|
||||||
@ -1553,7 +1617,7 @@ def test_create_function_with_unknown_arn():
|
|||||||
|
|
||||||
|
|
||||||
def create_invalid_lambda(role):
|
def create_invalid_lambda(role):
|
||||||
conn = boto3.client("lambda", "us-west-2")
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
with assert_raises(ClientError) as err:
|
with assert_raises(ClientError) as err:
|
||||||
conn.create_function(
|
conn.create_function(
|
||||||
@ -1572,7 +1636,7 @@ def create_invalid_lambda(role):
|
|||||||
|
|
||||||
def get_role_name():
|
def get_role_name():
|
||||||
with mock_iam():
|
with mock_iam():
|
||||||
iam = boto3.client("iam", region_name="us-west-2")
|
iam = boto3.client("iam", region_name=_lambda_region)
|
||||||
try:
|
try:
|
||||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||||
except ClientError:
|
except ClientError:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
template = {
|
template = {
|
||||||
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
|
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
|
||||||
}
|
}
|
||||||
|
@ -1,276 +1,276 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
template = {
|
template = {
|
||||||
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||||
"Parameters": {
|
"Parameters": {
|
||||||
"SSHLocation": {
|
"SSHLocation": {
|
||||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||||
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
||||||
"Default": "0.0.0.0/0",
|
"Default": "0.0.0.0/0",
|
||||||
"MinLength": "9",
|
"MinLength": "9",
|
||||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||||
"MaxLength": "18",
|
"MaxLength": "18",
|
||||||
"Type": "String",
|
"Type": "String",
|
||||||
},
|
},
|
||||||
"KeyName": {
|
"KeyName": {
|
||||||
"Type": "String",
|
"Type": "String",
|
||||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
||||||
"MinLength": "1",
|
"MinLength": "1",
|
||||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||||
"MaxLength": "255",
|
"MaxLength": "255",
|
||||||
"ConstraintDescription": "can contain only ASCII characters.",
|
"ConstraintDescription": "can contain only ASCII characters.",
|
||||||
},
|
},
|
||||||
"InstanceType": {
|
"InstanceType": {
|
||||||
"Default": "m1.small",
|
"Default": "m1.small",
|
||||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||||
"Type": "String",
|
"Type": "String",
|
||||||
"Description": "WebServer EC2 instance type",
|
"Description": "WebServer EC2 instance type",
|
||||||
"AllowedValues": [
|
"AllowedValues": [
|
||||||
"t1.micro",
|
"t1.micro",
|
||||||
"m1.small",
|
"m1.small",
|
||||||
"m1.medium",
|
"m1.medium",
|
||||||
"m1.large",
|
"m1.large",
|
||||||
"m1.xlarge",
|
"m1.xlarge",
|
||||||
"m2.xlarge",
|
"m2.xlarge",
|
||||||
"m2.2xlarge",
|
"m2.2xlarge",
|
||||||
"m2.4xlarge",
|
"m2.4xlarge",
|
||||||
"m3.xlarge",
|
"m3.xlarge",
|
||||||
"m3.2xlarge",
|
"m3.2xlarge",
|
||||||
"c1.medium",
|
"c1.medium",
|
||||||
"c1.xlarge",
|
"c1.xlarge",
|
||||||
"cc1.4xlarge",
|
"cc1.4xlarge",
|
||||||
"cc2.8xlarge",
|
"cc2.8xlarge",
|
||||||
"cg1.4xlarge",
|
"cg1.4xlarge",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"AWSTemplateFormatVersion": "2010-09-09",
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
"Outputs": {
|
"Outputs": {
|
||||||
"URL": {
|
"URL": {
|
||||||
"Description": "Newly created application URL",
|
"Description": "Newly created application URL",
|
||||||
"Value": {
|
"Value": {
|
||||||
"Fn::Join": [
|
"Fn::Join": [
|
||||||
"",
|
"",
|
||||||
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
|
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Resources": {
|
"Resources": {
|
||||||
"Subnet": {
|
"Subnet": {
|
||||||
"Type": "AWS::EC2::Subnet",
|
"Type": "AWS::EC2::Subnet",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"VpcId": {"Ref": "VPC"},
|
"VpcId": {"Ref": "VPC"},
|
||||||
"CidrBlock": "10.0.0.0/24",
|
"CidrBlock": "10.0.0.0/24",
|
||||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
|
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
|
||||||
"Route": {
|
"Route": {
|
||||||
"Type": "AWS::EC2::Route",
|
"Type": "AWS::EC2::Route",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"GatewayId": {"Ref": "InternetGateway"},
|
"GatewayId": {"Ref": "InternetGateway"},
|
||||||
"DestinationCidrBlock": "0.0.0.0/0",
|
"DestinationCidrBlock": "0.0.0.0/0",
|
||||||
"RouteTableId": {"Ref": "RouteTable"},
|
"RouteTableId": {"Ref": "RouteTable"},
|
||||||
},
|
},
|
||||||
"DependsOn": "AttachGateway",
|
"DependsOn": "AttachGateway",
|
||||||
},
|
},
|
||||||
"SubnetRouteTableAssociation": {
|
"SubnetRouteTableAssociation": {
|
||||||
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"SubnetId": {"Ref": "Subnet"},
|
"SubnetId": {"Ref": "Subnet"},
|
||||||
"RouteTableId": {"Ref": "RouteTable"},
|
"RouteTableId": {"Ref": "RouteTable"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"InternetGateway": {
|
"InternetGateway": {
|
||||||
"Type": "AWS::EC2::InternetGateway",
|
"Type": "AWS::EC2::InternetGateway",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
|
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"RouteTable": {
|
"RouteTable": {
|
||||||
"Type": "AWS::EC2::RouteTable",
|
"Type": "AWS::EC2::RouteTable",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"VpcId": {"Ref": "VPC"},
|
"VpcId": {"Ref": "VPC"},
|
||||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"WebServerWaitCondition": {
|
"WebServerWaitCondition": {
|
||||||
"Type": "AWS::CloudFormation::WaitCondition",
|
"Type": "AWS::CloudFormation::WaitCondition",
|
||||||
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
|
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
|
||||||
"DependsOn": "WebServerInstance",
|
"DependsOn": "WebServerInstance",
|
||||||
},
|
},
|
||||||
"VPC": {
|
"VPC": {
|
||||||
"Type": "AWS::EC2::VPC",
|
"Type": "AWS::EC2::VPC",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"CidrBlock": "10.0.0.0/16",
|
"CidrBlock": "10.0.0.0/16",
|
||||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"InstanceSecurityGroup": {
|
"InstanceSecurityGroup": {
|
||||||
"Type": "AWS::EC2::SecurityGroup",
|
"Type": "AWS::EC2::SecurityGroup",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"SecurityGroupIngress": [
|
"SecurityGroupIngress": [
|
||||||
{
|
{
|
||||||
"ToPort": "22",
|
"ToPort": "22",
|
||||||
"IpProtocol": "tcp",
|
"IpProtocol": "tcp",
|
||||||
"CidrIp": {"Ref": "SSHLocation"},
|
"CidrIp": {"Ref": "SSHLocation"},
|
||||||
"FromPort": "22",
|
"FromPort": "22",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ToPort": "80",
|
"ToPort": "80",
|
||||||
"IpProtocol": "tcp",
|
"IpProtocol": "tcp",
|
||||||
"CidrIp": "0.0.0.0/0",
|
"CidrIp": "0.0.0.0/0",
|
||||||
"FromPort": "80",
|
"FromPort": "80",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"VpcId": {"Ref": "VPC"},
|
"VpcId": {"Ref": "VPC"},
|
||||||
"GroupDescription": "Enable SSH access via port 22",
|
"GroupDescription": "Enable SSH access via port 22",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"WebServerInstance": {
|
"WebServerInstance": {
|
||||||
"Type": "AWS::EC2::Instance",
|
"Type": "AWS::EC2::Instance",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"UserData": {
|
"UserData": {
|
||||||
"Fn::Base64": {
|
"Fn::Base64": {
|
||||||
"Fn::Join": [
|
"Fn::Join": [
|
||||||
"",
|
"",
|
||||||
[
|
[
|
||||||
"#!/bin/bash\n",
|
"#!/bin/bash\n",
|
||||||
"yum update -y aws-cfn-bootstrap\n",
|
"yum update -y aws-cfn-bootstrap\n",
|
||||||
"# Helper function\n",
|
"# Helper function\n",
|
||||||
"function error_exit\n",
|
"function error_exit\n",
|
||||||
"{\n",
|
"{\n",
|
||||||
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
|
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
|
||||||
{"Ref": "WebServerWaitHandle"},
|
{"Ref": "WebServerWaitHandle"},
|
||||||
"'\n",
|
"'\n",
|
||||||
" exit 1\n",
|
" exit 1\n",
|
||||||
"}\n",
|
"}\n",
|
||||||
"# Install the simple web page\n",
|
"# Install the simple web page\n",
|
||||||
"/opt/aws/bin/cfn-init -s ",
|
"/opt/aws/bin/cfn-init -s ",
|
||||||
{"Ref": "AWS::StackId"},
|
{"Ref": "AWS::StackId"},
|
||||||
" -r WebServerInstance ",
|
" -r WebServerInstance ",
|
||||||
" --region ",
|
" --region ",
|
||||||
{"Ref": "AWS::Region"},
|
{"Ref": "AWS::Region"},
|
||||||
" || error_exit 'Failed to run cfn-init'\n",
|
" || error_exit 'Failed to run cfn-init'\n",
|
||||||
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
||||||
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
||||||
"# All done so signal success\n",
|
"# All done so signal success\n",
|
||||||
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
|
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
|
||||||
{"Ref": "WebServerWaitHandle"},
|
{"Ref": "WebServerWaitHandle"},
|
||||||
"'\n",
|
"'\n",
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Tags": [
|
"Tags": [
|
||||||
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
|
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
|
||||||
{"Value": "Bar", "Key": "Foo"},
|
{"Value": "Bar", "Key": "Foo"},
|
||||||
],
|
],
|
||||||
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
|
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
|
||||||
"KeyName": {"Ref": "KeyName"},
|
"KeyName": {"Ref": "KeyName"},
|
||||||
"SubnetId": {"Ref": "Subnet"},
|
"SubnetId": {"Ref": "Subnet"},
|
||||||
"ImageId": {
|
"ImageId": {
|
||||||
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
|
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
|
||||||
},
|
},
|
||||||
"InstanceType": {"Ref": "InstanceType"},
|
"InstanceType": {"Ref": "InstanceType"},
|
||||||
},
|
},
|
||||||
"Metadata": {
|
"Metadata": {
|
||||||
"Comment": "Install a simple PHP application",
|
"Comment": "Install a simple PHP application",
|
||||||
"AWS::CloudFormation::Init": {
|
"AWS::CloudFormation::Init": {
|
||||||
"config": {
|
"config": {
|
||||||
"files": {
|
"files": {
|
||||||
"/etc/cfn/cfn-hup.conf": {
|
"/etc/cfn/cfn-hup.conf": {
|
||||||
"content": {
|
"content": {
|
||||||
"Fn::Join": [
|
"Fn::Join": [
|
||||||
"",
|
"",
|
||||||
[
|
[
|
||||||
"[main]\n",
|
"[main]\n",
|
||||||
"stack=",
|
"stack=",
|
||||||
{"Ref": "AWS::StackId"},
|
{"Ref": "AWS::StackId"},
|
||||||
"\n",
|
"\n",
|
||||||
"region=",
|
"region=",
|
||||||
{"Ref": "AWS::Region"},
|
{"Ref": "AWS::Region"},
|
||||||
"\n",
|
"\n",
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"owner": "root",
|
"owner": "root",
|
||||||
"group": "root",
|
"group": "root",
|
||||||
"mode": "000400",
|
"mode": "000400",
|
||||||
},
|
},
|
||||||
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
||||||
"content": {
|
"content": {
|
||||||
"Fn::Join": [
|
"Fn::Join": [
|
||||||
"",
|
"",
|
||||||
[
|
[
|
||||||
"[cfn-auto-reloader-hook]\n",
|
"[cfn-auto-reloader-hook]\n",
|
||||||
"triggers=post.update\n",
|
"triggers=post.update\n",
|
||||||
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
||||||
"action=/opt/aws/bin/cfn-init -s ",
|
"action=/opt/aws/bin/cfn-init -s ",
|
||||||
{"Ref": "AWS::StackId"},
|
{"Ref": "AWS::StackId"},
|
||||||
" -r WebServerInstance ",
|
" -r WebServerInstance ",
|
||||||
" --region ",
|
" --region ",
|
||||||
{"Ref": "AWS::Region"},
|
{"Ref": "AWS::Region"},
|
||||||
"\n",
|
"\n",
|
||||||
"runas=root\n",
|
"runas=root\n",
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"/var/www/html/index.php": {
|
"/var/www/html/index.php": {
|
||||||
"content": {
|
"content": {
|
||||||
"Fn::Join": [
|
"Fn::Join": [
|
||||||
"",
|
"",
|
||||||
[
|
[
|
||||||
"<?php\n",
|
"<?php\n",
|
||||||
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
||||||
"?>\n",
|
"?>\n",
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"owner": "apache",
|
"owner": "apache",
|
||||||
"group": "apache",
|
"group": "apache",
|
||||||
"mode": "000644",
|
"mode": "000644",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"sysvinit": {
|
"sysvinit": {
|
||||||
"httpd": {"ensureRunning": "true", "enabled": "true"},
|
"httpd": {"ensureRunning": "true", "enabled": "true"},
|
||||||
"sendmail": {
|
"sendmail": {
|
||||||
"ensureRunning": "false",
|
"ensureRunning": "false",
|
||||||
"enabled": "false",
|
"enabled": "false",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"packages": {"yum": {"httpd": [], "php": []}},
|
"packages": {"yum": {"httpd": [], "php": []}},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"IPAddress": {
|
"IPAddress": {
|
||||||
"Type": "AWS::EC2::EIP",
|
"Type": "AWS::EC2::EIP",
|
||||||
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
|
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
|
||||||
"DependsOn": "AttachGateway",
|
"DependsOn": "AttachGateway",
|
||||||
},
|
},
|
||||||
"AttachGateway": {
|
"AttachGateway": {
|
||||||
"Type": "AWS::EC2::VPCGatewayAttachment",
|
"Type": "AWS::EC2::VPCGatewayAttachment",
|
||||||
"Properties": {
|
"Properties": {
|
||||||
"VpcId": {"Ref": "VPC"},
|
"VpcId": {"Ref": "VPC"},
|
||||||
"InternetGatewayId": {"Ref": "InternetGateway"},
|
"InternetGatewayId": {"Ref": "InternetGateway"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"Mappings": {
|
"Mappings": {
|
||||||
"RegionMap": {
|
"RegionMap": {
|
||||||
"ap-southeast-1": {"AMI": "ami-74dda626"},
|
"ap-southeast-1": {"AMI": "ami-74dda626"},
|
||||||
"ap-southeast-2": {"AMI": "ami-b3990e89"},
|
"ap-southeast-2": {"AMI": "ami-b3990e89"},
|
||||||
"us-west-2": {"AMI": "ami-16fd7026"},
|
"us-west-2": {"AMI": "ami-16fd7026"},
|
||||||
"us-east-1": {"AMI": "ami-7f418316"},
|
"us-east-1": {"AMI": "ami-7f418316"},
|
||||||
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
|
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
|
||||||
"us-west-1": {"AMI": "ami-951945d0"},
|
"us-west-1": {"AMI": "ami-951945d0"},
|
||||||
"eu-west-1": {"AMI": "ami-24506250"},
|
"eu-west-1": {"AMI": "ami-24506250"},
|
||||||
"sa-east-1": {"AMI": "ami-3e3be423"},
|
"sa-east-1": {"AMI": "ami-3e3be423"},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ def test_create_stack_with_notification_arn():
|
|||||||
@mock_s3_deprecated
|
@mock_s3_deprecated
|
||||||
def test_create_stack_from_s3_url():
|
def test_create_stack_from_s3_url():
|
||||||
s3_conn = boto.s3.connect_to_region("us-west-1")
|
s3_conn = boto.s3.connect_to_region("us-west-1")
|
||||||
bucket = s3_conn.create_bucket("foobar")
|
bucket = s3_conn.create_bucket("foobar", location="us-west-1")
|
||||||
key = boto.s3.key.Key(bucket)
|
key = boto.s3.key.Key(bucket)
|
||||||
key.key = "template-key"
|
key.key = "template-key"
|
||||||
key.set_contents_from_string(dummy_template_json)
|
key.set_contents_from_string(dummy_template_json)
|
||||||
|
@ -1,117 +1,117 @@
|
|||||||
import boto
|
import boto
|
||||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_cloudwatch_deprecated
|
from moto import mock_cloudwatch_deprecated
|
||||||
|
|
||||||
|
|
||||||
def alarm_fixture(name="tester", action=None):
|
def alarm_fixture(name="tester", action=None):
|
||||||
action = action or ["arn:alarm"]
|
action = action or ["arn:alarm"]
|
||||||
return MetricAlarm(
|
return MetricAlarm(
|
||||||
name=name,
|
name=name,
|
||||||
namespace="{0}_namespace".format(name),
|
namespace="{0}_namespace".format(name),
|
||||||
metric="{0}_metric".format(name),
|
metric="{0}_metric".format(name),
|
||||||
comparison=">=",
|
comparison=">=",
|
||||||
threshold=2.0,
|
threshold=2.0,
|
||||||
period=60,
|
period=60,
|
||||||
evaluation_periods=5,
|
evaluation_periods=5,
|
||||||
statistic="Average",
|
statistic="Average",
|
||||||
description="A test",
|
description="A test",
|
||||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||||
alarm_actions=action,
|
alarm_actions=action,
|
||||||
ok_actions=["arn:ok"],
|
ok_actions=["arn:ok"],
|
||||||
insufficient_data_actions=["arn:insufficient"],
|
insufficient_data_actions=["arn:insufficient"],
|
||||||
unit="Seconds",
|
unit="Seconds",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch_deprecated
|
@mock_cloudwatch_deprecated
|
||||||
def test_create_alarm():
|
def test_create_alarm():
|
||||||
conn = boto.connect_cloudwatch()
|
conn = boto.connect_cloudwatch()
|
||||||
|
|
||||||
alarm = alarm_fixture()
|
alarm = alarm_fixture()
|
||||||
conn.create_alarm(alarm)
|
conn.create_alarm(alarm)
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(1)
|
alarms.should.have.length_of(1)
|
||||||
alarm = alarms[0]
|
alarm = alarms[0]
|
||||||
alarm.name.should.equal("tester")
|
alarm.name.should.equal("tester")
|
||||||
alarm.namespace.should.equal("tester_namespace")
|
alarm.namespace.should.equal("tester_namespace")
|
||||||
alarm.metric.should.equal("tester_metric")
|
alarm.metric.should.equal("tester_metric")
|
||||||
alarm.comparison.should.equal(">=")
|
alarm.comparison.should.equal(">=")
|
||||||
alarm.threshold.should.equal(2.0)
|
alarm.threshold.should.equal(2.0)
|
||||||
alarm.period.should.equal(60)
|
alarm.period.should.equal(60)
|
||||||
alarm.evaluation_periods.should.equal(5)
|
alarm.evaluation_periods.should.equal(5)
|
||||||
alarm.statistic.should.equal("Average")
|
alarm.statistic.should.equal("Average")
|
||||||
alarm.description.should.equal("A test")
|
alarm.description.should.equal("A test")
|
||||||
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||||
list(alarm.alarm_actions).should.equal(["arn:alarm"])
|
list(alarm.alarm_actions).should.equal(["arn:alarm"])
|
||||||
list(alarm.ok_actions).should.equal(["arn:ok"])
|
list(alarm.ok_actions).should.equal(["arn:ok"])
|
||||||
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
|
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
|
||||||
alarm.unit.should.equal("Seconds")
|
alarm.unit.should.equal("Seconds")
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch_deprecated
|
@mock_cloudwatch_deprecated
|
||||||
def test_delete_alarm():
|
def test_delete_alarm():
|
||||||
conn = boto.connect_cloudwatch()
|
conn = boto.connect_cloudwatch()
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(0)
|
alarms.should.have.length_of(0)
|
||||||
|
|
||||||
alarm = alarm_fixture()
|
alarm = alarm_fixture()
|
||||||
conn.create_alarm(alarm)
|
conn.create_alarm(alarm)
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(1)
|
alarms.should.have.length_of(1)
|
||||||
|
|
||||||
alarms[0].delete()
|
alarms[0].delete()
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(0)
|
alarms.should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch_deprecated
|
@mock_cloudwatch_deprecated
|
||||||
def test_put_metric_data():
|
def test_put_metric_data():
|
||||||
conn = boto.connect_cloudwatch()
|
conn = boto.connect_cloudwatch()
|
||||||
|
|
||||||
conn.put_metric_data(
|
conn.put_metric_data(
|
||||||
namespace="tester",
|
namespace="tester",
|
||||||
name="metric",
|
name="metric",
|
||||||
value=1.5,
|
value=1.5,
|
||||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||||
)
|
)
|
||||||
|
|
||||||
metrics = conn.list_metrics()
|
metrics = conn.list_metrics()
|
||||||
metrics.should.have.length_of(1)
|
metrics.should.have.length_of(1)
|
||||||
metric = metrics[0]
|
metric = metrics[0]
|
||||||
metric.namespace.should.equal("tester")
|
metric.namespace.should.equal("tester")
|
||||||
metric.name.should.equal("metric")
|
metric.name.should.equal("metric")
|
||||||
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch_deprecated
|
@mock_cloudwatch_deprecated
|
||||||
def test_describe_alarms():
|
def test_describe_alarms():
|
||||||
conn = boto.connect_cloudwatch()
|
conn = boto.connect_cloudwatch()
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(0)
|
alarms.should.have.length_of(0)
|
||||||
|
|
||||||
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
||||||
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
||||||
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
||||||
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(4)
|
alarms.should.have.length_of(4)
|
||||||
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
||||||
alarms.should.have.length_of(2)
|
alarms.should.have.length_of(2)
|
||||||
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
||||||
alarms.should.have.length_of(3)
|
alarms.should.have.length_of(3)
|
||||||
alarms = conn.describe_alarms(action_prefix="afoo")
|
alarms = conn.describe_alarms(action_prefix="afoo")
|
||||||
alarms.should.have.length_of(2)
|
alarms.should.have.length_of(2)
|
||||||
|
|
||||||
for alarm in conn.describe_alarms():
|
for alarm in conn.describe_alarms():
|
||||||
alarm.delete()
|
alarm.delete()
|
||||||
|
|
||||||
alarms = conn.describe_alarms()
|
alarms = conn.describe_alarms()
|
||||||
alarms.should.have.length_of(0)
|
alarms.should.have.length_of(0)
|
||||||
|
@ -27,6 +27,11 @@ def test_create_user_pool():
|
|||||||
|
|
||||||
result["UserPool"]["Id"].should_not.be.none
|
result["UserPool"]["Id"].should_not.be.none
|
||||||
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
|
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
|
||||||
|
result["UserPool"]["Arn"].should.equal(
|
||||||
|
"arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format(
|
||||||
|
ACCOUNT_ID, result["UserPool"]["Id"]
|
||||||
|
)
|
||||||
|
)
|
||||||
result["UserPool"]["Name"].should.equal(name)
|
result["UserPool"]["Name"].should.equal(name)
|
||||||
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
|
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
|
||||||
|
|
||||||
@ -911,6 +916,55 @@ def test_admin_create_existing_user():
|
|||||||
caught.should.be.true
|
caught.should.be.true
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_resend_invitation_existing_user():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
value = str(uuid.uuid4())
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
conn.admin_create_user(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
Username=username,
|
||||||
|
UserAttributes=[{"Name": "thing", "Value": value}],
|
||||||
|
)
|
||||||
|
|
||||||
|
caught = False
|
||||||
|
try:
|
||||||
|
conn.admin_create_user(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
Username=username,
|
||||||
|
UserAttributes=[{"Name": "thing", "Value": value}],
|
||||||
|
MessageAction="RESEND",
|
||||||
|
)
|
||||||
|
except conn.exceptions.UsernameExistsException:
|
||||||
|
caught = True
|
||||||
|
|
||||||
|
caught.should.be.false
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_resend_invitation_missing_user():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
value = str(uuid.uuid4())
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
caught = False
|
||||||
|
try:
|
||||||
|
conn.admin_create_user(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
Username=username,
|
||||||
|
UserAttributes=[{"Name": "thing", "Value": value}],
|
||||||
|
MessageAction="RESEND",
|
||||||
|
)
|
||||||
|
except conn.exceptions.UserNotFoundException:
|
||||||
|
caught = True
|
||||||
|
|
||||||
|
caught.should.be.true
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_admin_get_user():
|
def test_admin_get_user():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
@ -46,4 +46,4 @@ def test_domain_dispatched_with_service():
|
|||||||
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
||||||
backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"})
|
backend_app = dispatcher.get_application({"HTTP_HOST": "s3.us-east1.amazonaws.com"})
|
||||||
keys = set(backend_app.view_functions.keys())
|
keys = set(backend_app.view_functions.keys())
|
||||||
keys.should.contain("ResponseObject.key_response")
|
keys.should.contain("ResponseObject.key_or_control_response")
|
||||||
|
@ -1,182 +1,182 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import boto.datapipeline
|
import boto.datapipeline
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_datapipeline_deprecated
|
from moto import mock_datapipeline_deprecated
|
||||||
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
||||||
|
|
||||||
|
|
||||||
def get_value_from_fields(key, fields):
|
def get_value_from_fields(key, fields):
|
||||||
for field in fields:
|
for field in fields:
|
||||||
if field["key"] == key:
|
if field["key"] == key:
|
||||||
return field["stringValue"]
|
return field["stringValue"]
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_create_pipeline():
|
def test_create_pipeline():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
|
|
||||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||||
|
|
||||||
pipeline_id = res["pipelineId"]
|
pipeline_id = res["pipelineId"]
|
||||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||||
"pipelineDescriptionList"
|
"pipelineDescriptionList"
|
||||||
]
|
]
|
||||||
pipeline_descriptions.should.have.length_of(1)
|
pipeline_descriptions.should.have.length_of(1)
|
||||||
|
|
||||||
pipeline_description = pipeline_descriptions[0]
|
pipeline_description = pipeline_descriptions[0]
|
||||||
pipeline_description["name"].should.equal("mypipeline")
|
pipeline_description["name"].should.equal("mypipeline")
|
||||||
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
||||||
fields = pipeline_description["fields"]
|
fields = pipeline_description["fields"]
|
||||||
|
|
||||||
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
|
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
|
||||||
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
|
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
|
||||||
|
|
||||||
|
|
||||||
PIPELINE_OBJECTS = [
|
PIPELINE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"id": "Default",
|
"id": "Default",
|
||||||
"name": "Default",
|
"name": "Default",
|
||||||
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
|
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Schedule",
|
"id": "Schedule",
|
||||||
"name": "Schedule",
|
"name": "Schedule",
|
||||||
"fields": [
|
"fields": [
|
||||||
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
|
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
|
||||||
{"key": "type", "stringValue": "Schedule"},
|
{"key": "type", "stringValue": "Schedule"},
|
||||||
{"key": "period", "stringValue": "1 hour"},
|
{"key": "period", "stringValue": "1 hour"},
|
||||||
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
|
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "SayHello",
|
"id": "SayHello",
|
||||||
"name": "SayHello",
|
"name": "SayHello",
|
||||||
"fields": [
|
"fields": [
|
||||||
{"key": "type", "stringValue": "ShellCommandActivity"},
|
{"key": "type", "stringValue": "ShellCommandActivity"},
|
||||||
{"key": "command", "stringValue": "echo hello"},
|
{"key": "command", "stringValue": "echo hello"},
|
||||||
{"key": "parent", "refValue": "Default"},
|
{"key": "parent", "refValue": "Default"},
|
||||||
{"key": "schedule", "refValue": "Schedule"},
|
{"key": "schedule", "refValue": "Schedule"},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_creating_pipeline_definition():
|
def test_creating_pipeline_definition():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||||
pipeline_id = res["pipelineId"]
|
pipeline_id = res["pipelineId"]
|
||||||
|
|
||||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||||
|
|
||||||
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
||||||
pipeline_definition["pipelineObjects"].should.have.length_of(3)
|
pipeline_definition["pipelineObjects"].should.have.length_of(3)
|
||||||
default_object = pipeline_definition["pipelineObjects"][0]
|
default_object = pipeline_definition["pipelineObjects"][0]
|
||||||
default_object["name"].should.equal("Default")
|
default_object["name"].should.equal("Default")
|
||||||
default_object["id"].should.equal("Default")
|
default_object["id"].should.equal("Default")
|
||||||
default_object["fields"].should.equal(
|
default_object["fields"].should.equal(
|
||||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_describing_pipeline_objects():
|
def test_describing_pipeline_objects():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||||
pipeline_id = res["pipelineId"]
|
pipeline_id = res["pipelineId"]
|
||||||
|
|
||||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||||
|
|
||||||
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
||||||
"pipelineObjects"
|
"pipelineObjects"
|
||||||
]
|
]
|
||||||
|
|
||||||
objects.should.have.length_of(2)
|
objects.should.have.length_of(2)
|
||||||
default_object = [x for x in objects if x["id"] == "Default"][0]
|
default_object = [x for x in objects if x["id"] == "Default"][0]
|
||||||
default_object["name"].should.equal("Default")
|
default_object["name"].should.equal("Default")
|
||||||
default_object["fields"].should.equal(
|
default_object["fields"].should.equal(
|
||||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_activate_pipeline():
|
def test_activate_pipeline():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
|
|
||||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||||
|
|
||||||
pipeline_id = res["pipelineId"]
|
pipeline_id = res["pipelineId"]
|
||||||
conn.activate_pipeline(pipeline_id)
|
conn.activate_pipeline(pipeline_id)
|
||||||
|
|
||||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||||
"pipelineDescriptionList"
|
"pipelineDescriptionList"
|
||||||
]
|
]
|
||||||
pipeline_descriptions.should.have.length_of(1)
|
pipeline_descriptions.should.have.length_of(1)
|
||||||
pipeline_description = pipeline_descriptions[0]
|
pipeline_description = pipeline_descriptions[0]
|
||||||
fields = pipeline_description["fields"]
|
fields = pipeline_description["fields"]
|
||||||
|
|
||||||
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
|
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_delete_pipeline():
|
def test_delete_pipeline():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||||
pipeline_id = res["pipelineId"]
|
pipeline_id = res["pipelineId"]
|
||||||
|
|
||||||
conn.delete_pipeline(pipeline_id)
|
conn.delete_pipeline(pipeline_id)
|
||||||
|
|
||||||
response = conn.list_pipelines()
|
response = conn.list_pipelines()
|
||||||
|
|
||||||
response["pipelineIdList"].should.have.length_of(0)
|
response["pipelineIdList"].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_listing_pipelines():
|
def test_listing_pipelines():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
||||||
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
||||||
|
|
||||||
response = conn.list_pipelines()
|
response = conn.list_pipelines()
|
||||||
|
|
||||||
response["hasMoreResults"].should.be(False)
|
response["hasMoreResults"].should.be(False)
|
||||||
response["marker"].should.be.none
|
response["marker"].should.be.none
|
||||||
response["pipelineIdList"].should.have.length_of(2)
|
response["pipelineIdList"].should.have.length_of(2)
|
||||||
response["pipelineIdList"].should.contain(
|
response["pipelineIdList"].should.contain(
|
||||||
{"id": res1["pipelineId"], "name": "mypipeline1"}
|
{"id": res1["pipelineId"], "name": "mypipeline1"}
|
||||||
)
|
)
|
||||||
response["pipelineIdList"].should.contain(
|
response["pipelineIdList"].should.contain(
|
||||||
{"id": res2["pipelineId"], "name": "mypipeline2"}
|
{"id": res2["pipelineId"], "name": "mypipeline2"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_datapipeline_deprecated
|
@mock_datapipeline_deprecated
|
||||||
def test_listing_paginated_pipelines():
|
def test_listing_paginated_pipelines():
|
||||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||||
for i in range(100):
|
for i in range(100):
|
||||||
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
||||||
|
|
||||||
response = conn.list_pipelines()
|
response = conn.list_pipelines()
|
||||||
|
|
||||||
response["hasMoreResults"].should.be(True)
|
response["hasMoreResults"].should.be(True)
|
||||||
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
|
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
|
||||||
response["pipelineIdList"].should.have.length_of(50)
|
response["pipelineIdList"].should.have.length_of(50)
|
||||||
|
|
||||||
|
|
||||||
# testing a helper function
|
# testing a helper function
|
||||||
def test_remove_capitalization_of_dict_keys():
|
def test_remove_capitalization_of_dict_keys():
|
||||||
result = remove_capitalization_of_dict_keys(
|
result = remove_capitalization_of_dict_keys(
|
||||||
{
|
{
|
||||||
"Id": "IdValue",
|
"Id": "IdValue",
|
||||||
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
|
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
result.should.equal(
|
result.should.equal(
|
||||||
{
|
{
|
||||||
"id": "IdValue",
|
"id": "IdValue",
|
||||||
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
|
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -1,470 +1,470 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
|
|
||||||
from moto import mock_dynamodb_deprecated
|
from moto import mock_dynamodb_deprecated
|
||||||
|
|
||||||
from boto.dynamodb import condition
|
from boto.dynamodb import condition
|
||||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
||||||
from boto.exception import DynamoDBResponseError
|
from boto.exception import DynamoDBResponseError
|
||||||
|
|
||||||
|
|
||||||
def create_table(conn):
|
def create_table(conn):
|
||||||
message_table_schema = conn.create_schema(
|
message_table_schema = conn.create_schema(
|
||||||
hash_key_name="forum_name",
|
hash_key_name="forum_name",
|
||||||
hash_key_proto_value=str,
|
hash_key_proto_value=str,
|
||||||
range_key_name="subject",
|
range_key_name="subject",
|
||||||
range_key_proto_value=str,
|
range_key_proto_value=str,
|
||||||
)
|
)
|
||||||
|
|
||||||
table = conn.create_table(
|
table = conn.create_table(
|
||||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||||
)
|
)
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
create_table(conn)
|
create_table(conn)
|
||||||
|
|
||||||
expected = {
|
expected = {
|
||||||
"Table": {
|
"Table": {
|
||||||
"CreationDateTime": 1326499200.0,
|
"CreationDateTime": 1326499200.0,
|
||||||
"ItemCount": 0,
|
"ItemCount": 0,
|
||||||
"KeySchema": {
|
"KeySchema": {
|
||||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
|
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
|
||||||
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
|
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
|
||||||
},
|
},
|
||||||
"ProvisionedThroughput": {
|
"ProvisionedThroughput": {
|
||||||
"ReadCapacityUnits": 10,
|
"ReadCapacityUnits": 10,
|
||||||
"WriteCapacityUnits": 10,
|
"WriteCapacityUnits": 10,
|
||||||
},
|
},
|
||||||
"TableName": "messages",
|
"TableName": "messages",
|
||||||
"TableSizeBytes": 0,
|
"TableSizeBytes": 0,
|
||||||
"TableStatus": "ACTIVE",
|
"TableStatus": "ACTIVE",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
conn.describe_table("messages").should.equal(expected)
|
conn.describe_table("messages").should.equal(expected)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
create_table(conn)
|
create_table(conn)
|
||||||
conn.list_tables().should.have.length_of(1)
|
conn.list_tables().should.have.length_of(1)
|
||||||
|
|
||||||
conn.layer1.delete_table("messages")
|
conn.layer1.delete_table("messages")
|
||||||
conn.list_tables().should.have.length_of(0)
|
conn.list_tables().should.have.length_of(0)
|
||||||
|
|
||||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||||
DynamoDBResponseError
|
DynamoDBResponseError
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
table.read_units.should.equal(10)
|
table.read_units.should.equal(10)
|
||||||
table.write_units.should.equal(10)
|
table.write_units.should.equal(10)
|
||||||
|
|
||||||
table.update_throughput(5, 6)
|
table.update_throughput(5, 6)
|
||||||
table.refresh()
|
table.refresh()
|
||||||
|
|
||||||
table.read_units.should.equal(5)
|
table.read_units.should.equal(5)
|
||||||
table.write_units.should.equal(6)
|
table.write_units.should.equal(6)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(
|
item = table.new_item(
|
||||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||||
)
|
)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
|
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
|
||||||
|
|
||||||
returned_item = table.get_item(
|
returned_item = table.get_item(
|
||||||
hash_key="LOLCat Forum",
|
hash_key="LOLCat Forum",
|
||||||
range_key="Check this out!",
|
range_key="Check this out!",
|
||||||
attributes_to_get=["Body", "SentBy"],
|
attributes_to_get=["Body", "SentBy"],
|
||||||
)
|
)
|
||||||
dict(returned_item).should.equal(
|
dict(returned_item).should.equal(
|
||||||
{
|
{
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"subject": "Check this out!",
|
"subject": "Check this out!",
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
item["SentBy"] = "User B"
|
item["SentBy"] = "User B"
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
returned_item = table.get_item(
|
returned_item = table.get_item(
|
||||||
hash_key="LOLCat Forum",
|
hash_key="LOLCat Forum",
|
||||||
range_key="Check this out!",
|
range_key="Check this out!",
|
||||||
attributes_to_get=["Body", "SentBy"],
|
attributes_to_get=["Body", "SentBy"],
|
||||||
)
|
)
|
||||||
dict(returned_item).should.equal(
|
dict(returned_item).should.equal(
|
||||||
{
|
{
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"subject": "Check this out!",
|
"subject": "Check this out!",
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.put_item.when.called_with(
|
conn.layer1.put_item.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
|
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
|
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
|
||||||
DynamoDBKeyNotFoundError
|
DynamoDBKeyNotFoundError
|
||||||
)
|
)
|
||||||
table.has_item("foobar", "more").should.equal(False)
|
table.has_item("foobar", "more").should.equal(False)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.get_item.when.called_with(
|
conn.layer1.get_item.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||||
).should.throw(DynamoDBKeyNotFoundError)
|
).should.throw(DynamoDBKeyNotFoundError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_get_item_without_range_key():
|
def test_get_item_without_range_key():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
message_table_schema = conn.create_schema(
|
message_table_schema = conn.create_schema(
|
||||||
hash_key_name="test_hash",
|
hash_key_name="test_hash",
|
||||||
hash_key_proto_value=int,
|
hash_key_proto_value=int,
|
||||||
range_key_name="test_range",
|
range_key_name="test_range",
|
||||||
range_key_proto_value=int,
|
range_key_proto_value=int,
|
||||||
)
|
)
|
||||||
table = conn.create_table(
|
table = conn.create_table(
|
||||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||||
)
|
)
|
||||||
|
|
||||||
hash_key = 3241526475
|
hash_key = 3241526475
|
||||||
range_key = 1234567890987
|
range_key = 1234567890987
|
||||||
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
|
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
|
||||||
new_item.put()
|
new_item.put()
|
||||||
|
|
||||||
table.get_item.when.called_with(hash_key=hash_key).should.throw(
|
table.get_item.when.called_with(hash_key=hash_key).should.throw(
|
||||||
DynamoDBValidationError
|
DynamoDBValidationError
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(
|
item = table.new_item(
|
||||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||||
)
|
)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
response = item.delete()
|
response = item.delete()
|
||||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(0)
|
table.item_count.should.equal(0)
|
||||||
|
|
||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item_with_attribute_response():
|
def test_delete_item_with_attribute_response():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(
|
item = table.new_item(
|
||||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||||
)
|
)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
response = item.delete(return_values="ALL_OLD")
|
response = item.delete(return_values="ALL_OLD")
|
||||||
response.should.equal(
|
response.should.equal(
|
||||||
{
|
{
|
||||||
"Attributes": {
|
"Attributes": {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"subject": "Check this out!",
|
"subject": "Check this out!",
|
||||||
},
|
},
|
||||||
"ConsumedCapacityUnits": 0.5,
|
"ConsumedCapacityUnits": 0.5,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(0)
|
table.item_count.should.equal(0)
|
||||||
|
|
||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.delete_item.when.called_with(
|
conn.layer1.delete_item.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_query():
|
def test_query():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
|
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
|
||||||
results.response["Items"].should.have.length_of(3)
|
results.response["Items"].should.have.length_of(3)
|
||||||
|
|
||||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
|
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
|
||||||
results.response["Items"].should.have.length_of(2)
|
results.response["Items"].should.have.length_of(2)
|
||||||
|
|
||||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
|
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
|
||||||
results.response["Items"].should.have.length_of(0)
|
results.response["Items"].should.have.length_of(0)
|
||||||
|
|
||||||
results = table.query(
|
results = table.query(
|
||||||
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
|
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
|
||||||
)
|
)
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.query(
|
results = table.query(
|
||||||
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
|
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
|
||||||
)
|
)
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.query(
|
results = table.query(
|
||||||
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
|
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
|
||||||
)
|
)
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.query.when.called_with(
|
conn.layer1.query.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
hash_key_value={"S": "the-key"},
|
hash_key_value={"S": "the-key"},
|
||||||
range_key_conditions={
|
range_key_conditions={
|
||||||
"AttributeValueList": [{"S": "User B"}],
|
"AttributeValueList": [{"S": "User B"}],
|
||||||
"ComparisonOperator": "EQ",
|
"ComparisonOperator": "EQ",
|
||||||
},
|
},
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan():
|
def test_scan():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
results = table.scan()
|
results = table.scan()
|
||||||
results.response["Items"].should.have.length_of(3)
|
results.response["Items"].should.have.length_of(3)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||||
results.response["Items"].should.have.length_of(3)
|
results.response["Items"].should.have.length_of(3)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||||
results.response["Items"].should.have.length_of(2)
|
results.response["Items"].should.have.length_of(2)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||||
results.response["Items"].should.have.length_of(0)
|
results.response["Items"].should.have.length_of(0)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.scan.when.called_with(
|
conn.layer1.scan.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
scan_filter={
|
scan_filter={
|
||||||
"SentBy": {
|
"SentBy": {
|
||||||
"AttributeValueList": [{"S": "User B"}],
|
"AttributeValueList": [{"S": "User B"}],
|
||||||
"ComparisonOperator": "EQ",
|
"ComparisonOperator": "EQ",
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan_after_has_item():
|
def test_scan_after_has_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
list(table.scan()).should.equal([])
|
list(table.scan()).should.equal([])
|
||||||
|
|
||||||
table.has_item(hash_key="the-key", range_key="123")
|
table.has_item(hash_key="the-key", range_key="123")
|
||||||
|
|
||||||
list(table.scan()).should.equal([])
|
list(table.scan()).should.equal([])
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
batch_list = conn.new_batch_write_list()
|
batch_list = conn.new_batch_write_list()
|
||||||
|
|
||||||
items = []
|
items = []
|
||||||
items.append(
|
items.append(
|
||||||
table.new_item(
|
table.new_item(
|
||||||
hash_key="the-key",
|
hash_key="the-key",
|
||||||
range_key="123",
|
range_key="123",
|
||||||
attrs={
|
attrs={
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
items.append(
|
items.append(
|
||||||
table.new_item(
|
table.new_item(
|
||||||
hash_key="the-key",
|
hash_key="the-key",
|
||||||
range_key="789",
|
range_key="789",
|
||||||
attrs={
|
attrs={
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
batch_list.add_batch(table, puts=items)
|
batch_list.add_batch(table, puts=items)
|
||||||
conn.batch_write_item(batch_list)
|
conn.batch_write_item(batch_list)
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(2)
|
table.item_count.should.equal(2)
|
||||||
|
|
||||||
batch_list = conn.new_batch_write_list()
|
batch_list = conn.new_batch_write_list()
|
||||||
batch_list.add_batch(table, deletes=[("the-key", "789")])
|
batch_list.add_batch(table, deletes=[("the-key", "789")])
|
||||||
conn.batch_write_item(batch_list)
|
conn.batch_write_item(batch_list)
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
|
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
|
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
|
||||||
# Iterate through so that batch_item gets called
|
# Iterate through so that batch_item gets called
|
||||||
count = len([x for x in items])
|
count = len([x for x in items])
|
||||||
count.should.equal(2)
|
count.should.equal(2)
|
||||||
|
@ -1,390 +1,390 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
|
|
||||||
from moto import mock_dynamodb_deprecated
|
from moto import mock_dynamodb_deprecated
|
||||||
|
|
||||||
from boto.dynamodb import condition
|
from boto.dynamodb import condition
|
||||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||||
from boto.exception import DynamoDBResponseError
|
from boto.exception import DynamoDBResponseError
|
||||||
|
|
||||||
|
|
||||||
def create_table(conn):
|
def create_table(conn):
|
||||||
message_table_schema = conn.create_schema(
|
message_table_schema = conn.create_schema(
|
||||||
hash_key_name="forum_name", hash_key_proto_value=str
|
hash_key_name="forum_name", hash_key_proto_value=str
|
||||||
)
|
)
|
||||||
|
|
||||||
table = conn.create_table(
|
table = conn.create_table(
|
||||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||||
)
|
)
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
create_table(conn)
|
create_table(conn)
|
||||||
|
|
||||||
expected = {
|
expected = {
|
||||||
"Table": {
|
"Table": {
|
||||||
"CreationDateTime": 1326499200.0,
|
"CreationDateTime": 1326499200.0,
|
||||||
"ItemCount": 0,
|
"ItemCount": 0,
|
||||||
"KeySchema": {
|
"KeySchema": {
|
||||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
|
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
|
||||||
},
|
},
|
||||||
"ProvisionedThroughput": {
|
"ProvisionedThroughput": {
|
||||||
"ReadCapacityUnits": 10,
|
"ReadCapacityUnits": 10,
|
||||||
"WriteCapacityUnits": 10,
|
"WriteCapacityUnits": 10,
|
||||||
},
|
},
|
||||||
"TableName": "messages",
|
"TableName": "messages",
|
||||||
"TableSizeBytes": 0,
|
"TableSizeBytes": 0,
|
||||||
"TableStatus": "ACTIVE",
|
"TableStatus": "ACTIVE",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
conn.describe_table("messages").should.equal(expected)
|
conn.describe_table("messages").should.equal(expected)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
create_table(conn)
|
create_table(conn)
|
||||||
conn.list_tables().should.have.length_of(1)
|
conn.list_tables().should.have.length_of(1)
|
||||||
|
|
||||||
conn.layer1.delete_table("messages")
|
conn.layer1.delete_table("messages")
|
||||||
conn.list_tables().should.have.length_of(0)
|
conn.list_tables().should.have.length_of(0)
|
||||||
|
|
||||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||||
DynamoDBResponseError
|
DynamoDBResponseError
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
table.read_units.should.equal(10)
|
table.read_units.should.equal(10)
|
||||||
table.write_units.should.equal(10)
|
table.write_units.should.equal(10)
|
||||||
|
|
||||||
table.update_throughput(5, 6)
|
table.update_throughput(5, 6)
|
||||||
table.refresh()
|
table.refresh()
|
||||||
|
|
||||||
table.read_units.should.equal(5)
|
table.read_units.should.equal(5)
|
||||||
table.write_units.should.equal(6)
|
table.write_units.should.equal(6)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
returned_item = table.get_item(
|
returned_item = table.get_item(
|
||||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||||
)
|
)
|
||||||
dict(returned_item).should.equal(
|
dict(returned_item).should.equal(
|
||||||
{
|
{
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
item["SentBy"] = "User B"
|
item["SentBy"] = "User B"
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
returned_item = table.get_item(
|
returned_item = table.get_item(
|
||||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||||
)
|
)
|
||||||
dict(returned_item).should.equal(
|
dict(returned_item).should.equal(
|
||||||
{
|
{
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.put_item.when.called_with(
|
conn.layer1.put_item.when.called_with(
|
||||||
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
|
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
table.get_item.when.called_with(hash_key="tester").should.throw(
|
table.get_item.when.called_with(hash_key="tester").should.throw(
|
||||||
DynamoDBKeyNotFoundError
|
DynamoDBKeyNotFoundError
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.get_item.when.called_with(
|
conn.layer1.get_item.when.called_with(
|
||||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||||
).should.throw(DynamoDBKeyNotFoundError)
|
).should.throw(DynamoDBKeyNotFoundError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
response = item.delete()
|
response = item.delete()
|
||||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(0)
|
table.item_count.should.equal(0)
|
||||||
|
|
||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item_with_attribute_response():
|
def test_delete_item_with_attribute_response():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
response = item.delete(return_values="ALL_OLD")
|
response = item.delete(return_values="ALL_OLD")
|
||||||
response.should.equal(
|
response.should.equal(
|
||||||
{
|
{
|
||||||
"Attributes": {
|
"Attributes": {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"forum_name": "LOLCat Forum",
|
"forum_name": "LOLCat Forum",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
},
|
},
|
||||||
"ConsumedCapacityUnits": 0.5,
|
"ConsumedCapacityUnits": 0.5,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(0)
|
table.item_count.should.equal(0)
|
||||||
|
|
||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.delete_item.when.called_with(
|
conn.layer1.delete_item.when.called_with(
|
||||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_query():
|
def test_query():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
results = table.query(hash_key="the-key")
|
results = table.query(hash_key="the-key")
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.query.when.called_with(
|
conn.layer1.query.when.called_with(
|
||||||
table_name="undeclared-table", hash_key_value={"S": "the-key"}
|
table_name="undeclared-table", hash_key_value={"S": "the-key"}
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan():
|
def test_scan():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key3", attrs=item_data)
|
item = table.new_item(hash_key="the-key3", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
results = table.scan()
|
results = table.scan()
|
||||||
results.response["Items"].should.have.length_of(3)
|
results.response["Items"].should.have.length_of(3)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||||
results.response["Items"].should.have.length_of(3)
|
results.response["Items"].should.have.length_of(3)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||||
results.response["Items"].should.have.length_of(2)
|
results.response["Items"].should.have.length_of(2)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||||
results.response["Items"].should.have.length_of(0)
|
results.response["Items"].should.have.length_of(0)
|
||||||
|
|
||||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||||
results.response["Items"].should.have.length_of(1)
|
results.response["Items"].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
|
||||||
conn.layer1.scan.when.called_with(
|
conn.layer1.scan.when.called_with(
|
||||||
table_name="undeclared-table",
|
table_name="undeclared-table",
|
||||||
scan_filter={
|
scan_filter={
|
||||||
"SentBy": {
|
"SentBy": {
|
||||||
"AttributeValueList": [{"S": "User B"}],
|
"AttributeValueList": [{"S": "User B"}],
|
||||||
"ComparisonOperator": "EQ",
|
"ComparisonOperator": "EQ",
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_scan_after_has_item():
|
def test_scan_after_has_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
list(table.scan()).should.equal([])
|
list(table.scan()).should.equal([])
|
||||||
|
|
||||||
table.has_item("the-key")
|
table.has_item("the-key")
|
||||||
|
|
||||||
list(table.scan()).should.equal([])
|
list(table.scan()).should.equal([])
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
batch_list = conn.new_batch_write_list()
|
batch_list = conn.new_batch_write_list()
|
||||||
|
|
||||||
items = []
|
items = []
|
||||||
items.append(
|
items.append(
|
||||||
table.new_item(
|
table.new_item(
|
||||||
hash_key="the-key",
|
hash_key="the-key",
|
||||||
attrs={
|
attrs={
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
items.append(
|
items.append(
|
||||||
table.new_item(
|
table.new_item(
|
||||||
hash_key="the-key2",
|
hash_key="the-key2",
|
||||||
attrs={
|
attrs={
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
batch_list.add_batch(table, puts=items)
|
batch_list.add_batch(table, puts=items)
|
||||||
conn.batch_write_item(batch_list)
|
conn.batch_write_item(batch_list)
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(2)
|
table.item_count.should.equal(2)
|
||||||
|
|
||||||
batch_list = conn.new_batch_write_list()
|
batch_list = conn.new_batch_write_list()
|
||||||
batch_list.add_batch(table, deletes=[("the-key")])
|
batch_list.add_batch(table, deletes=[("the-key")])
|
||||||
conn.batch_write_item(batch_list)
|
conn.batch_write_item(batch_list)
|
||||||
|
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb_deprecated
|
@mock_dynamodb_deprecated
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
table = create_table(conn)
|
table = create_table(conn)
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User A",
|
"SentBy": "User A",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="the-key1", attrs=item_data)
|
item = table.new_item(hash_key="the-key1", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
"Body": "http://url_to_lolcat.gif",
|
"Body": "http://url_to_lolcat.gif",
|
||||||
"SentBy": "User B",
|
"SentBy": "User B",
|
||||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||||
"Ids": set([1, 2, 3]),
|
"Ids": set([1, 2, 3]),
|
||||||
"PK": 7,
|
"PK": 7,
|
||||||
}
|
}
|
||||||
item = table.new_item(hash_key="another-key", attrs=item_data)
|
item = table.new_item(hash_key="another-key", attrs=item_data)
|
||||||
item.put()
|
item.put()
|
||||||
|
|
||||||
items = table.batch_get_item([("the-key1"), ("another-key")])
|
items = table.batch_get_item([("the-key1"), ("another-key")])
|
||||||
# Iterate through so that batch_item gets called
|
# Iterate through so that batch_item gets called
|
||||||
count = len([x for x in items])
|
count = len([x for x in items])
|
||||||
count.should.have.equal(2)
|
count.should.have.equal(2)
|
||||||
|
@ -3609,6 +3609,31 @@ def test_update_supports_list_append_maps():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_update_supports_list_append_with_nested_if_not_exists_operation():
|
||||||
|
dynamo = boto3.resource("dynamodb", region_name="us-west-1")
|
||||||
|
table_name = "test"
|
||||||
|
|
||||||
|
dynamo.create_table(
|
||||||
|
TableName=table_name,
|
||||||
|
AttributeDefinitions=[{"AttributeName": "Id", "AttributeType": "S"}],
|
||||||
|
KeySchema=[{"AttributeName": "Id", "KeyType": "HASH"}],
|
||||||
|
ProvisionedThroughput={"ReadCapacityUnits": 20, "WriteCapacityUnits": 20},
|
||||||
|
)
|
||||||
|
|
||||||
|
table = dynamo.Table(table_name)
|
||||||
|
|
||||||
|
table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}})
|
||||||
|
table.update_item(
|
||||||
|
Key={"Id": "item-id"},
|
||||||
|
UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)",
|
||||||
|
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
|
||||||
|
)
|
||||||
|
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
|
||||||
|
{"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_update_catches_invalid_list_append_operation():
|
def test_update_catches_invalid_list_append_operation():
|
||||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||||
|
@ -1,37 +1,37 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto3
|
import boto3
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_describe_account_attributes():
|
def test_describe_account_attributes():
|
||||||
conn = boto3.client("ec2", region_name="us-east-1")
|
conn = boto3.client("ec2", region_name="us-east-1")
|
||||||
response = conn.describe_account_attributes()
|
response = conn.describe_account_attributes()
|
||||||
expected_attribute_values = [
|
expected_attribute_values = [
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "5"}],
|
"AttributeValues": [{"AttributeValue": "5"}],
|
||||||
"AttributeName": "vpc-max-security-groups-per-interface",
|
"AttributeName": "vpc-max-security-groups-per-interface",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "20"}],
|
"AttributeValues": [{"AttributeValue": "20"}],
|
||||||
"AttributeName": "max-instances",
|
"AttributeName": "max-instances",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
|
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
|
||||||
"AttributeName": "supported-platforms",
|
"AttributeName": "supported-platforms",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "none"}],
|
"AttributeValues": [{"AttributeValue": "none"}],
|
||||||
"AttributeName": "default-vpc",
|
"AttributeName": "default-vpc",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "5"}],
|
"AttributeValues": [{"AttributeValue": "5"}],
|
||||||
"AttributeName": "max-elastic-ips",
|
"AttributeName": "max-elastic-ips",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"AttributeValues": [{"AttributeValue": "5"}],
|
"AttributeValues": [{"AttributeValue": "5"}],
|
||||||
"AttributeName": "vpc-max-elastic-ips",
|
"AttributeName": "vpc-max-elastic-ips",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
response["AccountAttributes"].should.equal(expected_attribute_values)
|
response["AccountAttributes"].should.equal(expected_attribute_values)
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_amazon_dev_pay():
|
def test_amazon_dev_pay():
|
||||||
pass
|
pass
|
||||||
|
@ -12,6 +12,7 @@ import sure # noqa
|
|||||||
|
|
||||||
from moto import mock_ec2_deprecated, mock_ec2
|
from moto import mock_ec2_deprecated, mock_ec2
|
||||||
from moto.ec2.models import AMIS, OWNER_ID
|
from moto.ec2.models import AMIS, OWNER_ID
|
||||||
|
from moto.iam.models import ACCOUNT_ID
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte
|
||||||
|
|
||||||
|
|
||||||
@ -251,6 +252,19 @@ def test_ami_pulls_attributes_from_instance():
|
|||||||
image.kernel_id.should.equal("test-kernel")
|
image.kernel_id.should.equal("test-kernel")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2_deprecated
|
||||||
|
def test_ami_uses_account_id_if_valid_access_key_is_supplied():
|
||||||
|
access_key = "AKIAXXXXXXXXXXXXXXXX"
|
||||||
|
conn = boto.connect_ec2(access_key, "the_secret")
|
||||||
|
reservation = conn.run_instances("ami-1234abcd")
|
||||||
|
instance = reservation.instances[0]
|
||||||
|
instance.modify_attribute("kernel", "test-kernel")
|
||||||
|
|
||||||
|
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||||
|
images = conn.get_all_images(owners=["self"])
|
||||||
|
[(ami.id, ami.owner_id) for ami in images].should.equal([(image_id, ACCOUNT_ID)])
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_ami_filters():
|
def test_ami_filters():
|
||||||
conn = boto.connect_ec2("the_key", "the_secret")
|
conn = boto.connect_ec2("the_key", "the_secret")
|
||||||
@ -773,7 +787,7 @@ def test_ami_filter_wildcard():
|
|||||||
instance.create_image(Name="not-matching-image")
|
instance.create_image(Name="not-matching-image")
|
||||||
|
|
||||||
my_images = ec2_client.describe_images(
|
my_images = ec2_client.describe_images(
|
||||||
Owners=["111122223333"], Filters=[{"Name": "name", "Values": ["test*"]}]
|
Owners=[ACCOUNT_ID], Filters=[{"Name": "name", "Values": ["test*"]}]
|
||||||
)["Images"]
|
)["Images"]
|
||||||
my_images.should.have.length_of(1)
|
my_images.should.have.length_of(1)
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_ip_addresses():
|
def test_ip_addresses():
|
||||||
pass
|
pass
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_monitoring():
|
def test_monitoring():
|
||||||
pass
|
pass
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_placement_groups():
|
def test_placement_groups():
|
||||||
pass
|
pass
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_reserved_instances():
|
def test_reserved_instances():
|
||||||
pass
|
pass
|
||||||
|
@ -1,96 +1,96 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2_deprecated
|
from moto import mock_ec2_deprecated
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_virtual_private_gateways():
|
def test_virtual_private_gateways():
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
|
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
vpn_gateway.should_not.be.none
|
vpn_gateway.should_not.be.none
|
||||||
vpn_gateway.id.should.match(r"vgw-\w+")
|
vpn_gateway.id.should.match(r"vgw-\w+")
|
||||||
vpn_gateway.type.should.equal("ipsec.1")
|
vpn_gateway.type.should.equal("ipsec.1")
|
||||||
vpn_gateway.state.should.equal("available")
|
vpn_gateway.state.should.equal("available")
|
||||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_describe_vpn_gateway():
|
def test_describe_vpn_gateway():
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
|
|
||||||
vgws = conn.get_all_vpn_gateways()
|
vgws = conn.get_all_vpn_gateways()
|
||||||
vgws.should.have.length_of(1)
|
vgws.should.have.length_of(1)
|
||||||
|
|
||||||
gateway = vgws[0]
|
gateway = vgws[0]
|
||||||
gateway.id.should.match(r"vgw-\w+")
|
gateway.id.should.match(r"vgw-\w+")
|
||||||
gateway.id.should.equal(vpn_gateway.id)
|
gateway.id.should.equal(vpn_gateway.id)
|
||||||
vpn_gateway.type.should.equal("ipsec.1")
|
vpn_gateway.type.should.equal("ipsec.1")
|
||||||
vpn_gateway.state.should.equal("available")
|
vpn_gateway.state.should.equal("available")
|
||||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_vpn_gateway_vpc_attachment():
|
def test_vpn_gateway_vpc_attachment():
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
vpc = conn.create_vpc("10.0.0.0/16")
|
vpc = conn.create_vpc("10.0.0.0/16")
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
|
|
||||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||||
|
|
||||||
gateway = conn.get_all_vpn_gateways()[0]
|
gateway = conn.get_all_vpn_gateways()[0]
|
||||||
attachments = gateway.attachments
|
attachments = gateway.attachments
|
||||||
attachments.should.have.length_of(1)
|
attachments.should.have.length_of(1)
|
||||||
attachments[0].vpc_id.should.equal(vpc.id)
|
attachments[0].vpc_id.should.equal(vpc.id)
|
||||||
attachments[0].state.should.equal("attached")
|
attachments[0].state.should.equal("attached")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_delete_vpn_gateway():
|
def test_delete_vpn_gateway():
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
|
|
||||||
conn.delete_vpn_gateway(vpn_gateway.id)
|
conn.delete_vpn_gateway(vpn_gateway.id)
|
||||||
vgws = conn.get_all_vpn_gateways()
|
vgws = conn.get_all_vpn_gateways()
|
||||||
vgws.should.have.length_of(0)
|
vgws.should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_vpn_gateway_tagging():
|
def test_vpn_gateway_tagging():
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
vpn_gateway.add_tag("a key", "some value")
|
vpn_gateway.add_tag("a key", "some value")
|
||||||
|
|
||||||
tag = conn.get_all_tags()[0]
|
tag = conn.get_all_tags()[0]
|
||||||
tag.name.should.equal("a key")
|
tag.name.should.equal("a key")
|
||||||
tag.value.should.equal("some value")
|
tag.value.should.equal("some value")
|
||||||
|
|
||||||
# Refresh the subnet
|
# Refresh the subnet
|
||||||
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
||||||
vpn_gateway.tags.should.have.length_of(1)
|
vpn_gateway.tags.should.have.length_of(1)
|
||||||
vpn_gateway.tags["a key"].should.equal("some value")
|
vpn_gateway.tags["a key"].should.equal("some value")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
def test_detach_vpn_gateway():
|
def test_detach_vpn_gateway():
|
||||||
|
|
||||||
conn = boto.connect_vpc("the_key", "the_secret")
|
conn = boto.connect_vpc("the_key", "the_secret")
|
||||||
vpc = conn.create_vpc("10.0.0.0/16")
|
vpc = conn.create_vpc("10.0.0.0/16")
|
||||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||||
|
|
||||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||||
|
|
||||||
gateway = conn.get_all_vpn_gateways()[0]
|
gateway = conn.get_all_vpn_gateways()[0]
|
||||||
attachments = gateway.attachments
|
attachments = gateway.attachments
|
||||||
attachments.should.have.length_of(1)
|
attachments.should.have.length_of(1)
|
||||||
attachments[0].vpc_id.should.equal(vpc.id)
|
attachments[0].vpc_id.should.equal(vpc.id)
|
||||||
attachments[0].state.should.equal("attached")
|
attachments[0].state.should.equal("attached")
|
||||||
|
|
||||||
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||||
|
|
||||||
gateway = conn.get_all_vpn_gateways()[0]
|
gateway = conn.get_all_vpn_gateways()[0]
|
||||||
attachments = gateway.attachments
|
attachments = gateway.attachments
|
||||||
attachments.should.have.length_of(0)
|
attachments.should.have.length_of(0)
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_vm_export():
|
def test_vm_export():
|
||||||
pass
|
pass
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_vm_import():
|
def test_vm_import():
|
||||||
pass
|
pass
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_windows():
|
def test_windows():
|
||||||
pass
|
pass
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
import random
|
|
||||||
import boto3
|
|
||||||
import json
|
import json
|
||||||
import sure # noqa
|
import random
|
||||||
|
import unittest
|
||||||
|
|
||||||
from moto.events import mock_events
|
import boto3
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
from moto.core import ACCOUNT_ID
|
from moto.core import ACCOUNT_ID
|
||||||
|
from moto.core.exceptions import JsonRESTError
|
||||||
|
from moto.events import mock_events
|
||||||
|
from moto.events.models import EventsBackend
|
||||||
|
|
||||||
RULES = [
|
RULES = [
|
||||||
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
|
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
|
||||||
@ -136,14 +139,6 @@ def test_list_rule_names_by_target():
|
|||||||
assert rule in test_2_target["Rules"]
|
assert rule in test_2_target["Rules"]
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
|
||||||
def test_list_rules():
|
|
||||||
client = generate_environment()
|
|
||||||
|
|
||||||
rules = client.list_rules()
|
|
||||||
assert len(rules["Rules"]) == len(RULES)
|
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
@mock_events
|
||||||
def test_delete_rule():
|
def test_delete_rule():
|
||||||
client = generate_environment()
|
client = generate_environment()
|
||||||
@ -461,3 +456,50 @@ def test_delete_event_bus_errors():
|
|||||||
client.delete_event_bus.when.called_with(Name="default").should.throw(
|
client.delete_event_bus.when.called_with(Name="default").should.throw(
|
||||||
ClientError, "Cannot delete event bus default."
|
ClientError, "Cannot delete event bus default."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_events
|
||||||
|
def test_rule_tagging_happy():
|
||||||
|
client = generate_environment()
|
||||||
|
rule_name = get_random_rule()["Name"]
|
||||||
|
rule_arn = client.describe_rule(Name=rule_name).get("Arn")
|
||||||
|
|
||||||
|
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||||
|
client.tag_resource(ResourceARN=rule_arn, Tags=tags)
|
||||||
|
|
||||||
|
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
|
||||||
|
tc = unittest.TestCase("__init__")
|
||||||
|
expected = [{"Value": "value1", "Key": "key1"}, {"Value": "value2", "Key": "key2"}]
|
||||||
|
tc.assertTrue(
|
||||||
|
(expected[0] == actual[0] and expected[1] == actual[1])
|
||||||
|
or (expected[1] == actual[0] and expected[0] == actual[1])
|
||||||
|
)
|
||||||
|
|
||||||
|
client.untag_resource(ResourceARN=rule_arn, TagKeys=["key1"])
|
||||||
|
|
||||||
|
actual = client.list_tags_for_resource(ResourceARN=rule_arn).get("Tags")
|
||||||
|
expected = [{"Key": "key2", "Value": "value2"}]
|
||||||
|
assert expected == actual
|
||||||
|
|
||||||
|
|
||||||
|
@mock_events
|
||||||
|
def test_rule_tagging_sad():
|
||||||
|
back_end = EventsBackend("us-west-2")
|
||||||
|
|
||||||
|
try:
|
||||||
|
back_end.tag_resource("unknown", [])
|
||||||
|
raise "tag_resource should fail if ResourceARN is not known"
|
||||||
|
except JsonRESTError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
back_end.untag_resource("unknown", [])
|
||||||
|
raise "untag_resource should fail if ResourceARN is not known"
|
||||||
|
except JsonRESTError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
back_end.list_tags_for_resource("unknown")
|
||||||
|
raise "list_tags_for_resource should fail if ResourceARN is not known"
|
||||||
|
except JsonRESTError:
|
||||||
|
pass
|
||||||
|
@ -1,21 +1,21 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
import boto.glacier
|
import boto.glacier
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_glacier_deprecated
|
from moto import mock_glacier_deprecated
|
||||||
|
|
||||||
|
|
||||||
@mock_glacier_deprecated
|
@mock_glacier_deprecated
|
||||||
def test_create_and_delete_archive():
|
def test_create_and_delete_archive():
|
||||||
the_file = NamedTemporaryFile(delete=False)
|
the_file = NamedTemporaryFile(delete=False)
|
||||||
the_file.write(b"some stuff")
|
the_file.write(b"some stuff")
|
||||||
the_file.close()
|
the_file.close()
|
||||||
|
|
||||||
conn = boto.glacier.connect_to_region("us-west-2")
|
conn = boto.glacier.connect_to_region("us-west-2")
|
||||||
vault = conn.create_vault("my_vault")
|
vault = conn.create_vault("my_vault")
|
||||||
|
|
||||||
archive_id = vault.upload_archive(the_file.name)
|
archive_id = vault.upload_archive(the_file.name)
|
||||||
|
|
||||||
vault.delete_archive(archive_id)
|
vault.delete_archive(archive_id)
|
||||||
|
@ -1,31 +1,31 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import boto.glacier
|
import boto.glacier
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_glacier_deprecated
|
from moto import mock_glacier_deprecated
|
||||||
|
|
||||||
|
|
||||||
@mock_glacier_deprecated
|
@mock_glacier_deprecated
|
||||||
def test_create_vault():
|
def test_create_vault():
|
||||||
conn = boto.glacier.connect_to_region("us-west-2")
|
conn = boto.glacier.connect_to_region("us-west-2")
|
||||||
|
|
||||||
conn.create_vault("my_vault")
|
conn.create_vault("my_vault")
|
||||||
|
|
||||||
vaults = conn.list_vaults()
|
vaults = conn.list_vaults()
|
||||||
vaults.should.have.length_of(1)
|
vaults.should.have.length_of(1)
|
||||||
vaults[0].name.should.equal("my_vault")
|
vaults[0].name.should.equal("my_vault")
|
||||||
|
|
||||||
|
|
||||||
@mock_glacier_deprecated
|
@mock_glacier_deprecated
|
||||||
def test_delete_vault():
|
def test_delete_vault():
|
||||||
conn = boto.glacier.connect_to_region("us-west-2")
|
conn = boto.glacier.connect_to_region("us-west-2")
|
||||||
|
|
||||||
conn.create_vault("my_vault")
|
conn.create_vault("my_vault")
|
||||||
|
|
||||||
vaults = conn.list_vaults()
|
vaults = conn.list_vaults()
|
||||||
vaults.should.have.length_of(1)
|
vaults.should.have.length_of(1)
|
||||||
|
|
||||||
conn.delete_vault("my_vault")
|
conn.delete_vault("my_vault")
|
||||||
vaults = conn.list_vaults()
|
vaults = conn.list_vaults()
|
||||||
vaults.should.have.length_of(0)
|
vaults.should.have.length_of(0)
|
||||||
|
@ -1 +1 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
@ -1 +1 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
@ -1,97 +1,97 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||||
|
|
||||||
|
|
||||||
def create_database(client, database_name):
|
def create_database(client, database_name):
|
||||||
return client.create_database(DatabaseInput={"Name": database_name})
|
return client.create_database(DatabaseInput={"Name": database_name})
|
||||||
|
|
||||||
|
|
||||||
def get_database(client, database_name):
|
def get_database(client, database_name):
|
||||||
return client.get_database(Name=database_name)
|
return client.get_database(Name=database_name)
|
||||||
|
|
||||||
|
|
||||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||||
table_input = copy.deepcopy(TABLE_INPUT)
|
table_input = copy.deepcopy(TABLE_INPUT)
|
||||||
table_input["Name"] = table_name
|
table_input["Name"] = table_name
|
||||||
table_input["PartitionKeys"] = partition_keys
|
table_input["PartitionKeys"] = partition_keys
|
||||||
table_input["StorageDescriptor"]["Columns"] = columns
|
table_input["StorageDescriptor"]["Columns"] = columns
|
||||||
table_input["StorageDescriptor"][
|
table_input["StorageDescriptor"][
|
||||||
"Location"
|
"Location"
|
||||||
] = "s3://my-bucket/{database_name}/{table_name}".format(
|
] = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||||
database_name=database_name, table_name=table_name
|
database_name=database_name, table_name=table_name
|
||||||
)
|
)
|
||||||
return table_input
|
return table_input
|
||||||
|
|
||||||
|
|
||||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||||
if table_input is None:
|
if table_input is None:
|
||||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||||
|
|
||||||
return client.create_table(DatabaseName=database_name, TableInput=table_input)
|
return client.create_table(DatabaseName=database_name, TableInput=table_input)
|
||||||
|
|
||||||
|
|
||||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||||
if table_input is None:
|
if table_input is None:
|
||||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||||
|
|
||||||
return client.update_table(DatabaseName=database_name, TableInput=table_input)
|
return client.update_table(DatabaseName=database_name, TableInput=table_input)
|
||||||
|
|
||||||
|
|
||||||
def get_table(client, database_name, table_name):
|
def get_table(client, database_name, table_name):
|
||||||
return client.get_table(DatabaseName=database_name, Name=table_name)
|
return client.get_table(DatabaseName=database_name, Name=table_name)
|
||||||
|
|
||||||
|
|
||||||
def get_tables(client, database_name):
|
def get_tables(client, database_name):
|
||||||
return client.get_tables(DatabaseName=database_name)
|
return client.get_tables(DatabaseName=database_name)
|
||||||
|
|
||||||
|
|
||||||
def get_table_versions(client, database_name, table_name):
|
def get_table_versions(client, database_name, table_name):
|
||||||
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
|
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
|
||||||
|
|
||||||
|
|
||||||
def get_table_version(client, database_name, table_name, version_id):
|
def get_table_version(client, database_name, table_name, version_id):
|
||||||
return client.get_table_version(
|
return client.get_table_version(
|
||||||
DatabaseName=database_name, TableName=table_name, VersionId=version_id
|
DatabaseName=database_name, TableName=table_name, VersionId=version_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||||
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
|
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||||
database_name=database_name, table_name=table_name
|
database_name=database_name, table_name=table_name
|
||||||
)
|
)
|
||||||
|
|
||||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||||
part_input["Values"] = values
|
part_input["Values"] = values
|
||||||
part_input["StorageDescriptor"]["Columns"] = columns
|
part_input["StorageDescriptor"]["Columns"] = columns
|
||||||
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
|
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
|
||||||
return part_input
|
return part_input
|
||||||
|
|
||||||
|
|
||||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||||
if partiton_input is None:
|
if partiton_input is None:
|
||||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||||
return client.create_partition(
|
return client.create_partition(
|
||||||
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
|
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_partition(
|
def update_partition(
|
||||||
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
|
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
|
||||||
):
|
):
|
||||||
if partiton_input is None:
|
if partiton_input is None:
|
||||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||||
return client.update_partition(
|
return client.update_partition(
|
||||||
DatabaseName=database_name,
|
DatabaseName=database_name,
|
||||||
TableName=table_name,
|
TableName=table_name,
|
||||||
PartitionInput=partiton_input,
|
PartitionInput=partiton_input,
|
||||||
PartitionValueList=old_values,
|
PartitionValueList=old_values,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_partition(client, database_name, table_name, values):
|
def get_partition(client, database_name, table_name, values):
|
||||||
return client.get_partition(
|
return client.get_partition(
|
||||||
DatabaseName=database_name, TableName=table_name, PartitionValues=values
|
DatabaseName=database_name, TableName=table_name, PartitionValues=values
|
||||||
)
|
)
|
||||||
|
@ -9,6 +9,173 @@ from botocore.exceptions import ClientError
|
|||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_attach_policy():
|
||||||
|
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||||
|
policy_name = "my-policy"
|
||||||
|
doc = "{}"
|
||||||
|
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
cert_arn = cert["certificateArn"]
|
||||||
|
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
|
||||||
|
res = client.list_attached_policies(target=cert_arn)
|
||||||
|
res.should.have.key("policies").which.should.have.length_of(1)
|
||||||
|
res["policies"][0]["policyName"].should.equal("my-policy")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_detach_policy():
|
||||||
|
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||||
|
policy_name = "my-policy"
|
||||||
|
doc = "{}"
|
||||||
|
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
cert_arn = cert["certificateArn"]
|
||||||
|
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
|
||||||
|
res = client.list_attached_policies(target=cert_arn)
|
||||||
|
res.should.have.key("policies").which.should.have.length_of(1)
|
||||||
|
res["policies"][0]["policyName"].should.equal("my-policy")
|
||||||
|
|
||||||
|
client.detach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
res = client.list_attached_policies(target=cert_arn)
|
||||||
|
res.should.have.key("policies").which.should.be.empty
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_list_attached_policies():
|
||||||
|
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
policies = client.list_attached_policies(target=cert["certificateArn"])
|
||||||
|
policies["policies"].should.be.empty
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_policy_versions():
|
||||||
|
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||||
|
policy_name = "my-policy"
|
||||||
|
doc = "{}"
|
||||||
|
|
||||||
|
policy = client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||||
|
policy.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
|
||||||
|
policy.should.have.key("policyVersionId").which.should.equal("1")
|
||||||
|
|
||||||
|
policy = client.get_policy(policyName=policy_name)
|
||||||
|
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||||
|
policy.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
|
||||||
|
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||||
|
policy["defaultVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
policy1 = client.create_policy_version(
|
||||||
|
policyName=policy_name,
|
||||||
|
policyDocument=json.dumps({"version": "version_1"}),
|
||||||
|
setAsDefault=True,
|
||||||
|
)
|
||||||
|
policy1.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy1.should.have.key("policyDocument").which.should.equal(
|
||||||
|
json.dumps({"version": "version_1"})
|
||||||
|
)
|
||||||
|
policy1.should.have.key("policyVersionId").which.should.equal("2")
|
||||||
|
policy1.should.have.key("isDefaultVersion").which.should.equal(True)
|
||||||
|
|
||||||
|
policy2 = client.create_policy_version(
|
||||||
|
policyName=policy_name,
|
||||||
|
policyDocument=json.dumps({"version": "version_2"}),
|
||||||
|
setAsDefault=False,
|
||||||
|
)
|
||||||
|
policy2.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy2.should.have.key("policyDocument").which.should.equal(
|
||||||
|
json.dumps({"version": "version_2"})
|
||||||
|
)
|
||||||
|
policy2.should.have.key("policyVersionId").which.should.equal("3")
|
||||||
|
policy2.should.have.key("isDefaultVersion").which.should.equal(False)
|
||||||
|
|
||||||
|
policy = client.get_policy(policyName=policy_name)
|
||||||
|
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||||
|
policy.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy.should.have.key("policyDocument").which.should.equal(
|
||||||
|
json.dumps({"version": "version_1"})
|
||||||
|
)
|
||||||
|
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||||
|
policy1["policyVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||||
|
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
|
||||||
|
list(
|
||||||
|
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||||
|
).count(True).should.equal(1)
|
||||||
|
default_policy = list(
|
||||||
|
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||||
|
)
|
||||||
|
default_policy[0].should.have.key("versionId").should.equal(
|
||||||
|
policy1["policyVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
policy = client.get_policy(policyName=policy_name)
|
||||||
|
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||||
|
policy.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy.should.have.key("policyDocument").which.should.equal(
|
||||||
|
json.dumps({"version": "version_1"})
|
||||||
|
)
|
||||||
|
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||||
|
policy1["policyVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
client.set_default_policy_version(
|
||||||
|
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
|
||||||
|
)
|
||||||
|
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||||
|
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
|
||||||
|
list(
|
||||||
|
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||||
|
).count(True).should.equal(1)
|
||||||
|
default_policy = list(
|
||||||
|
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||||
|
)
|
||||||
|
default_policy[0].should.have.key("versionId").should.equal(
|
||||||
|
policy2["policyVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
policy = client.get_policy(policyName=policy_name)
|
||||||
|
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||||
|
policy.should.have.key("policyArn").which.should_not.be.none
|
||||||
|
policy.should.have.key("policyDocument").which.should.equal(
|
||||||
|
json.dumps({"version": "version_2"})
|
||||||
|
)
|
||||||
|
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||||
|
policy2["policyVersionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
client.delete_policy_version(policyName=policy_name, policyVersionId="1")
|
||||||
|
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||||
|
policy_versions.should.have.key("policyVersions").which.should.have.length_of(2)
|
||||||
|
|
||||||
|
client.delete_policy_version(
|
||||||
|
policyName=policy_name, policyVersionId=policy1["policyVersionId"]
|
||||||
|
)
|
||||||
|
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||||
|
policy_versions.should.have.key("policyVersions").which.should.have.length_of(1)
|
||||||
|
|
||||||
|
# should fail as it"s the default policy. Should use delete_policy instead
|
||||||
|
try:
|
||||||
|
client.delete_policy_version(
|
||||||
|
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
|
||||||
|
)
|
||||||
|
assert False, "Should have failed in previous call"
|
||||||
|
except Exception as exception:
|
||||||
|
exception.response["Error"]["Message"].should.equal(
|
||||||
|
"Cannot delete the default version of a policy"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_iot
|
@mock_iot
|
||||||
def test_things():
|
def test_things():
|
||||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||||
@ -994,7 +1161,10 @@ def test_create_job():
|
|||||||
client = boto3.client("iot", region_name="eu-west-1")
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
name = "my-thing"
|
name = "my-thing"
|
||||||
job_id = "TestJob"
|
job_id = "TestJob"
|
||||||
# thing
|
# thing# job document
|
||||||
|
# job_document = {
|
||||||
|
# "field": "value"
|
||||||
|
# }
|
||||||
thing = client.create_thing(thingName=name)
|
thing = client.create_thing(thingName=name)
|
||||||
thing.should.have.key("thingName").which.should.equal(name)
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
thing.should.have.key("thingArn")
|
thing.should.have.key("thingArn")
|
||||||
@ -1020,6 +1190,63 @@ def test_create_job():
|
|||||||
job.should.have.key("description")
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_list_jobs():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing# job document
|
||||||
|
# job_document = {
|
||||||
|
# "field": "value"
|
||||||
|
# }
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job1 = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job1.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job1.should.have.key("jobArn")
|
||||||
|
job1.should.have.key("description")
|
||||||
|
|
||||||
|
job2 = client.create_job(
|
||||||
|
jobId=job_id + "1",
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job2.should.have.key("jobId").which.should.equal(job_id + "1")
|
||||||
|
job2.should.have.key("jobArn")
|
||||||
|
job2.should.have.key("description")
|
||||||
|
|
||||||
|
jobs = client.list_jobs()
|
||||||
|
jobs.should.have.key("jobs")
|
||||||
|
jobs.should_not.have.key("nextToken")
|
||||||
|
jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1")
|
||||||
|
|
||||||
|
|
||||||
@mock_iot
|
@mock_iot
|
||||||
def test_describe_job():
|
def test_describe_job():
|
||||||
client = boto3.client("iot", region_name="eu-west-1")
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
@ -1124,3 +1351,387 @@ def test_describe_job_1():
|
|||||||
job.should.have.key("job").which.should.have.key(
|
job.should.have.key("job").which.should.have.key(
|
||||||
"jobExecutionsRolloutConfig"
|
"jobExecutionsRolloutConfig"
|
||||||
).which.should.have.key("maximumPerMinute").which.should.equal(10)
|
).which.should.have.key("maximumPerMinute").which.should.equal(10)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_delete_job():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
|
||||||
|
job = client.describe_job(jobId=job_id)
|
||||||
|
job.should.have.key("job")
|
||||||
|
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
|
||||||
|
client.delete_job(jobId=job_id)
|
||||||
|
|
||||||
|
client.list_jobs()["jobs"].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_cancel_job():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
|
||||||
|
job = client.describe_job(jobId=job_id)
|
||||||
|
job.should.have.key("job")
|
||||||
|
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
|
||||||
|
job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are")
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
|
||||||
|
job = client.describe_job(jobId=job_id)
|
||||||
|
job.should.have.key("job")
|
||||||
|
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("job").which.should.have.key("status").which.should.equal(
|
||||||
|
"CANCELED"
|
||||||
|
)
|
||||||
|
job.should.have.key("job").which.should.have.key(
|
||||||
|
"forceCanceled"
|
||||||
|
).which.should.equal(False)
|
||||||
|
job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal(
|
||||||
|
"Because"
|
||||||
|
)
|
||||||
|
job.should.have.key("job").which.should.have.key("comment").which.should.equal(
|
||||||
|
"You are"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_get_job_document_with_document_source():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
|
||||||
|
job_document = client.get_job_document(jobId=job_id)
|
||||||
|
job_document.should.have.key("document").which.should.equal("")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_get_job_document_with_document():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
|
||||||
|
job_document = client.get_job_document(jobId=job_id)
|
||||||
|
job_document.should.have.key("document").which.should.equal('{"field": "value"}')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_describe_job_execution():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
|
||||||
|
job_execution.should.have.key("execution")
|
||||||
|
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
|
||||||
|
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
|
||||||
|
False
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
|
||||||
|
{"detailsMap": {}}
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("thingArn").which.should.equal(
|
||||||
|
thing["thingArn"]
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("queuedAt")
|
||||||
|
job_execution["execution"].should.have.key("startedAt")
|
||||||
|
job_execution["execution"].should.have.key("lastUpdatedAt")
|
||||||
|
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
|
||||||
|
123
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
|
||||||
|
job_execution["execution"].should.have.key(
|
||||||
|
"approximateSecondsBeforeTimedOut"
|
||||||
|
).which.should.equal(123)
|
||||||
|
|
||||||
|
job_execution = client.describe_job_execution(
|
||||||
|
jobId=job_id, thingName=name, executionNumber=123
|
||||||
|
)
|
||||||
|
job_execution.should.have.key("execution")
|
||||||
|
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
|
||||||
|
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
|
||||||
|
False
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
|
||||||
|
{"detailsMap": {}}
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("thingArn").which.should.equal(
|
||||||
|
thing["thingArn"]
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("queuedAt")
|
||||||
|
job_execution["execution"].should.have.key("startedAt")
|
||||||
|
job_execution["execution"].should.have.key("lastUpdatedAt")
|
||||||
|
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
|
||||||
|
123
|
||||||
|
)
|
||||||
|
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
|
||||||
|
job_execution["execution"].should.have.key(
|
||||||
|
"approximateSecondsBeforeTimedOut"
|
||||||
|
).which.should.equal(123)
|
||||||
|
|
||||||
|
try:
|
||||||
|
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456)
|
||||||
|
except ClientError as exc:
|
||||||
|
error_code = exc.response["Error"]["Code"]
|
||||||
|
error_code.should.equal("ResourceNotFoundException")
|
||||||
|
else:
|
||||||
|
raise Exception("Should have raised error")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_cancel_job_execution():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
client.cancel_job_execution(jobId=job_id, thingName=name)
|
||||||
|
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
|
||||||
|
job_execution.should.have.key("execution")
|
||||||
|
job_execution["execution"].should.have.key("status").which.should.equal("CANCELED")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_delete_job_execution():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123)
|
||||||
|
try:
|
||||||
|
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123)
|
||||||
|
except ClientError as exc:
|
||||||
|
error_code = exc.response["Error"]["Code"]
|
||||||
|
error_code.should.equal("ResourceNotFoundException")
|
||||||
|
else:
|
||||||
|
raise Exception("Should have raised error")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_list_job_executions_for_job():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
job_execution = client.list_job_executions_for_job(jobId=job_id)
|
||||||
|
job_execution.should.have.key("executionSummaries")
|
||||||
|
job_execution["executionSummaries"][0].should.have.key(
|
||||||
|
"thingArn"
|
||||||
|
).which.should.equal(thing["thingArn"])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_list_job_executions_for_thing():
|
||||||
|
client = boto3.client("iot", region_name="eu-west-1")
|
||||||
|
name = "my-thing"
|
||||||
|
job_id = "TestJob"
|
||||||
|
# thing
|
||||||
|
thing = client.create_thing(thingName=name)
|
||||||
|
thing.should.have.key("thingName").which.should.equal(name)
|
||||||
|
thing.should.have.key("thingArn")
|
||||||
|
|
||||||
|
# job document
|
||||||
|
job_document = {"field": "value"}
|
||||||
|
|
||||||
|
job = client.create_job(
|
||||||
|
jobId=job_id,
|
||||||
|
targets=[thing["thingArn"]],
|
||||||
|
document=json.dumps(job_document),
|
||||||
|
description="Description",
|
||||||
|
presignedUrlConfig={
|
||||||
|
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||||
|
"expiresInSec": 123,
|
||||||
|
},
|
||||||
|
targetSelection="CONTINUOUS",
|
||||||
|
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||||
|
)
|
||||||
|
|
||||||
|
job.should.have.key("jobId").which.should.equal(job_id)
|
||||||
|
job.should.have.key("jobArn")
|
||||||
|
job.should.have.key("description")
|
||||||
|
|
||||||
|
job_execution = client.list_job_executions_for_thing(thingName=name)
|
||||||
|
job_execution.should.have.key("executionSummaries")
|
||||||
|
job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal(
|
||||||
|
job_id
|
||||||
|
)
|
||||||
|
@ -1,76 +1,76 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto3
|
import boto3
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from moto import mock_opsworks
|
from moto import mock_opsworks
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2015-01-01")
|
@freeze_time("2015-01-01")
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_create_app_response():
|
def test_create_app_response():
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = client.create_stack(
|
stack_id = client.create_stack(
|
||||||
Name="test_stack_1",
|
Name="test_stack_1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
|
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
|
||||||
|
|
||||||
response.should.contain("AppId")
|
response.should.contain("AppId")
|
||||||
|
|
||||||
second_stack_id = client.create_stack(
|
second_stack_id = client.create_stack(
|
||||||
Name="test_stack_2",
|
Name="test_stack_2",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
|
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
|
||||||
|
|
||||||
response.should.contain("AppId")
|
response.should.contain("AppId")
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_app.when.called_with(
|
client.create_app.when.called_with(
|
||||||
StackId=stack_id, Type="other", Name="TestApp"
|
StackId=stack_id, Type="other", Name="TestApp"
|
||||||
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
|
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_app.when.called_with(
|
client.create_app.when.called_with(
|
||||||
StackId="nothere", Type="other", Name="TestApp"
|
StackId="nothere", Type="other", Name="TestApp"
|
||||||
).should.throw(Exception, "nothere")
|
).should.throw(Exception, "nothere")
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2015-01-01")
|
@freeze_time("2015-01-01")
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_describe_apps():
|
def test_describe_apps():
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = client.create_stack(
|
stack_id = client.create_stack(
|
||||||
Name="test_stack_1",
|
Name="test_stack_1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
|
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
|
||||||
|
|
||||||
rv1 = client.describe_apps(StackId=stack_id)
|
rv1 = client.describe_apps(StackId=stack_id)
|
||||||
rv2 = client.describe_apps(AppIds=[app_id])
|
rv2 = client.describe_apps(AppIds=[app_id])
|
||||||
rv1["Apps"].should.equal(rv2["Apps"])
|
rv1["Apps"].should.equal(rv2["Apps"])
|
||||||
|
|
||||||
rv1["Apps"][0]["Name"].should.equal("TestApp")
|
rv1["Apps"][0]["Name"].should.equal("TestApp")
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_apps.when.called_with(
|
client.describe_apps.when.called_with(
|
||||||
StackId=stack_id, AppIds=[app_id]
|
StackId=stack_id, AppIds=[app_id]
|
||||||
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
|
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_apps.when.called_with(StackId="nothere").should.throw(
|
client.describe_apps.when.called_with(StackId="nothere").should.throw(
|
||||||
Exception, "Unable to find stack with ID nothere"
|
Exception, "Unable to find stack with ID nothere"
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
|
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
|
||||||
Exception, "nothere"
|
Exception, "nothere"
|
||||||
)
|
)
|
||||||
|
@ -1,206 +1,206 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto3
|
import boto3
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_opsworks
|
from moto import mock_opsworks
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_create_instance():
|
def test_create_instance():
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = client.create_stack(
|
stack_id = client.create_stack(
|
||||||
Name="test_stack_1",
|
Name="test_stack_1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
layer_id = client.create_layer(
|
layer_id = client.create_layer(
|
||||||
StackId=stack_id,
|
StackId=stack_id,
|
||||||
Type="custom",
|
Type="custom",
|
||||||
Name="TestLayer",
|
Name="TestLayer",
|
||||||
Shortname="TestLayerShortName",
|
Shortname="TestLayerShortName",
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
|
|
||||||
second_stack_id = client.create_stack(
|
second_stack_id = client.create_stack(
|
||||||
Name="test_stack_2",
|
Name="test_stack_2",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
second_layer_id = client.create_layer(
|
second_layer_id = client.create_layer(
|
||||||
StackId=second_stack_id,
|
StackId=second_stack_id,
|
||||||
Type="custom",
|
Type="custom",
|
||||||
Name="SecondTestLayer",
|
Name="SecondTestLayer",
|
||||||
Shortname="SecondTestLayerShortName",
|
Shortname="SecondTestLayerShortName",
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
|
|
||||||
response = client.create_instance(
|
response = client.create_instance(
|
||||||
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
|
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
|
||||||
)
|
)
|
||||||
|
|
||||||
response.should.contain("InstanceId")
|
response.should.contain("InstanceId")
|
||||||
|
|
||||||
client.create_instance.when.called_with(
|
client.create_instance.when.called_with(
|
||||||
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
|
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
|
||||||
).should.throw(Exception, "Unable to find stack with ID nothere")
|
).should.throw(Exception, "Unable to find stack with ID nothere")
|
||||||
|
|
||||||
client.create_instance.when.called_with(
|
client.create_instance.when.called_with(
|
||||||
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
|
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
|
||||||
).should.throw(Exception, "nothere")
|
).should.throw(Exception, "nothere")
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_instance.when.called_with(
|
client.create_instance.when.called_with(
|
||||||
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
|
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
|
||||||
).should.throw(Exception, "Please only provide layer IDs from the same stack")
|
).should.throw(Exception, "Please only provide layer IDs from the same stack")
|
||||||
# ClientError
|
# ClientError
|
||||||
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
|
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
|
||||||
Exception, "Unable to find instance with ID nothere"
|
Exception, "Unable to find instance with ID nothere"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_describe_instances():
|
def test_describe_instances():
|
||||||
"""
|
"""
|
||||||
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
|
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
|
||||||
|
|
||||||
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
|
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
|
||||||
populate S2L1 with 1 instance (S2L1_i1)
|
populate S2L1 with 1 instance (S2L1_i1)
|
||||||
populate S2L2 with 3 instances (S2L2_i1..2)
|
populate S2L2 with 3 instances (S2L2_i1..2)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
S1 = client.create_stack(
|
S1 = client.create_stack(
|
||||||
Name="S1",
|
Name="S1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
S1L1 = client.create_layer(
|
S1L1 = client.create_layer(
|
||||||
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
|
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
S2 = client.create_stack(
|
S2 = client.create_stack(
|
||||||
Name="S2",
|
Name="S2",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
S2L1 = client.create_layer(
|
S2L1 = client.create_layer(
|
||||||
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
|
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
S2L2 = client.create_layer(
|
S2L2 = client.create_layer(
|
||||||
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
|
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
|
|
||||||
S1L1_i1 = client.create_instance(
|
S1L1_i1 = client.create_instance(
|
||||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
S1L1_i2 = client.create_instance(
|
S1L1_i2 = client.create_instance(
|
||||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
S2L1_i1 = client.create_instance(
|
S2L1_i1 = client.create_instance(
|
||||||
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
|
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
S2L2_i1 = client.create_instance(
|
S2L2_i1 = client.create_instance(
|
||||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
S2L2_i2 = client.create_instance(
|
S2L2_i2 = client.create_instance(
|
||||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
|
|
||||||
# instances in Stack 1
|
# instances in Stack 1
|
||||||
response = client.describe_instances(StackId=S1)["Instances"]
|
response = client.describe_instances(StackId=S1)["Instances"]
|
||||||
response.should.have.length_of(2)
|
response.should.have.length_of(2)
|
||||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||||
|
|
||||||
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
|
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
|
||||||
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
|
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
|
||||||
sorted(response, key=lambda d: d["InstanceId"])
|
sorted(response, key=lambda d: d["InstanceId"])
|
||||||
)
|
)
|
||||||
|
|
||||||
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
|
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
|
||||||
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
|
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
|
||||||
sorted(response, key=lambda d: d["InstanceId"])
|
sorted(response, key=lambda d: d["InstanceId"])
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.describe_instances(StackId=S1)["Instances"]
|
response = client.describe_instances(StackId=S1)["Instances"]
|
||||||
response.should.have.length_of(2)
|
response.should.have.length_of(2)
|
||||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||||
|
|
||||||
# instances in Stack 2
|
# instances in Stack 2
|
||||||
response = client.describe_instances(StackId=S2)["Instances"]
|
response = client.describe_instances(StackId=S2)["Instances"]
|
||||||
response.should.have.length_of(3)
|
response.should.have.length_of(3)
|
||||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||||
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
|
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
|
||||||
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
|
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
|
||||||
|
|
||||||
response = client.describe_instances(LayerId=S2L1)["Instances"]
|
response = client.describe_instances(LayerId=S2L1)["Instances"]
|
||||||
response.should.have.length_of(1)
|
response.should.have.length_of(1)
|
||||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||||
|
|
||||||
response = client.describe_instances(LayerId=S2L2)["Instances"]
|
response = client.describe_instances(LayerId=S2L2)["Instances"]
|
||||||
response.should.have.length_of(2)
|
response.should.have.length_of(2)
|
||||||
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
|
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
|
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
|
||||||
Exception, "Please provide either one or more"
|
Exception, "Please provide either one or more"
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_instances.when.called_with(StackId="nothere").should.throw(
|
client.describe_instances.when.called_with(StackId="nothere").should.throw(
|
||||||
Exception, "nothere"
|
Exception, "nothere"
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
|
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
|
||||||
Exception, "nothere"
|
Exception, "nothere"
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
|
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
|
||||||
Exception, "nothere"
|
Exception, "nothere"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_ec2_integration():
|
def test_ec2_integration():
|
||||||
"""
|
"""
|
||||||
instances created via OpsWorks should be discoverable via ec2
|
instances created via OpsWorks should be discoverable via ec2
|
||||||
"""
|
"""
|
||||||
|
|
||||||
opsworks = boto3.client("opsworks", region_name="us-east-1")
|
opsworks = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = opsworks.create_stack(
|
stack_id = opsworks.create_stack(
|
||||||
Name="S1",
|
Name="S1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
layer_id = opsworks.create_layer(
|
layer_id = opsworks.create_layer(
|
||||||
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
|
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
|
|
||||||
instance_id = opsworks.create_instance(
|
instance_id = opsworks.create_instance(
|
||||||
StackId=stack_id,
|
StackId=stack_id,
|
||||||
LayerIds=[layer_id],
|
LayerIds=[layer_id],
|
||||||
InstanceType="t2.micro",
|
InstanceType="t2.micro",
|
||||||
SshKeyName="testSSH",
|
SshKeyName="testSSH",
|
||||||
)["InstanceId"]
|
)["InstanceId"]
|
||||||
|
|
||||||
ec2 = boto3.client("ec2", region_name="us-east-1")
|
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||||
|
|
||||||
# Before starting the instance, it shouldn't be discoverable via ec2
|
# Before starting the instance, it shouldn't be discoverable via ec2
|
||||||
reservations = ec2.describe_instances()["Reservations"]
|
reservations = ec2.describe_instances()["Reservations"]
|
||||||
assert reservations.should.be.empty
|
assert reservations.should.be.empty
|
||||||
|
|
||||||
# After starting the instance, it should be discoverable via ec2
|
# After starting the instance, it should be discoverable via ec2
|
||||||
opsworks.start_instance(InstanceId=instance_id)
|
opsworks.start_instance(InstanceId=instance_id)
|
||||||
reservations = ec2.describe_instances()["Reservations"]
|
reservations = ec2.describe_instances()["Reservations"]
|
||||||
reservations[0]["Instances"].should.have.length_of(1)
|
reservations[0]["Instances"].should.have.length_of(1)
|
||||||
instance = reservations[0]["Instances"][0]
|
instance = reservations[0]["Instances"][0]
|
||||||
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
|
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
|
||||||
|
|
||||||
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
|
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
|
||||||
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])
|
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])
|
||||||
|
@ -1,96 +1,96 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto3
|
import boto3
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from moto import mock_opsworks
|
from moto import mock_opsworks
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2015-01-01")
|
@freeze_time("2015-01-01")
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_create_layer_response():
|
def test_create_layer_response():
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = client.create_stack(
|
stack_id = client.create_stack(
|
||||||
Name="test_stack_1",
|
Name="test_stack_1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
response = client.create_layer(
|
response = client.create_layer(
|
||||||
StackId=stack_id,
|
StackId=stack_id,
|
||||||
Type="custom",
|
Type="custom",
|
||||||
Name="TestLayer",
|
Name="TestLayer",
|
||||||
Shortname="TestLayerShortName",
|
Shortname="TestLayerShortName",
|
||||||
)
|
)
|
||||||
|
|
||||||
response.should.contain("LayerId")
|
response.should.contain("LayerId")
|
||||||
|
|
||||||
second_stack_id = client.create_stack(
|
second_stack_id = client.create_stack(
|
||||||
Name="test_stack_2",
|
Name="test_stack_2",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
|
|
||||||
response = client.create_layer(
|
response = client.create_layer(
|
||||||
StackId=second_stack_id,
|
StackId=second_stack_id,
|
||||||
Type="custom",
|
Type="custom",
|
||||||
Name="TestLayer",
|
Name="TestLayer",
|
||||||
Shortname="TestLayerShortName",
|
Shortname="TestLayerShortName",
|
||||||
)
|
)
|
||||||
|
|
||||||
response.should.contain("LayerId")
|
response.should.contain("LayerId")
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_layer.when.called_with(
|
client.create_layer.when.called_with(
|
||||||
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
|
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
|
||||||
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
|
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_layer.when.called_with(
|
client.create_layer.when.called_with(
|
||||||
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
|
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
|
||||||
).should.throw(
|
).should.throw(
|
||||||
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
|
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.create_layer.when.called_with(
|
client.create_layer.when.called_with(
|
||||||
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
|
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
|
||||||
).should.throw(Exception, "nothere")
|
).should.throw(Exception, "nothere")
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2015-01-01")
|
@freeze_time("2015-01-01")
|
||||||
@mock_opsworks
|
@mock_opsworks
|
||||||
def test_describe_layers():
|
def test_describe_layers():
|
||||||
client = boto3.client("opsworks", region_name="us-east-1")
|
client = boto3.client("opsworks", region_name="us-east-1")
|
||||||
stack_id = client.create_stack(
|
stack_id = client.create_stack(
|
||||||
Name="test_stack_1",
|
Name="test_stack_1",
|
||||||
Region="us-east-1",
|
Region="us-east-1",
|
||||||
ServiceRoleArn="service_arn",
|
ServiceRoleArn="service_arn",
|
||||||
DefaultInstanceProfileArn="profile_arn",
|
DefaultInstanceProfileArn="profile_arn",
|
||||||
)["StackId"]
|
)["StackId"]
|
||||||
layer_id = client.create_layer(
|
layer_id = client.create_layer(
|
||||||
StackId=stack_id,
|
StackId=stack_id,
|
||||||
Type="custom",
|
Type="custom",
|
||||||
Name="TestLayer",
|
Name="TestLayer",
|
||||||
Shortname="TestLayerShortName",
|
Shortname="TestLayerShortName",
|
||||||
)["LayerId"]
|
)["LayerId"]
|
||||||
|
|
||||||
rv1 = client.describe_layers(StackId=stack_id)
|
rv1 = client.describe_layers(StackId=stack_id)
|
||||||
rv2 = client.describe_layers(LayerIds=[layer_id])
|
rv2 = client.describe_layers(LayerIds=[layer_id])
|
||||||
rv1["Layers"].should.equal(rv2["Layers"])
|
rv1["Layers"].should.equal(rv2["Layers"])
|
||||||
|
|
||||||
rv1["Layers"][0]["Name"].should.equal("TestLayer")
|
rv1["Layers"][0]["Name"].should.equal("TestLayer")
|
||||||
|
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_layers.when.called_with(
|
client.describe_layers.when.called_with(
|
||||||
StackId=stack_id, LayerIds=[layer_id]
|
StackId=stack_id, LayerIds=[layer_id]
|
||||||
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
|
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_layers.when.called_with(StackId="nothere").should.throw(
|
client.describe_layers.when.called_with(StackId="nothere").should.throw(
|
||||||
Exception, "Unable to find stack with ID nothere"
|
Exception, "Unable to find stack with ID nothere"
|
||||||
)
|
)
|
||||||
# ClientError
|
# ClientError
|
||||||
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
|
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
|
||||||
Exception, "nothere"
|
Exception, "nothere"
|
||||||
)
|
)
|
||||||
|
@ -1,263 +1,263 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
import boto3
|
import boto3
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
from moto import mock_polly
|
from moto import mock_polly
|
||||||
|
|
||||||
# Polly only available in a few regions
|
# Polly only available in a few regions
|
||||||
DEFAULT_REGION = "eu-west-1"
|
DEFAULT_REGION = "eu-west-1"
|
||||||
|
|
||||||
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
|
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<lexicon version="1.0"
|
<lexicon version="1.0"
|
||||||
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
|
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
|
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
|
||||||
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
|
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
|
||||||
alphabet="ipa"
|
alphabet="ipa"
|
||||||
xml:lang="en-US">
|
xml:lang="en-US">
|
||||||
<lexeme>
|
<lexeme>
|
||||||
<grapheme>W3C</grapheme>
|
<grapheme>W3C</grapheme>
|
||||||
<alias>World Wide Web Consortium</alias>
|
<alias>World Wide Web Consortium</alias>
|
||||||
</lexeme>
|
</lexeme>
|
||||||
</lexicon>"""
|
</lexicon>"""
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_describe_voices():
|
def test_describe_voices():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
|
|
||||||
resp = client.describe_voices()
|
resp = client.describe_voices()
|
||||||
len(resp["Voices"]).should.be.greater_than(1)
|
len(resp["Voices"]).should.be.greater_than(1)
|
||||||
|
|
||||||
resp = client.describe_voices(LanguageCode="en-GB")
|
resp = client.describe_voices(LanguageCode="en-GB")
|
||||||
len(resp["Voices"]).should.equal(3)
|
len(resp["Voices"]).should.equal(3)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.describe_voices(LanguageCode="SOME_LANGUAGE")
|
client.describe_voices(LanguageCode="SOME_LANGUAGE")
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("400")
|
err.response["Error"]["Code"].should.equal("400")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised an exception")
|
raise RuntimeError("Should of raised an exception")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_put_list_lexicon():
|
def test_put_list_lexicon():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
|
|
||||||
# Return nothing
|
# Return nothing
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
resp = client.list_lexicons()
|
resp = client.list_lexicons()
|
||||||
len(resp["Lexicons"]).should.equal(1)
|
len(resp["Lexicons"]).should.equal(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_put_get_lexicon():
|
def test_put_get_lexicon():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
|
|
||||||
# Return nothing
|
# Return nothing
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
resp = client.get_lexicon(Name="test")
|
resp = client.get_lexicon(Name="test")
|
||||||
resp.should.contain("Lexicon")
|
resp.should.contain("Lexicon")
|
||||||
resp.should.contain("LexiconAttributes")
|
resp.should.contain("LexiconAttributes")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_put_lexicon_bad_name():
|
def test_put_lexicon_bad_name():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
|
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised an exception")
|
raise RuntimeError("Should of raised an exception")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech():
|
def test_synthesize_speech():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
|
|
||||||
# Return nothing
|
# Return nothing
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
|
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
|
||||||
for output_format, content_type in tests:
|
for output_format, content_type in tests:
|
||||||
resp = client.synthesize_speech(
|
resp = client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat=output_format,
|
OutputFormat=output_format,
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
resp["ContentType"].should.equal(content_type)
|
resp["ContentType"].should.equal(content_type)
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_lexicon():
|
def test_synthesize_speech_bad_lexicon():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test2"],
|
LexiconNames=["test2"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
|
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised LexiconNotFoundException")
|
raise RuntimeError("Should of raised LexiconNotFoundException")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_output_format():
|
def test_synthesize_speech_bad_output_format():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="invalid",
|
OutputFormat="invalid",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_sample_rate():
|
def test_synthesize_speech_bad_sample_rate():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="18000",
|
SampleRate="18000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
|
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_text_type():
|
def test_synthesize_speech_bad_text_type():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="invalid",
|
TextType="invalid",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_voice_id():
|
def test_synthesize_speech_bad_voice_id():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Luke",
|
VoiceId="Luke",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_text_too_long():
|
def test_synthesize_speech_text_too_long():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234" * 376, # = 3008 characters
|
Text="test1234" * 376, # = 3008 characters
|
||||||
TextType="text",
|
TextType="text",
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
|
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_speech_marks1():
|
def test_synthesize_speech_bad_speech_marks1():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="text",
|
TextType="text",
|
||||||
SpeechMarkTypes=["word"],
|
SpeechMarkTypes=["word"],
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal(
|
err.response["Error"]["Code"].should.equal(
|
||||||
"MarksNotSupportedForFormatException"
|
"MarksNotSupportedForFormatException"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
|
||||||
|
|
||||||
@mock_polly
|
@mock_polly
|
||||||
def test_synthesize_speech_bad_speech_marks2():
|
def test_synthesize_speech_bad_speech_marks2():
|
||||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.synthesize_speech(
|
client.synthesize_speech(
|
||||||
LexiconNames=["test"],
|
LexiconNames=["test"],
|
||||||
OutputFormat="pcm",
|
OutputFormat="pcm",
|
||||||
SampleRate="16000",
|
SampleRate="16000",
|
||||||
Text="test1234",
|
Text="test1234",
|
||||||
TextType="ssml",
|
TextType="ssml",
|
||||||
SpeechMarkTypes=["word"],
|
SpeechMarkTypes=["word"],
|
||||||
VoiceId="Astrid",
|
VoiceId="Astrid",
|
||||||
)
|
)
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
err.response["Error"]["Code"].should.equal(
|
err.response["Error"]["Code"].should.equal(
|
||||||
"MarksNotSupportedForFormatException"
|
"MarksNotSupportedForFormatException"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Should of raised ")
|
raise RuntimeError("Should of raised ")
|
||||||
|
@ -21,7 +21,10 @@ def test_get_resources_s3():
|
|||||||
# Create 4 buckets
|
# Create 4 buckets
|
||||||
for i in range(1, 5):
|
for i in range(1, 5):
|
||||||
i_str = str(i)
|
i_str = str(i)
|
||||||
s3_client.create_bucket(Bucket="test_bucket" + i_str)
|
s3_client.create_bucket(
|
||||||
|
Bucket="test_bucket" + i_str,
|
||||||
|
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
|
||||||
|
)
|
||||||
s3_client.put_bucket_tagging(
|
s3_client.put_bucket_tagging(
|
||||||
Bucket="test_bucket" + i_str,
|
Bucket="test_bucket" + i_str,
|
||||||
Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]},
|
Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]},
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -16,7 +16,7 @@ from moto import mock_s3_deprecated, mock_s3
|
|||||||
@mock_s3_deprecated
|
@mock_s3_deprecated
|
||||||
def test_lifecycle_create():
|
def test_lifecycle_create():
|
||||||
conn = boto.s3.connect_to_region("us-west-1")
|
conn = boto.s3.connect_to_region("us-west-1")
|
||||||
bucket = conn.create_bucket("foobar")
|
bucket = conn.create_bucket("foobar", location="us-west-1")
|
||||||
|
|
||||||
lifecycle = Lifecycle()
|
lifecycle = Lifecycle()
|
||||||
lifecycle.add_rule("myid", "", "Enabled", 30)
|
lifecycle.add_rule("myid", "", "Enabled", 30)
|
||||||
@ -33,7 +33,9 @@ def test_lifecycle_create():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_lifecycle_with_filters():
|
def test_lifecycle_with_filters():
|
||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket="bucket")
|
client.create_bucket(
|
||||||
|
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
# Create a lifecycle rule with a Filter (no tags):
|
# Create a lifecycle rule with a Filter (no tags):
|
||||||
lfc = {
|
lfc = {
|
||||||
@ -245,7 +247,9 @@ def test_lifecycle_with_filters():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_lifecycle_with_eodm():
|
def test_lifecycle_with_eodm():
|
||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket="bucket")
|
client.create_bucket(
|
||||||
|
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
lfc = {
|
lfc = {
|
||||||
"Rules": [
|
"Rules": [
|
||||||
@ -293,7 +297,9 @@ def test_lifecycle_with_eodm():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_lifecycle_with_nve():
|
def test_lifecycle_with_nve():
|
||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket="bucket")
|
client.create_bucket(
|
||||||
|
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
lfc = {
|
lfc = {
|
||||||
"Rules": [
|
"Rules": [
|
||||||
@ -327,7 +333,9 @@ def test_lifecycle_with_nve():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_lifecycle_with_nvt():
|
def test_lifecycle_with_nvt():
|
||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket="bucket")
|
client.create_bucket(
|
||||||
|
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
lfc = {
|
lfc = {
|
||||||
"Rules": [
|
"Rules": [
|
||||||
@ -393,7 +401,9 @@ def test_lifecycle_with_nvt():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_lifecycle_with_aimu():
|
def test_lifecycle_with_aimu():
|
||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket="bucket")
|
client.create_bucket(
|
||||||
|
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
lfc = {
|
lfc = {
|
||||||
"Rules": [
|
"Rules": [
|
||||||
@ -432,7 +442,7 @@ def test_lifecycle_with_aimu():
|
|||||||
@mock_s3_deprecated
|
@mock_s3_deprecated
|
||||||
def test_lifecycle_with_glacier_transition():
|
def test_lifecycle_with_glacier_transition():
|
||||||
conn = boto.s3.connect_to_region("us-west-1")
|
conn = boto.s3.connect_to_region("us-west-1")
|
||||||
bucket = conn.create_bucket("foobar")
|
bucket = conn.create_bucket("foobar", location="us-west-1")
|
||||||
|
|
||||||
lifecycle = Lifecycle()
|
lifecycle = Lifecycle()
|
||||||
transition = Transition(days=30, storage_class="GLACIER")
|
transition = Transition(days=30, storage_class="GLACIER")
|
||||||
@ -451,7 +461,7 @@ def test_lifecycle_with_glacier_transition():
|
|||||||
@mock_s3_deprecated
|
@mock_s3_deprecated
|
||||||
def test_lifecycle_multi():
|
def test_lifecycle_multi():
|
||||||
conn = boto.s3.connect_to_region("us-west-1")
|
conn = boto.s3.connect_to_region("us-west-1")
|
||||||
bucket = conn.create_bucket("foobar")
|
bucket = conn.create_bucket("foobar", location="us-west-1")
|
||||||
|
|
||||||
date = "2022-10-12T00:00:00.000Z"
|
date = "2022-10-12T00:00:00.000Z"
|
||||||
sc = "GLACIER"
|
sc = "GLACIER"
|
||||||
@ -493,7 +503,7 @@ def test_lifecycle_multi():
|
|||||||
@mock_s3_deprecated
|
@mock_s3_deprecated
|
||||||
def test_lifecycle_delete():
|
def test_lifecycle_delete():
|
||||||
conn = boto.s3.connect_to_region("us-west-1")
|
conn = boto.s3.connect_to_region("us-west-1")
|
||||||
bucket = conn.create_bucket("foobar")
|
bucket = conn.create_bucket("foobar", location="us-west-1")
|
||||||
|
|
||||||
lifecycle = Lifecycle()
|
lifecycle = Lifecycle()
|
||||||
lifecycle.add_rule(expiration=30)
|
lifecycle.add_rule(expiration=30)
|
||||||
|
@ -11,7 +11,7 @@ from moto import mock_s3
|
|||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_storage_class_standard():
|
def test_s3_storage_class_standard():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3", region_name="us-east-1")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(Bucket="Bucket")
|
||||||
|
|
||||||
# add an object to the bucket with standard storage
|
# add an object to the bucket with standard storage
|
||||||
@ -26,7 +26,9 @@ def test_s3_storage_class_standard():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_storage_class_infrequent_access():
|
def test_s3_storage_class_infrequent_access():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
|
||||||
|
)
|
||||||
|
|
||||||
# add an object to the bucket with standard storage
|
# add an object to the bucket with standard storage
|
||||||
|
|
||||||
@ -46,7 +48,9 @@ def test_s3_storage_class_infrequent_access():
|
|||||||
def test_s3_storage_class_intelligent_tiering():
|
def test_s3_storage_class_intelligent_tiering():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
|
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
|
||||||
|
)
|
||||||
s3.put_object(
|
s3.put_object(
|
||||||
Bucket="Bucket",
|
Bucket="Bucket",
|
||||||
Key="my_key_infrequent",
|
Key="my_key_infrequent",
|
||||||
@ -61,7 +65,7 @@ def test_s3_storage_class_intelligent_tiering():
|
|||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_storage_class_copy():
|
def test_s3_storage_class_copy():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3", region_name="us-east-1")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(Bucket="Bucket")
|
||||||
s3.put_object(
|
s3.put_object(
|
||||||
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
||||||
@ -86,7 +90,7 @@ def test_s3_storage_class_copy():
|
|||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_invalid_copied_storage_class():
|
def test_s3_invalid_copied_storage_class():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3", region_name="us-east-1")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(Bucket="Bucket")
|
||||||
s3.put_object(
|
s3.put_object(
|
||||||
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
||||||
@ -119,7 +123,9 @@ def test_s3_invalid_copied_storage_class():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_invalid_storage_class():
|
def test_s3_invalid_storage_class():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
# Try to add an object with an invalid storage class
|
# Try to add an object with an invalid storage class
|
||||||
with assert_raises(ClientError) as err:
|
with assert_raises(ClientError) as err:
|
||||||
@ -137,7 +143,9 @@ def test_s3_invalid_storage_class():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_default_storage_class():
|
def test_s3_default_storage_class():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
|
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
|
||||||
|
|
||||||
@ -150,7 +158,9 @@ def test_s3_default_storage_class():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_copy_object_error_for_glacier_storage_class():
|
def test_s3_copy_object_error_for_glacier_storage_class():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
s3.put_object(
|
s3.put_object(
|
||||||
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
|
||||||
@ -169,7 +179,9 @@ def test_s3_copy_object_error_for_glacier_storage_class():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_s3_copy_object_error_for_deep_archive_storage_class():
|
def test_s3_copy_object_error_for_deep_archive_storage_class():
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
s3.create_bucket(Bucket="Bucket")
|
s3.create_bucket(
|
||||||
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
||||||
|
)
|
||||||
|
|
||||||
s3.put_object(
|
s3.put_object(
|
||||||
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
|
||||||
|
@ -1,107 +1,107 @@
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto.swf.exceptions import SWFUnknownResourceFault
|
from moto.swf.exceptions import SWFUnknownResourceFault
|
||||||
from moto.swf.models import Domain
|
from moto.swf.models import Domain
|
||||||
|
|
||||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||||
import tests.backport_assert_raises # noqa
|
import tests.backport_assert_raises # noqa
|
||||||
|
|
||||||
# Fake WorkflowExecution for tests purposes
|
# Fake WorkflowExecution for tests purposes
|
||||||
WorkflowExecution = namedtuple(
|
WorkflowExecution = namedtuple(
|
||||||
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
|
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_domain_short_dict_representation():
|
def test_domain_short_dict_representation():
|
||||||
domain = Domain("foo", "52")
|
domain = Domain("foo", "52")
|
||||||
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
|
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
|
||||||
|
|
||||||
domain.description = "foo bar"
|
domain.description = "foo bar"
|
||||||
domain.to_short_dict()["description"].should.equal("foo bar")
|
domain.to_short_dict()["description"].should.equal("foo bar")
|
||||||
|
|
||||||
|
|
||||||
def test_domain_full_dict_representation():
|
def test_domain_full_dict_representation():
|
||||||
domain = Domain("foo", "52")
|
domain = Domain("foo", "52")
|
||||||
|
|
||||||
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
|
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
|
||||||
_config = domain.to_full_dict()["configuration"]
|
_config = domain.to_full_dict()["configuration"]
|
||||||
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
|
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
|
||||||
|
|
||||||
|
|
||||||
def test_domain_string_representation():
|
def test_domain_string_representation():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
|
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
|
||||||
|
|
||||||
|
|
||||||
def test_domain_add_to_activity_task_list():
|
def test_domain_add_to_activity_task_list():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
domain.add_to_activity_task_list("foo", "bar")
|
domain.add_to_activity_task_list("foo", "bar")
|
||||||
domain.activity_task_lists.should.equal({"foo": ["bar"]})
|
domain.activity_task_lists.should.equal({"foo": ["bar"]})
|
||||||
|
|
||||||
|
|
||||||
def test_domain_activity_tasks():
|
def test_domain_activity_tasks():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
domain.add_to_activity_task_list("foo", "bar")
|
domain.add_to_activity_task_list("foo", "bar")
|
||||||
domain.add_to_activity_task_list("other", "baz")
|
domain.add_to_activity_task_list("other", "baz")
|
||||||
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
|
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
|
||||||
|
|
||||||
|
|
||||||
def test_domain_add_to_decision_task_list():
|
def test_domain_add_to_decision_task_list():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
domain.add_to_decision_task_list("foo", "bar")
|
domain.add_to_decision_task_list("foo", "bar")
|
||||||
domain.decision_task_lists.should.equal({"foo": ["bar"]})
|
domain.decision_task_lists.should.equal({"foo": ["bar"]})
|
||||||
|
|
||||||
|
|
||||||
def test_domain_decision_tasks():
|
def test_domain_decision_tasks():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
domain.add_to_decision_task_list("foo", "bar")
|
domain.add_to_decision_task_list("foo", "bar")
|
||||||
domain.add_to_decision_task_list("other", "baz")
|
domain.add_to_decision_task_list("other", "baz")
|
||||||
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
|
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
|
||||||
|
|
||||||
|
|
||||||
def test_domain_get_workflow_execution():
|
def test_domain_get_workflow_execution():
|
||||||
domain = Domain("my-domain", "60")
|
domain = Domain("my-domain", "60")
|
||||||
|
|
||||||
wfe1 = WorkflowExecution(
|
wfe1 = WorkflowExecution(
|
||||||
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
|
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
|
||||||
)
|
)
|
||||||
wfe2 = WorkflowExecution(
|
wfe2 = WorkflowExecution(
|
||||||
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
|
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
|
||||||
)
|
)
|
||||||
wfe3 = WorkflowExecution(
|
wfe3 = WorkflowExecution(
|
||||||
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
|
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
|
||||||
)
|
)
|
||||||
wfe4 = WorkflowExecution(
|
wfe4 = WorkflowExecution(
|
||||||
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
|
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
|
||||||
)
|
)
|
||||||
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
|
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
|
||||||
|
|
||||||
# get workflow execution through workflow_id and run_id
|
# get workflow execution through workflow_id and run_id
|
||||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
|
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
|
||||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
|
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
|
||||||
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
|
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
|
||||||
|
|
||||||
domain.get_workflow_execution.when.called_with(
|
domain.get_workflow_execution.when.called_with(
|
||||||
"wf-id-1", run_id="non-existent"
|
"wf-id-1", run_id="non-existent"
|
||||||
).should.throw(SWFUnknownResourceFault)
|
).should.throw(SWFUnknownResourceFault)
|
||||||
|
|
||||||
# get OPEN workflow execution by default if no run_id
|
# get OPEN workflow execution by default if no run_id
|
||||||
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
|
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
|
||||||
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
|
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
|
||||||
SWFUnknownResourceFault
|
SWFUnknownResourceFault
|
||||||
)
|
)
|
||||||
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
|
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
|
||||||
SWFUnknownResourceFault
|
SWFUnknownResourceFault
|
||||||
)
|
)
|
||||||
|
|
||||||
# raise_if_closed attribute
|
# raise_if_closed attribute
|
||||||
domain.get_workflow_execution(
|
domain.get_workflow_execution(
|
||||||
"wf-id-1", run_id="run-id-1", raise_if_closed=True
|
"wf-id-1", run_id="run-id-1", raise_if_closed=True
|
||||||
).should.equal(wfe1)
|
).should.equal(wfe1)
|
||||||
domain.get_workflow_execution.when.called_with(
|
domain.get_workflow_execution.when.called_with(
|
||||||
"wf-id-3", run_id="run-id-4", raise_if_closed=True
|
"wf-id-3", run_id="run-id-4", raise_if_closed=True
|
||||||
).should.throw(SWFUnknownResourceFault)
|
).should.throw(SWFUnknownResourceFault)
|
||||||
|
|
||||||
# raise_if_none attribute
|
# raise_if_none attribute
|
||||||
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
|
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
|
||||||
|
@ -1,19 +1,19 @@
|
|||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto.swf.models import Timeout
|
from moto.swf.models import Timeout
|
||||||
|
|
||||||
from ..utils import make_workflow_execution
|
from ..utils import make_workflow_execution
|
||||||
|
|
||||||
|
|
||||||
def test_timeout_creation():
|
def test_timeout_creation():
|
||||||
wfe = make_workflow_execution()
|
wfe = make_workflow_execution()
|
||||||
|
|
||||||
# epoch 1420113600 == "2015-01-01 13:00:00"
|
# epoch 1420113600 == "2015-01-01 13:00:00"
|
||||||
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
|
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
|
||||||
|
|
||||||
with freeze_time("2015-01-01 12:00:00"):
|
with freeze_time("2015-01-01 12:00:00"):
|
||||||
timeout.reached.should.be.falsy
|
timeout.reached.should.be.falsy
|
||||||
|
|
||||||
with freeze_time("2015-01-01 13:00:00"):
|
with freeze_time("2015-01-01 13:00:00"):
|
||||||
timeout.reached.should.be.truthy
|
timeout.reached.should.be.truthy
|
||||||
|
@ -148,6 +148,39 @@ def test_workflow_execution_full_dict_representation():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_closed_workflow_execution_full_dict_representation():
|
||||||
|
domain = get_basic_domain()
|
||||||
|
wf_type = WorkflowType(
|
||||||
|
"test-workflow",
|
||||||
|
"v1.0",
|
||||||
|
task_list="queue",
|
||||||
|
default_child_policy="ABANDON",
|
||||||
|
default_execution_start_to_close_timeout="300",
|
||||||
|
default_task_start_to_close_timeout="300",
|
||||||
|
)
|
||||||
|
wfe = WorkflowExecution(domain, wf_type, "ab1234")
|
||||||
|
wfe.execution_status = "CLOSED"
|
||||||
|
wfe.close_status = "CANCELED"
|
||||||
|
wfe.close_timestamp = 1420066801.123
|
||||||
|
|
||||||
|
fd = wfe.to_full_dict()
|
||||||
|
medium_dict = wfe.to_medium_dict()
|
||||||
|
medium_dict["closeStatus"] = "CANCELED"
|
||||||
|
medium_dict["closeTimestamp"] = 1420066801.123
|
||||||
|
fd["executionInfo"].should.equal(medium_dict)
|
||||||
|
fd["openCounts"]["openTimers"].should.equal(0)
|
||||||
|
fd["openCounts"]["openDecisionTasks"].should.equal(0)
|
||||||
|
fd["openCounts"]["openActivityTasks"].should.equal(0)
|
||||||
|
fd["executionConfiguration"].should.equal(
|
||||||
|
{
|
||||||
|
"childPolicy": "ABANDON",
|
||||||
|
"executionStartToCloseTimeout": "300",
|
||||||
|
"taskList": {"name": "queue"},
|
||||||
|
"taskStartToCloseTimeout": "300",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_workflow_execution_list_dict_representation():
|
def test_workflow_execution_list_dict_representation():
|
||||||
domain = get_basic_domain()
|
domain = get_basic_domain()
|
||||||
wf_type = WorkflowType(
|
wf_type = WorkflowType(
|
||||||
|
@ -1,114 +1,114 @@
|
|||||||
import boto
|
import boto
|
||||||
from boto.swf.exceptions import SWFResponseError
|
from boto.swf.exceptions import SWFResponseError
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_swf_deprecated
|
from moto import mock_swf_deprecated
|
||||||
|
|
||||||
|
|
||||||
# RegisterDomain endpoint
|
# RegisterDomain endpoint
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_register_domain():
|
def test_register_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("test-domain", "60", description="A test domain")
|
conn.register_domain("test-domain", "60", description="A test domain")
|
||||||
|
|
||||||
all_domains = conn.list_domains("REGISTERED")
|
all_domains = conn.list_domains("REGISTERED")
|
||||||
domain = all_domains["domainInfos"][0]
|
domain = all_domains["domainInfos"][0]
|
||||||
|
|
||||||
domain["name"].should.equal("test-domain")
|
domain["name"].should.equal("test-domain")
|
||||||
domain["status"].should.equal("REGISTERED")
|
domain["status"].should.equal("REGISTERED")
|
||||||
domain["description"].should.equal("A test domain")
|
domain["description"].should.equal("A test domain")
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_register_already_existing_domain():
|
def test_register_already_existing_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("test-domain", "60", description="A test domain")
|
conn.register_domain("test-domain", "60", description="A test domain")
|
||||||
|
|
||||||
conn.register_domain.when.called_with(
|
conn.register_domain.when.called_with(
|
||||||
"test-domain", "60", description="A test domain"
|
"test-domain", "60", description="A test domain"
|
||||||
).should.throw(SWFResponseError)
|
).should.throw(SWFResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_register_with_wrong_parameter_type():
|
def test_register_with_wrong_parameter_type():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
|
|
||||||
conn.register_domain.when.called_with(
|
conn.register_domain.when.called_with(
|
||||||
"test-domain", 60, description="A test domain"
|
"test-domain", 60, description="A test domain"
|
||||||
).should.throw(SWFResponseError)
|
).should.throw(SWFResponseError)
|
||||||
|
|
||||||
|
|
||||||
# ListDomains endpoint
|
# ListDomains endpoint
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_list_domains_order():
|
def test_list_domains_order():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("b-test-domain", "60")
|
conn.register_domain("b-test-domain", "60")
|
||||||
conn.register_domain("a-test-domain", "60")
|
conn.register_domain("a-test-domain", "60")
|
||||||
conn.register_domain("c-test-domain", "60")
|
conn.register_domain("c-test-domain", "60")
|
||||||
|
|
||||||
all_domains = conn.list_domains("REGISTERED")
|
all_domains = conn.list_domains("REGISTERED")
|
||||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||||
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
|
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_list_domains_reverse_order():
|
def test_list_domains_reverse_order():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("b-test-domain", "60")
|
conn.register_domain("b-test-domain", "60")
|
||||||
conn.register_domain("a-test-domain", "60")
|
conn.register_domain("a-test-domain", "60")
|
||||||
conn.register_domain("c-test-domain", "60")
|
conn.register_domain("c-test-domain", "60")
|
||||||
|
|
||||||
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
|
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
|
||||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||||
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
|
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
|
||||||
|
|
||||||
|
|
||||||
# DeprecateDomain endpoint
|
# DeprecateDomain endpoint
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_deprecate_domain():
|
def test_deprecate_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("test-domain", "60", description="A test domain")
|
conn.register_domain("test-domain", "60", description="A test domain")
|
||||||
conn.deprecate_domain("test-domain")
|
conn.deprecate_domain("test-domain")
|
||||||
|
|
||||||
all_domains = conn.list_domains("DEPRECATED")
|
all_domains = conn.list_domains("DEPRECATED")
|
||||||
domain = all_domains["domainInfos"][0]
|
domain = all_domains["domainInfos"][0]
|
||||||
|
|
||||||
domain["name"].should.equal("test-domain")
|
domain["name"].should.equal("test-domain")
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_deprecate_already_deprecated_domain():
|
def test_deprecate_already_deprecated_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("test-domain", "60", description="A test domain")
|
conn.register_domain("test-domain", "60", description="A test domain")
|
||||||
conn.deprecate_domain("test-domain")
|
conn.deprecate_domain("test-domain")
|
||||||
|
|
||||||
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
|
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_deprecate_non_existent_domain():
|
def test_deprecate_non_existent_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
|
|
||||||
conn.deprecate_domain.when.called_with("non-existent").should.throw(
|
conn.deprecate_domain.when.called_with("non-existent").should.throw(
|
||||||
SWFResponseError
|
SWFResponseError
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# DescribeDomain endpoint
|
# DescribeDomain endpoint
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_describe_domain():
|
def test_describe_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
conn.register_domain("test-domain", "60", description="A test domain")
|
conn.register_domain("test-domain", "60", description="A test domain")
|
||||||
|
|
||||||
domain = conn.describe_domain("test-domain")
|
domain = conn.describe_domain("test-domain")
|
||||||
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
|
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
|
||||||
domain["domainInfo"]["description"].should.equal("A test domain")
|
domain["domainInfo"]["description"].should.equal("A test domain")
|
||||||
domain["domainInfo"]["name"].should.equal("test-domain")
|
domain["domainInfo"]["name"].should.equal("test-domain")
|
||||||
domain["domainInfo"]["status"].should.equal("REGISTERED")
|
domain["domainInfo"]["status"].should.equal("REGISTERED")
|
||||||
|
|
||||||
|
|
||||||
@mock_swf_deprecated
|
@mock_swf_deprecated
|
||||||
def test_describe_non_existent_domain():
|
def test_describe_non_existent_domain():
|
||||||
conn = boto.connect_swf("the_key", "the_secret")
|
conn = boto.connect_swf("the_key", "the_secret")
|
||||||
|
|
||||||
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)
|
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto.swf.utils import decapitalize
|
from moto.swf.utils import decapitalize
|
||||||
|
|
||||||
|
|
||||||
def test_decapitalize():
|
def test_decapitalize():
|
||||||
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
|
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
|
||||||
for before, after in cases.items():
|
for before, after in cases.items():
|
||||||
decapitalize(before).should.equal(after)
|
decapitalize(before).should.equal(after)
|
||||||
|
79
tests/test_utilities/test_tagging_service.py
Normal file
79
tests/test_utilities/test_tagging_service.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import sure
|
||||||
|
|
||||||
|
from moto.utilities.tagging_service import TaggingService
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_empty():
|
||||||
|
svc = TaggingService()
|
||||||
|
result = svc.list_tags_for_resource("test")
|
||||||
|
|
||||||
|
{"Tags": []}.should.be.equal(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_tag():
|
||||||
|
svc = TaggingService("TheTags", "TagKey", "TagValue")
|
||||||
|
tags = [{"TagKey": "key_key", "TagValue": "value_value"}]
|
||||||
|
svc.tag_resource("arn", tags)
|
||||||
|
actual = svc.list_tags_for_resource("arn")
|
||||||
|
expected = {"TheTags": [{"TagKey": "key_key", "TagValue": "value_value"}]}
|
||||||
|
|
||||||
|
expected.should.be.equal(actual)
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_tag_without_value():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key_key"}]
|
||||||
|
svc.tag_resource("arn", tags)
|
||||||
|
actual = svc.list_tags_for_resource("arn")
|
||||||
|
expected = {"Tags": [{"Key": "key_key", "Value": None}]}
|
||||||
|
|
||||||
|
expected.should.be.equal(actual)
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_tag_using_names():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key_key", "Value": "value_value"}]
|
||||||
|
svc.tag_resource("arn", tags)
|
||||||
|
svc.untag_resource_using_names("arn", ["key_key"])
|
||||||
|
result = svc.list_tags_for_resource("arn")
|
||||||
|
|
||||||
|
{"Tags": []}.should.be.equal(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_all_tags_for_resource():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key_key", "Value": "value_value"}]
|
||||||
|
tags2 = [{"Key": "key_key2", "Value": "value_value2"}]
|
||||||
|
svc.tag_resource("arn", tags)
|
||||||
|
svc.tag_resource("arn", tags2)
|
||||||
|
svc.delete_all_tags_for_resource("arn")
|
||||||
|
result = svc.list_tags_for_resource("arn")
|
||||||
|
|
||||||
|
{"Tags": []}.should.be.equal(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_empty_delete():
|
||||||
|
svc = TaggingService()
|
||||||
|
svc.untag_resource_using_names("arn", ["key_key"])
|
||||||
|
result = svc.list_tags_for_resource("arn")
|
||||||
|
|
||||||
|
{"Tags": []}.should.be.equal(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_tag_using_tags():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key_key", "Value": "value_value"}]
|
||||||
|
svc.tag_resource("arn", tags)
|
||||||
|
svc.untag_resource_using_tags("arn", tags)
|
||||||
|
result = svc.list_tags_for_resource("arn")
|
||||||
|
|
||||||
|
{"Tags": []}.should.be.equal(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tag_names():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||||
|
actual = svc.extract_tag_names(tags)
|
||||||
|
expected = ["key1", "key2"]
|
||||||
|
|
||||||
|
expected.should.be.equal(actual)
|
Loading…
x
Reference in New Issue
Block a user