commit
66947d84f3
@ -4850,12 +4850,12 @@
|
||||
- [X] list_policies
|
||||
- [X] list_policies_for_target
|
||||
- [X] list_roots
|
||||
- [ ] list_tags_for_resource
|
||||
- [x] list_tags_for_resource
|
||||
- [X] list_targets_for_policy
|
||||
- [X] move_account
|
||||
- [ ] remove_account_from_organization
|
||||
- [ ] tag_resource
|
||||
- [ ] untag_resource
|
||||
- [x] tag_resource
|
||||
- [x] untag_resource
|
||||
- [ ] update_organizational_unit
|
||||
- [ ] update_policy
|
||||
|
||||
|
31
moto/awslambda/exceptions.py
Normal file
31
moto/awslambda/exceptions.py
Normal file
@ -0,0 +1,31 @@
|
||||
from botocore.client import ClientError
|
||||
|
||||
|
||||
class LambdaClientError(ClientError):
|
||||
def __init__(self, error, message):
|
||||
error_response = {"Error": {"Code": error, "Message": message}}
|
||||
super(LambdaClientError, self).__init__(error_response, None)
|
||||
|
||||
|
||||
class CrossAccountNotAllowed(LambdaClientError):
|
||||
def __init__(self):
|
||||
super(CrossAccountNotAllowed, self).__init__(
|
||||
"AccessDeniedException", "Cross-account pass role is not allowed."
|
||||
)
|
||||
|
||||
|
||||
class InvalidParameterValueException(LambdaClientError):
|
||||
def __init__(self, message):
|
||||
super(InvalidParameterValueException, self).__init__(
|
||||
"InvalidParameterValueException", message
|
||||
)
|
||||
|
||||
|
||||
class InvalidRoleFormat(LambdaClientError):
|
||||
pattern = r"arn:(aws[a-zA-Z-]*)?:iam::(\d{12}):role/?[a-zA-Z_0-9+=,.@\-_/]+"
|
||||
|
||||
def __init__(self, role):
|
||||
message = "1 validation error detected: Value '{0}' at 'role' failed to satisfy constraint: Member must satisfy regular expression pattern: {1}".format(
|
||||
role, InvalidRoleFormat.pattern
|
||||
)
|
||||
super(InvalidRoleFormat, self).__init__("ValidationException", message)
|
@ -26,11 +26,18 @@ import requests.adapters
|
||||
import boto.awslambda
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.iam.models import iam_backend
|
||||
from moto.iam.exceptions import IAMNotFoundException
|
||||
from moto.core.utils import unix_time_millis
|
||||
from moto.s3.models import s3_backend
|
||||
from moto.logs.models import logs_backends
|
||||
from moto.s3.exceptions import MissingBucket, MissingKey
|
||||
from moto import settings
|
||||
from .exceptions import (
|
||||
CrossAccountNotAllowed,
|
||||
InvalidRoleFormat,
|
||||
InvalidParameterValueException,
|
||||
)
|
||||
from .utils import make_function_arn, make_function_ver_arn
|
||||
from moto.sqs import sqs_backends
|
||||
from moto.dynamodb2 import dynamodb_backends2
|
||||
@ -214,9 +221,8 @@ class LambdaFunction(BaseModel):
|
||||
key = s3_backend.get_key(self.code["S3Bucket"], self.code["S3Key"])
|
||||
except MissingBucket:
|
||||
if do_validate_s3():
|
||||
raise ValueError(
|
||||
"InvalidParameterValueException",
|
||||
"Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist",
|
||||
raise InvalidParameterValueException(
|
||||
"Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist"
|
||||
)
|
||||
except MissingKey:
|
||||
if do_validate_s3():
|
||||
@ -357,6 +363,8 @@ class LambdaFunction(BaseModel):
|
||||
self.code_bytes = key.value
|
||||
self.code_size = key.size
|
||||
self.code_sha_256 = hashlib.sha256(key.value).hexdigest()
|
||||
self.code["S3Bucket"] = updated_spec["S3Bucket"]
|
||||
self.code["S3Key"] = updated_spec["S3Key"]
|
||||
|
||||
return self.get_configuration()
|
||||
|
||||
@ -520,6 +528,15 @@ class LambdaFunction(BaseModel):
|
||||
return make_function_arn(self.region, ACCOUNT_ID, self.function_name)
|
||||
raise UnformattedGetAttTemplateException()
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, new_resource_name, cloudformation_json, original_resource, region_name
|
||||
):
|
||||
updated_props = cloudformation_json["Properties"]
|
||||
original_resource.update_configuration(updated_props)
|
||||
original_resource.update_function_code(updated_props["Code"])
|
||||
return original_resource
|
||||
|
||||
@staticmethod
|
||||
def _create_zipfile_from_plaintext_code(code):
|
||||
zip_output = io.BytesIO()
|
||||
@ -529,6 +546,9 @@ class LambdaFunction(BaseModel):
|
||||
zip_output.seek(0)
|
||||
return zip_output.read()
|
||||
|
||||
def delete(self, region):
|
||||
lambda_backends[region].delete_function(self.function_name)
|
||||
|
||||
|
||||
class EventSourceMapping(BaseModel):
|
||||
def __init__(self, spec):
|
||||
@ -668,6 +688,19 @@ class LambdaStorage(object):
|
||||
:param fn: Function
|
||||
:type fn: LambdaFunction
|
||||
"""
|
||||
valid_role = re.match(InvalidRoleFormat.pattern, fn.role)
|
||||
if valid_role:
|
||||
account = valid_role.group(2)
|
||||
if account != ACCOUNT_ID:
|
||||
raise CrossAccountNotAllowed()
|
||||
try:
|
||||
iam_backend.get_role_by_arn(fn.role)
|
||||
except IAMNotFoundException:
|
||||
raise InvalidParameterValueException(
|
||||
"The role defined for the function cannot be assumed by Lambda."
|
||||
)
|
||||
else:
|
||||
raise InvalidRoleFormat(fn.role)
|
||||
if fn.function_name in self._functions:
|
||||
self._functions[fn.function_name]["latest"] = fn
|
||||
else:
|
||||
|
@ -211,30 +211,14 @@ class LambdaResponse(BaseResponse):
|
||||
return 200, {}, json.dumps(result)
|
||||
|
||||
def _create_function(self, request, full_url, headers):
|
||||
try:
|
||||
fn = self.lambda_backend.create_function(self.json_body)
|
||||
except ValueError as e:
|
||||
return (
|
||||
400,
|
||||
{},
|
||||
json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}}),
|
||||
)
|
||||
else:
|
||||
config = fn.get_configuration()
|
||||
return 201, {}, json.dumps(config)
|
||||
fn = self.lambda_backend.create_function(self.json_body)
|
||||
config = fn.get_configuration()
|
||||
return 201, {}, json.dumps(config)
|
||||
|
||||
def _create_event_source_mapping(self, request, full_url, headers):
|
||||
try:
|
||||
fn = self.lambda_backend.create_event_source_mapping(self.json_body)
|
||||
except ValueError as e:
|
||||
return (
|
||||
400,
|
||||
{},
|
||||
json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}}),
|
||||
)
|
||||
else:
|
||||
config = fn.get_configuration()
|
||||
return 201, {}, json.dumps(config)
|
||||
fn = self.lambda_backend.create_event_source_mapping(self.json_body)
|
||||
config = fn.get_configuration()
|
||||
return 201, {}, json.dumps(config)
|
||||
|
||||
def _list_event_source_mappings(self, event_source_arn, function_name):
|
||||
esms = self.lambda_backend.list_event_source_mappings(
|
||||
|
@ -5,6 +5,7 @@ from moto.core.exceptions import RESTError
|
||||
import boto.ec2.cloudwatch
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzutc
|
||||
from uuid import uuid4
|
||||
from .utils import make_arn_for_dashboard
|
||||
|
||||
DEFAULT_ACCOUNT_ID = 123456789012
|
||||
@ -193,6 +194,7 @@ class CloudWatchBackend(BaseBackend):
|
||||
self.alarms = {}
|
||||
self.dashboards = {}
|
||||
self.metric_data = []
|
||||
self.paged_metric_data = {}
|
||||
|
||||
def put_metric_alarm(
|
||||
self,
|
||||
@ -377,6 +379,36 @@ class CloudWatchBackend(BaseBackend):
|
||||
|
||||
self.alarms[alarm_name].update_state(reason, reason_data, state_value)
|
||||
|
||||
def list_metrics(self, next_token, namespace, metric_name):
|
||||
if next_token:
|
||||
if next_token not in self.paged_metric_data:
|
||||
raise RESTError(
|
||||
"PaginationException", "Request parameter NextToken is invalid"
|
||||
)
|
||||
else:
|
||||
metrics = self.paged_metric_data[next_token]
|
||||
del self.paged_metric_data[next_token] # Cant reuse same token twice
|
||||
return self._get_paginated(metrics)
|
||||
else:
|
||||
metrics = self.get_filtered_metrics(metric_name, namespace)
|
||||
return self._get_paginated(metrics)
|
||||
|
||||
def get_filtered_metrics(self, metric_name, namespace):
|
||||
metrics = self.get_all_metrics()
|
||||
if namespace:
|
||||
metrics = [md for md in metrics if md.namespace == namespace]
|
||||
if metric_name:
|
||||
metrics = [md for md in metrics if md.name == metric_name]
|
||||
return metrics
|
||||
|
||||
def _get_paginated(self, metrics):
|
||||
if len(metrics) > 500:
|
||||
next_token = str(uuid4())
|
||||
self.paged_metric_data[next_token] = metrics[500:]
|
||||
return next_token, metrics[0:500]
|
||||
else:
|
||||
return None, metrics
|
||||
|
||||
|
||||
class LogGroup(BaseModel):
|
||||
def __init__(self, spec):
|
||||
|
@ -120,9 +120,14 @@ class CloudWatchResponse(BaseResponse):
|
||||
|
||||
@amzn_request_id
|
||||
def list_metrics(self):
|
||||
metrics = self.cloudwatch_backend.get_all_metrics()
|
||||
namespace = self._get_param("Namespace")
|
||||
metric_name = self._get_param("MetricName")
|
||||
next_token = self._get_param("NextToken")
|
||||
next_token, metrics = self.cloudwatch_backend.list_metrics(
|
||||
next_token, namespace, metric_name
|
||||
)
|
||||
template = self.response_template(LIST_METRICS_TEMPLATE)
|
||||
return template.render(metrics=metrics)
|
||||
return template.render(metrics=metrics, next_token=next_token)
|
||||
|
||||
@amzn_request_id
|
||||
def delete_dashboards(self):
|
||||
@ -340,9 +345,11 @@ LIST_METRICS_TEMPLATE = """<ListMetricsResponse xmlns="http://monitoring.amazona
|
||||
</member>
|
||||
{% endfor %}
|
||||
</Metrics>
|
||||
{% if next_token is not none %}
|
||||
<NextToken>
|
||||
96e88479-4662-450b-8a13-239ded6ce9fe
|
||||
{{ next_token }}
|
||||
</NextToken>
|
||||
{% endif %}
|
||||
</ListMetricsResult>
|
||||
</ListMetricsResponse>"""
|
||||
|
||||
|
@ -8,6 +8,7 @@ import random
|
||||
import re
|
||||
import six
|
||||
import string
|
||||
from botocore.exceptions import ClientError
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
@ -141,7 +142,10 @@ class convert_flask_to_httpretty_response(object):
|
||||
def __call__(self, args=None, **kwargs):
|
||||
from flask import request, Response
|
||||
|
||||
result = self.callback(request, request.url, {})
|
||||
try:
|
||||
result = self.callback(request, request.url, {})
|
||||
except ClientError as exc:
|
||||
result = 400, {}, exc.response["Error"]["Message"]
|
||||
# result is a status, headers, response tuple
|
||||
if len(result) == 3:
|
||||
status, headers, content = result
|
||||
|
@ -63,6 +63,16 @@ class DynamoType(object):
|
||||
elif self.is_map():
|
||||
self.value = dict((k, DynamoType(v)) for k, v in self.value.items())
|
||||
|
||||
def get(self, key):
|
||||
if not key:
|
||||
return self
|
||||
else:
|
||||
key_head = key.split(".")[0]
|
||||
key_tail = ".".join(key.split(".")[1:])
|
||||
if key_head not in self.value:
|
||||
self.value[key_head] = DynamoType({"NONE": None})
|
||||
return self.value[key_head].get(key_tail)
|
||||
|
||||
def set(self, key, new_value, index=None):
|
||||
if index:
|
||||
index = int(index)
|
||||
@ -174,8 +184,13 @@ class DynamoType(object):
|
||||
|
||||
Returns DynamoType or None.
|
||||
"""
|
||||
if isinstance(key, six.string_types) and self.is_map() and key in self.value:
|
||||
return DynamoType(self.value[key])
|
||||
if isinstance(key, six.string_types) and self.is_map():
|
||||
if "." in key and key.split(".")[0] in self.value:
|
||||
return self.value[key.split(".")[0]].child_attr(
|
||||
".".join(key.split(".")[1:])
|
||||
)
|
||||
elif "." not in key and key in self.value:
|
||||
return DynamoType(self.value[key])
|
||||
|
||||
if isinstance(key, int) and self.is_list():
|
||||
idx = key
|
||||
@ -383,11 +398,19 @@ class Item(BaseModel):
|
||||
# created with only this value if it doesn't exist yet
|
||||
# New value must be of same set type as previous value
|
||||
elif dyn_value.is_set():
|
||||
existing = self.attrs.get(key, DynamoType({dyn_value.type: {}}))
|
||||
if not existing.same_type(dyn_value):
|
||||
key_head = key.split(".")[0]
|
||||
key_tail = ".".join(key.split(".")[1:])
|
||||
if key_head not in self.attrs:
|
||||
self.attrs[key_head] = DynamoType({dyn_value.type: {}})
|
||||
existing = self.attrs.get(key_head)
|
||||
existing = existing.get(key_tail)
|
||||
if existing.value and not existing.same_type(dyn_value):
|
||||
raise TypeError()
|
||||
new_set = set(existing.value).union(dyn_value.value)
|
||||
self.attrs[key] = DynamoType({existing.type: list(new_set)})
|
||||
new_set = set(existing.value or []).union(dyn_value.value)
|
||||
existing.set(
|
||||
key=None,
|
||||
new_value=DynamoType({dyn_value.type: list(new_set)}),
|
||||
)
|
||||
else: # Number and Sets are the only supported types for ADD
|
||||
raise TypeError
|
||||
|
||||
@ -402,12 +425,18 @@ class Item(BaseModel):
|
||||
|
||||
if not dyn_value.is_set():
|
||||
raise TypeError
|
||||
existing = self.attrs.get(key, None)
|
||||
key_head = key.split(".")[0]
|
||||
key_tail = ".".join(key.split(".")[1:])
|
||||
existing = self.attrs.get(key_head)
|
||||
existing = existing.get(key_tail)
|
||||
if existing:
|
||||
if not existing.same_type(dyn_value):
|
||||
raise TypeError
|
||||
new_set = set(existing.value).difference(dyn_value.value)
|
||||
self.attrs[key] = DynamoType({existing.type: list(new_set)})
|
||||
existing.set(
|
||||
key=None,
|
||||
new_value=DynamoType({existing.type: list(new_set)}),
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"{} update action not yet supported".format(action)
|
||||
@ -418,7 +447,14 @@ class Item(BaseModel):
|
||||
list_append_re = re.match("list_append\\((.+),(.+)\\)", value)
|
||||
if list_append_re:
|
||||
new_value = expression_attribute_values[list_append_re.group(2).strip()]
|
||||
old_list = self.attrs[list_append_re.group(1)]
|
||||
old_list_key = list_append_re.group(1)
|
||||
# Get the existing value
|
||||
old_list = self.attrs[old_list_key.split(".")[0]]
|
||||
if "." in old_list_key:
|
||||
# Value is nested inside a map - find the appropriate child attr
|
||||
old_list = old_list.child_attr(
|
||||
".".join(old_list_key.split(".")[1:])
|
||||
)
|
||||
if not old_list.is_list():
|
||||
raise ParamValidationError
|
||||
old_list.value.extend(new_value["L"])
|
||||
|
@ -1644,23 +1644,27 @@ class RegionsAndZonesBackend(object):
|
||||
class SecurityRule(object):
|
||||
def __init__(self, ip_protocol, from_port, to_port, ip_ranges, source_groups):
|
||||
self.ip_protocol = ip_protocol
|
||||
self.from_port = from_port
|
||||
self.to_port = to_port
|
||||
self.ip_ranges = ip_ranges or []
|
||||
self.source_groups = source_groups
|
||||
|
||||
@property
|
||||
def unique_representation(self):
|
||||
return "{0}-{1}-{2}-{3}-{4}".format(
|
||||
self.ip_protocol,
|
||||
self.from_port,
|
||||
self.to_port,
|
||||
self.ip_ranges,
|
||||
self.source_groups,
|
||||
)
|
||||
if ip_protocol != "-1":
|
||||
self.from_port = from_port
|
||||
self.to_port = to_port
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.unique_representation == other.unique_representation
|
||||
if self.ip_protocol != other.ip_protocol:
|
||||
return False
|
||||
if self.ip_ranges != other.ip_ranges:
|
||||
return False
|
||||
if self.source_groups != other.source_groups:
|
||||
return False
|
||||
if self.ip_protocol != "-1":
|
||||
if self.from_port != other.from_port:
|
||||
return False
|
||||
if self.to_port != other.to_port:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class SecurityGroup(TaggedEC2Resource):
|
||||
@ -1670,7 +1674,7 @@ class SecurityGroup(TaggedEC2Resource):
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.ingress_rules = []
|
||||
self.egress_rules = [SecurityRule(-1, None, None, ["0.0.0.0/0"], [])]
|
||||
self.egress_rules = [SecurityRule("-1", None, None, ["0.0.0.0/0"], [])]
|
||||
self.enis = {}
|
||||
self.vpc_id = vpc_id
|
||||
self.owner_id = OWNER_ID
|
||||
|
@ -567,16 +567,14 @@ class EC2ContainerServiceBackend(BaseBackend):
|
||||
|
||||
return task_definition
|
||||
|
||||
def list_task_definitions(self):
|
||||
"""
|
||||
Filtering not implemented
|
||||
"""
|
||||
def list_task_definitions(self, family_prefix):
|
||||
task_arns = []
|
||||
for task_definition_list in self.task_definitions.values():
|
||||
task_arns.extend(
|
||||
[
|
||||
task_definition.arn
|
||||
for task_definition in task_definition_list.values()
|
||||
if family_prefix is None or task_definition.family == family_prefix
|
||||
]
|
||||
)
|
||||
return task_arns
|
||||
|
@ -68,7 +68,8 @@ class EC2ContainerServiceResponse(BaseResponse):
|
||||
return json.dumps({"taskDefinition": task_definition.response_object})
|
||||
|
||||
def list_task_definitions(self):
|
||||
task_definition_arns = self.ecs_backend.list_task_definitions()
|
||||
family_prefix = self._get_param("familyPrefix")
|
||||
task_definition_arns = self.ecs_backend.list_task_definitions(family_prefix)
|
||||
return json.dumps(
|
||||
{
|
||||
"taskDefinitionArns": task_definition_arns
|
||||
|
@ -371,7 +371,7 @@ class Role(BaseModel):
|
||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||
|
||||
if attribute_name == "Arn":
|
||||
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"')
|
||||
return self.arn
|
||||
raise UnformattedGetAttTemplateException()
|
||||
|
||||
def get_tags(self):
|
||||
|
@ -55,7 +55,7 @@ class FakeThingType(BaseModel):
|
||||
self.thing_type_properties = thing_type_properties
|
||||
self.thing_type_id = str(uuid.uuid4()) # I don't know the rule of id
|
||||
t = time.time()
|
||||
self.metadata = {"deprecated": False, "creationData": int(t * 1000) / 1000.0}
|
||||
self.metadata = {"deprecated": False, "creationDate": int(t * 1000) / 1000.0}
|
||||
self.arn = "arn:aws:iot:%s:1:thingtype/%s" % (self.region_name, thing_type_name)
|
||||
|
||||
def to_dict(self):
|
||||
@ -69,7 +69,12 @@ class FakeThingType(BaseModel):
|
||||
|
||||
class FakeThingGroup(BaseModel):
|
||||
def __init__(
|
||||
self, thing_group_name, parent_group_name, thing_group_properties, region_name
|
||||
self,
|
||||
thing_group_name,
|
||||
parent_group_name,
|
||||
thing_group_properties,
|
||||
region_name,
|
||||
thing_groups,
|
||||
):
|
||||
self.region_name = region_name
|
||||
self.thing_group_name = thing_group_name
|
||||
@ -78,7 +83,32 @@ class FakeThingGroup(BaseModel):
|
||||
self.parent_group_name = parent_group_name
|
||||
self.thing_group_properties = thing_group_properties or {}
|
||||
t = time.time()
|
||||
self.metadata = {"creationData": int(t * 1000) / 1000.0}
|
||||
self.metadata = {"creationDate": int(t * 1000) / 1000.0}
|
||||
if parent_group_name:
|
||||
self.metadata["parentGroupName"] = parent_group_name
|
||||
# initilize rootToParentThingGroups
|
||||
if "rootToParentThingGroups" not in self.metadata:
|
||||
self.metadata["rootToParentThingGroups"] = []
|
||||
# search for parent arn
|
||||
for thing_group_arn, thing_group in thing_groups.items():
|
||||
if thing_group.thing_group_name == parent_group_name:
|
||||
parent_thing_group_structure = thing_group
|
||||
break
|
||||
# if parent arn found (should always be found)
|
||||
if parent_thing_group_structure:
|
||||
# copy parent's rootToParentThingGroups
|
||||
if "rootToParentThingGroups" in parent_thing_group_structure.metadata:
|
||||
self.metadata["rootToParentThingGroups"].extend(
|
||||
parent_thing_group_structure.metadata["rootToParentThingGroups"]
|
||||
)
|
||||
self.metadata["rootToParentThingGroups"].extend(
|
||||
[
|
||||
{
|
||||
"groupName": parent_group_name,
|
||||
"groupArn": parent_thing_group_structure.arn,
|
||||
}
|
||||
]
|
||||
)
|
||||
self.arn = "arn:aws:iot:%s:1:thinggroup/%s" % (
|
||||
self.region_name,
|
||||
thing_group_name,
|
||||
@ -639,6 +669,7 @@ class IoTBackend(BaseBackend):
|
||||
parent_group_name,
|
||||
thing_group_properties,
|
||||
self.region_name,
|
||||
self.thing_groups,
|
||||
)
|
||||
self.thing_groups[thing_group.arn] = thing_group
|
||||
return thing_group.thing_group_name, thing_group.arn, thing_group.thing_group_id
|
||||
|
@ -1,7 +1,11 @@
|
||||
from moto.core import BaseBackend
|
||||
import boto.logs
|
||||
from moto.core.utils import unix_time_millis
|
||||
from .exceptions import ResourceNotFoundException, ResourceAlreadyExistsException
|
||||
from .exceptions import (
|
||||
ResourceNotFoundException,
|
||||
ResourceAlreadyExistsException,
|
||||
InvalidParameterException,
|
||||
)
|
||||
|
||||
|
||||
class LogEvent:
|
||||
@ -118,41 +122,66 @@ class LogStream:
|
||||
|
||||
return True
|
||||
|
||||
def get_paging_token_from_index(index, back=False):
|
||||
if index is not None:
|
||||
return "b/{:056d}".format(index) if back else "f/{:056d}".format(index)
|
||||
return 0
|
||||
|
||||
def get_index_from_paging_token(token):
|
||||
def get_index_and_direction_from_token(token):
|
||||
if token is not None:
|
||||
return int(token[2:])
|
||||
return 0
|
||||
try:
|
||||
return token[0], int(token[2:])
|
||||
except Exception:
|
||||
raise InvalidParameterException(
|
||||
"The specified nextToken is invalid."
|
||||
)
|
||||
return None, 0
|
||||
|
||||
events = sorted(
|
||||
filter(filter_func, self.events),
|
||||
key=lambda event: event.timestamp,
|
||||
reverse=start_from_head,
|
||||
filter(filter_func, self.events), key=lambda event: event.timestamp,
|
||||
)
|
||||
next_index = get_index_from_paging_token(next_token)
|
||||
back_index = next_index
|
||||
|
||||
direction, index = get_index_and_direction_from_token(next_token)
|
||||
limit_index = limit - 1
|
||||
final_index = len(events) - 1
|
||||
|
||||
if direction is None:
|
||||
if start_from_head:
|
||||
start_index = 0
|
||||
end_index = start_index + limit_index
|
||||
else:
|
||||
end_index = final_index
|
||||
start_index = end_index - limit_index
|
||||
elif direction == "f":
|
||||
start_index = index + 1
|
||||
end_index = start_index + limit_index
|
||||
elif direction == "b":
|
||||
end_index = index - 1
|
||||
start_index = end_index - limit_index
|
||||
else:
|
||||
raise InvalidParameterException("The specified nextToken is invalid.")
|
||||
|
||||
if start_index < 0:
|
||||
start_index = 0
|
||||
elif start_index > final_index:
|
||||
return (
|
||||
[],
|
||||
"b/{:056d}".format(final_index),
|
||||
"f/{:056d}".format(final_index),
|
||||
)
|
||||
|
||||
if end_index > final_index:
|
||||
end_index = final_index
|
||||
elif end_index < 0:
|
||||
return (
|
||||
[],
|
||||
"b/{:056d}".format(0),
|
||||
"f/{:056d}".format(0),
|
||||
)
|
||||
|
||||
events_page = [
|
||||
event.to_response_dict()
|
||||
for event in events[next_index : next_index + limit]
|
||||
event.to_response_dict() for event in events[start_index : end_index + 1]
|
||||
]
|
||||
if next_index + limit < len(self.events):
|
||||
next_index += limit
|
||||
else:
|
||||
next_index = len(self.events)
|
||||
|
||||
back_index -= limit
|
||||
if back_index <= 0:
|
||||
back_index = 0
|
||||
|
||||
return (
|
||||
events_page,
|
||||
get_paging_token_from_index(back_index, True),
|
||||
get_paging_token_from_index(next_index),
|
||||
"b/{:056d}".format(start_index),
|
||||
"f/{:056d}".format(end_index),
|
||||
)
|
||||
|
||||
def filter_log_events(
|
||||
|
12
moto/organizations/exceptions.py
Normal file
12
moto/organizations/exceptions.py
Normal file
@ -0,0 +1,12 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.exceptions import JsonRESTError
|
||||
|
||||
|
||||
class InvalidInputException(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
def __init__(self):
|
||||
super(InvalidInputException, self).__init__(
|
||||
"InvalidInputException",
|
||||
"You provided a value that does not match the required pattern.",
|
||||
)
|
@ -8,6 +8,7 @@ from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.core.utils import unix_time
|
||||
from moto.organizations import utils
|
||||
from moto.organizations.exceptions import InvalidInputException
|
||||
|
||||
|
||||
class FakeOrganization(BaseModel):
|
||||
@ -57,6 +58,7 @@ class FakeAccount(BaseModel):
|
||||
self.joined_method = "CREATED"
|
||||
self.parent_id = organization.root_id
|
||||
self.attached_policies = []
|
||||
self.tags = {}
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
@ -442,5 +444,32 @@ class OrganizationsBackend(BaseBackend):
|
||||
]
|
||||
return dict(Targets=objects)
|
||||
|
||||
def tag_resource(self, **kwargs):
|
||||
account = next((a for a in self.accounts if a.id == kwargs["ResourceId"]), None)
|
||||
|
||||
if account is None:
|
||||
raise InvalidInputException
|
||||
|
||||
new_tags = {tag["Key"]: tag["Value"] for tag in kwargs["Tags"]}
|
||||
account.tags.update(new_tags)
|
||||
|
||||
def list_tags_for_resource(self, **kwargs):
|
||||
account = next((a for a in self.accounts if a.id == kwargs["ResourceId"]), None)
|
||||
|
||||
if account is None:
|
||||
raise InvalidInputException
|
||||
|
||||
tags = [{"Key": key, "Value": value} for key, value in account.tags.items()]
|
||||
return dict(Tags=tags)
|
||||
|
||||
def untag_resource(self, **kwargs):
|
||||
account = next((a for a in self.accounts if a.id == kwargs["ResourceId"]), None)
|
||||
|
||||
if account is None:
|
||||
raise InvalidInputException
|
||||
|
||||
for key in kwargs["TagKeys"]:
|
||||
account.tags.pop(key, None)
|
||||
|
||||
|
||||
organizations_backend = OrganizationsBackend()
|
||||
|
@ -119,3 +119,18 @@ class OrganizationsResponse(BaseResponse):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_targets_for_policy(**self.request_params)
|
||||
)
|
||||
|
||||
def tag_resource(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.tag_resource(**self.request_params)
|
||||
)
|
||||
|
||||
def list_tags_for_resource(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_tags_for_resource(**self.request_params)
|
||||
)
|
||||
|
||||
def untag_resource(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.untag_resource(**self.request_params)
|
||||
)
|
||||
|
@ -17,7 +17,7 @@ from .exceptions import (
|
||||
InvalidRequestException,
|
||||
ClientError,
|
||||
)
|
||||
from .utils import random_password, secret_arn
|
||||
from .utils import random_password, secret_arn, get_secret_name_from_arn
|
||||
|
||||
|
||||
class SecretsManager(BaseModel):
|
||||
@ -25,11 +25,25 @@ class SecretsManager(BaseModel):
|
||||
self.region = region_name
|
||||
|
||||
|
||||
class SecretsStore(dict):
|
||||
def __setitem__(self, key, value):
|
||||
new_key = get_secret_name_from_arn(key)
|
||||
super(SecretsStore, self).__setitem__(new_key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
new_key = get_secret_name_from_arn(key)
|
||||
return super(SecretsStore, self).__getitem__(new_key)
|
||||
|
||||
def __contains__(self, key):
|
||||
new_key = get_secret_name_from_arn(key)
|
||||
return dict.__contains__(self, new_key)
|
||||
|
||||
|
||||
class SecretsManagerBackend(BaseBackend):
|
||||
def __init__(self, region_name=None, **kwargs):
|
||||
super(SecretsManagerBackend, self).__init__()
|
||||
self.region = region_name
|
||||
self.secrets = {}
|
||||
self.secrets = SecretsStore()
|
||||
|
||||
def reset(self):
|
||||
region_name = self.region
|
||||
@ -44,7 +58,6 @@ class SecretsManagerBackend(BaseBackend):
|
||||
return (dt - epoch).total_seconds()
|
||||
|
||||
def get_secret_value(self, secret_id, version_id, version_stage):
|
||||
|
||||
if not self._is_valid_identifier(secret_id):
|
||||
raise SecretNotFoundException()
|
||||
|
||||
@ -453,6 +466,30 @@ class SecretsManagerBackend(BaseBackend):
|
||||
|
||||
return arn, name
|
||||
|
||||
@staticmethod
|
||||
def get_resource_policy(secret_id):
|
||||
resource_policy = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": {
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:aws:iam::111122223333:root",
|
||||
"arn:aws:iam::444455556666:root",
|
||||
]
|
||||
},
|
||||
"Action": ["secretsmanager:GetSecretValue"],
|
||||
"Resource": "*",
|
||||
},
|
||||
}
|
||||
return json.dumps(
|
||||
{
|
||||
"ARN": secret_id,
|
||||
"Name": secret_id,
|
||||
"ResourcePolicy": json.dumps(resource_policy),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
available_regions = boto3.session.Session().get_available_regions("secretsmanager")
|
||||
secretsmanager_backends = {
|
||||
|
@ -114,3 +114,9 @@ class SecretsManagerResponse(BaseResponse):
|
||||
secret_id=secret_id
|
||||
)
|
||||
return json.dumps(dict(ARN=arn, Name=name))
|
||||
|
||||
def get_resource_policy(self):
|
||||
secret_id = self._get_param("SecretId")
|
||||
return secretsmanager_backends[self.region].get_resource_policy(
|
||||
secret_id=secret_id
|
||||
)
|
||||
|
@ -72,6 +72,19 @@ def secret_arn(region, secret_id):
|
||||
)
|
||||
|
||||
|
||||
def get_secret_name_from_arn(secret_id):
|
||||
# can fetch by both arn and by name
|
||||
# but we are storing via name
|
||||
# so we need to change the arn to name
|
||||
# if it starts with arn then the secret id is arn
|
||||
if secret_id.startswith("arn:aws:secretsmanager:"):
|
||||
# split the arn by colon
|
||||
# then get the last value which is the name appended with a random string
|
||||
# then remove the random string
|
||||
secret_id = "-".join(secret_id.split(":")[-1].split("-")[:-1])
|
||||
return secret_id
|
||||
|
||||
|
||||
def _exclude_characters(password, exclude_characters):
|
||||
for c in exclude_characters:
|
||||
if c in string.punctuation:
|
||||
|
@ -15,6 +15,7 @@ from freezegun import freeze_time
|
||||
from moto import (
|
||||
mock_dynamodb2,
|
||||
mock_lambda,
|
||||
mock_iam,
|
||||
mock_s3,
|
||||
mock_ec2,
|
||||
mock_sns,
|
||||
@ -22,6 +23,7 @@ from moto import (
|
||||
settings,
|
||||
mock_sqs,
|
||||
)
|
||||
from moto.sts.models import ACCOUNT_ID
|
||||
from nose.tools import assert_raises
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
@ -96,7 +98,7 @@ def test_invoke_requestresponse_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
@ -129,7 +131,7 @@ def test_invoke_event_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
@ -163,7 +165,7 @@ if settings.TEST_SERVER_MODE:
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python3.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file2()},
|
||||
Description="test lambda function",
|
||||
@ -218,7 +220,7 @@ def test_invoke_function_from_sns():
|
||||
result = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -262,7 +264,7 @@ def test_create_based_on_s3_with_missing_bucket():
|
||||
conn.create_function.when.called_with(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "this-bucket-does-not-exist", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -287,7 +289,7 @@ def test_create_function_from_aws_bucket():
|
||||
result = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -308,7 +310,7 @@ def test_create_function_from_aws_bucket():
|
||||
_lambda_region
|
||||
),
|
||||
"Runtime": "python2.7",
|
||||
"Role": "test-iam-role",
|
||||
"Role": result["Role"],
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
"CodeSha256": hashlib.sha256(zip_content).hexdigest(),
|
||||
"CodeSize": len(zip_content),
|
||||
@ -334,7 +336,7 @@ def test_create_function_from_zipfile():
|
||||
result = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
@ -355,7 +357,7 @@ def test_create_function_from_zipfile():
|
||||
_lambda_region
|
||||
),
|
||||
"Runtime": "python2.7",
|
||||
"Role": "test-iam-role",
|
||||
"Role": result["Role"],
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
"CodeSize": len(zip_content),
|
||||
"Description": "test lambda function",
|
||||
@ -383,7 +385,7 @@ def test_get_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -414,7 +416,7 @@ def test_get_function():
|
||||
result["Configuration"]["FunctionName"].should.equal("testFunction")
|
||||
result["Configuration"]["Handler"].should.equal("lambda_function.lambda_handler")
|
||||
result["Configuration"]["MemorySize"].should.equal(128)
|
||||
result["Configuration"]["Role"].should.equal("test-iam-role")
|
||||
result["Configuration"]["Role"].should.equal(get_role_name())
|
||||
result["Configuration"]["Runtime"].should.equal("python2.7")
|
||||
result["Configuration"]["Timeout"].should.equal(3)
|
||||
result["Configuration"]["Version"].should.equal("$LATEST")
|
||||
@ -451,7 +453,7 @@ def test_get_function_by_arn():
|
||||
fnc = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": bucket_name, "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -477,7 +479,7 @@ def test_delete_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -512,7 +514,7 @@ def test_delete_function_by_arn():
|
||||
fnc = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": bucket_name, "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -547,7 +549,7 @@ def test_publish():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -599,7 +601,7 @@ def test_list_create_list_get_delete_list():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -624,7 +626,7 @@ def test_list_create_list_get_delete_list():
|
||||
"FunctionName": "testFunction",
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
"MemorySize": 128,
|
||||
"Role": "test-iam-role",
|
||||
"Role": get_role_name(),
|
||||
"Runtime": "python2.7",
|
||||
"Timeout": 3,
|
||||
"Version": "$LATEST",
|
||||
@ -665,7 +667,7 @@ def lambda_handler(event, context):
|
||||
client.create_function(
|
||||
FunctionName="test-lambda-fx",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
@ -698,7 +700,7 @@ def test_tags():
|
||||
function = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -766,7 +768,7 @@ def test_invoke_async_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
@ -790,7 +792,7 @@ def test_get_function_created_with_zipfile():
|
||||
result = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
@ -819,7 +821,7 @@ def test_get_function_created_with_zipfile():
|
||||
"FunctionName": "testFunction",
|
||||
"Handler": "lambda_function.handler",
|
||||
"MemorySize": 128,
|
||||
"Role": "test-iam-role",
|
||||
"Role": get_role_name(),
|
||||
"Runtime": "python2.7",
|
||||
"Timeout": 3,
|
||||
"Version": "$LATEST",
|
||||
@ -835,7 +837,7 @@ def test_add_function_permission():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=(get_role_name()),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
@ -866,7 +868,7 @@ def test_get_function_policy():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
@ -906,7 +908,7 @@ def test_list_versions_by_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="arn:aws:iam::123456789012:role/test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -935,7 +937,7 @@ def test_list_versions_by_function():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction_2",
|
||||
Runtime="python2.7",
|
||||
Role="arn:aws:iam::123456789012:role/test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -964,7 +966,7 @@ def test_create_function_with_already_exists():
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -976,7 +978,7 @@ def test_create_function_with_already_exists():
|
||||
response = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -1008,7 +1010,7 @@ def test_create_event_source_mapping():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1038,7 +1040,7 @@ def test_invoke_function_from_sqs():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1098,7 +1100,7 @@ def test_invoke_function_from_dynamodb():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function executed after a DynamoDB table is updated",
|
||||
@ -1149,7 +1151,7 @@ def test_invoke_function_from_sqs_exception():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file4()},
|
||||
Description="test lambda function",
|
||||
@ -1208,7 +1210,7 @@ def test_list_event_source_mappings():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1240,7 +1242,7 @@ def test_get_event_source_mapping():
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1270,7 +1272,7 @@ def test_update_event_source_mapping():
|
||||
func1 = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1281,7 +1283,7 @@ def test_update_event_source_mapping():
|
||||
func2 = conn.create_function(
|
||||
FunctionName="testFunction2",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1314,7 +1316,7 @@ def test_delete_event_source_mapping():
|
||||
func1 = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
@ -1350,7 +1352,7 @@ def test_update_configuration():
|
||||
fxn = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -1395,7 +1397,7 @@ def test_update_function_zip():
|
||||
fxn = conn.create_function(
|
||||
FunctionName="testFunctionZip",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": zip_content_one},
|
||||
Description="test lambda function",
|
||||
@ -1430,7 +1432,7 @@ def test_update_function_zip():
|
||||
"FunctionName": "testFunctionZip",
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
"MemorySize": 128,
|
||||
"Role": "test-iam-role",
|
||||
"Role": fxn["Role"],
|
||||
"Runtime": "python2.7",
|
||||
"Timeout": 3,
|
||||
"Version": "2",
|
||||
@ -1453,7 +1455,7 @@ def test_update_function_s3():
|
||||
fxn = conn.create_function(
|
||||
FunctionName="testFunctionS3",
|
||||
Runtime="python2.7",
|
||||
Role="test-iam-role",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": "test-bucket", "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
@ -1492,10 +1494,67 @@ def test_update_function_s3():
|
||||
"FunctionName": "testFunctionS3",
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
"MemorySize": 128,
|
||||
"Role": "test-iam-role",
|
||||
"Role": fxn["Role"],
|
||||
"Runtime": "python2.7",
|
||||
"Timeout": 3,
|
||||
"Version": "2",
|
||||
"VpcConfig": {"SecurityGroupIds": [], "SubnetIds": []},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
def test_create_function_with_invalid_arn():
|
||||
err = create_invalid_lambda("test-iam-role")
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
"1 validation error detected: Value 'test-iam-role' at 'role' failed to satisfy constraint: Member must satisfy regular expression pattern: arn:(aws[a-zA-Z-]*)?:iam::(\d{12}):role/?[a-zA-Z_0-9+=,.@\-_/]+"
|
||||
)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
def test_create_function_with_arn_from_different_account():
|
||||
err = create_invalid_lambda("arn:aws:iam::000000000000:role/example_role")
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
"Cross-account pass role is not allowed."
|
||||
)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
def test_create_function_with_unknown_arn():
|
||||
err = create_invalid_lambda(
|
||||
"arn:aws:iam::" + str(ACCOUNT_ID) + ":role/service-role/unknown_role"
|
||||
)
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
"The role defined for the function cannot be assumed by Lambda."
|
||||
)
|
||||
|
||||
|
||||
def create_invalid_lambda(role):
|
||||
conn = boto3.client("lambda", "us-west-2")
|
||||
zip_content = get_test_zip_file1()
|
||||
with assert_raises(ClientError) as err:
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role=role,
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
return err
|
||||
|
||||
|
||||
def get_role_name():
|
||||
with mock_iam():
|
||||
iam = boto3.client("iam", region_name="us-west-2")
|
||||
try:
|
||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||
except ClientError:
|
||||
return iam.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)["Role"]["Arn"]
|
||||
|
138
tests/test_awslambda/test_lambda_cloudformation.py
Normal file
138
tests/test_awslambda/test_lambda_cloudformation.py
Normal file
@ -0,0 +1,138 @@
|
||||
import boto3
|
||||
import io
|
||||
import sure # noqa
|
||||
import zipfile
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_cloudformation, mock_iam, mock_lambda, mock_s3
|
||||
from nose.tools import assert_raises
|
||||
from string import Template
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _process_lambda(func_str):
|
||||
zip_output = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
|
||||
zip_file.writestr("lambda_function.py", func_str)
|
||||
zip_file.close()
|
||||
zip_output.seek(0)
|
||||
return zip_output.read()
|
||||
|
||||
|
||||
def get_zip_file():
|
||||
pfunc = """
|
||||
def lambda_handler1(event, context):
|
||||
return event
|
||||
def lambda_handler2(event, context):
|
||||
return event
|
||||
"""
|
||||
return _process_lambda(pfunc)
|
||||
|
||||
|
||||
template = Template(
|
||||
"""{
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"LF3ABOV": {
|
||||
"Type": "AWS::Lambda::Function",
|
||||
"Properties": {
|
||||
"Handler": "$handler",
|
||||
"Role": "$role_arn",
|
||||
"Runtime": "$runtime",
|
||||
"Code": {
|
||||
"S3Bucket": "$bucket_name",
|
||||
"S3Key": "$key"
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_lambda
|
||||
@mock_s3
|
||||
def test_lambda_can_be_updated_by_cloudformation():
|
||||
s3 = boto3.client("s3", "us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
body2, stack = create_stack(cf, s3)
|
||||
created_fn_name = get_created_function_name(cf, stack)
|
||||
# Verify function has been created
|
||||
created_fn = lmbda.get_function(FunctionName=created_fn_name)
|
||||
created_fn["Configuration"]["Handler"].should.equal(
|
||||
"lambda_function.lambda_handler1"
|
||||
)
|
||||
created_fn["Configuration"]["Runtime"].should.equal("python3.7")
|
||||
created_fn["Code"]["Location"].should.match("/test1.zip")
|
||||
# Update CF stack
|
||||
cf.update_stack(StackName="teststack", TemplateBody=body2)
|
||||
updated_fn_name = get_created_function_name(cf, stack)
|
||||
# Verify function has been updated
|
||||
updated_fn = lmbda.get_function(FunctionName=updated_fn_name)
|
||||
updated_fn["Configuration"]["FunctionArn"].should.equal(
|
||||
created_fn["Configuration"]["FunctionArn"]
|
||||
)
|
||||
updated_fn["Configuration"]["Handler"].should.equal(
|
||||
"lambda_function.lambda_handler2"
|
||||
)
|
||||
updated_fn["Configuration"]["Runtime"].should.equal("python3.8")
|
||||
updated_fn["Code"]["Location"].should.match("/test2.zip")
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_lambda
|
||||
@mock_s3
|
||||
def test_lambda_can_be_deleted_by_cloudformation():
|
||||
s3 = boto3.client("s3", "us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
_, stack = create_stack(cf, s3)
|
||||
created_fn_name = get_created_function_name(cf, stack)
|
||||
# Delete Stack
|
||||
cf.delete_stack(StackName=stack["StackId"])
|
||||
# Verify function was deleted
|
||||
with assert_raises(ClientError) as e:
|
||||
lmbda.get_function(FunctionName=created_fn_name)
|
||||
e.exception.response["Error"]["Code"].should.equal("404")
|
||||
|
||||
|
||||
def create_stack(cf, s3):
|
||||
bucket_name = str(uuid4())
|
||||
s3.create_bucket(Bucket=bucket_name)
|
||||
s3.put_object(Bucket=bucket_name, Key="test1.zip", Body=get_zip_file())
|
||||
s3.put_object(Bucket=bucket_name, Key="test2.zip", Body=get_zip_file())
|
||||
body1 = get_template(bucket_name, "1", "python3.7")
|
||||
body2 = get_template(bucket_name, "2", "python3.8")
|
||||
stack = cf.create_stack(StackName="teststack", TemplateBody=body1)
|
||||
return body2, stack
|
||||
|
||||
|
||||
def get_created_function_name(cf, stack):
|
||||
res = cf.list_stack_resources(StackName=stack["StackId"])
|
||||
return res["StackResourceSummaries"][0]["PhysicalResourceId"]
|
||||
|
||||
|
||||
def get_template(bucket_name, version, runtime):
|
||||
key = "test" + version + ".zip"
|
||||
handler = "lambda_function.lambda_handler" + version
|
||||
return template.substitute(
|
||||
bucket_name=bucket_name,
|
||||
key=key,
|
||||
handler=handler,
|
||||
role_arn=get_role_arn(),
|
||||
runtime=runtime,
|
||||
)
|
||||
|
||||
|
||||
def get_role_arn():
|
||||
with mock_iam():
|
||||
iam = boto3.client("iam", region_name="us-west-2")
|
||||
try:
|
||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||
except ClientError:
|
||||
return iam.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)["Role"]["Arn"]
|
@ -4,6 +4,7 @@ import os
|
||||
import json
|
||||
|
||||
import boto
|
||||
import boto.iam
|
||||
import boto.s3
|
||||
import boto.s3.key
|
||||
import boto.cloudformation
|
||||
@ -18,6 +19,7 @@ from moto import (
|
||||
mock_cloudformation_deprecated,
|
||||
mock_s3_deprecated,
|
||||
mock_route53_deprecated,
|
||||
mock_iam_deprecated,
|
||||
)
|
||||
from moto.cloudformation import cloudformation_backends
|
||||
|
||||
@ -516,7 +518,7 @@ def test_create_stack_lambda_and_dynamodb():
|
||||
"Code": {"S3Bucket": "bucket_123", "S3Key": "key_123"},
|
||||
"FunctionName": "func1",
|
||||
"Handler": "handler.handler",
|
||||
"Role": "role1",
|
||||
"Role": get_role_name(),
|
||||
"Runtime": "python2.7",
|
||||
"Description": "descr",
|
||||
"MemorySize": 12345,
|
||||
@ -591,3 +593,12 @@ def test_create_stack_kinesis():
|
||||
stack = conn.describe_stacks()[0]
|
||||
resources = stack.list_resources()
|
||||
assert len(resources) == 1
|
||||
|
||||
|
||||
def get_role_name():
|
||||
with mock_iam_deprecated():
|
||||
iam = boto.connect_iam()
|
||||
role = iam.create_role("my-role")["create_role_response"]["create_role_result"][
|
||||
"role"
|
||||
]["arn"]
|
||||
return role
|
||||
|
@ -1773,11 +1773,25 @@ def lambda_handler(event, context):
|
||||
"Handler": "lambda_function.handler",
|
||||
"Description": "Test function",
|
||||
"MemorySize": 128,
|
||||
"Role": "test-role",
|
||||
"Role": {"Fn::GetAtt": ["MyRole", "Arn"]},
|
||||
"Runtime": "python2.7",
|
||||
"Environment": {"Variables": {"TEST_ENV_KEY": "test-env-val"}},
|
||||
},
|
||||
}
|
||||
},
|
||||
"MyRole": {
|
||||
"Type": "AWS::IAM::Role",
|
||||
"Properties": {
|
||||
"AssumeRolePolicyDocument": {
|
||||
"Statement": [
|
||||
{
|
||||
"Action": ["sts:AssumeRole"],
|
||||
"Effect": "Allow",
|
||||
"Principal": {"Service": ["ec2.amazonaws.com"]},
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@ -1791,7 +1805,6 @@ def lambda_handler(event, context):
|
||||
result["Functions"][0]["Description"].should.equal("Test function")
|
||||
result["Functions"][0]["Handler"].should.equal("lambda_function.handler")
|
||||
result["Functions"][0]["MemorySize"].should.equal(128)
|
||||
result["Functions"][0]["Role"].should.equal("test-role")
|
||||
result["Functions"][0]["Runtime"].should.equal("python2.7")
|
||||
result["Functions"][0]["Environment"].should.equal(
|
||||
{"Variables": {"TEST_ENV_KEY": "test-env-val"}}
|
||||
|
@ -1,8 +1,5 @@
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import boto3
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
90
tests/test_cloudwatch/test_cloudwatch_boto3.py
Executable file → Normal file
90
tests/test_cloudwatch/test_cloudwatch_boto3.py
Executable file → Normal file
@ -1,8 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
# from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
from nose.tools import assert_raises
|
||||
from uuid import uuid4
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
@ -155,13 +157,14 @@ def test_put_metric_data_no_dimensions():
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_with_statistics():
|
||||
conn = boto3.client("cloudwatch", region_name="us-east-1")
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace="tester",
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName="statmetric",
|
||||
Timestamp=datetime(2015, 1, 1),
|
||||
Timestamp=utc_now,
|
||||
# no Value to test https://github.com/spulec/moto/issues/1615
|
||||
StatisticValues=dict(
|
||||
SampleCount=123.0, Sum=123.0, Minimum=123.0, Maximum=123.0
|
||||
@ -203,3 +206,86 @@ def test_get_metric_statistics():
|
||||
datapoint = stats["Datapoints"][0]
|
||||
datapoint["SampleCount"].should.equal(1.0)
|
||||
datapoint["Sum"].should.equal(1.5)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_list_metrics():
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
# Verify namespace has to exist
|
||||
res = cloudwatch.list_metrics(Namespace="unknown/")["Metrics"]
|
||||
res.should.be.empty
|
||||
# Create some metrics to filter on
|
||||
create_metrics(cloudwatch, namespace="list_test_1/", metrics=4, data_points=2)
|
||||
create_metrics(cloudwatch, namespace="list_test_2/", metrics=4, data_points=2)
|
||||
# Verify we can retrieve everything
|
||||
res = cloudwatch.list_metrics()["Metrics"]
|
||||
len(res).should.equal(16) # 2 namespaces * 4 metrics * 2 data points
|
||||
# Verify we can filter by namespace/metric name
|
||||
res = cloudwatch.list_metrics(Namespace="list_test_1/")["Metrics"]
|
||||
len(res).should.equal(8) # 1 namespace * 4 metrics * 2 data points
|
||||
res = cloudwatch.list_metrics(Namespace="list_test_1/", MetricName="metric1")[
|
||||
"Metrics"
|
||||
]
|
||||
len(res).should.equal(2) # 1 namespace * 1 metrics * 2 data points
|
||||
# Verify format
|
||||
res.should.equal(
|
||||
[
|
||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
||||
]
|
||||
)
|
||||
# Verify unknown namespace still has no results
|
||||
res = cloudwatch.list_metrics(Namespace="unknown/")["Metrics"]
|
||||
res.should.be.empty
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_list_metrics_paginated():
|
||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||
# Verify that only a single page of metrics is returned
|
||||
cloudwatch.list_metrics()["Metrics"].should.be.empty
|
||||
# Verify we can't pass a random NextToken
|
||||
with assert_raises(ClientError) as e:
|
||||
cloudwatch.list_metrics(NextToken=str(uuid4()))
|
||||
e.exception.response["Error"]["Message"].should.equal(
|
||||
"Request parameter NextToken is invalid"
|
||||
)
|
||||
# Add a boatload of metrics
|
||||
create_metrics(cloudwatch, namespace="test", metrics=100, data_points=1)
|
||||
# Verify that a single page is returned until we've reached 500
|
||||
first_page = cloudwatch.list_metrics()
|
||||
first_page["Metrics"].shouldnt.be.empty
|
||||
len(first_page["Metrics"]).should.equal(100)
|
||||
create_metrics(cloudwatch, namespace="test", metrics=200, data_points=2)
|
||||
first_page = cloudwatch.list_metrics()
|
||||
len(first_page["Metrics"]).should.equal(500)
|
||||
first_page.shouldnt.contain("NextToken")
|
||||
# Verify that adding more data points results in pagination
|
||||
create_metrics(cloudwatch, namespace="test", metrics=60, data_points=10)
|
||||
first_page = cloudwatch.list_metrics()
|
||||
len(first_page["Metrics"]).should.equal(500)
|
||||
first_page["NextToken"].shouldnt.be.empty
|
||||
# Retrieve second page - and verify there's more where that came from
|
||||
second_page = cloudwatch.list_metrics(NextToken=first_page["NextToken"])
|
||||
len(second_page["Metrics"]).should.equal(500)
|
||||
second_page.should.contain("NextToken")
|
||||
# Last page should only have the last 100 results, and no NextToken (indicating that pagination is finished)
|
||||
third_page = cloudwatch.list_metrics(NextToken=second_page["NextToken"])
|
||||
len(third_page["Metrics"]).should.equal(100)
|
||||
third_page.shouldnt.contain("NextToken")
|
||||
# Verify that we can't reuse an existing token
|
||||
with assert_raises(ClientError) as e:
|
||||
cloudwatch.list_metrics(NextToken=first_page["NextToken"])
|
||||
e.exception.response["Error"]["Message"].should.equal(
|
||||
"Request parameter NextToken is invalid"
|
||||
)
|
||||
|
||||
|
||||
def create_metrics(cloudwatch, namespace, metrics=5, data_points=5):
|
||||
for i in range(0, metrics):
|
||||
metric_name = "metric" + str(i)
|
||||
for j in range(0, data_points):
|
||||
cloudwatch.put_metric_data(
|
||||
Namespace=namespace,
|
||||
MetricData=[{"MetricName": metric_name, "Value": j, "Unit": "Seconds"}],
|
||||
)
|
||||
|
@ -3237,6 +3237,7 @@ def test_update_supports_complex_expression_attribute_values():
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_supports_list_append():
|
||||
# Verify whether the list_append operation works as expected
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
client.create_table(
|
||||
@ -3270,6 +3271,132 @@ def test_update_supports_list_append():
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_supports_nested_list_append():
|
||||
# Verify whether we can append a list that's inside a map
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
client.create_table(
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
|
||||
TableName="TestTable",
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
|
||||
)
|
||||
client.put_item(
|
||||
TableName="TestTable",
|
||||
Item={
|
||||
"id": {"S": "nested_list_append"},
|
||||
"a": {"M": {"b": {"L": [{"S": "bar1"}]}}},
|
||||
},
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "nested_list_append"}},
|
||||
UpdateExpression="SET a.#b = list_append(a.#b, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b"},
|
||||
)
|
||||
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "nested_list_append"}}
|
||||
)["Item"]
|
||||
result.should.equal(
|
||||
{
|
||||
"id": {"S": "nested_list_append"},
|
||||
"a": {"M": {"b": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_supports_multiple_levels_nested_list_append():
|
||||
# Verify whether we can append a list that's inside a map that's inside a map (Inception!)
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
client.create_table(
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
|
||||
TableName="TestTable",
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
|
||||
)
|
||||
client.put_item(
|
||||
TableName="TestTable",
|
||||
Item={
|
||||
"id": {"S": "nested_list_append"},
|
||||
"a": {"M": {"b": {"M": {"c": {"L": [{"S": "bar1"}]}}}}},
|
||||
},
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "nested_list_append"}},
|
||||
UpdateExpression="SET a.#b.c = list_append(a.#b.#c, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b", "#c": "c"},
|
||||
)
|
||||
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "nested_list_append"}}
|
||||
)["Item"]
|
||||
result.should.equal(
|
||||
{
|
||||
"id": {"S": "nested_list_append"},
|
||||
"a": {"M": {"b": {"M": {"c": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}}}},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_supports_nested_list_append_onto_another_list():
|
||||
# Verify whether we can take the contents of one list, and use that to fill another list
|
||||
# Note that the contents of the other list is completely overwritten
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
client.create_table(
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
|
||||
TableName="TestTable",
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
|
||||
)
|
||||
client.put_item(
|
||||
TableName="TestTable",
|
||||
Item={
|
||||
"id": {"S": "list_append_another"},
|
||||
"a": {"M": {"b": {"L": [{"S": "bar1"}]}, "c": {"L": [{"S": "car1"}]}}},
|
||||
},
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "list_append_another"}},
|
||||
UpdateExpression="SET a.#c = list_append(a.#b, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b", "#c": "c"},
|
||||
)
|
||||
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "list_append_another"}}
|
||||
)["Item"]
|
||||
result.should.equal(
|
||||
{
|
||||
"id": {"S": "list_append_another"},
|
||||
"a": {
|
||||
"M": {
|
||||
"b": {"L": [{"S": "bar1"}]},
|
||||
"c": {"L": [{"S": "bar1"}, {"S": "bar2"}]},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_catches_invalid_list_append_operation():
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
@ -1289,6 +1289,16 @@ def test_update_item_add_with_expression():
|
||||
current_item["str_set"] = current_item["str_set"].union({"item4"})
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a string value to a non-existing set
|
||||
# Should just create the set in the background
|
||||
table.update_item(
|
||||
Key=item_key,
|
||||
UpdateExpression="ADD non_existing_str_set :v",
|
||||
ExpressionAttributeValues={":v": {"item4"}},
|
||||
)
|
||||
current_item["non_existing_str_set"] = {"item4"}
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a num value to a num set
|
||||
table.update_item(
|
||||
Key=item_key,
|
||||
@ -1336,6 +1346,69 @@ def test_update_item_add_with_expression():
|
||||
).should.have.raised(ClientError)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_item_add_with_nested_sets():
|
||||
table = _create_table_with_range_key()
|
||||
|
||||
item_key = {"forum_name": "the-key", "subject": "123"}
|
||||
current_item = {
|
||||
"forum_name": "the-key",
|
||||
"subject": "123",
|
||||
"nested": {"str_set": {"item1", "item2", "item3"}},
|
||||
}
|
||||
|
||||
# Put an entry in the DB to play with
|
||||
table.put_item(Item=current_item)
|
||||
|
||||
# Update item to add a string value to a nested string set
|
||||
table.update_item(
|
||||
Key=item_key,
|
||||
UpdateExpression="ADD nested.str_set :v",
|
||||
ExpressionAttributeValues={":v": {"item4"}},
|
||||
)
|
||||
current_item["nested"]["str_set"] = current_item["nested"]["str_set"].union(
|
||||
{"item4"}
|
||||
)
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
# Update item to add a string value to a non-existing set
|
||||
# Should just create the set in the background
|
||||
table.update_item(
|
||||
Key=item_key,
|
||||
UpdateExpression="ADD #ns.#ne :v",
|
||||
ExpressionAttributeNames={"#ns": "nested", "#ne": "non_existing_str_set"},
|
||||
ExpressionAttributeValues={":v": {"new_item"}},
|
||||
)
|
||||
current_item["nested"]["non_existing_str_set"] = {"new_item"}
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_item_delete_with_nested_sets():
|
||||
table = _create_table_with_range_key()
|
||||
|
||||
item_key = {"forum_name": "the-key", "subject": "123"}
|
||||
current_item = {
|
||||
"forum_name": "the-key",
|
||||
"subject": "123",
|
||||
"nested": {"str_set": {"item1", "item2", "item3"}},
|
||||
}
|
||||
|
||||
# Put an entry in the DB to play with
|
||||
table.put_item(Item=current_item)
|
||||
|
||||
# Update item to add a string value to a nested string set
|
||||
table.update_item(
|
||||
Key=item_key,
|
||||
UpdateExpression="DELETE nested.str_set :v",
|
||||
ExpressionAttributeValues={":v": {"item3"}},
|
||||
)
|
||||
current_item["nested"]["str_set"] = current_item["nested"]["str_set"].difference(
|
||||
{"item3"}
|
||||
)
|
||||
dict(table.get_item(Key=item_key)["Item"]).should.equal(current_item)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_update_item_delete_with_expression():
|
||||
table = _create_table_with_range_key()
|
||||
|
@ -833,3 +833,33 @@ def test_get_all_security_groups_filter_with_same_vpc_id():
|
||||
cm.exception.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_revoke_security_group_egress():
|
||||
ec2 = boto3.resource("ec2", "us-east-1")
|
||||
sg = ec2.create_security_group(Description="Test SG", GroupName="test-sg")
|
||||
|
||||
sg.ip_permissions_egress.should.equal(
|
||||
[
|
||||
{
|
||||
"IpProtocol": "-1",
|
||||
"IpRanges": [{"CidrIp": "0.0.0.0/0"}],
|
||||
"UserIdGroupPairs": [],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
sg.revoke_egress(
|
||||
IpPermissions=[
|
||||
{
|
||||
"FromPort": 0,
|
||||
"IpProtocol": "-1",
|
||||
"IpRanges": [{"CidrIp": "0.0.0.0/0"},],
|
||||
"ToPort": 123,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
sg.reload()
|
||||
sg.ip_permissions_egress.should.have.length_of(0)
|
||||
|
@ -171,6 +171,69 @@ def test_list_task_definitions():
|
||||
)
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_list_task_definitions_with_family_prefix():
|
||||
client = boto3.client("ecs", region_name="us-east-1")
|
||||
_ = client.register_task_definition(
|
||||
family="test_ecs_task_a",
|
||||
containerDefinitions=[
|
||||
{
|
||||
"name": "hello_world",
|
||||
"image": "docker/hello-world:latest",
|
||||
"cpu": 1024,
|
||||
"memory": 400,
|
||||
"essential": True,
|
||||
"environment": [
|
||||
{"name": "AWS_ACCESS_KEY_ID", "value": "SOME_ACCESS_KEY"}
|
||||
],
|
||||
"logConfiguration": {"logDriver": "json-file"},
|
||||
}
|
||||
],
|
||||
)
|
||||
_ = client.register_task_definition(
|
||||
family="test_ecs_task_a",
|
||||
containerDefinitions=[
|
||||
{
|
||||
"name": "hello_world",
|
||||
"image": "docker/hello-world:latest",
|
||||
"cpu": 1024,
|
||||
"memory": 400,
|
||||
"essential": True,
|
||||
"environment": [
|
||||
{"name": "AWS_ACCESS_KEY_ID", "value": "SOME_ACCESS_KEY"}
|
||||
],
|
||||
"logConfiguration": {"logDriver": "json-file"},
|
||||
}
|
||||
],
|
||||
)
|
||||
_ = client.register_task_definition(
|
||||
family="test_ecs_task_b",
|
||||
containerDefinitions=[
|
||||
{
|
||||
"name": "hello_world2",
|
||||
"image": "docker/hello-world2:latest",
|
||||
"cpu": 1024,
|
||||
"memory": 400,
|
||||
"essential": True,
|
||||
"environment": [
|
||||
{"name": "AWS_ACCESS_KEY_ID", "value": "SOME_ACCESS_KEY2"}
|
||||
],
|
||||
"logConfiguration": {"logDriver": "json-file"},
|
||||
}
|
||||
],
|
||||
)
|
||||
empty_response = client.list_task_definitions(familyPrefix="test_ecs_task")
|
||||
len(empty_response["taskDefinitionArns"]).should.equal(0)
|
||||
filtered_response = client.list_task_definitions(familyPrefix="test_ecs_task_a")
|
||||
len(filtered_response["taskDefinitionArns"]).should.equal(2)
|
||||
filtered_response["taskDefinitionArns"][0].should.equal(
|
||||
"arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task_a:1"
|
||||
)
|
||||
filtered_response["taskDefinitionArns"][1].should.equal(
|
||||
"arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task_a:2"
|
||||
)
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_describe_task_definition():
|
||||
client = boto3.client("ecs", region_name="us-east-1")
|
||||
@ -1756,7 +1819,7 @@ def test_update_task_definition_family_through_cloudformation_should_trigger_a_r
|
||||
cfn_conn.update_stack(StackName="test_stack", TemplateBody=template2_json)
|
||||
|
||||
ecs_conn = boto3.client("ecs", region_name="us-west-1")
|
||||
resp = ecs_conn.list_task_definitions(familyPrefix="testTaskDefinition")
|
||||
resp = ecs_conn.list_task_definitions(familyPrefix="testTaskDefinition2")
|
||||
len(resp["taskDefinitionArns"]).should.equal(1)
|
||||
resp["taskDefinitionArns"][0].endswith("testTaskDefinition2:1").should.be.true
|
||||
|
||||
|
@ -581,6 +581,278 @@ def test_delete_principal_thing():
|
||||
client.delete_certificate(certificateId=cert_id)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_describe_thing_group_metadata_hierarchy():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
group_name_1a = "my-group-name-1a"
|
||||
group_name_1b = "my-group-name-1b"
|
||||
group_name_2a = "my-group-name-2a"
|
||||
group_name_2b = "my-group-name-2b"
|
||||
group_name_3a = "my-group-name-3a"
|
||||
group_name_3b = "my-group-name-3b"
|
||||
group_name_3c = "my-group-name-3c"
|
||||
group_name_3d = "my-group-name-3d"
|
||||
|
||||
# --1a
|
||||
# |--2a
|
||||
# | |--3a
|
||||
# | |--3b
|
||||
# |
|
||||
# |--2b
|
||||
# |--3c
|
||||
# |--3d
|
||||
# --1b
|
||||
|
||||
# create thing groups tree
|
||||
# 1
|
||||
thing_group1a = client.create_thing_group(thingGroupName=group_name_1a)
|
||||
thing_group1a.should.have.key("thingGroupName").which.should.equal(group_name_1a)
|
||||
thing_group1a.should.have.key("thingGroupArn")
|
||||
thing_group1b = client.create_thing_group(thingGroupName=group_name_1b)
|
||||
thing_group1b.should.have.key("thingGroupName").which.should.equal(group_name_1b)
|
||||
thing_group1b.should.have.key("thingGroupArn")
|
||||
# 2
|
||||
thing_group2a = client.create_thing_group(
|
||||
thingGroupName=group_name_2a, parentGroupName=group_name_1a
|
||||
)
|
||||
thing_group2a.should.have.key("thingGroupName").which.should.equal(group_name_2a)
|
||||
thing_group2a.should.have.key("thingGroupArn")
|
||||
thing_group2b = client.create_thing_group(
|
||||
thingGroupName=group_name_2b, parentGroupName=group_name_1a
|
||||
)
|
||||
thing_group2b.should.have.key("thingGroupName").which.should.equal(group_name_2b)
|
||||
thing_group2b.should.have.key("thingGroupArn")
|
||||
# 3
|
||||
thing_group3a = client.create_thing_group(
|
||||
thingGroupName=group_name_3a, parentGroupName=group_name_2a
|
||||
)
|
||||
thing_group3a.should.have.key("thingGroupName").which.should.equal(group_name_3a)
|
||||
thing_group3a.should.have.key("thingGroupArn")
|
||||
thing_group3b = client.create_thing_group(
|
||||
thingGroupName=group_name_3b, parentGroupName=group_name_2a
|
||||
)
|
||||
thing_group3b.should.have.key("thingGroupName").which.should.equal(group_name_3b)
|
||||
thing_group3b.should.have.key("thingGroupArn")
|
||||
thing_group3c = client.create_thing_group(
|
||||
thingGroupName=group_name_3c, parentGroupName=group_name_2b
|
||||
)
|
||||
thing_group3c.should.have.key("thingGroupName").which.should.equal(group_name_3c)
|
||||
thing_group3c.should.have.key("thingGroupArn")
|
||||
thing_group3d = client.create_thing_group(
|
||||
thingGroupName=group_name_3d, parentGroupName=group_name_2b
|
||||
)
|
||||
thing_group3d.should.have.key("thingGroupName").which.should.equal(group_name_3d)
|
||||
thing_group3d.should.have.key("thingGroupArn")
|
||||
|
||||
# describe groups
|
||||
# groups level 1
|
||||
# 1a
|
||||
thing_group_description1a = client.describe_thing_group(
|
||||
thingGroupName=group_name_1a
|
||||
)
|
||||
thing_group_description1a.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_1a
|
||||
)
|
||||
thing_group_description1a.should.have.key("thingGroupProperties")
|
||||
thing_group_description1a.should.have.key("thingGroupMetadata")
|
||||
thing_group_description1a["thingGroupMetadata"].should.have.key("creationDate")
|
||||
thing_group_description1a.should.have.key("version")
|
||||
# 1b
|
||||
thing_group_description1b = client.describe_thing_group(
|
||||
thingGroupName=group_name_1b
|
||||
)
|
||||
thing_group_description1b.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_1b
|
||||
)
|
||||
thing_group_description1b.should.have.key("thingGroupProperties")
|
||||
thing_group_description1b.should.have.key("thingGroupMetadata")
|
||||
thing_group_description1b["thingGroupMetadata"].should.have.length_of(1)
|
||||
thing_group_description1b["thingGroupMetadata"].should.have.key("creationDate")
|
||||
thing_group_description1b.should.have.key("version")
|
||||
# groups level 2
|
||||
# 2a
|
||||
thing_group_description2a = client.describe_thing_group(
|
||||
thingGroupName=group_name_2a
|
||||
)
|
||||
thing_group_description2a.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_2a
|
||||
)
|
||||
thing_group_description2a.should.have.key("thingGroupProperties")
|
||||
thing_group_description2a.should.have.key("thingGroupMetadata")
|
||||
thing_group_description2a["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description2a["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_1a)
|
||||
thing_group_description2a["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description2a["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(1)
|
||||
thing_group_description2a["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description2a["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description2a.should.have.key("version")
|
||||
# 2b
|
||||
thing_group_description2b = client.describe_thing_group(
|
||||
thingGroupName=group_name_2b
|
||||
)
|
||||
thing_group_description2b.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_2b
|
||||
)
|
||||
thing_group_description2b.should.have.key("thingGroupProperties")
|
||||
thing_group_description2b.should.have.key("thingGroupMetadata")
|
||||
thing_group_description2b["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description2b["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_1a)
|
||||
thing_group_description2b["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description2b["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(1)
|
||||
thing_group_description2b["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description2b["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description2b.should.have.key("version")
|
||||
# groups level 3
|
||||
# 3a
|
||||
thing_group_description3a = client.describe_thing_group(
|
||||
thingGroupName=group_name_3a
|
||||
)
|
||||
thing_group_description3a.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_3a
|
||||
)
|
||||
thing_group_description3a.should.have.key("thingGroupProperties")
|
||||
thing_group_description3a.should.have.key("thingGroupMetadata")
|
||||
thing_group_description3a["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description3a["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_2a)
|
||||
thing_group_description3a["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description3a["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(2)
|
||||
thing_group_description3a["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description3a["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description3a["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupName"
|
||||
].should.match(group_name_2a)
|
||||
thing_group_description3a["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupArn"
|
||||
].should.match(thing_group2a["thingGroupArn"])
|
||||
thing_group_description3a.should.have.key("version")
|
||||
# 3b
|
||||
thing_group_description3b = client.describe_thing_group(
|
||||
thingGroupName=group_name_3b
|
||||
)
|
||||
thing_group_description3b.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_3b
|
||||
)
|
||||
thing_group_description3b.should.have.key("thingGroupProperties")
|
||||
thing_group_description3b.should.have.key("thingGroupMetadata")
|
||||
thing_group_description3b["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description3b["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_2a)
|
||||
thing_group_description3b["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description3b["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(2)
|
||||
thing_group_description3b["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description3b["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description3b["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupName"
|
||||
].should.match(group_name_2a)
|
||||
thing_group_description3b["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupArn"
|
||||
].should.match(thing_group2a["thingGroupArn"])
|
||||
thing_group_description3b.should.have.key("version")
|
||||
# 3c
|
||||
thing_group_description3c = client.describe_thing_group(
|
||||
thingGroupName=group_name_3c
|
||||
)
|
||||
thing_group_description3c.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_3c
|
||||
)
|
||||
thing_group_description3c.should.have.key("thingGroupProperties")
|
||||
thing_group_description3c.should.have.key("thingGroupMetadata")
|
||||
thing_group_description3c["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description3c["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_2b)
|
||||
thing_group_description3c["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description3c["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(2)
|
||||
thing_group_description3c["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description3c["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description3c["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupName"
|
||||
].should.match(group_name_2b)
|
||||
thing_group_description3c["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupArn"
|
||||
].should.match(thing_group2b["thingGroupArn"])
|
||||
thing_group_description3c.should.have.key("version")
|
||||
# 3d
|
||||
thing_group_description3d = client.describe_thing_group(
|
||||
thingGroupName=group_name_3d
|
||||
)
|
||||
thing_group_description3d.should.have.key("thingGroupName").which.should.equal(
|
||||
group_name_3d
|
||||
)
|
||||
thing_group_description3d.should.have.key("thingGroupProperties")
|
||||
thing_group_description3d.should.have.key("thingGroupMetadata")
|
||||
thing_group_description3d["thingGroupMetadata"].should.have.length_of(3)
|
||||
thing_group_description3d["thingGroupMetadata"].should.have.key(
|
||||
"parentGroupName"
|
||||
).being.equal(group_name_2b)
|
||||
thing_group_description3d["thingGroupMetadata"].should.have.key(
|
||||
"rootToParentThingGroups"
|
||||
)
|
||||
thing_group_description3d["thingGroupMetadata"][
|
||||
"rootToParentThingGroups"
|
||||
].should.have.length_of(2)
|
||||
thing_group_description3d["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupName"
|
||||
].should.match(group_name_1a)
|
||||
thing_group_description3d["thingGroupMetadata"]["rootToParentThingGroups"][0][
|
||||
"groupArn"
|
||||
].should.match(thing_group1a["thingGroupArn"])
|
||||
thing_group_description3d["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupName"
|
||||
].should.match(group_name_2b)
|
||||
thing_group_description3d["thingGroupMetadata"]["rootToParentThingGroups"][1][
|
||||
"groupArn"
|
||||
].should.match(thing_group2b["thingGroupArn"])
|
||||
thing_group_description3d.should.have.key("version")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_thing_groups():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
|
@ -166,70 +166,202 @@ def test_delete_retention_policy():
|
||||
|
||||
@mock_logs
|
||||
def test_get_log_events():
|
||||
conn = boto3.client("logs", "us-west-2")
|
||||
client = boto3.client("logs", "us-west-2")
|
||||
log_group_name = "test"
|
||||
log_stream_name = "stream"
|
||||
conn.create_log_group(logGroupName=log_group_name)
|
||||
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
|
||||
client.create_log_group(logGroupName=log_group_name)
|
||||
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
|
||||
|
||||
events = [{"timestamp": x, "message": str(x)} for x in range(20)]
|
||||
|
||||
conn.put_log_events(
|
||||
client.put_log_events(
|
||||
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=events
|
||||
)
|
||||
|
||||
resp = conn.get_log_events(
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name, logStreamName=log_stream_name, limit=10
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(10)
|
||||
resp.should.have.key("nextForwardToken")
|
||||
resp.should.have.key("nextBackwardToken")
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000010"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
for i in range(10):
|
||||
resp["events"][i]["timestamp"].should.equal(i)
|
||||
resp["events"][i]["message"].should.equal(str(i))
|
||||
|
||||
next_token = resp["nextForwardToken"]
|
||||
|
||||
resp = conn.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=next_token,
|
||||
limit=10,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(10)
|
||||
resp.should.have.key("nextForwardToken")
|
||||
resp.should.have.key("nextBackwardToken")
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000020"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
for i in range(10):
|
||||
resp["events"][i]["timestamp"].should.equal(i + 10)
|
||||
resp["events"][i]["message"].should.equal(str(i + 10))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000019"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000010"
|
||||
)
|
||||
|
||||
resp = conn.get_log_events(
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextBackwardToken"],
|
||||
limit=20,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(10)
|
||||
for i in range(10):
|
||||
resp["events"][i]["timestamp"].should.equal(i)
|
||||
resp["events"][i]["message"].should.equal(str(i))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000009"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextBackwardToken"],
|
||||
limit=10,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(0)
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextForwardToken"],
|
||||
limit=1,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(1)
|
||||
resp["events"][0]["timestamp"].should.equal(1)
|
||||
resp["events"][0]["message"].should.equal(str(1))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000001"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000001"
|
||||
)
|
||||
|
||||
|
||||
@mock_logs
|
||||
def test_get_log_events_with_start_from_head():
|
||||
client = boto3.client("logs", "us-west-2")
|
||||
log_group_name = "test"
|
||||
log_stream_name = "stream"
|
||||
client.create_log_group(logGroupName=log_group_name)
|
||||
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
|
||||
|
||||
events = [{"timestamp": x, "message": str(x)} for x in range(20)]
|
||||
|
||||
client.put_log_events(
|
||||
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=events
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
limit=10,
|
||||
startFromHead=True, # this parameter is only relevant without the usage of nextToken
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(10)
|
||||
resp.should.have.key("nextForwardToken")
|
||||
resp.should.have.key("nextBackwardToken")
|
||||
for i in range(10):
|
||||
resp["events"][i]["timestamp"].should.equal(i)
|
||||
resp["events"][i]["message"].should.equal(str(i))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000009"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000000"
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextForwardToken"],
|
||||
limit=20,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(10)
|
||||
for i in range(10):
|
||||
resp["events"][i]["timestamp"].should.equal(i + 10)
|
||||
resp["events"][i]["message"].should.equal(str(i + 10))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000019"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000010"
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextForwardToken"],
|
||||
limit=10,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(0)
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000019"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000019"
|
||||
)
|
||||
|
||||
resp = client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken=resp["nextBackwardToken"],
|
||||
limit=1,
|
||||
)
|
||||
|
||||
resp["events"].should.have.length_of(1)
|
||||
resp["events"][0]["timestamp"].should.equal(18)
|
||||
resp["events"][0]["message"].should.equal(str(18))
|
||||
resp["nextForwardToken"].should.equal(
|
||||
"f/00000000000000000000000000000000000000000000000000000018"
|
||||
)
|
||||
resp["nextBackwardToken"].should.equal(
|
||||
"b/00000000000000000000000000000000000000000000000000000018"
|
||||
)
|
||||
|
||||
|
||||
@mock_logs
|
||||
def test_get_log_events_errors():
|
||||
client = boto3.client("logs", "us-west-2")
|
||||
log_group_name = "test"
|
||||
log_stream_name = "stream"
|
||||
client.create_log_group(logGroupName=log_group_name)
|
||||
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken="n/00000000000000000000000000000000000000000000000000000000",
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal("GetLogEvents")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.equal("InvalidParameterException")
|
||||
ex.response["Error"]["Message"].should.contain(
|
||||
"The specified nextToken is invalid."
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
client.get_log_events(
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream_name,
|
||||
nextToken="not-existing-token",
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal("GetLogEvents")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.equal("InvalidParameterException")
|
||||
ex.response["Error"]["Message"].should.contain(
|
||||
"The specified nextToken is invalid."
|
||||
)
|
||||
|
||||
|
||||
@mock_logs
|
||||
|
@ -3,6 +3,7 @@ from __future__ import unicode_literals
|
||||
import boto3
|
||||
import json
|
||||
import six
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
@ -605,3 +606,110 @@ def test_list_targets_for_policy_exception():
|
||||
ex.operation_name.should.equal("ListTargetsForPolicy")
|
||||
ex.response["Error"]["Code"].should.equal("400")
|
||||
ex.response["Error"]["Message"].should.contain("InvalidInputException")
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_tag_resource():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
account_id = client.create_account(AccountName=mockname, Email=mockemail)[
|
||||
"CreateAccountStatus"
|
||||
]["AccountId"]
|
||||
|
||||
client.tag_resource(ResourceId=account_id, Tags=[{"Key": "key", "Value": "value"}])
|
||||
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
response["Tags"].should.equal([{"Key": "key", "Value": "value"}])
|
||||
|
||||
# adding a tag with an existing key, will update the value
|
||||
client.tag_resource(
|
||||
ResourceId=account_id, Tags=[{"Key": "key", "Value": "new-value"}]
|
||||
)
|
||||
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
response["Tags"].should.equal([{"Key": "key", "Value": "new-value"}])
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_tag_resource_errors():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
ResourceId="000000000000", Tags=[{"Key": "key", "Value": "value"},]
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidInputException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"You provided a value that does not match the required pattern."
|
||||
)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_tags_for_resource():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
account_id = client.create_account(AccountName=mockname, Email=mockemail)[
|
||||
"CreateAccountStatus"
|
||||
]["AccountId"]
|
||||
client.tag_resource(ResourceId=account_id, Tags=[{"Key": "key", "Value": "value"}])
|
||||
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
|
||||
response["Tags"].should.equal([{"Key": "key", "Value": "value"}])
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_tags_for_resource_errors():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
client.list_tags_for_resource(ResourceId="000000000000")
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal("ListTagsForResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidInputException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"You provided a value that does not match the required pattern."
|
||||
)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_untag_resource():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
account_id = client.create_account(AccountName=mockname, Email=mockemail)[
|
||||
"CreateAccountStatus"
|
||||
]["AccountId"]
|
||||
client.tag_resource(ResourceId=account_id, Tags=[{"Key": "key", "Value": "value"}])
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
response["Tags"].should.equal([{"Key": "key", "Value": "value"}])
|
||||
|
||||
# removing a non existing tag should not raise any error
|
||||
client.untag_resource(ResourceId=account_id, TagKeys=["not-existing"])
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
response["Tags"].should.equal([{"Key": "key", "Value": "value"}])
|
||||
|
||||
client.untag_resource(ResourceId=account_id, TagKeys=["key"])
|
||||
response = client.list_tags_for_resource(ResourceId=account_id)
|
||||
response["Tags"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_untag_resource_errors():
|
||||
client = boto3.client("organizations", region_name="us-east-1")
|
||||
client.create_organization(FeatureSet="ALL")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
client.untag_resource(ResourceId="000000000000", TagKeys=["key"])
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal("UntagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidInputException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"You provided a value that does not match the required pattern."
|
||||
)
|
||||
|
@ -26,6 +26,18 @@ def test_get_secret_value():
|
||||
assert result["SecretString"] == "foosecret"
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_value_by_arn():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
|
||||
secret_value = "test_get_secret_value_by_arn"
|
||||
result = conn.create_secret(
|
||||
Name="java-util-test-password", SecretString=secret_value
|
||||
)
|
||||
result = conn.get_secret_value(SecretId=result["ARN"])
|
||||
assert result["SecretString"] == secret_value
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_value_binary():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
@ -361,6 +373,18 @@ def test_describe_secret():
|
||||
assert secret_description_2["ARN"] != "" # Test arn not empty
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_with_arn():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
results = conn.create_secret(Name="test-secret", SecretString="foosecret")
|
||||
|
||||
secret_description = conn.describe_secret(SecretId=results["ARN"])
|
||||
|
||||
assert secret_description # Returned dict is not empty
|
||||
assert secret_description["Name"] == ("test-secret")
|
||||
assert secret_description["ARN"] != results["ARN"]
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_that_does_not_exist():
|
||||
conn = boto3.client("secretsmanager", region_name="us-west-2")
|
||||
|
@ -586,6 +586,29 @@ def test_can_list_secret_version_ids():
|
||||
].sort() == returned_version_ids.sort()
|
||||
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_resource_policy_secret():
|
||||
|
||||
backend = server.create_backend_app("secretsmanager")
|
||||
test_client = backend.test_client()
|
||||
|
||||
create_secret = test_client.post(
|
||||
"/",
|
||||
data={"Name": "test-secret", "SecretString": "foosecret"},
|
||||
headers={"X-Amz-Target": "secretsmanager.CreateSecret"},
|
||||
)
|
||||
describe_secret = test_client.post(
|
||||
"/",
|
||||
data={"SecretId": "test-secret"},
|
||||
headers={"X-Amz-Target": "secretsmanager.GetResourcePolicy"},
|
||||
)
|
||||
|
||||
json_data = json.loads(describe_secret.data.decode("utf-8"))
|
||||
assert json_data # Returned dict is not empty
|
||||
assert json_data["ARN"] != ""
|
||||
assert json_data["Name"] == "test-secret"
|
||||
|
||||
|
||||
#
|
||||
# The following tests should work, but fail on the embedded dict in
|
||||
# RotationRules. The error message suggests a problem deeper in the code, which
|
||||
|
Loading…
Reference in New Issue
Block a user