commit
66b26cd7b0
@ -8,6 +8,7 @@ from boto3 import Session
|
||||
|
||||
from moto.compat import OrderedDict
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||
|
||||
from .parsing import ResourceMap, OutputMap
|
||||
from .utils import (
|
||||
@ -240,6 +241,7 @@ class FakeStack(BaseModel):
|
||||
self.output_map = self._create_output_map()
|
||||
self._add_stack_event("CREATE_COMPLETE")
|
||||
self.status = "CREATE_COMPLETE"
|
||||
self.creation_time = datetime.utcnow()
|
||||
|
||||
def _create_resource_map(self):
|
||||
resource_map = ResourceMap(
|
||||
@ -259,6 +261,10 @@ class FakeStack(BaseModel):
|
||||
output_map.create()
|
||||
return output_map
|
||||
|
||||
@property
|
||||
def creation_time_iso_8601(self):
|
||||
return iso_8601_datetime_without_milliseconds(self.creation_time)
|
||||
|
||||
def _add_stack_event(
|
||||
self, resource_status, resource_status_reason=None, resource_properties=None
|
||||
):
|
||||
|
@ -1,5 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import copy
|
||||
import warnings
|
||||
@ -24,7 +25,8 @@ from moto.rds import models as rds_models
|
||||
from moto.rds2 import models as rds2_models
|
||||
from moto.redshift import models as redshift_models
|
||||
from moto.route53 import models as route53_models
|
||||
from moto.s3 import models as s3_models
|
||||
from moto.s3 import models as s3_models, s3_backend
|
||||
from moto.s3.utils import bucket_and_name_from_url
|
||||
from moto.sns import models as sns_models
|
||||
from moto.sqs import models as sqs_models
|
||||
from moto.core import ACCOUNT_ID
|
||||
@ -150,7 +152,10 @@ def clean_json(resource_json, resources_map):
|
||||
map_path = resource_json["Fn::FindInMap"][1:]
|
||||
result = resources_map[map_name]
|
||||
for path in map_path:
|
||||
result = result[clean_json(path, resources_map)]
|
||||
if "Fn::Transform" in result:
|
||||
result = resources_map[clean_json(path, resources_map)]
|
||||
else:
|
||||
result = result[clean_json(path, resources_map)]
|
||||
return result
|
||||
|
||||
if "Fn::GetAtt" in resource_json:
|
||||
@ -470,6 +475,17 @@ class ResourceMap(collections_abc.Mapping):
|
||||
def load_mapping(self):
|
||||
self._parsed_resources.update(self._template.get("Mappings", {}))
|
||||
|
||||
def transform_mapping(self):
|
||||
for k, v in self._template.get("Mappings", {}).items():
|
||||
if "Fn::Transform" in v:
|
||||
name = v["Fn::Transform"]["Name"]
|
||||
params = v["Fn::Transform"]["Parameters"]
|
||||
if name == "AWS::Include":
|
||||
location = params["Location"]
|
||||
bucket_name, name = bucket_and_name_from_url(location)
|
||||
key = s3_backend.get_key(bucket_name, name)
|
||||
self._parsed_resources.update(json.loads(key.value))
|
||||
|
||||
def load_parameters(self):
|
||||
parameter_slots = self._template.get("Parameters", {})
|
||||
for parameter_name, parameter in parameter_slots.items():
|
||||
@ -515,6 +531,7 @@ class ResourceMap(collections_abc.Mapping):
|
||||
|
||||
def create(self):
|
||||
self.load_mapping()
|
||||
self.transform_mapping()
|
||||
self.load_parameters()
|
||||
self.load_conditions()
|
||||
|
||||
|
@ -662,7 +662,7 @@ DESCRIBE_STACKS_TEMPLATE = """<DescribeStacksResponse>
|
||||
<member>
|
||||
<StackName>{{ stack.name }}</StackName>
|
||||
<StackId>{{ stack.stack_id }}</StackId>
|
||||
<CreationTime>2010-07-27T22:28:28Z</CreationTime>
|
||||
<CreationTime>{{ stack.creation_time_iso_8601 }}</CreationTime>
|
||||
<StackStatus>{{ stack.status }}</StackStatus>
|
||||
{% if stack.notification_arns %}
|
||||
<NotificationARNs>
|
||||
@ -803,7 +803,7 @@ LIST_STACKS_RESPONSE = """<ListStacksResponse>
|
||||
<StackId>{{ stack.stack_id }}</StackId>
|
||||
<StackStatus>{{ stack.status }}</StackStatus>
|
||||
<StackName>{{ stack.name }}</StackName>
|
||||
<CreationTime>2011-05-23T15:47:44Z</CreationTime>
|
||||
<CreationTime>{{ stack.creation_time_iso_8601 }}</CreationTime>
|
||||
<TemplateDescription>{{ stack.description }}</TemplateDescription>
|
||||
</member>
|
||||
{% endfor %}
|
||||
|
@ -5,6 +5,7 @@ from boto3 import Session
|
||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.logs import logs_backends
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzutc
|
||||
from uuid import uuid4
|
||||
@ -428,12 +429,9 @@ class LogGroup(BaseModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
spec = {"LogGroupName": properties["LogGroupName"]}
|
||||
optional_properties = "Tags".split()
|
||||
for prop in optional_properties:
|
||||
if prop in properties:
|
||||
spec[prop] = properties[prop]
|
||||
return LogGroup(spec)
|
||||
log_group_name = properties["LogGroupName"]
|
||||
tags = properties.get("Tags", {})
|
||||
return logs_backends[region_name].create_log_group(log_group_name, tags)
|
||||
|
||||
|
||||
cloudwatch_backends = {}
|
||||
|
@ -146,6 +146,9 @@ class DynamoType(object):
|
||||
def __eq__(self, other):
|
||||
return self.type == other.type and self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.type != other.type or self.value != other.value
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.cast_value < other.cast_value
|
||||
|
||||
@ -679,6 +682,10 @@ class Table(BaseModel):
|
||||
self.throughput["NumberOfDecreasesToday"] = 0
|
||||
self.indexes = indexes
|
||||
self.global_indexes = global_indexes if global_indexes else []
|
||||
for index in self.global_indexes:
|
||||
index[
|
||||
"IndexStatus"
|
||||
] = "ACTIVE" # One of 'CREATING'|'UPDATING'|'DELETING'|'ACTIVE'
|
||||
self.created_at = datetime.datetime.utcnow()
|
||||
self.items = defaultdict(dict)
|
||||
self.table_arn = self._generate_arn(table_name)
|
||||
@ -981,8 +988,13 @@ class Table(BaseModel):
|
||||
if index_name:
|
||||
|
||||
if index_range_key:
|
||||
|
||||
# Convert to float if necessary to ensure proper ordering
|
||||
def conv(x):
|
||||
return float(x.value) if x.type == "N" else x.value
|
||||
|
||||
results.sort(
|
||||
key=lambda item: item.attrs[index_range_key["AttributeName"]].value
|
||||
key=lambda item: conv(item.attrs[index_range_key["AttributeName"]])
|
||||
if item.attrs.get(index_range_key["AttributeName"])
|
||||
else None
|
||||
)
|
||||
|
@ -1,9 +1,12 @@
|
||||
from __future__ import unicode_literals
|
||||
import itertools
|
||||
|
||||
import copy
|
||||
import json
|
||||
import six
|
||||
import re
|
||||
|
||||
import itertools
|
||||
import six
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from moto.core.utils import camelcase_to_underscores, amzn_request_id
|
||||
from .exceptions import InvalidIndexNameError, InvalidUpdateExpression, ItemSizeTooLarge
|
||||
@ -711,7 +714,8 @@ class DynamoHandler(BaseResponse):
|
||||
attribute_updates = self.body.get("AttributeUpdates")
|
||||
expression_attribute_names = self.body.get("ExpressionAttributeNames", {})
|
||||
expression_attribute_values = self.body.get("ExpressionAttributeValues", {})
|
||||
existing_item = self.dynamodb_backend.get_item(name, key)
|
||||
# We need to copy the item in order to avoid it being modified by the update_item operation
|
||||
existing_item = copy.deepcopy(self.dynamodb_backend.get_item(name, key))
|
||||
if existing_item:
|
||||
existing_attributes = existing_item.to_json()["Attributes"]
|
||||
else:
|
||||
@ -797,14 +801,39 @@ class DynamoHandler(BaseResponse):
|
||||
k: v for k, v in existing_attributes.items() if k in changed_attributes
|
||||
}
|
||||
elif return_values == "UPDATED_NEW":
|
||||
item_dict["Attributes"] = {
|
||||
k: v
|
||||
for k, v in item_dict["Attributes"].items()
|
||||
if k in changed_attributes
|
||||
}
|
||||
item_dict["Attributes"] = self._build_updated_new_attributes(
|
||||
existing_attributes, item_dict["Attributes"]
|
||||
)
|
||||
|
||||
return dynamo_json_dump(item_dict)
|
||||
|
||||
def _build_updated_new_attributes(self, original, changed):
|
||||
if type(changed) != type(original):
|
||||
return changed
|
||||
else:
|
||||
if type(changed) is dict:
|
||||
return {
|
||||
key: self._build_updated_new_attributes(
|
||||
original.get(key, None), changed[key]
|
||||
)
|
||||
for key in changed.keys()
|
||||
if changed[key] != original.get(key, None)
|
||||
}
|
||||
elif type(changed) in (set, list):
|
||||
if len(changed) != len(original):
|
||||
return changed
|
||||
else:
|
||||
return [
|
||||
self._build_updated_new_attributes(
|
||||
original[index], changed[index]
|
||||
)
|
||||
for index in range(len(changed))
|
||||
]
|
||||
elif changed != original:
|
||||
return changed
|
||||
else:
|
||||
return None
|
||||
|
||||
def describe_limits(self):
|
||||
return json.dumps(
|
||||
{
|
||||
|
@ -405,6 +405,7 @@ class LogsBackend(BaseBackend):
|
||||
if log_group_name in self.groups:
|
||||
raise ResourceAlreadyExistsException()
|
||||
self.groups[log_group_name] = LogGroup(self.region_name, log_group_name, tags)
|
||||
return self.groups[log_group_name]
|
||||
|
||||
def ensure_log_group(self, log_group_name, tags):
|
||||
if log_group_name in self.groups:
|
||||
|
@ -12,6 +12,7 @@ import codecs
|
||||
import random
|
||||
import string
|
||||
import tempfile
|
||||
import threading
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
@ -110,6 +111,7 @@ class FakeKey(BaseModel):
|
||||
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
|
||||
self._max_buffer_size = max_buffer_size
|
||||
self.value = value
|
||||
self.lock = threading.Lock()
|
||||
|
||||
@property
|
||||
def version_id(self):
|
||||
@ -117,8 +119,12 @@ class FakeKey(BaseModel):
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
self.lock.acquire()
|
||||
self._value_buffer.seek(0)
|
||||
return self._value_buffer.read()
|
||||
r = self._value_buffer.read()
|
||||
r = copy.copy(r)
|
||||
self.lock.release()
|
||||
return r
|
||||
|
||||
@value.setter
|
||||
def value(self, new_value):
|
||||
@ -130,6 +136,7 @@ class FakeKey(BaseModel):
|
||||
if isinstance(new_value, six.text_type):
|
||||
new_value = new_value.encode(DEFAULT_TEXT_ENCODING)
|
||||
self._value_buffer.write(new_value)
|
||||
self.contentsize = len(new_value)
|
||||
|
||||
def copy(self, new_name=None, new_is_versioned=None):
|
||||
r = copy.deepcopy(self)
|
||||
@ -157,6 +164,7 @@ class FakeKey(BaseModel):
|
||||
self.acl = acl
|
||||
|
||||
def append_to_value(self, value):
|
||||
self.contentsize += len(value)
|
||||
self._value_buffer.seek(0, os.SEEK_END)
|
||||
self._value_buffer.write(value)
|
||||
|
||||
@ -229,8 +237,7 @@ class FakeKey(BaseModel):
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
self._value_buffer.seek(0, os.SEEK_END)
|
||||
return self._value_buffer.tell()
|
||||
return self.contentsize
|
||||
|
||||
@property
|
||||
def storage_class(self):
|
||||
@ -249,6 +256,7 @@ class FakeKey(BaseModel):
|
||||
state = self.__dict__.copy()
|
||||
state["value"] = self.value
|
||||
del state["_value_buffer"]
|
||||
del state["lock"]
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
@ -258,6 +266,7 @@ class FakeKey(BaseModel):
|
||||
max_size=self._max_buffer_size
|
||||
)
|
||||
self.value = state["value"]
|
||||
self.lock = threading.Lock()
|
||||
|
||||
|
||||
class FakeMultipart(BaseModel):
|
||||
@ -284,7 +293,7 @@ class FakeMultipart(BaseModel):
|
||||
etag = etag.replace('"', "")
|
||||
if part is None or part_etag != etag:
|
||||
raise InvalidPart()
|
||||
if last is not None and len(last.value) < UPLOAD_PART_MIN_SIZE:
|
||||
if last is not None and last.contentsize < UPLOAD_PART_MIN_SIZE:
|
||||
raise EntityTooSmall()
|
||||
md5s.extend(decode_hex(part_etag)[0])
|
||||
total.extend(part.value)
|
||||
|
@ -35,6 +35,17 @@ def bucket_name_from_url(url):
|
||||
return None
|
||||
|
||||
|
||||
# 'owi-common-cf', 'snippets/test.json' = bucket_and_name_from_url('s3://owi-common-cf/snippets/test.json')
|
||||
def bucket_and_name_from_url(url):
|
||||
prefix = "s3://"
|
||||
if url.startswith(prefix):
|
||||
bucket_name = url[len(prefix) : url.index("/", len(prefix))]
|
||||
key = url[url.index("/", len(prefix)) + 1 :]
|
||||
return bucket_name, key
|
||||
else:
|
||||
return None, None
|
||||
|
||||
|
||||
REGION_URL_REGEX = re.compile(
|
||||
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
|
||||
r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
|
||||
|
@ -91,9 +91,11 @@ class SESBackend(BaseBackend):
|
||||
return host in self.domains
|
||||
|
||||
def verify_email_identity(self, address):
|
||||
_, address = parseaddr(address)
|
||||
self.addresses.append(address)
|
||||
|
||||
def verify_email_address(self, address):
|
||||
_, address = parseaddr(address)
|
||||
self.email_addresses.append(address)
|
||||
|
||||
def verify_domain(self, domain):
|
||||
|
@ -2,6 +2,8 @@ from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
@ -911,6 +913,10 @@ def test_describe_stack_by_name():
|
||||
|
||||
stack = cf_conn.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
stack["StackName"].should.equal("test_stack")
|
||||
two_secs_ago = datetime.now(tz=pytz.UTC) - timedelta(seconds=2)
|
||||
assert (
|
||||
two_secs_ago < stack["CreationTime"] < datetime.now(tz=pytz.UTC)
|
||||
), "Stack should have been created recently"
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
|
@ -32,12 +32,14 @@ from moto import (
|
||||
mock_iam_deprecated,
|
||||
mock_kms,
|
||||
mock_lambda,
|
||||
mock_logs,
|
||||
mock_rds_deprecated,
|
||||
mock_rds2,
|
||||
mock_rds2_deprecated,
|
||||
mock_redshift,
|
||||
mock_redshift_deprecated,
|
||||
mock_route53_deprecated,
|
||||
mock_s3,
|
||||
mock_sns_deprecated,
|
||||
mock_sqs,
|
||||
mock_sqs_deprecated,
|
||||
@ -2332,3 +2334,48 @@ def test_stack_dynamodb_resources_integration():
|
||||
response["Item"]["Sales"].should.equal(Decimal("10"))
|
||||
response["Item"]["NumberOfSongs"].should.equal(Decimal("5"))
|
||||
response["Item"]["Album"].should.equal("myAlbum")
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_logs
|
||||
@mock_s3
|
||||
def test_create_log_group_using_fntransform():
|
||||
s3_resource = boto3.resource("s3")
|
||||
s3_resource.create_bucket(
|
||||
Bucket="owi-common-cf",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
s3_resource.Object("owi-common-cf", "snippets/test.json").put(
|
||||
Body=json.dumps({"lgname": {"name": "some-log-group"}})
|
||||
)
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Mappings": {
|
||||
"EnvironmentMapping": {
|
||||
"Fn::Transform": {
|
||||
"Name": "AWS::Include",
|
||||
"Parameters": {"Location": "s3://owi-common-cf/snippets/test.json"},
|
||||
}
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"LogGroup": {
|
||||
"Properties": {
|
||||
"LogGroupName": {
|
||||
"Fn::FindInMap": ["EnvironmentMapping", "lgname", "name"]
|
||||
},
|
||||
"RetentionInDays": 90,
|
||||
},
|
||||
"Type": "AWS::Logs::LogGroup",
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
cf_conn = boto3.client("cloudformation", "us-west-2")
|
||||
cf_conn.create_stack(
|
||||
StackName="test_stack", TemplateBody=json.dumps(template),
|
||||
)
|
||||
|
||||
logs_conn = boto3.client("logs", region_name="us-west-2")
|
||||
log_group = logs_conn.describe_log_groups()["logGroups"][0]
|
||||
log_group["logGroupName"].should.equal("some-log-group")
|
||||
|
@ -3431,13 +3431,18 @@ def test_update_supports_list_append():
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
updated_item = client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"SHA256": {"S": "sha-of-file"}},
|
||||
UpdateExpression="SET crontab = list_append(crontab, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"crontab": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}
|
||||
)
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"SHA256": {"S": "sha-of-file"}}
|
||||
@ -3470,15 +3475,19 @@ def test_update_supports_nested_list_append():
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
updated_item = client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "nested_list_append"}},
|
||||
UpdateExpression="SET a.#b = list_append(a.#b, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b"},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify item is appended to the existing list
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"a": {"M": {"b": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}}}
|
||||
)
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "nested_list_append"}}
|
||||
)["Item"]
|
||||
@ -3510,14 +3519,19 @@ def test_update_supports_multiple_levels_nested_list_append():
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
updated_item = client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "nested_list_append"}},
|
||||
UpdateExpression="SET a.#b.c = list_append(a.#b.#c, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b", "#c": "c"},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"a": {"M": {"b": {"M": {"c": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}}}}}
|
||||
)
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "nested_list_append"}}
|
||||
@ -3551,14 +3565,19 @@ def test_update_supports_nested_list_append_onto_another_list():
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
updated_item = client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "list_append_another"}},
|
||||
UpdateExpression="SET a.#c = list_append(a.#b, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"S": "bar2"}]}},
|
||||
ExpressionAttributeNames={"#b": "b", "#c": "c"},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"a": {"M": {"c": {"L": [{"S": "bar1"}, {"S": "bar2"}]}}}}
|
||||
)
|
||||
# Verify item is appended to the existing list
|
||||
result = client.get_item(
|
||||
TableName="TestTable", Key={"id": {"S": "list_append_another"}}
|
||||
@ -3601,13 +3620,18 @@ def test_update_supports_list_append_maps():
|
||||
)
|
||||
|
||||
# Update item using list_append expression
|
||||
client.update_item(
|
||||
updated_item = client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"id": {"S": "nested_list_append"}, "rid": {"S": "range_key"}},
|
||||
UpdateExpression="SET a = list_append(a, :i)",
|
||||
ExpressionAttributeValues={":i": {"L": [{"M": {"b": {"S": "bar2"}}}]}},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"a": {"L": [{"M": {"b": {"S": "bar1"}}}, {"M": {"b": {"S": "bar2"}}}]}}
|
||||
)
|
||||
# Verify item is appended to the existing list
|
||||
result = client.query(
|
||||
TableName="TestTable",
|
||||
@ -3643,11 +3667,18 @@ def test_update_supports_list_append_with_nested_if_not_exists_operation():
|
||||
table = dynamo.Table(table_name)
|
||||
|
||||
table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}})
|
||||
table.update_item(
|
||||
updated_item = table.update_item(
|
||||
Key={"Id": "item-id"},
|
||||
UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)",
|
||||
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"nest1": {"nest2": {"event_history": ["some_value"]}}}
|
||||
)
|
||||
|
||||
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
|
||||
{"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}}
|
||||
)
|
||||
@ -3668,11 +3699,18 @@ def test_update_supports_list_append_with_nested_if_not_exists_operation_and_pro
|
||||
table = dynamo.Table(table_name)
|
||||
|
||||
table.put_item(Item={"Id": "item-id", "event_history": ["other_value"]})
|
||||
table.update_item(
|
||||
updated_item = table.update_item(
|
||||
Key={"Id": "item-id"},
|
||||
UpdateExpression="SET event_history = list_append(if_not_exists(event_history, :empty_list), :new_value)",
|
||||
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal(
|
||||
{"event_history": ["other_value", "some_value"]}
|
||||
)
|
||||
|
||||
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
|
||||
{"Id": "item-id", "event_history": ["other_value", "some_value"]}
|
||||
)
|
||||
@ -3759,11 +3797,16 @@ def test_update_nested_item_if_original_value_is_none():
|
||||
)
|
||||
table = dynamo.Table("origin-rbu-dev")
|
||||
table.put_item(Item={"job_id": "a", "job_details": {"job_name": None}})
|
||||
table.update_item(
|
||||
updated_item = table.update_item(
|
||||
Key={"job_id": "a"},
|
||||
UpdateExpression="SET job_details.job_name = :output",
|
||||
ExpressionAttributeValues={":output": "updated"},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal({"job_details": {"job_name": "updated"}})
|
||||
|
||||
table.scan()["Items"][0]["job_details"]["job_name"].should.equal("updated")
|
||||
|
||||
|
||||
@ -3779,11 +3822,16 @@ def test_allow_update_to_item_with_different_type():
|
||||
table = dynamo.Table("origin-rbu-dev")
|
||||
table.put_item(Item={"job_id": "a", "job_details": {"job_name": {"nested": "yes"}}})
|
||||
table.put_item(Item={"job_id": "b", "job_details": {"job_name": {"nested": "yes"}}})
|
||||
table.update_item(
|
||||
updated_item = table.update_item(
|
||||
Key={"job_id": "a"},
|
||||
UpdateExpression="SET job_details.job_name = :output",
|
||||
ExpressionAttributeValues={":output": "updated"},
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
|
||||
# Verify updated item is correct
|
||||
updated_item["Attributes"].should.equal({"job_details": {"job_name": "updated"}})
|
||||
|
||||
table.get_item(Key={"job_id": "a"})["Item"]["job_details"][
|
||||
"job_name"
|
||||
].should.be.equal("updated")
|
||||
@ -4026,3 +4074,61 @@ def test_valid_transact_get_items():
|
||||
"Table": {"CapacityUnits": 2.0, "ReadCapacityUnits": 2.0,},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_gsi_verify_negative_number_order():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "partitionKey", "KeyType": "HASH"}],
|
||||
"GlobalSecondaryIndexes": [
|
||||
{
|
||||
"IndexName": "GSI-K1",
|
||||
"KeySchema": [
|
||||
{"AttributeName": "gsiK1PartitionKey", "KeyType": "HASH"},
|
||||
{"AttributeName": "gsiK1SortKey", "KeyType": "RANGE"},
|
||||
],
|
||||
"Projection": {"ProjectionType": "KEYS_ONLY",},
|
||||
}
|
||||
],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "partitionKey", "AttributeType": "S"},
|
||||
{"AttributeName": "gsiK1PartitionKey", "AttributeType": "S"},
|
||||
{"AttributeName": "gsiK1SortKey", "AttributeType": "N"},
|
||||
],
|
||||
}
|
||||
|
||||
item1 = {
|
||||
"partitionKey": "pk-1",
|
||||
"gsiK1PartitionKey": "gsi-k1",
|
||||
"gsiK1SortKey": Decimal("-0.6"),
|
||||
}
|
||||
|
||||
item2 = {
|
||||
"partitionKey": "pk-2",
|
||||
"gsiK1PartitionKey": "gsi-k1",
|
||||
"gsiK1SortKey": Decimal("-0.7"),
|
||||
}
|
||||
|
||||
item3 = {
|
||||
"partitionKey": "pk-3",
|
||||
"gsiK1PartitionKey": "gsi-k1",
|
||||
"gsiK1SortKey": Decimal("0.7"),
|
||||
}
|
||||
|
||||
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
table = dynamodb.Table("test-table")
|
||||
table.put_item(Item=item3)
|
||||
table.put_item(Item=item1)
|
||||
table.put_item(Item=item2)
|
||||
|
||||
resp = table.query(
|
||||
KeyConditionExpression=Key("gsiK1PartitionKey").eq("gsi-k1"),
|
||||
IndexName="GSI-K1",
|
||||
)
|
||||
# Items should be ordered with the lowest number first
|
||||
[float(item["gsiK1SortKey"]) for item in resp["Items"]].should.equal(
|
||||
[-0.7, -0.6, 0.7]
|
||||
)
|
||||
|
@ -574,6 +574,7 @@ def test_create_with_global_indexes():
|
||||
"ReadCapacityUnits": 6,
|
||||
"WriteCapacityUnits": 1,
|
||||
},
|
||||
"IndexStatus": "ACTIVE",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
@ -214,3 +214,16 @@ def test_send_raw_email_without_source_or_from():
|
||||
kwargs = dict(RawMessage={"Data": message.as_string()})
|
||||
|
||||
conn.send_raw_email.when.called_with(**kwargs).should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_ses
|
||||
def test_send_email_notification_with_encoded_sender():
|
||||
sender = "Foo <foo@bar.baz>"
|
||||
conn = boto3.client("ses", region_name="us-east-1")
|
||||
conn.verify_email_identity(EmailAddress=sender)
|
||||
response = conn.send_email(
|
||||
Source=sender,
|
||||
Destination={"ToAddresses": ["your.friend@hotmail.com"]},
|
||||
Message={"Subject": {"Data": "hi",}, "Body": {"Text": {"Data": "there",}}},
|
||||
)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
|
Loading…
Reference in New Issue
Block a user