2014-08-27 15:17:06 +00:00
|
|
|
from __future__ import unicode_literals
|
2018-11-08 16:08:24 +00:00
|
|
|
from collections import defaultdict
|
2017-11-11 05:35:01 +00:00
|
|
|
import copy
|
2013-12-05 11:16:56 +00:00
|
|
|
import datetime
|
2016-01-14 22:44:28 +00:00
|
|
|
import decimal
|
2013-12-05 11:16:56 +00:00
|
|
|
import json
|
2016-11-26 05:07:24 +00:00
|
|
|
import re
|
2018-11-08 15:54:54 +00:00
|
|
|
import uuid
|
2019-04-12 14:13:36 +00:00
|
|
|
import six
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2017-10-29 16:06:09 +00:00
|
|
|
import boto3
|
2019-10-23 08:12:03 +00:00
|
|
|
from botocore.exceptions import ParamValidationError
|
2014-11-30 03:43:30 +00:00
|
|
|
from moto.compat import OrderedDict
|
2017-03-12 04:41:12 +00:00
|
|
|
from moto.core import BaseBackend, BaseModel
|
2015-11-27 19:14:40 +00:00
|
|
|
from moto.core.utils import unix_time
|
2017-10-29 16:06:09 +00:00
|
|
|
from moto.core.exceptions import JsonRESTError
|
2019-04-01 20:23:49 +00:00
|
|
|
from .comparisons import get_comparison_func
|
|
|
|
from .comparisons import get_filter_expression
|
|
|
|
from .comparisons import get_expected
|
2019-10-14 09:02:22 +00:00
|
|
|
from .exceptions import InvalidIndexNameError, InvalidUpdateExpression, ItemSizeTooLarge
|
2013-12-05 11:16:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class DynamoJsonEncoder(json.JSONEncoder):
|
|
|
|
def default(self, obj):
|
2019-10-31 15:44:26 +00:00
|
|
|
if hasattr(obj, "to_json"):
|
2013-12-05 11:16:56 +00:00
|
|
|
return obj.to_json()
|
|
|
|
|
|
|
|
|
|
|
|
def dynamo_json_dump(dynamo_object):
|
|
|
|
return json.dumps(dynamo_object, cls=DynamoJsonEncoder)
|
|
|
|
|
|
|
|
|
2019-10-06 13:08:58 +00:00
|
|
|
def bytesize(val):
|
2019-10-31 15:44:26 +00:00
|
|
|
return len(str(val).encode("utf-8"))
|
2019-10-06 13:08:58 +00:00
|
|
|
|
|
|
|
|
2019-10-22 19:40:41 +00:00
|
|
|
def attribute_is_list(attr):
|
|
|
|
"""
|
2019-10-23 07:03:42 +00:00
|
|
|
Checks if attribute denotes a list, and returns the name of the list and the given list index if so
|
2019-10-22 19:40:41 +00:00
|
|
|
:param attr: attr or attr[index]
|
2019-10-23 07:03:42 +00:00
|
|
|
:return: attr, index or None
|
2019-10-22 19:40:41 +00:00
|
|
|
"""
|
2019-10-31 15:44:26 +00:00
|
|
|
list_index_update = re.match("(.+)\\[([0-9]+)\\]", attr)
|
2019-10-22 19:40:41 +00:00
|
|
|
if list_index_update:
|
|
|
|
attr = list_index_update.group(1)
|
|
|
|
return attr, list_index_update.group(2) if list_index_update else None
|
|
|
|
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
class DynamoType(object):
|
|
|
|
"""
|
|
|
|
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, type_as_dict):
|
2019-10-22 19:40:41 +00:00
|
|
|
if type(type_as_dict) == DynamoType:
|
|
|
|
self.type = type_as_dict.type
|
|
|
|
self.value = type_as_dict.value
|
|
|
|
else:
|
|
|
|
self.type = list(type_as_dict)[0]
|
|
|
|
self.value = list(type_as_dict.values())[0]
|
|
|
|
if self.is_list():
|
|
|
|
self.value = [DynamoType(val) for val in self.value]
|
|
|
|
elif self.is_map():
|
|
|
|
self.value = dict((k, DynamoType(v)) for k, v in self.value.items())
|
|
|
|
|
2019-11-27 12:36:42 +00:00
|
|
|
def get(self, key):
|
|
|
|
if not key:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
key_head = key.split(".")[0]
|
|
|
|
key_tail = ".".join(key.split(".")[1:])
|
|
|
|
if key_head not in self.value:
|
|
|
|
self.value[key_head] = DynamoType({"NONE": None})
|
|
|
|
return self.value[key_head].get(key_tail)
|
|
|
|
|
2019-10-22 19:40:41 +00:00
|
|
|
def set(self, key, new_value, index=None):
|
|
|
|
if index:
|
|
|
|
index = int(index)
|
|
|
|
if type(self.value) is not list:
|
|
|
|
raise InvalidUpdateExpression
|
|
|
|
if index >= len(self.value):
|
|
|
|
self.value.append(new_value)
|
|
|
|
# {'L': [DynamoType, ..]} ==> DynamoType.set()
|
|
|
|
self.value[min(index, len(self.value) - 1)].set(key, new_value)
|
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
attr = (key or "").split(".").pop(0)
|
2019-10-22 19:40:41 +00:00
|
|
|
attr, list_index = attribute_is_list(attr)
|
|
|
|
if not key:
|
|
|
|
# {'S': value} ==> {'S': new_value}
|
2019-11-15 15:47:18 +00:00
|
|
|
self.type = new_value.type
|
2019-10-22 19:40:41 +00:00
|
|
|
self.value = new_value.value
|
|
|
|
else:
|
|
|
|
if attr not in self.value: # nonexistingattribute
|
2019-10-31 15:44:26 +00:00
|
|
|
type_of_new_attr = "M" if "." in key else new_value.type
|
2019-10-22 19:40:41 +00:00
|
|
|
self.value[attr] = DynamoType({type_of_new_attr: {}})
|
|
|
|
# {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value)
|
2019-10-31 15:44:26 +00:00
|
|
|
self.value[attr].set(
|
|
|
|
".".join(key.split(".")[1:]), new_value, list_index
|
|
|
|
)
|
2019-10-22 19:40:41 +00:00
|
|
|
|
|
|
|
def delete(self, key, index=None):
|
|
|
|
if index:
|
|
|
|
if not key:
|
|
|
|
if int(index) < len(self.value):
|
|
|
|
del self.value[int(index)]
|
2019-10-31 15:44:26 +00:00
|
|
|
elif "." in key:
|
|
|
|
self.value[int(index)].delete(".".join(key.split(".")[1:]))
|
2019-10-22 19:40:41 +00:00
|
|
|
else:
|
|
|
|
self.value[int(index)].delete(key)
|
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
attr = key.split(".")[0]
|
2019-10-22 19:40:41 +00:00
|
|
|
attr, list_index = attribute_is_list(attr)
|
|
|
|
|
|
|
|
if list_index:
|
2019-10-31 15:44:26 +00:00
|
|
|
self.value[attr].delete(".".join(key.split(".")[1:]), list_index)
|
|
|
|
elif "." in key:
|
|
|
|
self.value[attr].delete(".".join(key.split(".")[1:]))
|
2019-10-22 19:40:41 +00:00
|
|
|
else:
|
|
|
|
self.value.pop(key)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-11-03 14:02:25 +00:00
|
|
|
def filter(self, projection_expressions):
|
2019-11-03 15:33:27 +00:00
|
|
|
nested_projections = [
|
|
|
|
expr[0 : expr.index(".")] for expr in projection_expressions if "." in expr
|
|
|
|
]
|
2019-11-03 14:02:25 +00:00
|
|
|
if self.is_map():
|
|
|
|
expressions_to_delete = []
|
|
|
|
for attr in self.value:
|
2019-11-03 15:33:27 +00:00
|
|
|
if (
|
|
|
|
attr not in projection_expressions
|
|
|
|
and attr not in nested_projections
|
|
|
|
):
|
2019-11-03 14:02:25 +00:00
|
|
|
expressions_to_delete.append(attr)
|
|
|
|
elif attr in nested_projections:
|
2019-11-03 15:33:27 +00:00
|
|
|
relevant_expressions = [
|
|
|
|
expr[len(attr + ".") :]
|
|
|
|
for expr in projection_expressions
|
|
|
|
if expr.startswith(attr + ".")
|
|
|
|
]
|
2019-11-03 14:02:25 +00:00
|
|
|
self.value[attr].filter(relevant_expressions)
|
|
|
|
for expr in expressions_to_delete:
|
|
|
|
self.value.pop(expr)
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return hash((self.type, self.value))
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
2019-10-31 15:44:26 +00:00
|
|
|
return self.type == other.type and self.value == other.value
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2014-02-23 11:11:54 +00:00
|
|
|
def __lt__(self, other):
|
2019-04-23 15:53:00 +00:00
|
|
|
return self.cast_value < other.cast_value
|
2014-02-23 11:11:54 +00:00
|
|
|
|
|
|
|
def __le__(self, other):
|
2019-04-23 15:53:00 +00:00
|
|
|
return self.cast_value <= other.cast_value
|
2014-02-23 11:11:54 +00:00
|
|
|
|
|
|
|
def __gt__(self, other):
|
2019-04-23 15:53:00 +00:00
|
|
|
return self.cast_value > other.cast_value
|
2014-02-23 11:11:54 +00:00
|
|
|
|
|
|
|
def __ge__(self, other):
|
2019-04-23 15:53:00 +00:00
|
|
|
return self.cast_value >= other.cast_value
|
2014-02-23 11:11:54 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return "DynamoType: {0}".format(self.to_json())
|
|
|
|
|
2016-05-06 03:39:10 +00:00
|
|
|
@property
|
|
|
|
def cast_value(self):
|
2017-09-13 04:42:29 +00:00
|
|
|
if self.is_number():
|
2017-02-09 02:06:05 +00:00
|
|
|
try:
|
|
|
|
return int(self.value)
|
|
|
|
except ValueError:
|
|
|
|
return float(self.value)
|
2019-01-10 10:39:12 +00:00
|
|
|
elif self.is_set():
|
2019-04-01 19:15:20 +00:00
|
|
|
sub_type = self.type[0]
|
2019-10-31 15:44:26 +00:00
|
|
|
return set([DynamoType({sub_type: v}).cast_value for v in self.value])
|
2019-04-01 19:15:20 +00:00
|
|
|
elif self.is_list():
|
|
|
|
return [DynamoType(v).cast_value for v in self.value]
|
|
|
|
elif self.is_map():
|
2019-10-31 15:44:26 +00:00
|
|
|
return dict([(k, DynamoType(v).cast_value) for k, v in self.value.items()])
|
2016-05-06 03:39:10 +00:00
|
|
|
else:
|
|
|
|
return self.value
|
|
|
|
|
2019-04-01 19:15:20 +00:00
|
|
|
def child_attr(self, key):
|
|
|
|
"""
|
|
|
|
Get Map or List children by key. str for Map, int for List.
|
|
|
|
|
|
|
|
Returns DynamoType or None.
|
|
|
|
"""
|
2019-11-25 14:55:43 +00:00
|
|
|
if isinstance(key, six.string_types) and self.is_map():
|
|
|
|
if "." in key and key.split(".")[0] in self.value:
|
|
|
|
return self.value[key.split(".")[0]].child_attr(
|
|
|
|
".".join(key.split(".")[1:])
|
|
|
|
)
|
|
|
|
elif "." not in key and key in self.value:
|
|
|
|
return DynamoType(self.value[key])
|
2019-04-01 19:15:20 +00:00
|
|
|
|
|
|
|
if isinstance(key, int) and self.is_list():
|
|
|
|
idx = key
|
2019-10-22 19:40:41 +00:00
|
|
|
if 0 <= idx < len(self.value):
|
2019-04-01 19:15:20 +00:00
|
|
|
return DynamoType(self.value[idx])
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2019-10-06 13:08:58 +00:00
|
|
|
def size(self):
|
|
|
|
if self.is_number():
|
|
|
|
value_size = len(str(self.value))
|
|
|
|
elif self.is_set():
|
|
|
|
sub_type = self.type[0]
|
|
|
|
value_size = sum([DynamoType({sub_type: v}).size() for v in self.value])
|
|
|
|
elif self.is_list():
|
2019-10-22 19:40:41 +00:00
|
|
|
value_size = sum([v.size() for v in self.value])
|
2019-10-06 13:08:58 +00:00
|
|
|
elif self.is_map():
|
2019-10-31 15:44:26 +00:00
|
|
|
value_size = sum(
|
|
|
|
[bytesize(k) + DynamoType(v).size() for k, v in self.value.items()]
|
|
|
|
)
|
2019-10-06 13:08:58 +00:00
|
|
|
elif type(self.value) == bool:
|
|
|
|
value_size = 1
|
|
|
|
else:
|
|
|
|
value_size = bytesize(self.value)
|
|
|
|
return value_size
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def to_json(self):
|
|
|
|
return {self.type: self.value}
|
|
|
|
|
|
|
|
def compare(self, range_comparison, range_objs):
|
|
|
|
"""
|
|
|
|
Compares this type against comparison filters
|
|
|
|
"""
|
2016-05-06 03:39:10 +00:00
|
|
|
range_values = [obj.cast_value for obj in range_objs]
|
2013-12-05 11:16:56 +00:00
|
|
|
comparison_func = get_comparison_func(range_comparison)
|
2016-05-06 03:39:10 +00:00
|
|
|
return comparison_func(self.cast_value, *range_values)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2017-09-13 04:42:29 +00:00
|
|
|
def is_number(self):
|
2019-10-31 15:44:26 +00:00
|
|
|
return self.type == "N"
|
2017-09-13 04:42:29 +00:00
|
|
|
|
|
|
|
def is_set(self):
|
2019-10-31 15:44:26 +00:00
|
|
|
return self.type == "SS" or self.type == "NS" or self.type == "BS"
|
2017-09-13 04:42:29 +00:00
|
|
|
|
2019-04-01 19:15:20 +00:00
|
|
|
def is_list(self):
|
2019-10-31 15:44:26 +00:00
|
|
|
return self.type == "L"
|
2019-04-01 19:15:20 +00:00
|
|
|
|
|
|
|
def is_map(self):
|
2019-10-31 15:44:26 +00:00
|
|
|
return self.type == "M"
|
2019-04-01 19:15:20 +00:00
|
|
|
|
2017-09-13 04:42:29 +00:00
|
|
|
def same_type(self, other):
|
|
|
|
return self.type == other.type
|
|
|
|
|
2014-11-15 14:35:52 +00:00
|
|
|
|
2019-10-06 13:08:58 +00:00
|
|
|
# https://github.com/spulec/moto/issues/1874
|
|
|
|
# Ensure that the total size of an item does not exceed 400kb
|
|
|
|
class LimitedSizeDict(dict):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
self.update(*args, **kwargs)
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
2019-10-31 15:44:26 +00:00
|
|
|
current_item_size = sum(
|
|
|
|
[
|
|
|
|
item.size() if type(item) == DynamoType else bytesize(str(item))
|
|
|
|
for item in (list(self.keys()) + list(self.values()))
|
|
|
|
]
|
|
|
|
)
|
|
|
|
new_item_size = bytesize(key) + (
|
|
|
|
value.size() if type(value) == DynamoType else bytesize(str(value))
|
|
|
|
)
|
2019-10-06 13:08:58 +00:00
|
|
|
# Official limit is set to 400000 (400KB)
|
|
|
|
# Manual testing confirms that the actual limit is between 409 and 410KB
|
|
|
|
# We'll set the limit to something in between to be safe
|
|
|
|
if (current_item_size + new_item_size) > 405000:
|
|
|
|
raise ItemSizeTooLarge
|
|
|
|
super(LimitedSizeDict, self).__setitem__(key, value)
|
|
|
|
|
|
|
|
|
2017-03-12 04:41:12 +00:00
|
|
|
class Item(BaseModel):
|
2013-12-05 11:16:56 +00:00
|
|
|
def __init__(self, hash_key, hash_key_type, range_key, range_key_type, attrs):
|
|
|
|
self.hash_key = hash_key
|
|
|
|
self.hash_key_type = hash_key_type
|
|
|
|
self.range_key = range_key
|
|
|
|
self.range_key_type = range_key_type
|
|
|
|
|
2019-10-06 13:08:58 +00:00
|
|
|
self.attrs = LimitedSizeDict()
|
2014-08-26 17:25:50 +00:00
|
|
|
for key, value in attrs.items():
|
2013-12-05 11:16:56 +00:00
|
|
|
self.attrs[key] = DynamoType(value)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "Item: {0}".format(self.to_json())
|
|
|
|
|
|
|
|
def to_json(self):
|
|
|
|
attributes = {}
|
2014-08-26 17:25:50 +00:00
|
|
|
for attribute_key, attribute in self.attrs.items():
|
2019-10-31 15:44:26 +00:00
|
|
|
attributes[attribute_key] = {attribute.type: attribute.value}
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
return {"Attributes": attributes}
|
2013-12-05 11:16:56 +00:00
|
|
|
|
|
|
|
def describe_attrs(self, attributes):
|
|
|
|
if attributes:
|
|
|
|
included = {}
|
2014-08-26 17:25:50 +00:00
|
|
|
for key, value in self.attrs.items():
|
2013-12-05 11:16:56 +00:00
|
|
|
if key in attributes:
|
|
|
|
included[key] = value
|
|
|
|
else:
|
|
|
|
included = self.attrs
|
2019-10-31 15:44:26 +00:00
|
|
|
return {"Item": included}
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def update(
|
|
|
|
self, update_expression, expression_attribute_names, expression_attribute_values
|
|
|
|
):
|
2017-06-15 22:34:58 +00:00
|
|
|
# Update subexpressions are identifiable by the operator keyword, so split on that and
|
|
|
|
# get rid of the empty leading string.
|
2019-10-31 15:44:26 +00:00
|
|
|
parts = [
|
|
|
|
p
|
|
|
|
for p in re.split(
|
|
|
|
r"\b(SET|REMOVE|ADD|DELETE)\b", update_expression, flags=re.I
|
|
|
|
)
|
|
|
|
if p
|
|
|
|
]
|
2017-06-15 22:34:58 +00:00
|
|
|
# make sure that we correctly found only operator/value pairs
|
2019-10-31 15:44:26 +00:00
|
|
|
assert (
|
|
|
|
len(parts) % 2 == 0
|
|
|
|
), "Mismatched operators and values in update expression: '{}'".format(
|
|
|
|
update_expression
|
|
|
|
)
|
2016-12-07 19:47:48 +00:00
|
|
|
for action, valstr in zip(parts[:-1:2], parts[1::2]):
|
2017-08-27 19:57:16 +00:00
|
|
|
action = action.upper()
|
2017-11-26 22:29:23 +00:00
|
|
|
|
|
|
|
# "Should" retain arguments in side (...)
|
2019-10-31 15:44:26 +00:00
|
|
|
values = re.split(r",(?![^(]*\))", valstr)
|
2016-11-26 05:07:24 +00:00
|
|
|
for value in values:
|
2014-11-30 03:03:20 +00:00
|
|
|
# A Real value
|
2016-12-07 19:47:48 +00:00
|
|
|
value = value.lstrip(":").rstrip(",").strip()
|
2016-11-26 05:07:24 +00:00
|
|
|
for k, v in expression_attribute_names.items():
|
2019-10-31 15:44:26 +00:00
|
|
|
value = re.sub(r"{0}\b".format(k), v, value)
|
2016-12-07 00:57:36 +00:00
|
|
|
|
2016-11-29 22:04:23 +00:00
|
|
|
if action == "REMOVE":
|
2019-06-12 05:44:56 +00:00
|
|
|
key = value
|
2019-10-31 15:44:26 +00:00
|
|
|
attr, list_index = attribute_is_list(key.split(".")[0])
|
|
|
|
if "." not in key:
|
2019-10-22 19:40:41 +00:00
|
|
|
if list_index:
|
|
|
|
new_list = DynamoType(self.attrs[attr])
|
|
|
|
new_list.delete(None, list_index)
|
|
|
|
self.attrs[attr] = new_list
|
2019-10-09 16:25:50 +00:00
|
|
|
else:
|
|
|
|
self.attrs.pop(value, None)
|
2019-06-12 05:38:15 +00:00
|
|
|
else:
|
|
|
|
# Handle nested dict updates
|
2019-10-31 15:44:26 +00:00
|
|
|
self.attrs[attr].delete(".".join(key.split(".")[1:]))
|
|
|
|
elif action == "SET":
|
2017-11-26 22:29:23 +00:00
|
|
|
key, value = value.split("=", 1)
|
2016-11-26 05:07:24 +00:00
|
|
|
key = key.strip()
|
|
|
|
value = value.strip()
|
2017-11-26 22:29:23 +00:00
|
|
|
|
2019-10-22 19:40:41 +00:00
|
|
|
# check whether key is a list
|
2019-10-31 15:44:26 +00:00
|
|
|
attr, list_index = attribute_is_list(key.split(".")[0])
|
2019-10-22 19:40:41 +00:00
|
|
|
# If value not exists, changes value to a default if needed, else its the same as it was
|
|
|
|
value = self._get_default(value)
|
2019-10-23 08:12:03 +00:00
|
|
|
# If operation == list_append, get the original value and append it
|
|
|
|
value = self._get_appended_list(value, expression_attribute_values)
|
2017-11-26 22:29:23 +00:00
|
|
|
|
2018-10-12 07:08:05 +00:00
|
|
|
if type(value) != DynamoType:
|
|
|
|
if value in expression_attribute_values:
|
2019-10-09 16:25:50 +00:00
|
|
|
dyn_value = DynamoType(expression_attribute_values[value])
|
2018-10-12 07:08:05 +00:00
|
|
|
else:
|
2019-10-09 16:25:50 +00:00
|
|
|
dyn_value = DynamoType({"S": value})
|
|
|
|
else:
|
|
|
|
dyn_value = value
|
2017-11-17 08:49:59 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
if "." in key and attr not in self.attrs:
|
2019-10-22 19:40:41 +00:00
|
|
|
raise ValueError # Setting nested attr not allowed if first attr does not exist yet
|
|
|
|
elif attr not in self.attrs:
|
|
|
|
self.attrs[attr] = dyn_value # set new top-level attribute
|
2017-11-17 08:49:59 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
self.attrs[attr].set(
|
|
|
|
".".join(key.split(".")[1:]), dyn_value, list_index
|
|
|
|
) # set value recursively
|
2017-11-17 08:49:59 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
elif action == "ADD":
|
2017-09-13 06:30:15 +00:00
|
|
|
key, value = value.split(" ", 1)
|
2017-09-13 04:42:29 +00:00
|
|
|
key = key.strip()
|
|
|
|
value_str = value.strip()
|
|
|
|
if value_str in expression_attribute_values:
|
|
|
|
dyn_value = DynamoType(expression_attribute_values[value])
|
|
|
|
else:
|
|
|
|
raise TypeError
|
|
|
|
|
|
|
|
# Handle adding numbers - value gets added to existing value,
|
|
|
|
# or added to 0 if it doesn't exist yet
|
|
|
|
if dyn_value.is_number():
|
2019-10-31 15:44:26 +00:00
|
|
|
existing = self.attrs.get(key, DynamoType({"N": "0"}))
|
2017-09-13 04:42:29 +00:00
|
|
|
if not existing.same_type(dyn_value):
|
|
|
|
raise TypeError()
|
2019-10-31 15:44:26 +00:00
|
|
|
self.attrs[key] = DynamoType(
|
|
|
|
{
|
|
|
|
"N": str(
|
|
|
|
decimal.Decimal(existing.value)
|
|
|
|
+ decimal.Decimal(dyn_value.value)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2017-09-13 04:42:29 +00:00
|
|
|
|
|
|
|
# Handle adding sets - value is added to the set, or set is
|
|
|
|
# created with only this value if it doesn't exist yet
|
|
|
|
# New value must be of same set type as previous value
|
|
|
|
elif dyn_value.is_set():
|
2019-11-27 12:36:42 +00:00
|
|
|
key_head = key.split(".")[0]
|
|
|
|
key_tail = ".".join(key.split(".")[1:])
|
|
|
|
if key_head not in self.attrs:
|
|
|
|
self.attrs[key_head] = DynamoType({dyn_value.type: {}})
|
|
|
|
existing = self.attrs.get(key_head)
|
|
|
|
existing = existing.get(key_tail)
|
|
|
|
if existing.value and not existing.same_type(dyn_value):
|
2017-09-13 04:42:29 +00:00
|
|
|
raise TypeError()
|
2019-11-27 12:36:42 +00:00
|
|
|
new_set = set(existing.value or []).union(dyn_value.value)
|
|
|
|
existing.set(
|
|
|
|
key=None,
|
|
|
|
new_value=DynamoType({dyn_value.type: list(new_set)}),
|
|
|
|
)
|
2017-09-13 04:42:29 +00:00
|
|
|
else: # Number and Sets are the only supported types for ADD
|
|
|
|
raise TypeError
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
elif action == "DELETE":
|
2017-09-13 06:30:15 +00:00
|
|
|
key, value = value.split(" ", 1)
|
2017-09-13 04:42:29 +00:00
|
|
|
key = key.strip()
|
|
|
|
value_str = value.strip()
|
|
|
|
if value_str in expression_attribute_values:
|
|
|
|
dyn_value = DynamoType(expression_attribute_values[value])
|
|
|
|
else:
|
|
|
|
raise TypeError
|
|
|
|
|
|
|
|
if not dyn_value.is_set():
|
|
|
|
raise TypeError
|
2019-11-28 13:22:20 +00:00
|
|
|
key_head = key.split(".")[0]
|
|
|
|
key_tail = ".".join(key.split(".")[1:])
|
|
|
|
existing = self.attrs.get(key_head)
|
|
|
|
existing = existing.get(key_tail)
|
2017-09-13 04:42:29 +00:00
|
|
|
if existing:
|
|
|
|
if not existing.same_type(dyn_value):
|
|
|
|
raise TypeError
|
|
|
|
new_set = set(existing.value).difference(dyn_value.value)
|
2019-11-28 13:22:20 +00:00
|
|
|
existing.set(
|
|
|
|
key=None,
|
|
|
|
new_value=DynamoType({existing.type: list(new_set)}),
|
|
|
|
)
|
2016-06-21 15:45:22 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise NotImplementedError(
|
|
|
|
"{} update action not yet supported".format(action)
|
|
|
|
)
|
2014-11-30 03:03:20 +00:00
|
|
|
|
2019-10-23 08:12:03 +00:00
|
|
|
def _get_appended_list(self, value, expression_attribute_values):
|
|
|
|
if type(value) != DynamoType:
|
2019-10-31 15:44:26 +00:00
|
|
|
list_append_re = re.match("list_append\\((.+),(.+)\\)", value)
|
2019-10-23 08:12:03 +00:00
|
|
|
if list_append_re:
|
|
|
|
new_value = expression_attribute_values[list_append_re.group(2).strip()]
|
2019-11-25 14:55:43 +00:00
|
|
|
old_list_key = list_append_re.group(1)
|
|
|
|
# Get the existing value
|
|
|
|
old_list = self.attrs[old_list_key.split(".")[0]]
|
|
|
|
if "." in old_list_key:
|
|
|
|
# Value is nested inside a map - find the appropriate child attr
|
|
|
|
old_list = old_list.child_attr(
|
|
|
|
".".join(old_list_key.split(".")[1:])
|
|
|
|
)
|
2019-10-23 08:12:03 +00:00
|
|
|
if not old_list.is_list():
|
|
|
|
raise ParamValidationError
|
2019-10-31 15:44:26 +00:00
|
|
|
old_list.value.extend(new_value["L"])
|
2019-10-23 08:12:03 +00:00
|
|
|
value = old_list
|
|
|
|
return value
|
|
|
|
|
2019-10-22 19:40:41 +00:00
|
|
|
def _get_default(self, value):
|
2019-10-31 15:44:26 +00:00
|
|
|
if value.startswith("if_not_exists"):
|
2019-10-22 19:40:41 +00:00
|
|
|
# Function signature
|
2019-10-31 15:44:26 +00:00
|
|
|
match = re.match(
|
|
|
|
r".*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*", value
|
|
|
|
)
|
2019-10-22 19:40:41 +00:00
|
|
|
if not match:
|
|
|
|
raise TypeError
|
|
|
|
|
|
|
|
path, value = match.groups()
|
|
|
|
|
|
|
|
# If it already exists, get its value so we dont overwrite it
|
|
|
|
if path in self.attrs:
|
|
|
|
value = self.attrs[path]
|
|
|
|
return value
|
|
|
|
|
2015-11-07 21:45:24 +00:00
|
|
|
def update_with_attribute_updates(self, attribute_updates):
|
|
|
|
for attribute_name, update_action in attribute_updates.items():
|
2019-10-31 15:44:26 +00:00
|
|
|
action = update_action["Action"]
|
|
|
|
if action == "DELETE" and "Value" not in update_action:
|
2016-01-11 18:16:15 +00:00
|
|
|
if attribute_name in self.attrs:
|
|
|
|
del self.attrs[attribute_name]
|
2016-01-04 23:29:02 +00:00
|
|
|
continue
|
2019-10-31 15:44:26 +00:00
|
|
|
new_value = list(update_action["Value"].values())[0]
|
|
|
|
if action == "PUT":
|
2015-11-07 21:45:24 +00:00
|
|
|
# TODO deal with other types
|
2019-07-21 15:34:20 +00:00
|
|
|
if isinstance(new_value, list):
|
|
|
|
self.attrs[attribute_name] = DynamoType({"L": new_value})
|
|
|
|
elif isinstance(new_value, set):
|
2015-11-12 02:59:55 +00:00
|
|
|
self.attrs[attribute_name] = DynamoType({"SS": new_value})
|
2015-12-21 23:45:08 +00:00
|
|
|
elif isinstance(new_value, dict):
|
|
|
|
self.attrs[attribute_name] = DynamoType({"M": new_value})
|
2019-10-31 15:44:26 +00:00
|
|
|
elif set(update_action["Value"].keys()) == set(["N"]):
|
2015-12-21 23:45:08 +00:00
|
|
|
self.attrs[attribute_name] = DynamoType({"N": new_value})
|
2019-10-31 15:44:26 +00:00
|
|
|
elif set(update_action["Value"].keys()) == set(["NULL"]):
|
2016-01-11 18:16:15 +00:00
|
|
|
if attribute_name in self.attrs:
|
|
|
|
del self.attrs[attribute_name]
|
2015-11-12 02:59:55 +00:00
|
|
|
else:
|
|
|
|
self.attrs[attribute_name] = DynamoType({"S": new_value})
|
2019-10-31 15:44:26 +00:00
|
|
|
elif action == "ADD":
|
|
|
|
if set(update_action["Value"].keys()) == set(["N"]):
|
|
|
|
existing = self.attrs.get(attribute_name, DynamoType({"N": "0"}))
|
|
|
|
self.attrs[attribute_name] = DynamoType(
|
|
|
|
{
|
|
|
|
"N": str(
|
|
|
|
decimal.Decimal(existing.value)
|
|
|
|
+ decimal.Decimal(new_value)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
elif set(update_action["Value"].keys()) == set(["SS"]):
|
2017-09-11 19:06:24 +00:00
|
|
|
existing = self.attrs.get(attribute_name, DynamoType({"SS": {}}))
|
|
|
|
new_set = set(existing.value).union(set(new_value))
|
2019-10-31 15:44:26 +00:00
|
|
|
self.attrs[attribute_name] = DynamoType({"SS": list(new_set)})
|
2016-01-14 22:44:28 +00:00
|
|
|
else:
|
|
|
|
# TODO: implement other data types
|
2017-02-24 02:37:43 +00:00
|
|
|
raise NotImplementedError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"ADD not supported for %s"
|
|
|
|
% ", ".join(update_action["Value"].keys())
|
|
|
|
)
|
|
|
|
elif action == "DELETE":
|
|
|
|
if set(update_action["Value"].keys()) == set(["SS"]):
|
2019-05-22 09:47:02 +00:00
|
|
|
existing = self.attrs.get(attribute_name, DynamoType({"SS": {}}))
|
|
|
|
new_set = set(existing.value).difference(set(new_value))
|
2019-10-31 15:44:26 +00:00
|
|
|
self.attrs[attribute_name] = DynamoType({"SS": list(new_set)})
|
2019-05-22 09:47:02 +00:00
|
|
|
else:
|
|
|
|
raise NotImplementedError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"ADD not supported for %s"
|
|
|
|
% ", ".join(update_action["Value"].keys())
|
|
|
|
)
|
2019-05-22 09:47:02 +00:00
|
|
|
else:
|
|
|
|
raise NotImplementedError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"%s action not support for update_with_attribute_updates" % action
|
|
|
|
)
|
2015-11-07 21:45:24 +00:00
|
|
|
|
2019-11-03 14:02:25 +00:00
|
|
|
# Filter using projection_expression
|
|
|
|
# Ensure a deep copy is used to filter, otherwise actual data will be removed
|
|
|
|
def filter(self, projection_expression):
|
2019-11-03 15:33:27 +00:00
|
|
|
expressions = [x.strip() for x in projection_expression.split(",")]
|
|
|
|
top_level_expressions = [
|
|
|
|
expr[0 : expr.index(".")] for expr in expressions if "." in expr
|
|
|
|
]
|
2019-11-03 14:02:25 +00:00
|
|
|
for attr in list(self.attrs):
|
|
|
|
if attr not in expressions and attr not in top_level_expressions:
|
|
|
|
self.attrs.pop(attr)
|
|
|
|
if attr in top_level_expressions:
|
2019-11-03 15:33:27 +00:00
|
|
|
relevant_expressions = [
|
|
|
|
expr[len(attr + ".") :]
|
|
|
|
for expr in expressions
|
|
|
|
if expr.startswith(attr + ".")
|
|
|
|
]
|
2019-11-03 14:02:25 +00:00
|
|
|
self.attrs[attr].filter(relevant_expressions)
|
|
|
|
|
2014-11-15 14:35:52 +00:00
|
|
|
|
2018-11-08 15:54:54 +00:00
|
|
|
class StreamRecord(BaseModel):
|
|
|
|
def __init__(self, table, stream_type, event_name, old, new, seq):
|
2019-10-31 15:44:26 +00:00
|
|
|
old_a = old.to_json()["Attributes"] if old is not None else {}
|
|
|
|
new_a = new.to_json()["Attributes"] if new is not None else {}
|
2018-11-08 15:54:54 +00:00
|
|
|
|
|
|
|
rec = old if old is not None else new
|
|
|
|
keys = {table.hash_key_attr: rec.hash_key.to_json()}
|
|
|
|
if table.range_key_attr is not None:
|
|
|
|
keys[table.range_key_attr] = rec.range_key.to_json()
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2018-11-08 15:54:54 +00:00
|
|
|
self.record = {
|
2019-10-31 15:44:26 +00:00
|
|
|
"eventID": uuid.uuid4().hex,
|
|
|
|
"eventName": event_name,
|
|
|
|
"eventSource": "aws:dynamodb",
|
|
|
|
"eventVersion": "1.0",
|
|
|
|
"awsRegion": "us-east-1",
|
|
|
|
"dynamodb": {
|
|
|
|
"StreamViewType": stream_type,
|
|
|
|
"ApproximateCreationDateTime": datetime.datetime.utcnow().isoformat(),
|
|
|
|
"SequenceNumber": str(seq),
|
|
|
|
"SizeBytes": 1,
|
|
|
|
"Keys": keys,
|
|
|
|
},
|
2018-11-08 15:54:54 +00:00
|
|
|
}
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
if stream_type in ("NEW_IMAGE", "NEW_AND_OLD_IMAGES"):
|
|
|
|
self.record["dynamodb"]["NewImage"] = new_a
|
|
|
|
if stream_type in ("OLD_IMAGE", "NEW_AND_OLD_IMAGES"):
|
|
|
|
self.record["dynamodb"]["OldImage"] = old_a
|
2018-11-08 15:54:54 +00:00
|
|
|
|
|
|
|
# This is a substantial overestimate but it's the easiest to do now
|
2019-10-31 15:44:26 +00:00
|
|
|
self.record["dynamodb"]["SizeBytes"] = len(json.dumps(self.record["dynamodb"]))
|
2018-11-08 15:54:54 +00:00
|
|
|
|
|
|
|
def to_json(self):
|
|
|
|
return self.record
|
|
|
|
|
|
|
|
|
2018-11-07 22:10:00 +00:00
|
|
|
class StreamShard(BaseModel):
|
|
|
|
def __init__(self, table):
|
|
|
|
self.table = table
|
2019-10-31 15:44:26 +00:00
|
|
|
self.id = "shardId-00000001541626099285-f35f62ef"
|
2018-11-07 22:10:00 +00:00
|
|
|
self.starting_sequence_number = 1100000000017454423009
|
|
|
|
self.items = []
|
|
|
|
self.created_on = datetime.datetime.utcnow()
|
|
|
|
|
|
|
|
def to_json(self):
|
|
|
|
return {
|
2019-10-31 15:44:26 +00:00
|
|
|
"ShardId": self.id,
|
|
|
|
"SequenceNumberRange": {
|
|
|
|
"StartingSequenceNumber": str(self.starting_sequence_number)
|
|
|
|
},
|
2018-11-07 22:10:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
def add(self, old, new):
|
2019-10-31 15:44:26 +00:00
|
|
|
t = self.table.stream_specification["StreamViewType"]
|
2018-11-08 15:54:54 +00:00
|
|
|
if old is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
event_name = "INSERT"
|
2018-11-08 15:54:54 +00:00
|
|
|
elif new is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
event_name = "DELETE"
|
2018-11-08 15:54:54 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
event_name = "MODIFY"
|
2018-11-08 15:54:54 +00:00
|
|
|
seq = len(self.items) + self.starting_sequence_number
|
2019-10-31 15:44:26 +00:00
|
|
|
self.items.append(StreamRecord(self.table, t, event_name, old, new, seq))
|
2019-10-07 10:11:22 +00:00
|
|
|
result = None
|
|
|
|
from moto.awslambda import lambda_backends
|
2019-10-31 15:44:26 +00:00
|
|
|
|
2019-10-07 10:11:22 +00:00
|
|
|
for arn, esm in self.table.lambda_event_source_mappings.items():
|
2019-10-31 15:44:26 +00:00
|
|
|
region = arn[
|
|
|
|
len("arn:aws:lambda:") : arn.index(":", len("arn:aws:lambda:"))
|
|
|
|
]
|
2019-10-07 10:11:22 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
result = lambda_backends[region].send_dynamodb_items(
|
|
|
|
arn, self.items, esm.event_source_arn
|
|
|
|
)
|
2019-10-07 10:11:22 +00:00
|
|
|
|
|
|
|
if result:
|
|
|
|
self.items = []
|
2018-11-08 15:54:54 +00:00
|
|
|
|
|
|
|
def get(self, start, quantity):
|
|
|
|
start -= self.starting_sequence_number
|
|
|
|
assert start >= 0
|
|
|
|
end = start + quantity
|
|
|
|
return [i.to_json() for i in self.items[start:end]]
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2018-11-07 22:10:00 +00:00
|
|
|
|
2017-03-12 04:41:12 +00:00
|
|
|
class Table(BaseModel):
|
2019-10-31 15:44:26 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
schema=None,
|
|
|
|
attr=None,
|
|
|
|
throughput=None,
|
|
|
|
indexes=None,
|
|
|
|
global_indexes=None,
|
|
|
|
streams=None,
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
self.name = table_name
|
|
|
|
self.attr = attr
|
|
|
|
self.schema = schema
|
|
|
|
self.range_key_attr = None
|
|
|
|
self.hash_key_attr = None
|
|
|
|
self.range_key_type = None
|
|
|
|
self.hash_key_type = None
|
|
|
|
for elem in schema:
|
|
|
|
if elem["KeyType"] == "HASH":
|
|
|
|
self.hash_key_attr = elem["AttributeName"]
|
|
|
|
self.hash_key_type = elem["KeyType"]
|
|
|
|
else:
|
|
|
|
self.range_key_attr = elem["AttributeName"]
|
|
|
|
self.range_key_type = elem["KeyType"]
|
|
|
|
if throughput is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
self.throughput = {"WriteCapacityUnits": 10, "ReadCapacityUnits": 10}
|
2013-12-05 11:16:56 +00:00
|
|
|
else:
|
|
|
|
self.throughput = throughput
|
|
|
|
self.throughput["NumberOfDecreasesToday"] = 0
|
|
|
|
self.indexes = indexes
|
2015-03-14 19:02:43 +00:00
|
|
|
self.global_indexes = global_indexes if global_indexes else []
|
2016-09-07 18:40:52 +00:00
|
|
|
self.created_at = datetime.datetime.utcnow()
|
2013-12-05 11:16:56 +00:00
|
|
|
self.items = defaultdict(dict)
|
2017-05-11 01:58:42 +00:00
|
|
|
self.table_arn = self._generate_arn(table_name)
|
|
|
|
self.tags = []
|
2017-10-29 16:06:09 +00:00
|
|
|
self.ttl = {
|
2019-10-31 15:44:26 +00:00
|
|
|
"TimeToLiveStatus": "DISABLED" # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
|
2017-10-29 16:06:09 +00:00
|
|
|
# 'AttributeName': 'string' # Can contain this
|
|
|
|
}
|
2018-11-07 20:03:25 +00:00
|
|
|
self.set_stream_specification(streams)
|
2019-10-07 10:11:22 +00:00
|
|
|
self.lambda_event_source_mappings = {}
|
2017-05-11 01:58:42 +00:00
|
|
|
|
2019-05-25 09:16:33 +00:00
|
|
|
@classmethod
|
2019-10-31 15:44:26 +00:00
|
|
|
def create_from_cloudformation_json(
|
|
|
|
cls, resource_name, cloudformation_json, region_name
|
|
|
|
):
|
|
|
|
properties = cloudformation_json["Properties"]
|
2019-05-25 09:16:33 +00:00
|
|
|
params = {}
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
if "KeySchema" in properties:
|
|
|
|
params["schema"] = properties["KeySchema"]
|
|
|
|
if "AttributeDefinitions" in properties:
|
|
|
|
params["attr"] = properties["AttributeDefinitions"]
|
|
|
|
if "GlobalSecondaryIndexes" in properties:
|
|
|
|
params["global_indexes"] = properties["GlobalSecondaryIndexes"]
|
|
|
|
if "ProvisionedThroughput" in properties:
|
|
|
|
params["throughput"] = properties["ProvisionedThroughput"]
|
|
|
|
if "LocalSecondaryIndexes" in properties:
|
|
|
|
params["indexes"] = properties["LocalSecondaryIndexes"]
|
|
|
|
|
|
|
|
table = dynamodb_backends[region_name].create_table(
|
|
|
|
name=properties["TableName"], **params
|
|
|
|
)
|
2019-05-25 09:16:33 +00:00
|
|
|
return table
|
|
|
|
|
2017-05-11 01:58:42 +00:00
|
|
|
def _generate_arn(self, name):
|
2019-10-31 15:44:26 +00:00
|
|
|
return "arn:aws:dynamodb:us-east-1:123456789011:table/" + name
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2018-11-07 20:03:25 +00:00
|
|
|
def set_stream_specification(self, streams):
|
|
|
|
self.stream_specification = streams
|
2019-10-31 15:44:26 +00:00
|
|
|
if streams and (streams.get("StreamEnabled") or streams.get("StreamViewType")):
|
|
|
|
self.stream_specification["StreamEnabled"] = True
|
2018-11-07 20:03:25 +00:00
|
|
|
self.latest_stream_label = datetime.datetime.utcnow().isoformat()
|
2018-11-07 22:10:00 +00:00
|
|
|
self.stream_shard = StreamShard(self)
|
2018-11-07 20:03:25 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
self.stream_specification = {"StreamEnabled": False}
|
2018-11-07 20:03:25 +00:00
|
|
|
self.latest_stream_label = None
|
2018-11-07 22:10:00 +00:00
|
|
|
self.stream_shard = None
|
2018-11-07 20:03:25 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def describe(self, base_key="TableDescription"):
|
2013-12-05 11:16:56 +00:00
|
|
|
results = {
|
2016-05-06 02:14:23 +00:00
|
|
|
base_key: {
|
2019-10-31 15:44:26 +00:00
|
|
|
"AttributeDefinitions": self.attr,
|
|
|
|
"ProvisionedThroughput": self.throughput,
|
|
|
|
"TableSizeBytes": 0,
|
|
|
|
"TableName": self.name,
|
|
|
|
"TableStatus": "ACTIVE",
|
|
|
|
"TableArn": self.table_arn,
|
|
|
|
"KeySchema": self.schema,
|
|
|
|
"ItemCount": len(self),
|
|
|
|
"CreationDateTime": unix_time(self.created_at),
|
|
|
|
"GlobalSecondaryIndexes": [index for index in self.global_indexes],
|
|
|
|
"LocalSecondaryIndexes": [index for index in self.indexes],
|
2013-12-05 11:16:56 +00:00
|
|
|
}
|
|
|
|
}
|
2019-10-31 15:44:26 +00:00
|
|
|
if self.stream_specification and self.stream_specification["StreamEnabled"]:
|
|
|
|
results[base_key]["StreamSpecification"] = self.stream_specification
|
2018-11-08 18:22:24 +00:00
|
|
|
if self.latest_stream_label:
|
2019-10-31 15:44:26 +00:00
|
|
|
results[base_key]["LatestStreamLabel"] = self.latest_stream_label
|
|
|
|
results[base_key]["LatestStreamArn"] = (
|
|
|
|
self.table_arn + "/stream/" + self.latest_stream_label
|
|
|
|
)
|
2013-12-05 11:16:56 +00:00
|
|
|
return results
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def __len__(self):
|
|
|
|
count = 0
|
2014-08-26 17:25:50 +00:00
|
|
|
for key, value in self.items.items():
|
2013-12-05 11:16:56 +00:00
|
|
|
if self.has_range_key:
|
|
|
|
count += len(value)
|
|
|
|
else:
|
|
|
|
count += 1
|
|
|
|
return count
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2015-03-14 19:02:43 +00:00
|
|
|
@property
|
|
|
|
def hash_key_names(self):
|
|
|
|
keys = [self.hash_key_attr]
|
|
|
|
for index in self.global_indexes:
|
2016-01-15 18:46:04 +00:00
|
|
|
hash_key = None
|
2019-10-31 15:44:26 +00:00
|
|
|
for key in index["KeySchema"]:
|
|
|
|
if key["KeyType"] == "HASH":
|
|
|
|
hash_key = key["AttributeName"]
|
2016-01-15 18:46:04 +00:00
|
|
|
keys.append(hash_key)
|
2015-03-14 19:02:43 +00:00
|
|
|
return keys
|
|
|
|
|
|
|
|
@property
|
|
|
|
def range_key_names(self):
|
|
|
|
keys = [self.range_key_attr]
|
|
|
|
for index in self.global_indexes:
|
2016-01-15 18:46:04 +00:00
|
|
|
range_key = None
|
2019-10-31 15:44:26 +00:00
|
|
|
for key in index["KeySchema"]:
|
|
|
|
if key["KeyType"] == "RANGE":
|
|
|
|
range_key = keys.append(key["AttributeName"])
|
2016-01-15 18:46:04 +00:00
|
|
|
keys.append(range_key)
|
2015-03-14 19:02:43 +00:00
|
|
|
return keys
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def put_item(
|
|
|
|
self,
|
|
|
|
item_attrs,
|
|
|
|
expected=None,
|
|
|
|
condition_expression=None,
|
|
|
|
expression_attribute_names=None,
|
|
|
|
expression_attribute_values=None,
|
|
|
|
overwrite=False,
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
hash_value = DynamoType(item_attrs.get(self.hash_key_attr))
|
|
|
|
if self.has_range_key:
|
|
|
|
range_value = DynamoType(item_attrs.get(self.range_key_attr))
|
|
|
|
else:
|
|
|
|
range_value = None
|
|
|
|
|
2018-11-08 15:54:54 +00:00
|
|
|
if expected is None:
|
|
|
|
expected = {}
|
|
|
|
lookup_range_value = range_value
|
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
expected_range_value = expected.get(self.range_key_attr, {}).get("Value")
|
|
|
|
if expected_range_value is None:
|
2018-11-08 15:54:54 +00:00
|
|
|
lookup_range_value = range_value
|
|
|
|
else:
|
|
|
|
lookup_range_value = DynamoType(expected_range_value)
|
2018-11-07 22:10:00 +00:00
|
|
|
current = self.get_item(hash_value, lookup_range_value)
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
item = Item(
|
|
|
|
hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs
|
|
|
|
)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2015-07-07 19:07:32 +00:00
|
|
|
if not overwrite:
|
2019-04-01 20:23:49 +00:00
|
|
|
if not get_expected(expected).expr(current):
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("The conditional request failed")
|
2019-04-01 20:48:00 +00:00
|
|
|
condition_op = get_filter_expression(
|
|
|
|
condition_expression,
|
|
|
|
expression_attribute_names,
|
2019-10-31 15:44:26 +00:00
|
|
|
expression_attribute_values,
|
|
|
|
)
|
2019-04-01 20:48:00 +00:00
|
|
|
if not condition_op.expr(current):
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("The conditional request failed")
|
2019-04-01 20:23:49 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
if range_value:
|
|
|
|
self.items[hash_value][range_value] = item
|
|
|
|
else:
|
|
|
|
self.items[hash_value] = item
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2018-11-07 22:10:00 +00:00
|
|
|
if self.stream_shard is not None:
|
|
|
|
self.stream_shard.add(current, item)
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
return item
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def __nonzero__(self):
|
|
|
|
return True
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2014-10-27 01:11:03 +00:00
|
|
|
def __bool__(self):
|
|
|
|
return self.__nonzero__()
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
@property
|
|
|
|
def has_range_key(self):
|
|
|
|
return self.range_key_attr is not None
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2019-10-08 19:29:09 +00:00
|
|
|
def get_item(self, hash_key, range_key=None, projection_expression=None):
|
2013-12-05 11:16:56 +00:00
|
|
|
if self.has_range_key and not range_key:
|
2017-02-24 02:37:43 +00:00
|
|
|
raise ValueError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"Table has a range key, but no range key was passed into get_item"
|
|
|
|
)
|
2013-12-05 11:16:56 +00:00
|
|
|
try:
|
2019-10-08 19:29:09 +00:00
|
|
|
result = None
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
if range_key:
|
2019-10-08 19:29:09 +00:00
|
|
|
result = self.items[hash_key][range_key]
|
|
|
|
elif hash_key in self.items:
|
|
|
|
result = self.items[hash_key]
|
|
|
|
|
|
|
|
if projection_expression and result:
|
|
|
|
result = copy.deepcopy(result)
|
2019-11-03 14:02:25 +00:00
|
|
|
result.filter(projection_expression)
|
2016-01-14 22:46:05 +00:00
|
|
|
|
2019-10-08 19:29:09 +00:00
|
|
|
if not result:
|
|
|
|
raise KeyError
|
2016-01-14 22:46:05 +00:00
|
|
|
|
2019-10-08 19:29:09 +00:00
|
|
|
return result
|
2013-12-05 11:16:56 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def delete_item(self, hash_key, range_key):
|
|
|
|
try:
|
|
|
|
if range_key:
|
2018-11-08 15:54:54 +00:00
|
|
|
item = self.items[hash_key].pop(range_key)
|
2013-12-05 11:16:56 +00:00
|
|
|
else:
|
2018-11-08 15:54:54 +00:00
|
|
|
item = self.items.pop(hash_key)
|
|
|
|
|
|
|
|
if self.stream_shard is not None:
|
|
|
|
self.stream_shard.add(item, None)
|
2018-11-08 16:08:24 +00:00
|
|
|
|
2018-11-08 15:54:54 +00:00
|
|
|
return item
|
2013-12-05 11:16:56 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def query(
|
|
|
|
self,
|
|
|
|
hash_key,
|
|
|
|
range_comparison,
|
|
|
|
range_objs,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
scan_index_forward,
|
|
|
|
projection_expression,
|
|
|
|
index_name=None,
|
|
|
|
filter_expression=None,
|
|
|
|
**filter_kwargs
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
results = []
|
2019-04-18 11:31:46 +00:00
|
|
|
|
2015-12-14 06:12:11 +00:00
|
|
|
if index_name:
|
2019-05-21 16:45:30 +00:00
|
|
|
all_indexes = self.all_indexes()
|
2019-10-31 15:44:26 +00:00
|
|
|
indexes_by_name = dict((i["IndexName"], i) for i in all_indexes)
|
2015-12-14 06:12:11 +00:00
|
|
|
if index_name not in indexes_by_name:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Invalid index: %s for table: %s. Available indexes are: %s"
|
|
|
|
% (index_name, self.name, ", ".join(indexes_by_name.keys()))
|
|
|
|
)
|
2015-12-14 06:12:11 +00:00
|
|
|
|
|
|
|
index = indexes_by_name[index_name]
|
|
|
|
try:
|
2019-10-31 15:44:26 +00:00
|
|
|
index_hash_key = [
|
|
|
|
key for key in index["KeySchema"] if key["KeyType"] == "HASH"
|
|
|
|
][0]
|
2015-12-14 06:12:11 +00:00
|
|
|
except IndexError:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("Missing Hash Key. KeySchema: %s" % index["KeySchema"])
|
2015-12-14 06:12:11 +00:00
|
|
|
|
2019-04-18 11:31:46 +00:00
|
|
|
try:
|
2019-10-31 15:44:26 +00:00
|
|
|
index_range_key = [
|
|
|
|
key for key in index["KeySchema"] if key["KeyType"] == "RANGE"
|
|
|
|
][0]
|
2019-04-18 11:31:46 +00:00
|
|
|
except IndexError:
|
|
|
|
index_range_key = None
|
|
|
|
|
2015-12-14 06:12:11 +00:00
|
|
|
possible_results = []
|
|
|
|
for item in self.all_items():
|
|
|
|
if not isinstance(item, Item):
|
|
|
|
continue
|
2019-10-31 15:44:26 +00:00
|
|
|
item_hash_key = item.attrs.get(index_hash_key["AttributeName"])
|
2019-04-18 11:31:46 +00:00
|
|
|
if index_range_key is None:
|
|
|
|
if item_hash_key and item_hash_key == hash_key:
|
|
|
|
possible_results.append(item)
|
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
item_range_key = item.attrs.get(index_range_key["AttributeName"])
|
2019-04-18 11:31:46 +00:00
|
|
|
if item_hash_key and item_hash_key == hash_key and item_range_key:
|
|
|
|
possible_results.append(item)
|
2015-12-14 06:12:11 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
possible_results = [
|
|
|
|
item
|
|
|
|
for item in list(self.all_items())
|
|
|
|
if isinstance(item, Item) and item.hash_key == hash_key
|
|
|
|
]
|
2013-12-05 11:16:56 +00:00
|
|
|
if range_comparison:
|
2015-12-14 06:12:11 +00:00
|
|
|
if index_name and not index_range_key:
|
2017-02-24 02:37:43 +00:00
|
|
|
raise ValueError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"Range Key comparison but no range key found for index: %s"
|
|
|
|
% index_name
|
|
|
|
)
|
2015-12-14 06:12:11 +00:00
|
|
|
|
|
|
|
elif index_name:
|
|
|
|
for result in possible_results:
|
2019-10-31 15:44:26 +00:00
|
|
|
if result.attrs.get(index_range_key["AttributeName"]).compare(
|
|
|
|
range_comparison, range_objs
|
|
|
|
):
|
2015-12-14 06:12:11 +00:00
|
|
|
results.append(result)
|
|
|
|
else:
|
|
|
|
for result in possible_results:
|
|
|
|
if result.range_key.compare(range_comparison, range_objs):
|
|
|
|
results.append(result)
|
2016-03-17 04:30:51 +00:00
|
|
|
|
|
|
|
if filter_kwargs:
|
|
|
|
for result in possible_results:
|
|
|
|
for field, value in filter_kwargs.items():
|
2019-10-31 15:44:26 +00:00
|
|
|
dynamo_types = [
|
|
|
|
DynamoType(ele) for ele in value["AttributeValueList"]
|
|
|
|
]
|
|
|
|
if result.attrs.get(field).compare(
|
|
|
|
value["ComparisonOperator"], dynamo_types
|
|
|
|
):
|
2016-03-17 04:30:51 +00:00
|
|
|
results.append(result)
|
|
|
|
|
|
|
|
if not range_comparison and not filter_kwargs:
|
2017-02-24 02:37:43 +00:00
|
|
|
# If we're not filtering on range key or on an index return all
|
|
|
|
# values
|
2013-12-05 11:16:56 +00:00
|
|
|
results = possible_results
|
2014-02-23 11:11:54 +00:00
|
|
|
|
2015-12-14 06:12:11 +00:00
|
|
|
if index_name:
|
|
|
|
|
|
|
|
if index_range_key:
|
2019-10-31 15:44:26 +00:00
|
|
|
results.sort(
|
|
|
|
key=lambda item: item.attrs[index_range_key["AttributeName"]].value
|
|
|
|
if item.attrs.get(index_range_key["AttributeName"])
|
|
|
|
else None
|
|
|
|
)
|
2015-12-14 06:12:11 +00:00
|
|
|
else:
|
|
|
|
results.sort(key=lambda item: item.range_key)
|
2016-03-01 19:30:35 +00:00
|
|
|
|
|
|
|
if scan_index_forward is False:
|
|
|
|
results.reverse()
|
|
|
|
|
2016-03-16 22:17:41 +00:00
|
|
|
scanned_count = len(list(self.all_items()))
|
2016-03-16 21:53:29 +00:00
|
|
|
|
2017-11-08 22:53:31 +00:00
|
|
|
if filter_expression is not None:
|
|
|
|
results = [item for item in results if filter_expression.expr(item)]
|
|
|
|
|
2019-11-03 14:02:25 +00:00
|
|
|
results = copy.deepcopy(results)
|
2018-03-21 16:02:31 +00:00
|
|
|
if projection_expression:
|
|
|
|
for result in results:
|
2019-11-03 14:02:25 +00:00
|
|
|
result.filter(projection_expression)
|
2018-03-21 16:02:31 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
results, last_evaluated_key = self._trim_results(
|
|
|
|
results, limit, exclusive_start_key
|
|
|
|
)
|
2016-03-16 21:53:29 +00:00
|
|
|
return results, scanned_count, last_evaluated_key
|
2013-12-05 11:16:56 +00:00
|
|
|
|
|
|
|
def all_items(self):
|
|
|
|
for hash_set in self.items.values():
|
|
|
|
if self.range_key_attr:
|
|
|
|
for item in hash_set.values():
|
|
|
|
yield item
|
|
|
|
else:
|
|
|
|
yield hash_set
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2019-05-21 16:45:30 +00:00
|
|
|
def all_indexes(self):
|
|
|
|
return (self.global_indexes or []) + (self.indexes or [])
|
|
|
|
|
|
|
|
def has_idx_items(self, index_name):
|
|
|
|
|
|
|
|
all_indexes = self.all_indexes()
|
2019-10-31 15:44:26 +00:00
|
|
|
indexes_by_name = dict((i["IndexName"], i) for i in all_indexes)
|
2019-05-21 16:45:30 +00:00
|
|
|
idx = indexes_by_name[index_name]
|
2019-10-31 15:44:26 +00:00
|
|
|
idx_col_set = set([i["AttributeName"] for i in idx["KeySchema"]])
|
2019-05-21 16:45:30 +00:00
|
|
|
|
|
|
|
for hash_set in self.items.values():
|
|
|
|
if self.range_key_attr:
|
|
|
|
for item in hash_set.values():
|
|
|
|
if idx_col_set.issubset(set(item.attrs)):
|
|
|
|
yield item
|
|
|
|
else:
|
|
|
|
if idx_col_set.issubset(set(hash_set.attrs)):
|
|
|
|
yield hash_set
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def scan(
|
|
|
|
self,
|
|
|
|
filters,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
filter_expression=None,
|
|
|
|
index_name=None,
|
|
|
|
projection_expression=None,
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
results = []
|
|
|
|
scanned_count = 0
|
2019-05-21 16:45:30 +00:00
|
|
|
all_indexes = self.all_indexes()
|
2019-10-31 15:44:26 +00:00
|
|
|
indexes_by_name = dict((i["IndexName"], i) for i in all_indexes)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-05-21 16:45:30 +00:00
|
|
|
if index_name:
|
|
|
|
if index_name not in indexes_by_name:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise InvalidIndexNameError(
|
|
|
|
"The table does not have the specified index: %s" % index_name
|
|
|
|
)
|
2019-05-21 16:45:30 +00:00
|
|
|
items = self.has_idx_items(index_name)
|
|
|
|
else:
|
|
|
|
items = self.all_items()
|
|
|
|
|
|
|
|
for item in items:
|
2013-12-05 11:16:56 +00:00
|
|
|
scanned_count += 1
|
|
|
|
passes_all_conditions = True
|
2019-10-31 15:44:26 +00:00
|
|
|
for (
|
|
|
|
attribute_name,
|
|
|
|
(comparison_operator, comparison_objs),
|
|
|
|
) in filters.items():
|
2017-10-07 20:57:14 +00:00
|
|
|
attribute = item.attrs.get(attribute_name)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
|
|
|
if attribute:
|
|
|
|
# Attribute found
|
|
|
|
if not attribute.compare(comparison_operator, comparison_objs):
|
|
|
|
passes_all_conditions = False
|
|
|
|
break
|
2019-10-31 15:44:26 +00:00
|
|
|
elif comparison_operator == "NULL":
|
2013-12-05 11:16:56 +00:00
|
|
|
# Comparison is NULL and we don't have the attribute
|
|
|
|
continue
|
|
|
|
else:
|
2017-02-24 02:37:43 +00:00
|
|
|
# No attribute found and comparison is no NULL. This item
|
|
|
|
# fails
|
2013-12-05 11:16:56 +00:00
|
|
|
passes_all_conditions = False
|
|
|
|
break
|
|
|
|
|
2017-10-07 20:57:14 +00:00
|
|
|
if filter_expression is not None:
|
|
|
|
passes_all_conditions &= filter_expression.expr(item)
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
if passes_all_conditions:
|
2017-10-07 20:57:14 +00:00
|
|
|
results.append(item)
|
2016-03-01 19:30:35 +00:00
|
|
|
|
2019-06-26 20:54:48 +00:00
|
|
|
if projection_expression:
|
|
|
|
results = copy.deepcopy(results)
|
|
|
|
for result in results:
|
2019-11-03 14:02:25 +00:00
|
|
|
result.filter(projection_expression)
|
2019-06-26 20:54:48 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
results, last_evaluated_key = self._trim_results(
|
|
|
|
results, limit, exclusive_start_key, index_name
|
|
|
|
)
|
2016-03-01 19:30:35 +00:00
|
|
|
return results, scanned_count, last_evaluated_key
|
|
|
|
|
2019-10-10 08:14:22 +00:00
|
|
|
def _trim_results(self, results, limit, exclusive_start_key, scanned_index=None):
|
2016-03-01 19:30:35 +00:00
|
|
|
if exclusive_start_key is not None:
|
|
|
|
hash_key = DynamoType(exclusive_start_key.get(self.hash_key_attr))
|
|
|
|
range_key = exclusive_start_key.get(self.range_key_attr)
|
|
|
|
if range_key is not None:
|
|
|
|
range_key = DynamoType(range_key)
|
|
|
|
for i in range(len(results)):
|
2019-10-31 15:44:26 +00:00
|
|
|
if (
|
|
|
|
results[i].hash_key == hash_key
|
|
|
|
and results[i].range_key == range_key
|
|
|
|
):
|
|
|
|
results = results[i + 1 :]
|
2016-03-01 19:30:35 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
last_evaluated_key = None
|
|
|
|
if limit and len(results) > limit:
|
|
|
|
results = results[:limit]
|
2019-10-31 15:44:26 +00:00
|
|
|
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
2016-03-01 19:30:35 +00:00
|
|
|
if results[-1].range_key is not None:
|
|
|
|
last_evaluated_key[self.range_key_attr] = results[-1].range_key
|
|
|
|
|
2019-10-10 08:14:22 +00:00
|
|
|
if scanned_index:
|
2019-05-21 16:45:30 +00:00
|
|
|
all_indexes = self.all_indexes()
|
2019-10-31 15:44:26 +00:00
|
|
|
indexes_by_name = dict((i["IndexName"], i) for i in all_indexes)
|
2019-10-10 08:14:22 +00:00
|
|
|
idx = indexes_by_name[scanned_index]
|
2019-10-31 15:44:26 +00:00
|
|
|
idx_col_list = [i["AttributeName"] for i in idx["KeySchema"]]
|
2019-05-21 16:45:30 +00:00
|
|
|
for col in idx_col_list:
|
|
|
|
last_evaluated_key[col] = results[-1].attrs[col]
|
|
|
|
|
2016-03-01 19:30:35 +00:00
|
|
|
return results, last_evaluated_key
|
|
|
|
|
2015-05-20 15:20:16 +00:00
|
|
|
def lookup(self, *args, **kwargs):
|
|
|
|
if not self.schema:
|
|
|
|
self.describe()
|
|
|
|
for x, arg in enumerate(args):
|
|
|
|
kwargs[self.schema[x].name] = arg
|
|
|
|
ret = self.get_item(**kwargs)
|
|
|
|
if not ret.keys():
|
|
|
|
return None
|
|
|
|
return ret
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
|
|
|
|
class DynamoDBBackend(BaseBackend):
|
2017-10-29 16:06:09 +00:00
|
|
|
def __init__(self, region_name=None):
|
|
|
|
self.region_name = region_name
|
2013-12-05 11:16:56 +00:00
|
|
|
self.tables = OrderedDict()
|
|
|
|
|
2017-10-29 16:06:09 +00:00
|
|
|
def reset(self):
|
|
|
|
region_name = self.region_name
|
|
|
|
|
|
|
|
self.__dict__ = {}
|
|
|
|
self.__init__(region_name)
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def create_table(self, name, **params):
|
2015-09-10 09:31:46 +00:00
|
|
|
if name in self.tables:
|
|
|
|
return None
|
2013-12-05 11:16:56 +00:00
|
|
|
table = Table(name, **params)
|
|
|
|
self.tables[name] = table
|
|
|
|
return table
|
|
|
|
|
|
|
|
def delete_table(self, name):
|
|
|
|
return self.tables.pop(name, None)
|
|
|
|
|
2017-05-11 01:58:42 +00:00
|
|
|
def tag_resource(self, table_arn, tags):
|
|
|
|
for table in self.tables:
|
|
|
|
if self.tables[table].table_arn == table_arn:
|
|
|
|
self.tables[table].tags.extend(tags)
|
|
|
|
|
2017-10-29 16:06:09 +00:00
|
|
|
def untag_resource(self, table_arn, tag_keys):
|
|
|
|
for table in self.tables:
|
|
|
|
if self.tables[table].table_arn == table_arn:
|
2019-10-31 15:44:26 +00:00
|
|
|
self.tables[table].tags = [
|
|
|
|
tag for tag in self.tables[table].tags if tag["Key"] not in tag_keys
|
|
|
|
]
|
2017-10-29 16:06:09 +00:00
|
|
|
|
2017-05-11 01:58:42 +00:00
|
|
|
def list_tags_of_resource(self, table_arn):
|
|
|
|
required_table = None
|
|
|
|
for table in self.tables:
|
|
|
|
if self.tables[table].table_arn == table_arn:
|
|
|
|
required_table = self.tables[table]
|
|
|
|
return required_table.tags
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def update_table_throughput(self, name, throughput):
|
|
|
|
table = self.tables[name]
|
|
|
|
table.throughput = throughput
|
|
|
|
return table
|
|
|
|
|
2018-11-07 20:03:25 +00:00
|
|
|
def update_table_streams(self, name, stream_specification):
|
|
|
|
table = self.tables[name]
|
2019-10-31 15:44:26 +00:00
|
|
|
if (
|
|
|
|
stream_specification.get("StreamEnabled")
|
|
|
|
or stream_specification.get("StreamViewType")
|
|
|
|
) and table.latest_stream_label:
|
|
|
|
raise ValueError("Table already has stream enabled")
|
2018-11-07 20:03:25 +00:00
|
|
|
table.set_stream_specification(stream_specification)
|
|
|
|
return table
|
|
|
|
|
2015-12-14 22:42:17 +00:00
|
|
|
def update_table_global_indexes(self, name, global_index_updates):
|
|
|
|
table = self.tables[name]
|
2019-10-31 15:44:26 +00:00
|
|
|
gsis_by_name = dict((i["IndexName"], i) for i in table.global_indexes)
|
2015-12-14 22:42:17 +00:00
|
|
|
for gsi_update in global_index_updates:
|
2019-10-31 15:44:26 +00:00
|
|
|
gsi_to_create = gsi_update.get("Create")
|
|
|
|
gsi_to_update = gsi_update.get("Update")
|
|
|
|
gsi_to_delete = gsi_update.get("Delete")
|
2015-12-14 22:42:17 +00:00
|
|
|
|
|
|
|
if gsi_to_delete:
|
2019-10-31 15:44:26 +00:00
|
|
|
index_name = gsi_to_delete["IndexName"]
|
2015-12-14 22:42:17 +00:00
|
|
|
if index_name not in gsis_by_name:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Global Secondary Index does not exist, but tried to delete: %s"
|
|
|
|
% gsi_to_delete["IndexName"]
|
|
|
|
)
|
2015-12-14 22:42:17 +00:00
|
|
|
|
|
|
|
del gsis_by_name[index_name]
|
|
|
|
|
|
|
|
if gsi_to_update:
|
2019-10-31 15:44:26 +00:00
|
|
|
index_name = gsi_to_update["IndexName"]
|
2015-12-14 22:42:17 +00:00
|
|
|
if index_name not in gsis_by_name:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Global Secondary Index does not exist, but tried to update: %s"
|
|
|
|
% gsi_to_update["IndexName"]
|
|
|
|
)
|
2015-12-14 22:42:17 +00:00
|
|
|
gsis_by_name[index_name].update(gsi_to_update)
|
|
|
|
|
|
|
|
if gsi_to_create:
|
2019-10-31 15:44:26 +00:00
|
|
|
if gsi_to_create["IndexName"] in gsis_by_name:
|
2017-02-24 02:37:43 +00:00
|
|
|
raise ValueError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"Global Secondary Index already exists: %s"
|
|
|
|
% gsi_to_create["IndexName"]
|
|
|
|
)
|
2015-12-14 22:42:17 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
gsis_by_name[gsi_to_create["IndexName"]] = gsi_to_create
|
2015-12-14 22:42:17 +00:00
|
|
|
|
2018-06-06 16:56:19 +00:00
|
|
|
# in python 3.6, dict.values() returns a dict_values object, but we expect it to be a list in other
|
|
|
|
# parts of the codebase
|
|
|
|
table.global_indexes = list(gsis_by_name.values())
|
2015-12-14 22:42:17 +00:00
|
|
|
return table
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def put_item(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
item_attrs,
|
|
|
|
expected=None,
|
|
|
|
condition_expression=None,
|
|
|
|
expression_attribute_names=None,
|
|
|
|
expression_attribute_values=None,
|
|
|
|
overwrite=False,
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if not table:
|
|
|
|
return None
|
2019-10-31 15:44:26 +00:00
|
|
|
return table.put_item(
|
|
|
|
item_attrs,
|
|
|
|
expected,
|
|
|
|
condition_expression,
|
|
|
|
expression_attribute_names,
|
|
|
|
expression_attribute_values,
|
|
|
|
overwrite,
|
|
|
|
)
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2015-03-14 19:02:43 +00:00
|
|
|
def get_table_keys_name(self, table_name, keys):
|
|
|
|
"""
|
|
|
|
Given a set of keys, extracts the key and range key
|
|
|
|
"""
|
2013-12-05 11:16:56 +00:00
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if not table:
|
|
|
|
return None, None
|
|
|
|
else:
|
2016-01-15 18:46:04 +00:00
|
|
|
if len(keys) == 1:
|
|
|
|
for key in keys:
|
|
|
|
if key in table.hash_key_names:
|
|
|
|
return key, None
|
2016-03-17 04:30:51 +00:00
|
|
|
# for potential_hash, potential_range in zip(table.hash_key_names, table.range_key_names):
|
|
|
|
# if set([potential_hash, potential_range]) == set(keys):
|
|
|
|
# return potential_hash, potential_range
|
|
|
|
potential_hash, potential_range = None, None
|
|
|
|
for key in set(keys):
|
|
|
|
if key in table.hash_key_names:
|
|
|
|
potential_hash = key
|
|
|
|
elif key in table.range_key_names:
|
|
|
|
potential_range = key
|
|
|
|
return potential_hash, potential_range
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
def get_keys_value(self, table, keys):
|
2019-10-31 15:44:26 +00:00
|
|
|
if table.hash_key_attr not in keys or (
|
|
|
|
table.has_range_key and table.range_key_attr not in keys
|
|
|
|
):
|
2017-02-24 02:37:43 +00:00
|
|
|
raise ValueError(
|
2019-10-31 15:44:26 +00:00
|
|
|
"Table has a range key, but no range key was passed into get_item"
|
|
|
|
)
|
2014-08-26 17:25:50 +00:00
|
|
|
hash_key = DynamoType(keys[table.hash_key_attr])
|
2019-10-31 15:44:26 +00:00
|
|
|
range_key = (
|
|
|
|
DynamoType(keys[table.range_key_attr]) if table.has_range_key else None
|
|
|
|
)
|
2014-11-15 14:35:52 +00:00
|
|
|
return hash_key, range_key
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2014-11-30 03:03:20 +00:00
|
|
|
def get_table(self, table_name):
|
|
|
|
return self.tables.get(table_name)
|
|
|
|
|
2019-10-08 19:29:09 +00:00
|
|
|
def get_item(self, table_name, keys, projection_expression=None):
|
2014-11-30 03:03:20 +00:00
|
|
|
table = self.get_table(table_name)
|
2013-12-05 11:16:56 +00:00
|
|
|
if not table:
|
2015-07-15 01:27:49 +00:00
|
|
|
raise ValueError("No table found")
|
2014-11-15 14:35:52 +00:00
|
|
|
hash_key, range_key = self.get_keys_value(table, keys)
|
2019-10-08 19:29:09 +00:00
|
|
|
return table.get_item(hash_key, range_key, projection_expression)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def query(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
hash_key_dict,
|
|
|
|
range_comparison,
|
|
|
|
range_value_dicts,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
scan_index_forward,
|
|
|
|
projection_expression,
|
|
|
|
index_name=None,
|
|
|
|
expr_names=None,
|
|
|
|
expr_values=None,
|
|
|
|
filter_expression=None,
|
|
|
|
**filter_kwargs
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if not table:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
hash_key = DynamoType(hash_key_dict)
|
2019-10-31 15:44:26 +00:00
|
|
|
range_values = [DynamoType(range_value) for range_value in range_value_dicts]
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
filter_expression = get_filter_expression(
|
|
|
|
filter_expression, expr_names, expr_values
|
|
|
|
)
|
2017-11-08 22:53:31 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
return table.query(
|
|
|
|
hash_key,
|
|
|
|
range_comparison,
|
|
|
|
range_values,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
scan_index_forward,
|
|
|
|
projection_expression,
|
|
|
|
index_name,
|
|
|
|
filter_expression,
|
|
|
|
**filter_kwargs
|
|
|
|
)
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def scan(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
filters,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
filter_expression,
|
|
|
|
expr_names,
|
|
|
|
expr_values,
|
|
|
|
index_name,
|
|
|
|
projection_expression,
|
|
|
|
):
|
2013-12-05 11:16:56 +00:00
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if not table:
|
|
|
|
return None, None, None
|
|
|
|
|
|
|
|
scan_filters = {}
|
2014-08-26 17:25:50 +00:00
|
|
|
for key, (comparison_operator, comparison_values) in filters.items():
|
2013-12-05 11:16:56 +00:00
|
|
|
dynamo_types = [DynamoType(value) for value in comparison_values]
|
|
|
|
scan_filters[key] = (comparison_operator, dynamo_types)
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
filter_expression = get_filter_expression(
|
|
|
|
filter_expression, expr_names, expr_values
|
|
|
|
)
|
2017-10-07 20:57:14 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
projection_expression = ",".join(
|
|
|
|
[
|
|
|
|
expr_names.get(attr, attr)
|
|
|
|
for attr in projection_expression.replace(" ", "").split(",")
|
|
|
|
]
|
|
|
|
)
|
2019-06-27 18:37:46 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
return table.scan(
|
|
|
|
scan_filters,
|
|
|
|
limit,
|
|
|
|
exclusive_start_key,
|
|
|
|
filter_expression,
|
|
|
|
index_name,
|
|
|
|
projection_expression,
|
|
|
|
)
|
2014-08-26 17:25:50 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def update_item(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
key,
|
|
|
|
update_expression,
|
|
|
|
attribute_updates,
|
|
|
|
expression_attribute_names,
|
|
|
|
expression_attribute_values,
|
|
|
|
expected=None,
|
|
|
|
condition_expression=None,
|
|
|
|
):
|
2014-11-30 03:03:20 +00:00
|
|
|
table = self.get_table(table_name)
|
|
|
|
|
2015-12-31 18:27:01 +00:00
|
|
|
if all([table.hash_key_attr in key, table.range_key_attr in key]):
|
2017-02-24 02:37:43 +00:00
|
|
|
# Covers cases where table has hash and range keys, ``key`` param
|
|
|
|
# will be a dict
|
2015-12-31 18:27:01 +00:00
|
|
|
hash_value = DynamoType(key[table.hash_key_attr])
|
|
|
|
range_value = DynamoType(key[table.range_key_attr])
|
|
|
|
elif table.hash_key_attr in key:
|
2015-12-31 18:46:54 +00:00
|
|
|
# Covers tables that have a range key where ``key`` param is a dict
|
2015-12-31 18:27:01 +00:00
|
|
|
hash_value = DynamoType(key[table.hash_key_attr])
|
|
|
|
range_value = None
|
|
|
|
else:
|
2015-12-31 18:46:54 +00:00
|
|
|
# Covers other cases
|
2015-12-31 18:27:01 +00:00
|
|
|
hash_value = DynamoType(key)
|
|
|
|
range_value = None
|
2015-11-07 21:45:24 +00:00
|
|
|
|
2015-12-31 18:27:01 +00:00
|
|
|
item = table.get_item(hash_value, range_value)
|
2017-06-10 00:32:19 +00:00
|
|
|
|
|
|
|
if not expected:
|
|
|
|
expected = {}
|
|
|
|
|
2019-04-01 20:23:49 +00:00
|
|
|
if not get_expected(expected).expr(item):
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("The conditional request failed")
|
2019-04-12 14:13:36 +00:00
|
|
|
condition_op = get_filter_expression(
|
|
|
|
condition_expression,
|
|
|
|
expression_attribute_names,
|
2019-10-31 15:44:26 +00:00
|
|
|
expression_attribute_values,
|
|
|
|
)
|
2019-04-12 14:13:36 +00:00
|
|
|
if not condition_op.expr(item):
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("The conditional request failed")
|
2017-06-10 00:32:19 +00:00
|
|
|
|
2016-01-14 22:46:05 +00:00
|
|
|
# Update does not fail on new items, so create one
|
|
|
|
if item is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
data = {table.hash_key_attr: {hash_value.type: hash_value.value}}
|
2016-01-14 22:46:05 +00:00
|
|
|
if range_value:
|
2019-10-31 15:44:26 +00:00
|
|
|
data.update(
|
|
|
|
{table.range_key_attr: {range_value.type: range_value.value}}
|
|
|
|
)
|
2016-01-14 22:46:05 +00:00
|
|
|
|
|
|
|
table.put_item(data)
|
|
|
|
item = table.get_item(hash_value, range_value)
|
|
|
|
|
2015-11-07 21:45:24 +00:00
|
|
|
if update_expression:
|
2019-10-31 15:44:26 +00:00
|
|
|
item.update(
|
|
|
|
update_expression,
|
|
|
|
expression_attribute_names,
|
|
|
|
expression_attribute_values,
|
|
|
|
)
|
2015-11-07 21:45:24 +00:00
|
|
|
else:
|
|
|
|
item.update_with_attribute_updates(attribute_updates)
|
2014-11-30 03:03:20 +00:00
|
|
|
return item
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
def delete_item(
|
|
|
|
self,
|
|
|
|
table_name,
|
|
|
|
key,
|
|
|
|
expression_attribute_names=None,
|
|
|
|
expression_attribute_values=None,
|
|
|
|
condition_expression=None,
|
|
|
|
):
|
2017-05-09 03:25:59 +00:00
|
|
|
table = self.get_table(table_name)
|
2013-12-05 11:16:56 +00:00
|
|
|
if not table:
|
|
|
|
return None
|
2019-10-06 14:49:02 +00:00
|
|
|
|
|
|
|
hash_value, range_value = self.get_keys_value(table, key)
|
|
|
|
item = table.get_item(hash_value, range_value)
|
|
|
|
|
|
|
|
condition_op = get_filter_expression(
|
|
|
|
condition_expression,
|
|
|
|
expression_attribute_names,
|
2019-10-31 15:44:26 +00:00
|
|
|
expression_attribute_values,
|
|
|
|
)
|
2019-10-06 14:49:02 +00:00
|
|
|
if not condition_op.expr(item):
|
2019-10-31 15:44:26 +00:00
|
|
|
raise ValueError("The conditional request failed")
|
2019-10-06 14:49:02 +00:00
|
|
|
|
|
|
|
return table.delete_item(hash_value, range_value)
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2017-10-29 16:06:09 +00:00
|
|
|
def update_ttl(self, table_name, ttl_spec):
|
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if table is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise JsonRESTError("ResourceNotFound", "Table not found")
|
2017-10-29 16:06:09 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
if "Enabled" not in ttl_spec or "AttributeName" not in ttl_spec:
|
|
|
|
raise JsonRESTError(
|
|
|
|
"InvalidParameterValue",
|
|
|
|
"TimeToLiveSpecification does not contain Enabled and AttributeName",
|
|
|
|
)
|
2017-10-29 16:06:09 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
if ttl_spec["Enabled"]:
|
|
|
|
table.ttl["TimeToLiveStatus"] = "ENABLED"
|
2017-10-29 16:06:09 +00:00
|
|
|
else:
|
2019-10-31 15:44:26 +00:00
|
|
|
table.ttl["TimeToLiveStatus"] = "DISABLED"
|
|
|
|
table.ttl["AttributeName"] = ttl_spec["AttributeName"]
|
2017-10-29 16:06:09 +00:00
|
|
|
|
|
|
|
def describe_ttl(self, table_name):
|
|
|
|
table = self.tables.get(table_name)
|
|
|
|
if table is None:
|
2019-10-31 15:44:26 +00:00
|
|
|
raise JsonRESTError("ResourceNotFound", "Table not found")
|
2017-10-29 16:06:09 +00:00
|
|
|
|
|
|
|
return table.ttl
|
|
|
|
|
2013-12-05 11:16:56 +00:00
|
|
|
|
2017-10-29 16:06:09 +00:00
|
|
|
available_regions = boto3.session.Session().get_available_regions("dynamodb")
|
2019-10-31 15:44:26 +00:00
|
|
|
dynamodb_backends = {
|
|
|
|
region: DynamoDBBackend(region_name=region) for region in available_regions
|
|
|
|
}
|