DynamoDB - Add 1MB item size check
This commit is contained in:
parent
66b26cd7b0
commit
e82e1e3f39
@ -285,6 +285,9 @@ class Item(BaseModel):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "Item: {0}".format(self.to_json())
|
return "Item: {0}".format(self.to_json())
|
||||||
|
|
||||||
|
def size(self):
|
||||||
|
return sum([bytesize(key) + value.size() for key, value in self.attrs.items()])
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
attributes = {}
|
attributes = {}
|
||||||
for attribute_key, attribute in self.attrs.items():
|
for attribute_key, attribute in self.attrs.items():
|
||||||
@ -1123,6 +1126,14 @@ class Table(BaseModel):
|
|||||||
break
|
break
|
||||||
|
|
||||||
last_evaluated_key = None
|
last_evaluated_key = None
|
||||||
|
size_limit = 1000000 # DynamoDB has a 1MB size limit
|
||||||
|
item_size = sum([res.size() for res in results])
|
||||||
|
if item_size > size_limit:
|
||||||
|
item_size = idx = 0
|
||||||
|
while item_size + results[idx].size() < size_limit:
|
||||||
|
item_size += results[idx].size()
|
||||||
|
idx += 1
|
||||||
|
limit = min(limit, idx) if limit else idx
|
||||||
if limit and len(results) > limit:
|
if limit and len(results) > limit:
|
||||||
results = results[:limit]
|
results = results[:limit]
|
||||||
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
||||||
|
@ -4132,3 +4132,41 @@ def test_gsi_verify_negative_number_order():
|
|||||||
[float(item["gsiK1SortKey"]) for item in resp["Items"]].should.equal(
|
[float(item["gsiK1SortKey"]) for item in resp["Items"]].should.equal(
|
||||||
[-0.7, -0.6, 0.7]
|
[-0.7, -0.6, 0.7]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_dynamodb_max_1mb_limit():
|
||||||
|
ddb = boto3.resource("dynamodb", region_name="eu-west-1")
|
||||||
|
|
||||||
|
table_name = "populated-mock-table"
|
||||||
|
table = ddb.create_table(
|
||||||
|
TableName=table_name,
|
||||||
|
KeySchema=[
|
||||||
|
{"AttributeName": "partition_key", "KeyType": "HASH"},
|
||||||
|
{"AttributeName": "sort_key", "KeyType": "SORT"},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{"AttributeName": "partition_key", "AttributeType": "S"},
|
||||||
|
{"AttributeName": "sort_key", "AttributeType": "S"},
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Populate the table
|
||||||
|
items = [
|
||||||
|
{
|
||||||
|
"partition_key": "partition_key_val", # size=30
|
||||||
|
"sort_key": "sort_key_value____" + str(i), # size=30
|
||||||
|
}
|
||||||
|
for i in range(10000, 29999)
|
||||||
|
]
|
||||||
|
with table.batch_writer() as batch:
|
||||||
|
for item in items:
|
||||||
|
batch.put_item(Item=item)
|
||||||
|
|
||||||
|
response = table.query(
|
||||||
|
KeyConditionExpression=Key("partition_key").eq("partition_key_val")
|
||||||
|
)
|
||||||
|
# We shouldn't get everything back - the total result set is well over 1MB
|
||||||
|
assert response["Count"] < len(items)
|
||||||
|
response["LastEvaluatedKey"].shouldnt.be(None)
|
||||||
|
Loading…
Reference in New Issue
Block a user