Merge pull request #1935 from kgutwin/i-1875-dynamodb-streams

Support DynamoDB Streams
This commit is contained in:
Steve Pulec 2018-12-28 20:30:58 -05:00 committed by GitHub
commit 48ee867245
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 616 additions and 21 deletions

View File

@ -16,6 +16,7 @@ from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: n
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa

View File

@ -12,6 +12,7 @@ from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
from moto.ec2 import ec2_backends
from moto.ecr import ecr_backends
from moto.ecs import ecs_backends
@ -59,6 +60,7 @@ BACKENDS = {
'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2,
'dynamodbstreams': dynamodbstreams_backends,
'ec2': ec2_backends,
'ecr': ecr_backends,
'ecs': ecs_backends,

View File

@ -5,6 +5,7 @@ import datetime
import decimal
import json
import re
import uuid
import boto3
from moto.compat import OrderedDict
@ -292,9 +293,82 @@ class Item(BaseModel):
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
class StreamRecord(BaseModel):
def __init__(self, table, stream_type, event_name, old, new, seq):
old_a = old.to_json()['Attributes'] if old is not None else {}
new_a = new.to_json()['Attributes'] if new is not None else {}
rec = old if old is not None else new
keys = {table.hash_key_attr: rec.hash_key.to_json()}
if table.range_key_attr is not None:
keys[table.range_key_attr] = rec.range_key.to_json()
self.record = {
'eventID': uuid.uuid4().hex,
'eventName': event_name,
'eventSource': 'aws:dynamodb',
'eventVersion': '1.0',
'awsRegion': 'us-east-1',
'dynamodb': {
'StreamViewType': stream_type,
'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(),
'SequenceNumber': seq,
'SizeBytes': 1,
'Keys': keys
}
}
if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['NewImage'] = new_a
if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['OldImage'] = old_a
# This is a substantial overestimate but it's the easiest to do now
self.record['dynamodb']['SizeBytes'] = len(
json.dumps(self.record['dynamodb']))
def to_json(self):
return self.record
class StreamShard(BaseModel):
def __init__(self, table):
self.table = table
self.id = 'shardId-00000001541626099285-f35f62ef'
self.starting_sequence_number = 1100000000017454423009
self.items = []
self.created_on = datetime.datetime.utcnow()
def to_json(self):
return {
'ShardId': self.id,
'SequenceNumberRange': {
'StartingSequenceNumber': str(self.starting_sequence_number)
}
}
def add(self, old, new):
t = self.table.stream_specification['StreamViewType']
if old is None:
event_name = 'INSERT'
elif new is None:
event_name = 'DELETE'
else:
event_name = 'MODIFY'
seq = len(self.items) + self.starting_sequence_number
self.items.append(
StreamRecord(self.table, t, event_name, old, new, seq))
def get(self, start, quantity):
start -= self.starting_sequence_number
assert start >= 0
end = start + quantity
return [i.to_json() for i in self.items[start:end]]
class Table(BaseModel):
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None):
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None):
self.name = table_name
self.attr = attr
self.schema = schema
@ -325,10 +399,22 @@ class Table(BaseModel):
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
# 'AttributeName': 'string' # Can contain this
}
self.set_stream_specification(streams)
def _generate_arn(self, name):
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
def set_stream_specification(self, streams):
self.stream_specification = streams
if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')):
self.stream_specification['StreamEnabled'] = True
self.latest_stream_label = datetime.datetime.utcnow().isoformat()
self.stream_shard = StreamShard(self)
else:
self.stream_specification = {'StreamEnabled': False}
self.latest_stream_label = None
self.stream_shard = None
def describe(self, base_key='TableDescription'):
results = {
base_key: {
@ -345,6 +431,11 @@ class Table(BaseModel):
'LocalSecondaryIndexes': [index for index in self.indexes],
}
}
if self.stream_specification and self.stream_specification['StreamEnabled']:
results[base_key]['StreamSpecification'] = self.stream_specification
if self.latest_stream_label:
results[base_key]['LatestStreamLabel'] = self.latest_stream_label
results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label
return results
def __len__(self):
@ -385,10 +476,6 @@ class Table(BaseModel):
else:
range_value = None
item = Item(hash_value, self.hash_key_type, range_value,
self.range_key_type, item_attrs)
if not overwrite:
if expected is None:
expected = {}
lookup_range_value = range_value
@ -399,9 +486,12 @@ class Table(BaseModel):
lookup_range_value = range_value
else:
lookup_range_value = DynamoType(expected_range_value)
current = self.get_item(hash_value, lookup_range_value)
item = Item(hash_value, self.hash_key_type, range_value,
self.range_key_type, item_attrs)
if not overwrite:
if current is None:
current_attr = {}
elif hasattr(current, 'attrs'):
@ -432,6 +522,10 @@ class Table(BaseModel):
self.items[hash_value][range_value] = item
else:
self.items[hash_value] = item
if self.stream_shard is not None:
self.stream_shard.add(current, item)
return item
def __nonzero__(self):
@ -462,9 +556,14 @@ class Table(BaseModel):
def delete_item(self, hash_key, range_key):
try:
if range_key:
return self.items[hash_key].pop(range_key)
item = self.items[hash_key].pop(range_key)
else:
return self.items.pop(hash_key)
item = self.items.pop(hash_key)
if self.stream_shard is not None:
self.stream_shard.add(item, None)
return item
except KeyError:
return None
@ -680,6 +779,13 @@ class DynamoDBBackend(BaseBackend):
table.throughput = throughput
return table
def update_table_streams(self, name, stream_specification):
table = self.tables[name]
if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label:
raise ValueError('Table already has stream enabled')
table.set_stream_specification(stream_specification)
return table
def update_table_global_indexes(self, name, global_index_updates):
table = self.tables[name]
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)

View File

@ -104,13 +104,16 @@ class DynamoHandler(BaseResponse):
# getting the indexes
global_indexes = body.get("GlobalSecondaryIndexes", [])
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
# get the stream specification
streams = body.get("StreamSpecification")
table = self.dynamodb_backend.create_table(table_name,
schema=key_schema,
throughput=throughput,
attr=attr,
global_indexes=global_indexes,
indexes=local_secondary_indexes)
indexes=local_secondary_indexes,
streams=streams)
if table is not None:
return dynamo_json_dump(table.describe())
else:
@ -163,12 +166,20 @@ class DynamoHandler(BaseResponse):
def update_table(self):
name = self.body['TableName']
table = self.dynamodb_backend.get_table(name)
if 'GlobalSecondaryIndexUpdates' in self.body:
table = self.dynamodb_backend.update_table_global_indexes(
name, self.body['GlobalSecondaryIndexUpdates'])
if 'ProvisionedThroughput' in self.body:
throughput = self.body["ProvisionedThroughput"]
table = self.dynamodb_backend.update_table_throughput(name, throughput)
if 'StreamSpecification' in self.body:
try:
table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification'])
except ValueError:
er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException'
return self.error(er, 'Cannot enable stream')
return dynamo_json_dump(table.describe())
def describe_table(self):

View File

@ -0,0 +1,6 @@
from __future__ import unicode_literals
from .models import dynamodbstreams_backends
from ..core.models import base_decorator
dynamodbstreams_backend = dynamodbstreams_backends['us-east-1']
mock_dynamodbstreams = base_decorator(dynamodbstreams_backends)

View File

@ -0,0 +1,129 @@
from __future__ import unicode_literals
import os
import json
import boto3
import base64
from moto.core import BaseBackend, BaseModel
from moto.dynamodb2.models import dynamodb_backends
class ShardIterator(BaseModel):
def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None):
self.id = base64.b64encode(os.urandom(472)).decode('utf-8')
self.streams_backend = streams_backend
self.stream_shard = stream_shard
self.shard_iterator_type = shard_iterator_type
if shard_iterator_type == 'TRIM_HORIZON':
self.sequence_number = stream_shard.starting_sequence_number
elif shard_iterator_type == 'LATEST':
self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items)
elif shard_iterator_type == 'AT_SEQUENCE_NUMBER':
self.sequence_number = sequence_number
elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER':
self.sequence_number = sequence_number + 1
@property
def arn(self):
return '{}/stream/{}|1|{}'.format(
self.stream_shard.table.table_arn,
self.stream_shard.table.latest_stream_label,
self.id)
def to_json(self):
return {
'ShardIterator': self.arn
}
def get(self, limit=1000):
items = self.stream_shard.get(self.sequence_number, limit)
try:
last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items)
new_shard_iterator = ShardIterator(self.streams_backend,
self.stream_shard,
'AFTER_SEQUENCE_NUMBER',
last_sequence_number)
except ValueError:
new_shard_iterator = ShardIterator(self.streams_backend,
self.stream_shard,
'AT_SEQUENCE_NUMBER',
self.sequence_number)
self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator
return {
'NextShardIterator': new_shard_iterator.arn,
'Records': items
}
class DynamoDBStreamsBackend(BaseBackend):
def __init__(self, region):
self.region = region
self.shard_iterators = {}
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
@property
def dynamodb(self):
return dynamodb_backends[self.region]
def _get_table_from_arn(self, arn):
table_name = arn.split(':', 6)[5].split('/')[1]
return self.dynamodb.get_table(table_name)
def describe_stream(self, arn):
table = self._get_table_from_arn(arn)
resp = {'StreamDescription': {
'StreamArn': arn,
'StreamLabel': table.latest_stream_label,
'StreamStatus': ('ENABLED' if table.latest_stream_label
else 'DISABLED'),
'StreamViewType': table.stream_specification['StreamViewType'],
'CreationRequestDateTime': table.stream_shard.created_on.isoformat(),
'TableName': table.name,
'KeySchema': table.schema,
'Shards': ([table.stream_shard.to_json()] if table.stream_shard
else [])
}}
return json.dumps(resp)
def list_streams(self, table_name=None):
streams = []
for table in self.dynamodb.tables.values():
if table_name is not None and table.name != table_name:
continue
if table.latest_stream_label:
d = table.describe(base_key='Table')
streams.append({
'StreamArn': d['Table']['LatestStreamArn'],
'TableName': d['Table']['TableName'],
'StreamLabel': d['Table']['LatestStreamLabel']
})
return json.dumps({'Streams': streams})
def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None):
table = self._get_table_from_arn(arn)
assert table.stream_shard.id == shard_id
shard_iterator = ShardIterator(self, table.stream_shard,
shard_iterator_type,
sequence_number)
self.shard_iterators[shard_iterator.arn] = shard_iterator
return json.dumps(shard_iterator.to_json())
def get_records(self, iterator_arn, limit):
shard_iterator = self.shard_iterators[iterator_arn]
return json.dumps(shard_iterator.get(limit))
available_regions = boto3.session.Session().get_available_regions(
'dynamodbstreams')
dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region)
for region in available_regions}

View File

@ -0,0 +1,34 @@
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from .models import dynamodbstreams_backends
class DynamoDBStreamsHandler(BaseResponse):
@property
def backend(self):
return dynamodbstreams_backends[self.region]
def describe_stream(self):
arn = self._get_param('StreamArn')
return self.backend.describe_stream(arn)
def list_streams(self):
table_name = self._get_param('TableName')
return self.backend.list_streams(table_name)
def get_shard_iterator(self):
arn = self._get_param('StreamArn')
shard_id = self._get_param('ShardId')
shard_iterator_type = self._get_param('ShardIteratorType')
return self.backend.get_shard_iterator(arn, shard_id,
shard_iterator_type)
def get_records(self):
arn = self._get_param('ShardIterator')
limit = self._get_param('Limit')
if limit is None:
limit = 1000
return self.backend.get_records(arn, limit)

View File

@ -0,0 +1,10 @@
from __future__ import unicode_literals
from .responses import DynamoDBStreamsHandler
url_bases = [
"https?://streams.dynamodb.(.+).amazonaws.com"
]
url_paths = {
"{0}/$": DynamoDBStreamsHandler.dispatch,
}

View File

@ -80,6 +80,9 @@ class DomainDispatcherApplication(object):
region = 'us-east-1'
if service == 'dynamodb':
if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'):
host = 'dynamodbstreams'
else:
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
# If Newer API version, use dynamodb2
if dynamo_api_version > "20111205":

View File

@ -1336,3 +1336,62 @@ def test_query_global_secondary_index_when_created_via_update_table_resource():
assert len(forum_and_subject_items) == 1
assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats',
'subject': 'my pet is the cutest'}
@mock_dynamodb2
def test_dynamodb_streams_1():
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
assert 'LatestStreamLabel' in resp['TableDescription']
assert 'LatestStreamArn' in resp['TableDescription']
resp = conn.delete_table(TableName='test-streams')
assert 'StreamSpecification' in resp['TableDescription']
@mock_dynamodb2
def test_dynamodb_streams_2():
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-stream-update',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
)
assert 'StreamSpecification' not in resp['TableDescription']
resp = conn.update_table(
TableName='test-stream-update',
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_IMAGE'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'NEW_IMAGE'
}
assert 'LatestStreamLabel' in resp['TableDescription']
assert 'LatestStreamArn' in resp['TableDescription']

View File

@ -0,0 +1,234 @@
from __future__ import unicode_literals, print_function
from nose.tools import assert_raises
import boto3
from moto import mock_dynamodb2, mock_dynamodbstreams
class TestCore():
stream_arn = None
mocks = []
def setup(self):
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
for m in self.mocks:
m.start()
# create a table with a stream
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1},
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_AND_OLD_IMAGES'
}
)
self.stream_arn = resp['TableDescription']['LatestStreamArn']
def teardown(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
conn.delete_table(TableName='test-streams')
self.stream_arn = None
for m in self.mocks:
m.stop()
def test_verify_stream(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.describe_table(TableName='test-streams')
assert 'LatestStreamArn' in resp['Table']
def test_describe_stream(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
assert 'StreamDescription' in resp
desc = resp['StreamDescription']
assert desc['StreamArn'] == self.stream_arn
assert desc['TableName'] == 'test-streams'
def test_list_streams(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.list_streams()
assert resp['Streams'][0]['StreamArn'] == self.stream_arn
resp = conn.list_streams(TableName='no-stream')
assert not resp['Streams']
def test_get_shard_iterator(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON'
)
assert 'ShardIterator' in resp
def test_get_records_empty(self):
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='LATEST'
)
iterator_id = resp['ShardIterator']
resp = conn.get_records(ShardIterator=iterator_id)
assert 'Records' in resp
assert len(resp['Records']) == 0
def test_get_records_seq(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
conn.put_item(
TableName='test-streams',
Item={
'id': {'S': 'entry1'},
'first_col': {'S': 'foo'}
}
)
conn.put_item(
TableName='test-streams',
Item={
'id': {'S': 'entry1'},
'first_col': {'S': 'bar'},
'second_col': {'S': 'baz'}
}
)
conn.delete_item(
TableName='test-streams',
Key={'id': {'S': 'entry1'}}
)
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = conn.describe_stream(StreamArn=self.stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = conn.get_shard_iterator(
StreamArn=self.stream_arn,
ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON'
)
iterator_id = resp['ShardIterator']
resp = conn.get_records(ShardIterator=iterator_id)
assert len(resp['Records']) == 3
assert resp['Records'][0]['eventName'] == 'INSERT'
assert resp['Records'][1]['eventName'] == 'MODIFY'
assert resp['Records'][2]['eventName'] == 'DELETE'
# now try fetching from the next shard iterator, it should be
# empty
resp = conn.get_records(ShardIterator=resp['NextShardIterator'])
assert len(resp['Records']) == 0
class TestEdges():
mocks = []
def setup(self):
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
for m in self.mocks:
m.start()
def teardown(self):
for m in self.mocks:
m.stop()
def test_enable_stream_on_table(self):
conn = boto3.client('dynamodb', region_name='us-east-1')
resp = conn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1}
)
assert 'StreamSpecification' not in resp['TableDescription']
resp = conn.update_table(
TableName='test-streams',
StreamSpecification={
'StreamViewType': 'KEYS_ONLY'
}
)
assert 'StreamSpecification' in resp['TableDescription']
assert resp['TableDescription']['StreamSpecification'] == {
'StreamEnabled': True,
'StreamViewType': 'KEYS_ONLY'
}
assert 'LatestStreamLabel' in resp['TableDescription']
# now try to enable it again
with assert_raises(conn.exceptions.ResourceInUseException):
resp = conn.update_table(
TableName='test-streams',
StreamSpecification={
'StreamViewType': 'OLD_IMAGES'
}
)
def test_stream_with_range_key(self):
dyn = boto3.client('dynamodb', region_name='us-east-1')
resp = dyn.create_table(
TableName='test-streams',
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'},
{'AttributeName': 'color', 'KeyType': 'RANGE'}],
AttributeDefinitions=[{'AttributeName': 'id',
'AttributeType': 'S'},
{'AttributeName': 'color',
'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1},
StreamSpecification={
'StreamViewType': 'NEW_IMAGES'
}
)
stream_arn = resp['TableDescription']['LatestStreamArn']
streams = boto3.client('dynamodbstreams', region_name='us-east-1')
resp = streams.describe_stream(StreamArn=stream_arn)
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
resp = streams.get_shard_iterator(
StreamArn=stream_arn,
ShardId=shard_id,
ShardIteratorType='LATEST'
)
iterator_id = resp['ShardIterator']
dyn.put_item(
TableName='test-streams',
Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}}
)
dyn.put_item(
TableName='test-streams',
Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}}
)
resp = streams.get_records(ShardIterator=iterator_id)
assert len(resp['Records']) == 2
assert resp['Records'][0]['eventName'] == 'INSERT'
assert resp['Records'][1]['eventName'] == 'INSERT'