Add prefix and numeric filtering logic for Archive EventPattern (#3835)
* Add prefix and numeric filtering logic for Archive EventPattern * Pull EventPattern logic out into class and test logic more directly * Apply black formatting Co-authored-by: Tom Noble <tom.noble@bjss.com>
This commit is contained in:
parent
6ea6445f60
commit
b138d9956b
@ -7,6 +7,7 @@ import warnings
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum, unique
|
from enum import Enum, unique
|
||||||
|
from operator import lt, le, eq, ge, gt
|
||||||
|
|
||||||
from boto3 import Session
|
from boto3 import Session
|
||||||
from six import string_types
|
from six import string_types
|
||||||
@ -94,47 +95,6 @@ class Rule(CloudFormationModel):
|
|||||||
if index is not None:
|
if index is not None:
|
||||||
self.targets.pop(index)
|
self.targets.pop(index)
|
||||||
|
|
||||||
def _does_event_match_filter(self, event, filter):
|
|
||||||
if not filter:
|
|
||||||
return True
|
|
||||||
items_and_filters = [(event.get(k), v) for k, v in filter.items()]
|
|
||||||
nested_filter_matches = [
|
|
||||||
self._does_event_match_filter(item, nested_filter)
|
|
||||||
for item, nested_filter in items_and_filters
|
|
||||||
if isinstance(nested_filter, dict)
|
|
||||||
]
|
|
||||||
filter_list_matches = [
|
|
||||||
self._does_item_match_filters(item, filter_list)
|
|
||||||
for item, filter_list in items_and_filters
|
|
||||||
if isinstance(filter_list, list)
|
|
||||||
]
|
|
||||||
return all(nested_filter_matches + filter_list_matches)
|
|
||||||
|
|
||||||
def _does_item_match_filters(self, item, filters):
|
|
||||||
allowed_values = [value for value in filters if isinstance(value, string_types)]
|
|
||||||
allowed_values_match = item in allowed_values if allowed_values else True
|
|
||||||
print(item, filters, allowed_values)
|
|
||||||
named_filter_matches = [
|
|
||||||
self._does_item_match_named_filter(item, filter)
|
|
||||||
for filter in filters
|
|
||||||
if isinstance(filter, dict)
|
|
||||||
]
|
|
||||||
return allowed_values_match and all(named_filter_matches)
|
|
||||||
|
|
||||||
def _does_item_match_named_filter(self, item, filter):
|
|
||||||
filter_name, filter_value = list(filter.items())[0]
|
|
||||||
if filter_name == "exists":
|
|
||||||
item_exists = item is not None
|
|
||||||
should_exist = filter_value
|
|
||||||
return item_exists if should_exist else not item_exists
|
|
||||||
else:
|
|
||||||
warnings.warn(
|
|
||||||
"'{}' filter logic unimplemented. defaulting to True".format(
|
|
||||||
filter_name
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def send_to_targets(self, event_bus_name, event):
|
def send_to_targets(self, event_bus_name, event):
|
||||||
event_bus_name = event_bus_name.split("/")[-1]
|
event_bus_name = event_bus_name.split("/")[-1]
|
||||||
if event_bus_name != self.event_bus_name:
|
if event_bus_name != self.event_bus_name:
|
||||||
@ -230,11 +190,8 @@ class Rule(CloudFormationModel):
|
|||||||
def _send_to_events_archive(self, resource_id, event):
|
def _send_to_events_archive(self, resource_id, event):
|
||||||
archive_name, archive_uuid = resource_id.split(":")
|
archive_name, archive_uuid = resource_id.split(":")
|
||||||
archive = events_backends[self.region_name].archives.get(archive_name)
|
archive = events_backends[self.region_name].archives.get(archive_name)
|
||||||
pattern = archive.event_pattern
|
|
||||||
if archive.uuid == archive_uuid:
|
if archive.uuid == archive_uuid:
|
||||||
event = json.loads(json.dumps(event))
|
if archive.event_pattern.matches_event(event):
|
||||||
pattern = json.loads(pattern) if pattern else None
|
|
||||||
if self._does_event_match_filter(event, pattern):
|
|
||||||
archive.events.append(event)
|
archive.events.append(event)
|
||||||
|
|
||||||
def _send_to_sqs_queue(self, resource_id, event, group_id=None):
|
def _send_to_sqs_queue(self, resource_id, event, group_id=None):
|
||||||
@ -415,7 +372,7 @@ class Archive(CloudFormationModel):
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.source_arn = source_arn
|
self.source_arn = source_arn
|
||||||
self.description = description
|
self.description = description
|
||||||
self.event_pattern = event_pattern
|
self.event_pattern = EventPattern(event_pattern)
|
||||||
self.retention = retention if retention else 0
|
self.retention = retention if retention else 0
|
||||||
|
|
||||||
self.creation_time = unix_time(datetime.utcnow())
|
self.creation_time = unix_time(datetime.utcnow())
|
||||||
@ -446,7 +403,7 @@ class Archive(CloudFormationModel):
|
|||||||
result = {
|
result = {
|
||||||
"ArchiveArn": self.arn,
|
"ArchiveArn": self.arn,
|
||||||
"Description": self.description,
|
"Description": self.description,
|
||||||
"EventPattern": self.event_pattern,
|
"EventPattern": str(self.event_pattern),
|
||||||
}
|
}
|
||||||
result.update(self.describe_short())
|
result.update(self.describe_short())
|
||||||
|
|
||||||
@ -603,6 +560,69 @@ class Replay(BaseModel):
|
|||||||
self.end_time = unix_time(datetime.utcnow())
|
self.end_time = unix_time(datetime.utcnow())
|
||||||
|
|
||||||
|
|
||||||
|
class EventPattern:
|
||||||
|
def __init__(self, filter):
|
||||||
|
self._filter = json.loads(filter) if filter else None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return json.dumps(self._filter)
|
||||||
|
|
||||||
|
def matches_event(self, event):
|
||||||
|
if not self._filter:
|
||||||
|
return True
|
||||||
|
event = json.loads(json.dumps(event))
|
||||||
|
return self._does_event_match(event, self._filter)
|
||||||
|
|
||||||
|
def _does_event_match(self, event, filter):
|
||||||
|
items_and_filters = [(event.get(k), v) for k, v in filter.items()]
|
||||||
|
nested_filter_matches = [
|
||||||
|
self._does_event_match(item, nested_filter)
|
||||||
|
for item, nested_filter in items_and_filters
|
||||||
|
if isinstance(nested_filter, dict)
|
||||||
|
]
|
||||||
|
filter_list_matches = [
|
||||||
|
self._does_item_match_filters(item, filter_list)
|
||||||
|
for item, filter_list in items_and_filters
|
||||||
|
if isinstance(filter_list, list)
|
||||||
|
]
|
||||||
|
return all(nested_filter_matches + filter_list_matches)
|
||||||
|
|
||||||
|
def _does_item_match_filters(self, item, filters):
|
||||||
|
allowed_values = [value for value in filters if isinstance(value, string_types)]
|
||||||
|
allowed_values_match = item in allowed_values if allowed_values else True
|
||||||
|
named_filter_matches = [
|
||||||
|
self._does_item_match_named_filter(item, filter)
|
||||||
|
for filter in filters
|
||||||
|
if isinstance(filter, dict)
|
||||||
|
]
|
||||||
|
return allowed_values_match and all(named_filter_matches)
|
||||||
|
|
||||||
|
def _does_item_match_named_filter(self, item, filter):
|
||||||
|
filter_name, filter_value = list(filter.items())[0]
|
||||||
|
if filter_name == "exists":
|
||||||
|
item_exists = item is not None
|
||||||
|
should_exist = filter_value
|
||||||
|
return item_exists if should_exist else not item_exists
|
||||||
|
if filter_name == "prefix":
|
||||||
|
prefix = filter_value
|
||||||
|
return item.startswith(prefix)
|
||||||
|
if filter_name == "numeric":
|
||||||
|
as_function = {"<": lt, "<=": le, "=": eq, ">=": ge, ">": gt}
|
||||||
|
operators_and_values = zip(filter_value[::2], filter_value[1::2])
|
||||||
|
numeric_matches = [
|
||||||
|
as_function[operator](item, value)
|
||||||
|
for operator, value in operators_and_values
|
||||||
|
]
|
||||||
|
return all(numeric_matches)
|
||||||
|
else:
|
||||||
|
warnings.warn(
|
||||||
|
"'{}' filter logic unimplemented. defaulting to True".format(
|
||||||
|
filter_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class EventsBackend(BaseBackend):
|
class EventsBackend(BaseBackend):
|
||||||
ACCOUNT_ID = re.compile(r"^(\d{1,12}|\*)$")
|
ACCOUNT_ID = re.compile(r"^(\d{1,12}|\*)$")
|
||||||
STATEMENT_ID = re.compile(r"^[a-zA-Z0-9-_]{1,64}$")
|
STATEMENT_ID = re.compile(r"^[a-zA-Z0-9-_]{1,64}$")
|
||||||
|
@ -14,6 +14,7 @@ from moto import mock_logs
|
|||||||
from moto.core import ACCOUNT_ID
|
from moto.core import ACCOUNT_ID
|
||||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||||
from moto.events import mock_events
|
from moto.events import mock_events
|
||||||
|
from moto.events.models import EventPattern
|
||||||
|
|
||||||
RULES = [
|
RULES = [
|
||||||
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
|
{"Name": "test1", "ScheduleExpression": "rate(5 minutes)"},
|
||||||
@ -1517,85 +1518,86 @@ def test_archive_event_with_bus_arn():
|
|||||||
response["SizeBytes"].should.be.greater_than(0)
|
response["SizeBytes"].should.be.greater_than(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
|
||||||
def test_archive_with_allowed_values_event_filter():
|
def test_archive_with_allowed_values_event_filter():
|
||||||
client = boto3.client("events", "eu-central-1")
|
pattern = EventPattern(json.dumps({"source": ["foo", "bar"]}))
|
||||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
assert pattern.matches_event({"source": "foo"})
|
||||||
ACCOUNT_ID
|
assert pattern.matches_event({"source": "bar"})
|
||||||
)
|
assert not pattern.matches_event({"source": "baz"})
|
||||||
client.create_archive(
|
|
||||||
ArchiveName="with-allowed-values-filter",
|
|
||||||
EventSourceArn=event_bus_arn,
|
|
||||||
EventPattern=json.dumps({"source": ["foo", "bar"]}),
|
|
||||||
)
|
|
||||||
matching_foo_event = {"Source": "foo", "DetailType": "", "Detail": "{}"}
|
|
||||||
matching_bar_event = {"Source": "bar", "DetailType": "", "Detail": "{}"}
|
|
||||||
non_matching_event = {"Source": "baz", "DetailType": "", "Detail": "{}"}
|
|
||||||
response = client.put_events(
|
|
||||||
Entries=[matching_foo_event, matching_bar_event, non_matching_event]
|
|
||||||
)
|
|
||||||
response["FailedEntryCount"].should.equal(0)
|
|
||||||
response = client.describe_archive(ArchiveName="with-allowed-values-filter")
|
|
||||||
response["EventCount"].should.equal(2)
|
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
|
||||||
def test_archive_with_nested_event_filter():
|
def test_archive_with_nested_event_filter():
|
||||||
client = boto3.client("events", "eu-central-1")
|
pattern = EventPattern(json.dumps({"detail": {"foo": ["bar"]}}))
|
||||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
assert pattern.matches_event({"detail": {"foo": "bar"}})
|
||||||
ACCOUNT_ID
|
assert not pattern.matches_event({"detail": {"foo": "baz"}})
|
||||||
)
|
|
||||||
client.create_archive(
|
|
||||||
ArchiveName="with-nested-filter",
|
|
||||||
EventSourceArn=event_bus_arn,
|
|
||||||
EventPattern=json.dumps({"detail": {"foo": ["bar"]}}),
|
|
||||||
)
|
|
||||||
matching_event = {"Source": "", "DetailType": "", "Detail": '{"foo": "bar"}'}
|
|
||||||
not_matching_event = {"Source": "", "DetailType": "", "Detail": '{"foo": "baz"}'}
|
|
||||||
response = client.put_events(Entries=[matching_event, not_matching_event])
|
|
||||||
response["FailedEntryCount"].should.equal(0)
|
|
||||||
response = client.describe_archive(ArchiveName="with-nested-filter")
|
|
||||||
response["EventCount"].should.equal(1)
|
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
|
||||||
def test_archive_with_exists_event_filter():
|
def test_archive_with_exists_event_filter():
|
||||||
client = boto3.client("events", "eu-central-1")
|
foo_exists = EventPattern(json.dumps({"detail": {"foo": [{"exists": True}]}}))
|
||||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
assert foo_exists.matches_event({"detail": {"foo": "bar"}})
|
||||||
ACCOUNT_ID
|
assert not foo_exists.matches_event({"detail": {}})
|
||||||
|
|
||||||
|
foo_not_exists = EventPattern(json.dumps({"detail": {"foo": [{"exists": False}]}}))
|
||||||
|
assert not foo_not_exists.matches_event({"detail": {"foo": "bar"}})
|
||||||
|
assert foo_not_exists.matches_event({"detail": {}})
|
||||||
|
|
||||||
|
bar_exists = EventPattern(json.dumps({"detail": {"bar": [{"exists": True}]}}))
|
||||||
|
assert not bar_exists.matches_event({"detail": {"foo": "bar"}})
|
||||||
|
assert not bar_exists.matches_event({"detail": {}})
|
||||||
|
|
||||||
|
bar_not_exists = EventPattern(json.dumps({"detail": {"bar": [{"exists": False}]}}))
|
||||||
|
assert bar_not_exists.matches_event({"detail": {"foo": "bar"}})
|
||||||
|
assert bar_not_exists.matches_event({"detail": {}})
|
||||||
|
|
||||||
|
|
||||||
|
def test_archive_with_prefix_event_filter():
|
||||||
|
pattern = EventPattern(json.dumps({"detail": {"foo": [{"prefix": "bar"}]}}))
|
||||||
|
assert pattern.matches_event({"detail": {"foo": "bar"}})
|
||||||
|
assert pattern.matches_event({"detail": {"foo": "bar!"}})
|
||||||
|
assert not pattern.matches_event({"detail": {"foo": "ba"}})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"operator, compare_to, should_match, should_not_match",
|
||||||
|
[
|
||||||
|
("<", 1, [0], [1, 2]),
|
||||||
|
("<=", 1, [0, 1], [2]),
|
||||||
|
("=", 1, [1], [0, 2]),
|
||||||
|
(">", 1, [2], [0, 1]),
|
||||||
|
(">=", 1, [1, 2], [0]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_archive_with_single_numeric_event_filter(
|
||||||
|
operator, compare_to, should_match, should_not_match
|
||||||
|
):
|
||||||
|
pattern = EventPattern(
|
||||||
|
json.dumps({"detail": {"foo": [{"numeric": [operator, compare_to]}]}})
|
||||||
)
|
)
|
||||||
client.create_archive(
|
for number in should_match:
|
||||||
ArchiveName="foo-exists-filter",
|
assert pattern.matches_event({"detail": {"foo": number}})
|
||||||
EventSourceArn=event_bus_arn,
|
for number in should_not_match:
|
||||||
EventPattern=json.dumps({"detail": {"foo": [{"exists": True}]}}),
|
assert not pattern.matches_event({"detail": {"foo": number}})
|
||||||
|
|
||||||
|
|
||||||
|
def test_archive_with_multi_numeric_event_filter():
|
||||||
|
events = [{"detail": {"foo": number}} for number in range(5)]
|
||||||
|
|
||||||
|
one_or_two = EventPattern(
|
||||||
|
json.dumps({"detail": {"foo": [{"numeric": [">=", 1, "<", 3]}]}})
|
||||||
)
|
)
|
||||||
client.create_archive(
|
assert not one_or_two.matches_event(events[0])
|
||||||
ArchiveName="foo-not-exists-filter",
|
assert one_or_two.matches_event(events[1])
|
||||||
EventSourceArn=event_bus_arn,
|
assert one_or_two.matches_event(events[2])
|
||||||
EventPattern=json.dumps({"detail": {"foo": [{"exists": False}]}}),
|
assert not one_or_two.matches_event(events[3])
|
||||||
|
assert not one_or_two.matches_event(events[4])
|
||||||
|
|
||||||
|
two_or_three = EventPattern(
|
||||||
|
json.dumps({"detail": {"foo": [{"numeric": [">", 1, "<=", 3]}]}})
|
||||||
)
|
)
|
||||||
client.create_archive(
|
assert not two_or_three.matches_event(events[0])
|
||||||
ArchiveName="bar-exists-filter",
|
assert not two_or_three.matches_event(events[1])
|
||||||
EventSourceArn=event_bus_arn,
|
assert two_or_three.matches_event(events[2])
|
||||||
EventPattern=json.dumps({"detail": {"bar": [{"exists": True}]}}),
|
assert two_or_three.matches_event(events[3])
|
||||||
)
|
assert not two_or_three.matches_event(events[4])
|
||||||
client.create_archive(
|
|
||||||
ArchiveName="bar-not-exists-filter",
|
|
||||||
EventSourceArn=event_bus_arn,
|
|
||||||
EventPattern=json.dumps({"detail": {"bar": [{"exists": False}]}}),
|
|
||||||
)
|
|
||||||
foo_exists_event = {"Source": "", "DetailType": "", "Detail": '{"foo": "bar"}'}
|
|
||||||
foo_not_exists_event = {"Source": "", "DetailType": "", "Detail": "{}"}
|
|
||||||
response = client.put_events(Entries=[foo_exists_event, foo_not_exists_event])
|
|
||||||
response["FailedEntryCount"].should.equal(0)
|
|
||||||
response = client.describe_archive(ArchiveName="foo-exists-filter")
|
|
||||||
response["EventCount"].should.equal(1)
|
|
||||||
response = client.describe_archive(ArchiveName="foo-not-exists-filter")
|
|
||||||
response["EventCount"].should.equal(1)
|
|
||||||
response = client.describe_archive(ArchiveName="bar-exists-filter")
|
|
||||||
response["EventCount"].should.equal(0)
|
|
||||||
response = client.describe_archive(ArchiveName="bar-not-exists-filter")
|
|
||||||
response["EventCount"].should.equal(2)
|
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
@mock_events
|
||||||
|
Loading…
Reference in New Issue
Block a user