Events - send events to (subset of) targets (#5169)

This commit is contained in:
Bert Blommers 2022-05-25 23:03:39 +00:00 committed by GitHub
parent e8ea958444
commit 5a34a6599d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 431 additions and 0 deletions

View File

@ -12,6 +12,8 @@
events
======
.. autoclass:: moto.events.models.EventsBackend
|start-h3| Example usage |end-h3|
.. sourcecode:: python

View File

@ -824,6 +824,9 @@ class EventPattern:
self._raw_pattern = raw_pattern
self._pattern = pattern
def get_pattern(self):
return self._pattern
def matches_event(self, event):
if not self._pattern:
return True
@ -921,6 +924,14 @@ class EventPatternParser:
class EventsBackend(BaseBackend):
"""
When a event occurs, the appropriate targets are triggered for a subset of usecases.
Supported events: S3:CreateBucket
Supported targets: AWSLambda functions
"""
ACCOUNT_ID = re.compile(r"^(\d{1,12}|\*)$")
STATEMENT_ID = re.compile(r"^[a-zA-Z0-9-_]{1,64}$")
_CRON_REGEX = re.compile(r"^cron\(.*\)")

View File

@ -0,0 +1,65 @@
import json
_EVENT_S3_OBJECT_CREATED = {
"version": "0",
"id": "17793124-05d4-b198-2fde-7ededc63b103",
"detail-type": "Object Created",
"source": "aws.s3",
"account": "123456789012",
"time": "2021-11-12T00:00:00Z",
"region": None,
"resources": [],
"detail": None,
}
def send_notification(source, event_name, region, resources, detail):
try:
_send_safe_notification(source, event_name, region, resources, detail)
except: # noqa
# If anything goes wrong, we should never fail
pass
def _send_safe_notification(source, event_name, region, resources, detail):
from .models import events_backends
event = None
if source == "aws.s3" and event_name == "CreateBucket":
event = _EVENT_S3_OBJECT_CREATED.copy()
event["region"] = region
event["resources"] = resources
event["detail"] = detail
if event is None:
return
for backend in events_backends.values():
applicable_targets = []
for rule in backend.rules.values():
if rule.state != "ENABLED":
continue
pattern = rule.event_pattern.get_pattern()
if source in pattern.get("source", []):
if event_name in pattern.get("detail", {}).get("eventName", []):
applicable_targets.extend(rule.targets)
for target in applicable_targets:
if target.get("Arn", "").startswith("arn:aws:lambda"):
_invoke_lambda(target.get("Arn"), event=event)
def _invoke_lambda(fn_arn, event):
from moto.awslambda import lambda_backends
lmbda_region = fn_arn.split(":")[3]
body = json.dumps(event)
lambda_backends[lmbda_region].invoke(
function_name=fn_arn,
qualifier=None,
body=body,
headers=dict(),
response_headers=dict(),
)

View File

@ -58,6 +58,7 @@ from moto.s3.exceptions import (
from .cloud_formation import cfn_to_api_encryption, is_replacement_update
from . import notifications
from .utils import clean_key_name, _VersionedKeyStore, undo_clean_key_name
from ..events.notifications import send_notification as events_send_notification
from ..settings import get_s3_default_key_buffer_size, S3_UPLOAD_PART_MIN_SIZE
MAX_BUCKET_NAME_LENGTH = 63
@ -1463,6 +1464,23 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
new_bucket = FakeBucket(name=bucket_name, region_name=region_name)
self.buckets[bucket_name] = new_bucket
notification_detail = {
"version": "0",
"bucket": {"name": bucket_name},
"request-id": "N4N7GDK58NMKJ12R",
"requester": get_account_id(),
"source-ip-address": "1.2.3.4",
"reason": "PutObject",
}
events_send_notification(
source="aws.s3",
event_name="CreateBucket",
region=region_name,
resources=[f"arn:aws:s3:::{bucket_name}"],
detail=notification_detail,
)
return new_bucket
def list_buckets(self):

View File

@ -0,0 +1,335 @@
import boto3
import json
from moto import mock_events, mock_iam, mock_lambda, mock_logs, mock_s3
from moto.core import ACCOUNT_ID
from ..test_awslambda.utilities import get_test_zip_file1, wait_for_log_msg
@mock_events
@mock_iam
@mock_lambda
@mock_logs
@mock_s3
def test_creating_bucket__invokes_lambda():
iam_client = boto3.client("iam", "us-east-1")
lambda_client = boto3.client("lambda", "us-east-1")
events_client = boto3.client("events", "us-east-1")
s3_client = boto3.client("s3", "us-east-1")
role = iam_client.create_role(
RoleName="foobar",
AssumeRolePolicyDocument="{}",
)["Role"]
func = lambda_client.create_function(
FunctionName="foobar",
Runtime="python3.8",
Role=role["Arn"],
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
)
events_client.put_rule(
Name="foobarrule",
EventPattern="""{
"source": [
"aws.s3"
],
"detail-type": [
"AWS API Call via CloudTrail"
],
"detail": {
"eventSource": [
"s3.amazonaws.com"
],
"eventName": [
"CreateBucket"
]
}
}""",
State="ENABLED",
RoleArn=role["Arn"],
)
events_client.put_targets(
Rule="foobarrule",
Targets=[
{
"Id": "n/a",
"Arn": func["FunctionArn"],
"RoleArn": role["Arn"],
}
],
)
bucket_name = "foobar"
s3_client.create_bucket(
ACL="public-read-write",
Bucket=bucket_name,
)
expected_msg = '"detail-type":"Object Created"'
log_group = f"/aws/lambda/{bucket_name}"
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group, wait_time=5)
assert (
msg_showed_up
), "Lambda was not invoked after creating an S3 bucket. All logs: " + str(all_logs)
event = json.loads(list([line for line in all_logs if expected_msg in line])[-1])
event.should.have.key("detail-type").equals("Object Created")
event.should.have.key("source").equals("aws.s3")
event.should.have.key("account").equals(ACCOUNT_ID)
event.should.have.key("time")
event.should.have.key("region").equals("us-east-1")
event.should.have.key("resources").equals([f"arn:aws:s3:::{bucket_name}"])
@mock_events
@mock_iam
@mock_lambda
@mock_logs
@mock_s3
def test_create_disabled_rule():
iam_client = boto3.client("iam", "us-east-1")
lambda_client = boto3.client("lambda", "us-east-1")
events_client = boto3.client("events", "us-east-1")
s3_client = boto3.client("s3", "us-east-1")
role = iam_client.create_role(
RoleName="foobar",
AssumeRolePolicyDocument="{}",
)["Role"]
func = lambda_client.create_function(
FunctionName="foobar",
Runtime="python3.8",
Role=role["Arn"],
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
)
events_client.put_rule(
Name="foobarrule",
EventPattern="""{
"source": [
"aws.s3"
],
"detail-type": [
"AWS API Call via CloudTrail"
],
"detail": {
"eventSource": [
"s3.amazonaws.com"
],
"eventName": [
"CreateBucket"
]
}
}""",
State="DISABLED",
RoleArn=role["Arn"],
)
events_client.put_targets(
Rule="foobarrule",
Targets=[
{
"Id": "n/a",
"Arn": func["FunctionArn"],
"RoleArn": role["Arn"],
}
],
)
bucket_name = "foobar"
s3_client.create_bucket(
ACL="public-read-write",
Bucket=bucket_name,
)
expected_msg = '"detail-type":"Object Created"'
log_group = f"/aws/lambda/{bucket_name}"
msg_showed_up, _ = wait_for_log_msg(expected_msg, log_group, wait_time=5)
msg_showed_up.should.equal(False)
@mock_events
@mock_iam
@mock_logs
@mock_s3
def test_create_rule_for_unsupported_target_arn():
iam_client = boto3.client("iam", "us-east-1")
events_client = boto3.client("events", "us-east-1")
s3_client = boto3.client("s3", "us-east-1")
role = iam_client.create_role(
RoleName="foobar",
AssumeRolePolicyDocument="{}",
)["Role"]
events_client.put_rule(
Name="foobarrule",
EventPattern="""{
"source": [
"aws.s3"
],
"detail-type": [
"AWS API Call via CloudTrail"
],
"detail": {
"eventSource": [
"s3.amazonaws.com"
],
"eventName": [
"CreateBucket"
]
}
}""",
State="ENABLED",
RoleArn=role["Arn"],
)
events_client.put_targets(
Rule="foobarrule",
Targets=[
{
"Id": "n/a",
"Arn": "arn:aws:unknown",
"RoleArn": role["Arn"],
}
],
)
bucket_name = "foobar"
s3_client.create_bucket(
ACL="public-read-write",
Bucket=bucket_name,
)
expected_msg = '"detail-type":"Object Created"'
log_group = f"/aws/lambda/{bucket_name}"
msg_showed_up, _ = wait_for_log_msg(expected_msg, log_group, wait_time=5)
msg_showed_up.should.equal(False)
@mock_events
@mock_iam
@mock_lambda
@mock_logs
@mock_s3
def test_creating_bucket__but_invoke_lambda_on_create_object():
iam_client = boto3.client("iam", "us-east-1")
lambda_client = boto3.client("lambda", "us-east-1")
events_client = boto3.client("events", "us-east-1")
s3_client = boto3.client("s3", "us-east-1")
role = iam_client.create_role(
RoleName="foobar",
AssumeRolePolicyDocument="{}",
)["Role"]
func = lambda_client.create_function(
FunctionName="foobar",
Runtime="python3.8",
Role=role["Arn"],
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
)
events_client.put_rule(
Name="foobarrule",
EventPattern="""{
"source": [
"aws.s3"
],
"detail": {
"eventSource": [
"s3.amazonaws.com"
],
"eventName": [
"CreateObject"
]
}
}""",
State="ENABLED",
RoleArn=role["Arn"],
)
events_client.put_targets(
Rule="foobarrule",
Targets=[
{
"Id": "n/a",
"Arn": func["FunctionArn"],
"RoleArn": role["Arn"],
}
],
)
bucket_name = "foobar"
s3_client.create_bucket(
ACL="public-read-write",
Bucket=bucket_name,
)
expected_msg = '"detail-type":"Object Created"'
log_group = f"/aws/lambda/{bucket_name}"
msg_showed_up, _ = wait_for_log_msg(expected_msg, log_group, wait_time=5)
msg_showed_up.should.equal(False)
@mock_events
@mock_iam
@mock_s3
def test_creating_bucket__succeeds_despite_unknown_lambda():
iam_client = boto3.client("iam", "us-east-1")
events_client = boto3.client("events", "us-east-1")
s3_client = boto3.client("s3", "us-east-1")
role = iam_client.create_role(
RoleName="foobar",
AssumeRolePolicyDocument="{}",
)["Role"]
events_client.put_rule(
Name="foobarrule",
EventPattern="""{
"source": [
"aws.s3"
],
"detail-type": [
"AWS API Call via CloudTrail"
],
"detail": {
"eventSource": [
"s3.amazonaws.com"
],
"eventName": [
"CreateBucket"
]
}
}""",
State="ENABLED",
RoleArn=role["Arn"],
)
events_client.put_targets(
Rule="foobarrule",
Targets=[
{
"Id": "n/a",
"Arn": "arn:aws:lambda:unknown",
"RoleArn": role["Arn"],
}
],
)
bucket_name = "foobar"
bucket = s3_client.create_bucket(
ACL="public-read-write",
Bucket=bucket_name,
)
bucket.shouldnt.equal(None)