diff --git a/moto/s3/models.py b/moto/s3/models.py index 0983631eb..99ca233ed 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -2048,6 +2048,7 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): lock_legal_status: Optional[str] = None, lock_until: Optional[str] = None, checksum_value: Optional[str] = None, + request_method: Optional[str] = "PUT", ) -> FakeKey: if storage is not None and storage not in STORAGE_CLASS: raise InvalidStorageClass(storage=storage) @@ -2096,9 +2097,18 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): keys = [new_key] bucket.keys.setlist(key_name, keys) + # Send event notification + if request_method == "POST": + notify_event_name = ( + notifications.S3NotificationEvent.OBJECT_CREATED_POST_EVENT + ) + else: # PUT request + notify_event_name = ( + notifications.S3NotificationEvent.OBJECT_CREATED_PUT_EVENT + ) notifications.send_event( self.account_id, - notifications.S3NotificationEvent.OBJECT_CREATED_PUT_EVENT, + notify_event_name, bucket, new_key, ) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 575b5bcc7..65c310814 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -1116,7 +1116,9 @@ class S3Response(BaseResponse): else: status_code = 204 - new_key = self.backend.put_object(bucket_name, key, f) + new_key = self.backend.put_object( + bucket_name, key, f, request_method=request.method + ) if self.querystring.get("acl"): acl = get_canned_acl(self.querystring["acl"][0]) diff --git a/tests/test_s3/test_s3_eventbridge_integration.py b/tests/test_s3/test_s3_eventbridge_integration.py index 40854e6a5..374631ff4 100644 --- a/tests/test_s3/test_s3_eventbridge_integration.py +++ b/tests/test_s3/test_s3_eventbridge_integration.py @@ -1,16 +1,21 @@ import json +from io import BytesIO +from unittest import SkipTest from uuid import uuid4 import boto3 +import requests -from moto import mock_aws +from moto import mock_aws, settings from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID +from tests.test_s3.test_s3_multipart import reduced_min_part_size REGION_NAME = "us-east-1" +REDUCED_PART_SIZE = 256 @mock_aws -def test_pub_object_notification(): +def test_put_object_notification_ObjectCreated_PUT(): s3_res = boto3.resource("s3", region_name=REGION_NAME) s3_client = boto3.client("s3", region_name=REGION_NAME) events_client = boto3.client("events", region_name=REGION_NAME) @@ -57,3 +62,67 @@ def test_pub_object_notification(): assert event_message["region"] == REGION_NAME assert event_message["detail"]["bucket"]["name"] == bucket_name assert event_message["detail"]["reason"] == "ObjectCreated" + + +@mock_aws +@reduced_min_part_size +def test_put_object_notification_ObjectCreated_POST(): + if not settings.TEST_DECORATOR_MODE: + raise SkipTest(("Doesn't quite work right with the Proxy or Server")) + + s3_res = boto3.resource("s3", region_name=REGION_NAME) + s3_client = boto3.client("s3", region_name=REGION_NAME) + events_client = boto3.client("events", region_name=REGION_NAME) + logs_client = boto3.client("logs", region_name=REGION_NAME) + + rule_name = "test-rule" + events_client.put_rule( + Name=rule_name, EventPattern=json.dumps({"account": [ACCOUNT_ID]}) + ) + log_group_name = "/test-group" + logs_client.create_log_group(logGroupName=log_group_name) + events_client.put_targets( + Rule=rule_name, + Targets=[ + { + "Id": "test", + "Arn": f"arn:aws:logs:{REGION_NAME}:{ACCOUNT_ID}:log-group:{log_group_name}", + } + ], + ) + + # Create S3 bucket + bucket_name = str(uuid4()) + s3_res.create_bucket(Bucket=bucket_name) + + # Put bucket notification event bridge + s3_client.put_bucket_notification_configuration( + Bucket=bucket_name, + NotificationConfiguration={"EventBridgeConfiguration": {}}, + ) + + ### + # multipart/formdata POST request (this request is processed in S3Response._bucket_response_post) + ### + content = b"Hello, this is a sample content for the multipart upload." + object_key = "test-key" + + _ = requests.post( + f"https://{bucket_name}.s3.amazonaws.com/", + data={"key": object_key}, + files={"file": ("tmp.txt", BytesIO(content))}, + ) + + events = sorted( + logs_client.filter_log_events(logGroupName=log_group_name)["events"], + key=lambda item: item["eventId"], + ) + assert len(events) == 1 + event_message = json.loads(events[0]["message"]) + assert event_message["detail-type"] == "Object Created" + assert event_message["source"] == "aws.s3" + assert event_message["account"] == ACCOUNT_ID + assert event_message["region"] == REGION_NAME + assert event_message["detail"]["bucket"]["name"] == bucket_name + assert event_message["detail"]["object"]["key"] == object_key + assert event_message["detail"]["reason"] == "ObjectCreated"