diff --git a/moto/s3/models.py b/moto/s3/models.py index a843d3692..f61d1fb37 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -1456,6 +1456,37 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): key.dispose() super().reset() + def log_incoming_request(self, request, bucket_name): + """ + Process incoming requests + If the request is made to a bucket with logging enabled, logs will be persisted in the appropriate bucket + """ + try: + bucket = self.get_bucket(bucket_name) + target_bucket = bucket.logging["TargetBucket"] + prefix = bucket.logging.get("TargetPrefix", "") + + now = datetime.datetime.now() + file_name = now.strftime( + f"%Y-%m-%d-%H-%M-%S-{random.get_random_hex(16).upper()}" + ) + date = now.strftime("%d/%b/%Y:%H:%M:%S +0000") + source_ip = "0.0.0.0" + source_iam = "-" # Can be the user ARN, or empty + unknown_hex = random.get_random_hex(16) + source = f"REST.{request.method}.BUCKET" # REST/CLI/CONSOLE + key_name = "-" + path = urllib.parse.urlparse(request.url).path or "-" + http_line = f"{request.method} {path} HTTP/1.1" + response = '200 - - 1 2 "-"' + user_agent = f"{request.headers.get('User-Agent')} prompt/off command/s3api.put-object" + content = f"{random.get_random_hex(64)} originbucket [{date}] {source_ip} {source_iam} {unknown_hex} {source} {key_name} {http_line} {response} {user_agent} - c29tZSB1bmtub3duIGRhdGE= SigV4 ECDHE-RSA-AES128-GCM-SHA256 AuthHeader {request.url.split('amazonaws.com')[0]}amazonaws.com TLSv1.2 - -" + self.put_object(target_bucket, prefix + file_name, value=content) + except: # noqa: E722 Do not use bare except + # log delivery is not guaranteed in AWS, so if anything goes wrong, it's 'safe' to just ignore it + # Realistically, we should only get here when the bucket does not exist, or logging is not enabled + pass + @property def _url_module(self): # The urls-property can be different depending on env variables @@ -1909,9 +1940,6 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): bucket.set_cors(cors_rules) def put_bucket_logging(self, bucket_name, logging_config): - """ - The logging functionality itself is not yet implemented - we only store the configuration for now. - """ bucket = self.get_bucket(bucket_name) bucket.set_logging(logging_config, self) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 1a71f8382..bb281418c 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -247,6 +247,8 @@ class S3Response(BaseResponse): @amzn_request_id def bucket_response(self, request, full_url, headers): self.setup_class(request, full_url, headers, use_raw_body=True) + bucket_name = self.parse_bucket_name_from_url(request, full_url) + self.backend.log_incoming_request(request, bucket_name) try: response = self._bucket_response(request, full_url) except S3ClientError as s3error: @@ -1112,6 +1114,8 @@ class S3Response(BaseResponse): def key_response(self, request, full_url, headers): # Key and Control are lumped in because splitting out the regex is too much of a pain :/ self.setup_class(request, full_url, headers, use_raw_body=True) + bucket_name = self.parse_bucket_name_from_url(request, full_url) + self.backend.log_incoming_request(request, bucket_name) response_headers = {} try: diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index dac1d2d4a..aec8c8b8e 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -2219,185 +2219,6 @@ def test_put_bucket_notification_errors(): ) -@mock_s3 -def test_put_bucket_logging(): - s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) - bucket_name = "mybucket" - log_bucket = "logbucket" - wrong_region_bucket = "wrongregionlogbucket" - s3.create_bucket(Bucket=bucket_name) - s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later... - s3.create_bucket( - Bucket=wrong_region_bucket, - CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, - ) - - # No logging config: - result = s3.get_bucket_logging(Bucket=bucket_name) - assert not result.get("LoggingEnabled") - - # A log-bucket that doesn't exist: - with pytest.raises(ClientError) as err: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": {"TargetBucket": "IAMNOTREAL", "TargetPrefix": ""} - }, - ) - assert err.value.response["Error"]["Code"] == "InvalidTargetBucketForLogging" - - # A log-bucket that's missing the proper ACLs for LogDelivery: - with pytest.raises(ClientError) as err: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": {"TargetBucket": log_bucket, "TargetPrefix": ""} - }, - ) - assert err.value.response["Error"]["Code"] == "InvalidTargetBucketForLogging" - assert "log-delivery" in err.value.response["Error"]["Message"] - - # Add the proper "log-delivery" ACL to the log buckets: - bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"] - for bucket in [log_bucket, wrong_region_bucket]: - s3.put_bucket_acl( - Bucket=bucket, - AccessControlPolicy={ - "Grants": [ - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group", - }, - "Permission": "WRITE", - }, - { - "Grantee": { - "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", - "Type": "Group", - }, - "Permission": "READ_ACP", - }, - { - "Grantee": {"Type": "CanonicalUser", "ID": bucket_owner["ID"]}, - "Permission": "FULL_CONTROL", - }, - ], - "Owner": bucket_owner, - }, - ) - - # A log-bucket that's in the wrong region: - with pytest.raises(ClientError) as err: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": wrong_region_bucket, - "TargetPrefix": "", - } - }, - ) - assert err.value.response["Error"]["Code"] == "CrossLocationLoggingProhibitted" - - # Correct logging: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": f"{bucket_name}/", - } - }, - ) - result = s3.get_bucket_logging(Bucket=bucket_name) - assert result["LoggingEnabled"]["TargetBucket"] == log_bucket - assert result["LoggingEnabled"]["TargetPrefix"] == f"{bucket_name}/" - assert not result["LoggingEnabled"].get("TargetGrants") - - # And disabling: - s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={}) - assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled") - - # And enabling with multiple target grants: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": f"{bucket_name}/", - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser", - }, - "Permission": "READ", - }, - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser", - }, - "Permission": "WRITE", - }, - ], - } - }, - ) - - result = s3.get_bucket_logging(Bucket=bucket_name) - assert len(result["LoggingEnabled"]["TargetGrants"]) == 2 - assert ( - result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"] - == "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274" - ) - - # Test with just 1 grant: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": f"{bucket_name}/", - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser", - }, - "Permission": "READ", - } - ], - } - }, - ) - result = s3.get_bucket_logging(Bucket=bucket_name) - assert len(result["LoggingEnabled"]["TargetGrants"]) == 1 - - # With an invalid grant: - with pytest.raises(ClientError) as err: - s3.put_bucket_logging( - Bucket=bucket_name, - BucketLoggingStatus={ - "LoggingEnabled": { - "TargetBucket": log_bucket, - "TargetPrefix": f"{bucket_name}/", - "TargetGrants": [ - { - "Grantee": { - "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", - "Type": "CanonicalUser", - }, - "Permission": "NOTAREALPERM", - } - ], - } - }, - ) - assert err.value.response["Error"]["Code"] == "MalformedXML" - - @mock_s3 def test_list_object_versions(): s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) diff --git a/tests/test_s3/test_s3_logging.py b/tests/test_s3/test_s3_logging.py new file mode 100644 index 000000000..3289b59a5 --- /dev/null +++ b/tests/test_s3/test_s3_logging.py @@ -0,0 +1,262 @@ +import boto3 +from botocore.client import ClientError + +from moto.s3.responses import DEFAULT_REGION_NAME +import pytest + +import sure # noqa # pylint: disable=unused-import + +from moto import mock_s3 + + +@mock_s3 +def test_put_bucket_logging(): + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + bucket_name = "mybucket" + log_bucket = "logbucket" + wrong_region_bucket = "wrongregionlogbucket" + s3.create_bucket(Bucket=bucket_name) + s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later... + s3.create_bucket( + Bucket=wrong_region_bucket, + CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, + ) + + # No logging config: + result = s3.get_bucket_logging(Bucket=bucket_name) + assert not result.get("LoggingEnabled") + + # A log-bucket that doesn't exist: + with pytest.raises(ClientError) as err: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": {"TargetBucket": "IAMNOTREAL", "TargetPrefix": ""} + }, + ) + assert err.value.response["Error"]["Code"] == "InvalidTargetBucketForLogging" + + # A log-bucket that's missing the proper ACLs for LogDelivery: + with pytest.raises(ClientError) as err: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": {"TargetBucket": log_bucket, "TargetPrefix": ""} + }, + ) + assert err.value.response["Error"]["Code"] == "InvalidTargetBucketForLogging" + assert "log-delivery" in err.value.response["Error"]["Message"] + + # Add the proper "log-delivery" ACL to the log buckets: + bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"] + for bucket in [log_bucket, wrong_region_bucket]: + s3.put_bucket_acl( + Bucket=bucket, + AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group", + }, + "Permission": "WRITE", + }, + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group", + }, + "Permission": "READ_ACP", + }, + { + "Grantee": {"Type": "CanonicalUser", "ID": bucket_owner["ID"]}, + "Permission": "FULL_CONTROL", + }, + ], + "Owner": bucket_owner, + }, + ) + + # A log-bucket that's in the wrong region: + with pytest.raises(ClientError) as err: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": wrong_region_bucket, + "TargetPrefix": "", + } + }, + ) + assert err.value.response["Error"]["Code"] == "CrossLocationLoggingProhibitted" + + # Correct logging: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": f"{bucket_name}/", + } + }, + ) + result = s3.get_bucket_logging(Bucket=bucket_name) + assert result["LoggingEnabled"]["TargetBucket"] == log_bucket + assert result["LoggingEnabled"]["TargetPrefix"] == f"{bucket_name}/" + assert not result["LoggingEnabled"].get("TargetGrants") + + # And disabling: + s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={}) + assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled") + + # And enabling with multiple target grants: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": f"{bucket_name}/", + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser", + }, + "Permission": "READ", + }, + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser", + }, + "Permission": "WRITE", + }, + ], + } + }, + ) + + result = s3.get_bucket_logging(Bucket=bucket_name) + assert len(result["LoggingEnabled"]["TargetGrants"]) == 2 + assert ( + result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"] + == "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274" + ) + + # Test with just 1 grant: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": f"{bucket_name}/", + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser", + }, + "Permission": "READ", + } + ], + } + }, + ) + result = s3.get_bucket_logging(Bucket=bucket_name) + assert len(result["LoggingEnabled"]["TargetGrants"]) == 1 + + # With an invalid grant: + with pytest.raises(ClientError) as err: + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": f"{bucket_name}/", + "TargetGrants": [ + { + "Grantee": { + "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274", + "Type": "CanonicalUser", + }, + "Permission": "NOTAREALPERM", + } + ], + } + }, + ) + assert err.value.response["Error"]["Code"] == "MalformedXML" + + +@mock_s3 +def test_log_file_is_created(): + # Create necessary buckets + s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + bucket_name = "mybucket" + log_bucket = "logbucket" + s3.create_bucket(Bucket=bucket_name) + s3.create_bucket(Bucket=log_bucket) + + # Enable logging + bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"] + s3.put_bucket_acl( + Bucket=log_bucket, + AccessControlPolicy={ + "Grants": [ + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group", + }, + "Permission": "WRITE", + }, + { + "Grantee": { + "URI": "http://acs.amazonaws.com/groups/s3/LogDelivery", + "Type": "Group", + }, + "Permission": "READ_ACP", + }, + { + "Grantee": {"Type": "CanonicalUser", "ID": bucket_owner["ID"]}, + "Permission": "FULL_CONTROL", + }, + ], + "Owner": bucket_owner, + }, + ) + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": { + "TargetBucket": log_bucket, + "TargetPrefix": f"{bucket_name}/", + } + }, + ) + + # Make some requests against the source bucket + s3.put_object(Bucket=bucket_name, Key="key1", Body=b"") + s3.put_object(Bucket=bucket_name, Key="key2", Body=b"data") + + s3.put_bucket_logging( + Bucket=bucket_name, + BucketLoggingStatus={ + "LoggingEnabled": {"TargetBucket": log_bucket, "TargetPrefix": ""} + }, + ) + s3.list_objects_v2(Bucket=bucket_name) + + # Verify files are created in the target (logging) bucket + keys = [k["Key"] for k in s3.list_objects_v2(Bucket=log_bucket)["Contents"]] + [k for k in keys if k.startswith("mybucket/")].should.have.length_of(3) + [k for k in keys if not k.startswith("mybucket/")].should.have.length_of(1) + + # Verify (roughly) files have the correct content + contents = [ + s3.get_object(Bucket=log_bucket, Key=key)["Body"].read().decode("utf-8") + for key in keys + ] + assert any([c for c in contents if bucket_name in c]) + assert any([c for c in contents if "REST.GET.BUCKET" in c]) + assert any([c for c in contents if "REST.PUT.BUCKET" in c])