S3: Add Notifications for Restore-actions (#7455)

This commit is contained in:
Bert Blommers 2024-03-10 14:51:22 +00:00 committed by GitHub
parent d5042facb6
commit 3c71f81155
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 97 additions and 15 deletions

View File

@ -6509,7 +6509,7 @@
## s3
<details>
<summary>67% implemented</summary>
<summary>68% implemented</summary>
- [X] abort_multipart_upload
- [X] complete_multipart_upload
@ -6605,7 +6605,7 @@
- [X] put_object_retention
- [X] put_object_tagging
- [X] put_public_access_block
- [ ] restore_object
- [X] restore_object
- [X] select_object_content
- [X] upload_file
- [X] upload_fileobj

View File

@ -138,7 +138,7 @@ s3
- [X] put_object_retention
- [X] put_object_tagging
- [X] put_public_access_block
- [ ] restore_object
- [X] restore_object
- [X] select_object_content
Highly experimental. Please raise an issue if you find any inconsistencies/bugs.

View File

@ -42,6 +42,7 @@ from moto.s3.exceptions import (
InvalidBucketName,
InvalidNotificationDestination,
InvalidNotificationEvent,
InvalidObjectState,
InvalidPart,
InvalidPublicAccessBlockConfiguration,
InvalidRequest,
@ -197,6 +198,25 @@ class FakeKey(BaseModel, ManagedState):
self._value_buffer.write(new_value)
self.contentsize = len(new_value)
@property
def status(self) -> Optional[str]:
previous = self._status
new_status = super().status
if previous != "RESTORED" and new_status == "RESTORED":
s3_backend = s3_backends[self.account_id]["global"]
bucket = s3_backend.get_bucket(self.bucket_name) # type: ignore
notifications.send_event(
self.account_id,
notifications.S3NotificationEvent.OBJECT_RESTORE_COMPLETED_EVENT,
bucket,
key=self,
)
return new_status
@status.setter
def status(self, value: str) -> None:
self._status = value
def set_metadata(self, metadata: Any, replace: bool = False) -> None:
if replace:
self._metadata = {} # type: ignore
@ -215,6 +235,14 @@ class FakeKey(BaseModel, ManagedState):
def restore(self, days: int) -> None:
self._expiry = utcnow() + datetime.timedelta(days)
s3_backend = s3_backends[self.account_id]["global"]
bucket = s3_backend.get_bucket(self.bucket_name) # type: ignore
notifications.send_event(
self.account_id,
notifications.S3NotificationEvent.OBJECT_RESTORE_POST_EVENT,
bucket,
key=self,
)
@property
def etag(self) -> str:
@ -2854,6 +2882,16 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
for x in query_result
]
def restore_object(self, bucket_name: str, key_name: str, days: str) -> bool:
key = self.get_object(bucket_name, key_name)
if not key:
raise MissingKey
if key.storage_class not in ARCHIVE_STORAGE_CLASSES:
raise InvalidObjectState(storage_class=key.storage_class)
had_expiry_date = key.expiry_date is not None
key.restore(int(days))
return had_expiry_date
def upload_file(self) -> None:
# Listed for the implementation coverage
# Implementation part of responses.py

View File

@ -2274,14 +2274,11 @@ class S3Response(BaseResponse):
elif "restore" in query:
es = minidom.parseString(body).getElementsByTagName("Days")
days = es[0].childNodes[0].wholeText
key = self.backend.get_object(bucket_name, key_name) # type: ignore
if key.storage_class not in ARCHIVE_STORAGE_CLASSES: # type: ignore
raise InvalidObjectState(storage_class=key.storage_class) # type: ignore
r = 202
if key.expiry_date is not None: # type: ignore
r = 200
key.restore(int(days)) # type: ignore
return r, {}, ""
previously_restored = self.backend.restore_object(
bucket_name, key_name, days
)
status_code = 200 if previously_restored else 202
return status_code, {}, ""
elif "select" in query:
request = xmltodict.parse(body)["SelectObjectContentRequest"]
select_query = request["Expression"]

View File

@ -544,7 +544,8 @@ def test_restore_key():
key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER")
assert key.restore is None
key.restore_object(RestoreRequest={"Days": 1})
resp = key.restore_object(RestoreRequest={"Days": 1})
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202
if settings.TEST_SERVER_MODE:
assert 'ongoing-request="false"' in key.restore
elif settings.TEST_DECORATOR_MODE:
@ -552,7 +553,8 @@ def test_restore_key():
'ongoing-request="false", expiry-date="Mon, 02 Jan 2012 12:00:00 GMT"'
)
key.restore_object(RestoreRequest={"Days": 2})
resp = key.restore_object(RestoreRequest={"Days": 2})
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
if settings.TEST_SERVER_MODE:
assert 'ongoing-request="false"' in key.restore
@ -594,6 +596,19 @@ def test_restore_key_transition():
state_manager.unset_transition(model_name="s3::keyrestore")
@mock_aws
def test_restore_unknown_key():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="mybucket")
with pytest.raises(ClientError) as exc:
client.restore_object(
Bucket="mybucket", Key="unknown", RestoreRequest={"Days": 1}
)
err = exc.value.response["Error"]
assert err["Code"] == "NoSuchKey"
@mock_aws
def test_cannot_restore_standard_class_object():
s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)

View File

@ -220,3 +220,33 @@ def test_delete_object_notification():
assert event_message["region"] == REGION_NAME
assert event_message["detail"]["bucket"]["name"] == bucket_name
assert event_message["detail"]["reason"] == "ObjectRemoved"
@mock_aws
def test_restore_key_notifications():
resource_names = _seteup_bucket_notification_eventbridge()
bucket_name = resource_names["bucket_name"]
s3_resource = boto3.resource("s3", region_name=REGION_NAME)
bucket = s3_resource.Bucket(bucket_name)
key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER")
key.restore_object(RestoreRequest={"Days": 1})
events = _get_send_events()
event_names = [json.loads(e["message"])["detail"]["reason"] for e in events]
assert event_names == ["ObjectCreated", "ObjectRestore"]
# Finish the Object Restoration - restore Completes immediately by default
key.load()
events = _get_send_events()
event_names = [json.loads(e["message"])["detail"]["reason"] for e in events]
assert event_names == ["ObjectCreated", "ObjectRestore", "ObjectRestore"]
# Sanity check - loading the Key does not mean the Restore-event is fired every time
key.load()
events = _get_send_events()
event_names = [json.loads(e["message"])["detail"]["reason"] for e in events]
assert event_names == ["ObjectCreated", "ObjectRestore", "ObjectRestore"]

View File

@ -203,9 +203,10 @@ def test_s3_copy_object_for_glacier_storage_class_restored():
)
s3_client.create_bucket(Bucket="Bucket2")
s3_client.restore_object(
resp = s3_client.restore_object(
Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}
)
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202
s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
@ -241,9 +242,10 @@ def test_s3_copy_object_for_deep_archive_storage_class_restored():
assert err["StorageClass"] == "DEEP_ARCHIVE"
s3_client.create_bucket(Bucket="Bucket2")
s3_client.restore_object(
resp = s3_client.restore_object(
Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}
)
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202
s3_client.get_object(Bucket="Bucket", Key="First_Object")
s3_client.copy_object(