diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index a36e45472..3e91aaee6 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -6509,7 +6509,7 @@ ## s3
-67% implemented +68% implemented - [X] abort_multipart_upload - [X] complete_multipart_upload @@ -6605,7 +6605,7 @@ - [X] put_object_retention - [X] put_object_tagging - [X] put_public_access_block -- [ ] restore_object +- [X] restore_object - [X] select_object_content - [X] upload_file - [X] upload_fileobj diff --git a/docs/docs/services/s3.rst b/docs/docs/services/s3.rst index bcabe4bbc..d7973040d 100644 --- a/docs/docs/services/s3.rst +++ b/docs/docs/services/s3.rst @@ -138,7 +138,7 @@ s3 - [X] put_object_retention - [X] put_object_tagging - [X] put_public_access_block -- [ ] restore_object +- [X] restore_object - [X] select_object_content Highly experimental. Please raise an issue if you find any inconsistencies/bugs. diff --git a/moto/s3/models.py b/moto/s3/models.py index 407b60e2a..b96a1581c 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -42,6 +42,7 @@ from moto.s3.exceptions import ( InvalidBucketName, InvalidNotificationDestination, InvalidNotificationEvent, + InvalidObjectState, InvalidPart, InvalidPublicAccessBlockConfiguration, InvalidRequest, @@ -197,6 +198,25 @@ class FakeKey(BaseModel, ManagedState): self._value_buffer.write(new_value) self.contentsize = len(new_value) + @property + def status(self) -> Optional[str]: + previous = self._status + new_status = super().status + if previous != "RESTORED" and new_status == "RESTORED": + s3_backend = s3_backends[self.account_id]["global"] + bucket = s3_backend.get_bucket(self.bucket_name) # type: ignore + notifications.send_event( + self.account_id, + notifications.S3NotificationEvent.OBJECT_RESTORE_COMPLETED_EVENT, + bucket, + key=self, + ) + return new_status + + @status.setter + def status(self, value: str) -> None: + self._status = value + def set_metadata(self, metadata: Any, replace: bool = False) -> None: if replace: self._metadata = {} # type: ignore @@ -215,6 +235,14 @@ class FakeKey(BaseModel, ManagedState): def restore(self, days: int) -> None: self._expiry = utcnow() + datetime.timedelta(days) + s3_backend = s3_backends[self.account_id]["global"] + bucket = s3_backend.get_bucket(self.bucket_name) # type: ignore + notifications.send_event( + self.account_id, + notifications.S3NotificationEvent.OBJECT_RESTORE_POST_EVENT, + bucket, + key=self, + ) @property def etag(self) -> str: @@ -2854,6 +2882,16 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): for x in query_result ] + def restore_object(self, bucket_name: str, key_name: str, days: str) -> bool: + key = self.get_object(bucket_name, key_name) + if not key: + raise MissingKey + if key.storage_class not in ARCHIVE_STORAGE_CLASSES: + raise InvalidObjectState(storage_class=key.storage_class) + had_expiry_date = key.expiry_date is not None + key.restore(int(days)) + return had_expiry_date + def upload_file(self) -> None: # Listed for the implementation coverage # Implementation part of responses.py diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 629df1c4e..a737f74df 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -2274,14 +2274,11 @@ class S3Response(BaseResponse): elif "restore" in query: es = minidom.parseString(body).getElementsByTagName("Days") days = es[0].childNodes[0].wholeText - key = self.backend.get_object(bucket_name, key_name) # type: ignore - if key.storage_class not in ARCHIVE_STORAGE_CLASSES: # type: ignore - raise InvalidObjectState(storage_class=key.storage_class) # type: ignore - r = 202 - if key.expiry_date is not None: # type: ignore - r = 200 - key.restore(int(days)) # type: ignore - return r, {}, "" + previously_restored = self.backend.restore_object( + bucket_name, key_name, days + ) + status_code = 200 if previously_restored else 202 + return status_code, {}, "" elif "select" in query: request = xmltodict.parse(body)["SelectObjectContentRequest"] select_query = request["Expression"] diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index 40a9425b3..cbb5c1d16 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -544,7 +544,8 @@ def test_restore_key(): key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER") assert key.restore is None - key.restore_object(RestoreRequest={"Days": 1}) + resp = key.restore_object(RestoreRequest={"Days": 1}) + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202 if settings.TEST_SERVER_MODE: assert 'ongoing-request="false"' in key.restore elif settings.TEST_DECORATOR_MODE: @@ -552,7 +553,8 @@ def test_restore_key(): 'ongoing-request="false", expiry-date="Mon, 02 Jan 2012 12:00:00 GMT"' ) - key.restore_object(RestoreRequest={"Days": 2}) + resp = key.restore_object(RestoreRequest={"Days": 2}) + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 if settings.TEST_SERVER_MODE: assert 'ongoing-request="false"' in key.restore @@ -594,6 +596,19 @@ def test_restore_key_transition(): state_manager.unset_transition(model_name="s3::keyrestore") +@mock_aws +def test_restore_unknown_key(): + client = boto3.client("s3", region_name=DEFAULT_REGION_NAME) + client.create_bucket(Bucket="mybucket") + + with pytest.raises(ClientError) as exc: + client.restore_object( + Bucket="mybucket", Key="unknown", RestoreRequest={"Days": 1} + ) + err = exc.value.response["Error"] + assert err["Code"] == "NoSuchKey" + + @mock_aws def test_cannot_restore_standard_class_object(): s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) diff --git a/tests/test_s3/test_s3_eventbridge_integration.py b/tests/test_s3/test_s3_eventbridge_integration.py index 9ccb25cbb..8bd9dc815 100644 --- a/tests/test_s3/test_s3_eventbridge_integration.py +++ b/tests/test_s3/test_s3_eventbridge_integration.py @@ -220,3 +220,33 @@ def test_delete_object_notification(): assert event_message["region"] == REGION_NAME assert event_message["detail"]["bucket"]["name"] == bucket_name assert event_message["detail"]["reason"] == "ObjectRemoved" + + +@mock_aws +def test_restore_key_notifications(): + resource_names = _seteup_bucket_notification_eventbridge() + bucket_name = resource_names["bucket_name"] + + s3_resource = boto3.resource("s3", region_name=REGION_NAME) + + bucket = s3_resource.Bucket(bucket_name) + key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER") + key.restore_object(RestoreRequest={"Days": 1}) + + events = _get_send_events() + event_names = [json.loads(e["message"])["detail"]["reason"] for e in events] + assert event_names == ["ObjectCreated", "ObjectRestore"] + + # Finish the Object Restoration - restore Completes immediately by default + key.load() + + events = _get_send_events() + event_names = [json.loads(e["message"])["detail"]["reason"] for e in events] + assert event_names == ["ObjectCreated", "ObjectRestore", "ObjectRestore"] + + # Sanity check - loading the Key does not mean the Restore-event is fired every time + key.load() + + events = _get_send_events() + event_names = [json.loads(e["message"])["detail"]["reason"] for e in events] + assert event_names == ["ObjectCreated", "ObjectRestore", "ObjectRestore"] diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py index b680c713d..f2c202373 100644 --- a/tests/test_s3/test_s3_storageclass.py +++ b/tests/test_s3/test_s3_storageclass.py @@ -203,9 +203,10 @@ def test_s3_copy_object_for_glacier_storage_class_restored(): ) s3_client.create_bucket(Bucket="Bucket2") - s3_client.restore_object( + resp = s3_client.restore_object( Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123} ) + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202 s3_client.copy_object( CopySource={"Bucket": "Bucket", "Key": "First_Object"}, @@ -241,9 +242,10 @@ def test_s3_copy_object_for_deep_archive_storage_class_restored(): assert err["StorageClass"] == "DEEP_ARCHIVE" s3_client.create_bucket(Bucket="Bucket2") - s3_client.restore_object( + resp = s3_client.restore_object( Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123} ) + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 202 s3_client.get_object(Bucket="Bucket", Key="First_Object") s3_client.copy_object(