diff --git a/moto/s3/models.py b/moto/s3/models.py index 98229539e..5fa115d69 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -169,6 +169,9 @@ class FakeKey(BaseModel): raise InvalidStorageClass(storage=storage) self._storage_class = storage + def set_expiry(self, expiry): + self._expiry = expiry + def set_acl(self, acl): self.acl = acl @@ -1689,6 +1692,9 @@ class S3Backend(BaseBackend): new_key.set_storage_class(storage) if acl is not None: new_key.set_acl(acl) + if key.storage_class in "GLACIER": + # Object copied from Glacier object should not have expiry + new_key.set_expiry(None) dest_bucket.keys[dest_key_name] = new_key diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 530365a6e..a7fb82210 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -1276,7 +1276,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): if key is not None: if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]: - raise ObjectNotInActiveTierError(key) + if key.response_dict.get( + "x-amz-restore" + ) is None or 'ongoing-request="true"' in key.response_dict.get( + "x-amz-restore" + ): + raise ObjectNotInActiveTierError(key) + self.backend.copy_key( src_bucket, src_key, diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py index f1a0479b2..a89b4a896 100644 --- a/tests/test_s3/test_s3_storageclass.py +++ b/tests/test_s3/test_s3_storageclass.py @@ -156,7 +156,7 @@ def test_s3_default_storage_class(): @mock_s3 -def test_s3_copy_object_error_for_glacier_storage_class(): +def test_s3_copy_object_error_for_glacier_storage_class_not_restored(): s3 = boto3.client("s3") s3.create_bucket( Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} @@ -177,7 +177,7 @@ def test_s3_copy_object_error_for_glacier_storage_class(): @mock_s3 -def test_s3_copy_object_error_for_deep_archive_storage_class(): +def test_s3_copy_object_error_for_deep_archive_storage_class_not_restored(): s3 = boto3.client("s3") s3.create_bucket( Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"} @@ -195,3 +195,57 @@ def test_s3_copy_object_error_for_deep_archive_storage_class(): ) exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError") + + +@mock_s3 +def test_s3_copy_object_for_glacier_storage_class_restored(): + s3 = boto3.client("s3", region_name="us-east-1") + s3.create_bucket(Bucket="Bucket") + + s3.put_object( + Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER" + ) + + s3.create_bucket(Bucket="Bucket2") + s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}) + + s3.copy_object( + CopySource={"Bucket": "Bucket", "Key": "First_Object"}, + Bucket="Bucket2", + Key="Second_Object", + ) + + list_of_copied_objects = s3.list_objects(Bucket="Bucket2") + # checks that copy of restored Glacier object has STANDARD storage class + list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD") + # checks that metadata of copy has no Restore property + s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property( + "Restore" + ) + + +@mock_s3 +def test_s3_copy_object_for_deep_archive_storage_class_restored(): + s3 = boto3.client("s3", region_name="us-east-1") + s3.create_bucket(Bucket="Bucket") + + s3.put_object( + Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE" + ) + + s3.create_bucket(Bucket="Bucket2") + s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}) + + s3.copy_object( + CopySource={"Bucket": "Bucket", "Key": "First_Object"}, + Bucket="Bucket2", + Key="Second_Object", + ) + + list_of_copied_objects = s3.list_objects(Bucket="Bucket2") + # checks that copy of restored Glacier object has STANDARD storage class + list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD") + # checks that metadata of copy has no Restore property + s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property( + "Restore" + )