2018-06-07 07:09:36 +00:00
|
|
|
import boto3
|
|
|
|
|
2022-03-11 21:28:45 +00:00
|
|
|
import sure # noqa # pylint: disable=unused-import
|
2018-06-07 07:09:36 +00:00
|
|
|
from botocore.exceptions import ClientError
|
2020-10-06 05:54:49 +00:00
|
|
|
import pytest
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-08-05 15:34:39 +00:00
|
|
|
from moto import mock_s3
|
2018-06-07 07:09:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_storage_class_standard():
|
2020-02-02 10:36:51 +00:00
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.create_bucket(Bucket="Bucket")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
# add an object to the bucket with standard storage
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(Bucket="Bucket", Key="my_key", Body="my_value")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
list_of_objects = s3.list_objects(Bucket="Bucket")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_storage_class_infrequent_access():
|
2019-10-31 15:44:26 +00:00
|
|
|
s3 = boto3.client("s3")
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
# add an object to the bucket with standard storage
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket",
|
|
|
|
Key="my_key_infrequent",
|
|
|
|
Body="my_value_infrequent",
|
|
|
|
StorageClass="STANDARD_IA",
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
D = s3.list_objects(Bucket="Bucket")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
D["Contents"][0]["StorageClass"].should.equal("STANDARD_IA")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
|
|
|
|
2019-08-05 15:34:39 +00:00
|
|
|
@mock_s3
|
|
|
|
def test_s3_storage_class_intelligent_tiering():
|
|
|
|
s3 = boto3.client("s3")
|
|
|
|
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
|
|
|
|
)
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket",
|
|
|
|
Key="my_key_infrequent",
|
|
|
|
Body="my_value_infrequent",
|
|
|
|
StorageClass="INTELLIGENT_TIERING",
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
|
|
|
objects = s3.list_objects(Bucket="Bucket")
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
objects["Contents"][0]["StorageClass"].should.equal("INTELLIGENT_TIERING")
|
2019-08-05 15:34:39 +00:00
|
|
|
|
|
|
|
|
2018-06-07 07:09:36 +00:00
|
|
|
@mock_s3
|
|
|
|
def test_s3_storage_class_copy():
|
2020-02-02 10:36:51 +00:00
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.create_bucket(Bucket="Bucket")
|
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.create_bucket(Bucket="Bucket2")
|
|
|
|
# second object is originally of storage class REDUCED_REDUNDANCY
|
|
|
|
s3.put_object(Bucket="Bucket2", Key="Second_Object", Body="Body2")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket2",
|
|
|
|
Key="Second_Object",
|
|
|
|
StorageClass="ONEZONE_IA",
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
# checks that a copied object can be properly copied
|
|
|
|
list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("ONEZONE_IA")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_invalid_copied_storage_class():
|
2020-02-02 10:36:51 +00:00
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.create_bucket(Bucket="Bucket")
|
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
|
|
|
|
)
|
|
|
|
|
|
|
|
s3.create_bucket(Bucket="Bucket2")
|
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket2",
|
|
|
|
Key="Second_Object",
|
|
|
|
Body="Body2",
|
|
|
|
StorageClass="REDUCED_REDUNDANCY",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Try to copy an object with an invalid storage class
|
2020-10-06 05:54:49 +00:00
|
|
|
with pytest.raises(ClientError) as err:
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket2",
|
|
|
|
Key="Second_Object",
|
|
|
|
StorageClass="STANDARD2",
|
|
|
|
)
|
|
|
|
|
2020-10-06 06:04:09 +00:00
|
|
|
e = err.value
|
2019-10-31 15:44:26 +00:00
|
|
|
e.response["Error"]["Code"].should.equal("InvalidStorageClass")
|
|
|
|
e.response["Error"]["Message"].should.equal(
|
|
|
|
"The storage class you specified is not valid"
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_invalid_storage_class():
|
2019-10-31 15:44:26 +00:00
|
|
|
s3 = boto3.client("s3")
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
# Try to add an object with an invalid storage class
|
2020-10-06 05:54:49 +00:00
|
|
|
with pytest.raises(ClientError) as err:
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARDD"
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2020-10-06 06:04:09 +00:00
|
|
|
e = err.value
|
2019-10-31 15:44:26 +00:00
|
|
|
e.response["Error"]["Code"].should.equal("InvalidStorageClass")
|
|
|
|
e.response["Error"]["Message"].should.equal(
|
|
|
|
"The storage class you specified is not valid"
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2018-06-07 07:09:36 +00:00
|
|
|
@mock_s3
|
|
|
|
def test_s3_default_storage_class():
|
2019-10-31 15:44:26 +00:00
|
|
|
s3 = boto3.client("s3")
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
|
|
|
)
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
list_of_objects = s3.list_objects(Bucket="Bucket")
|
2018-06-07 07:09:36 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
# tests that the default storage class is still STANDARD
|
|
|
|
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
2018-07-10 17:52:53 +00:00
|
|
|
|
|
|
|
|
2019-08-05 15:34:39 +00:00
|
|
|
@mock_s3
|
2020-09-21 06:37:50 +00:00
|
|
|
def test_s3_copy_object_error_for_glacier_storage_class_not_restored():
|
2019-08-05 15:34:39 +00:00
|
|
|
s3 = boto3.client("s3")
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2020-10-06 06:04:09 +00:00
|
|
|
with pytest.raises(ClientError) as ex:
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket",
|
|
|
|
Key="Second_Object",
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2020-10-06 06:04:09 +00:00
|
|
|
ex.value.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
|
2019-08-05 15:34:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
2020-09-21 06:37:50 +00:00
|
|
|
def test_s3_copy_object_error_for_deep_archive_storage_class_not_restored():
|
2019-08-05 15:34:39 +00:00
|
|
|
s3 = boto3.client("s3")
|
2020-02-02 10:36:51 +00:00
|
|
|
s3.create_bucket(
|
|
|
|
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
|
|
|
|
)
|
2019-08-05 15:34:39 +00:00
|
|
|
|
2020-10-06 05:54:49 +00:00
|
|
|
with pytest.raises(ClientError) as exc:
|
2019-10-31 15:44:26 +00:00
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket",
|
|
|
|
Key="Second_Object",
|
|
|
|
)
|
2018-07-10 17:52:53 +00:00
|
|
|
|
2020-10-06 06:04:09 +00:00
|
|
|
exc.value.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
|
2020-09-21 06:37:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_copy_object_for_glacier_storage_class_restored():
|
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
|
|
|
s3.create_bucket(Bucket="Bucket")
|
|
|
|
|
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
|
|
|
|
)
|
|
|
|
|
|
|
|
s3.create_bucket(Bucket="Bucket2")
|
|
|
|
s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123})
|
|
|
|
|
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket2",
|
|
|
|
Key="Second_Object",
|
|
|
|
)
|
|
|
|
|
|
|
|
list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
|
|
|
|
# checks that copy of restored Glacier object has STANDARD storage class
|
|
|
|
list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
|
|
|
# checks that metadata of copy has no Restore property
|
|
|
|
s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property(
|
|
|
|
"Restore"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_copy_object_for_deep_archive_storage_class_restored():
|
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
|
|
|
s3.create_bucket(Bucket="Bucket")
|
|
|
|
|
|
|
|
s3.put_object(
|
|
|
|
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
|
|
|
|
)
|
|
|
|
|
2023-03-09 22:00:17 +00:00
|
|
|
with pytest.raises(ClientError) as exc:
|
|
|
|
s3.get_object(Bucket="Bucket", Key="First_Object")
|
|
|
|
err = exc.value.response["Error"]
|
|
|
|
err["Code"].should.equal("InvalidObjectState")
|
|
|
|
err["Message"].should.equal(
|
|
|
|
"The operation is not valid for the object's storage class"
|
|
|
|
)
|
|
|
|
err["StorageClass"].should.equal("DEEP_ARCHIVE")
|
|
|
|
|
2020-09-21 06:37:50 +00:00
|
|
|
s3.create_bucket(Bucket="Bucket2")
|
|
|
|
s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123})
|
2023-03-09 22:00:17 +00:00
|
|
|
s3.get_object(Bucket="Bucket", Key="First_Object")
|
2020-09-21 06:37:50 +00:00
|
|
|
|
|
|
|
s3.copy_object(
|
|
|
|
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
|
|
|
|
Bucket="Bucket2",
|
|
|
|
Key="Second_Object",
|
|
|
|
)
|
|
|
|
|
|
|
|
list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
|
|
|
|
# checks that copy of restored Glacier object has STANDARD storage class
|
|
|
|
list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
|
|
|
# checks that metadata of copy has no Restore property
|
|
|
|
s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property(
|
|
|
|
"Restore"
|
|
|
|
)
|
2021-11-02 23:02:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
|
|
|
def test_s3_get_object_from_glacier():
|
|
|
|
s3 = boto3.client("s3", region_name="us-east-1")
|
|
|
|
bucket_name = "tests3getobjectfromglacier"
|
|
|
|
s3.create_bucket(Bucket=bucket_name)
|
|
|
|
|
|
|
|
s3.put_object(
|
|
|
|
Bucket=bucket_name, Key="test.txt", Body="contents", StorageClass="GLACIER"
|
|
|
|
)
|
|
|
|
with pytest.raises(ClientError) as exc:
|
|
|
|
s3.get_object(Bucket=bucket_name, Key="test.txt")
|
|
|
|
err = exc.value.response["Error"]
|
|
|
|
err["Code"].should.equal("InvalidObjectState")
|
|
|
|
err["Message"].should.equal(
|
|
|
|
"The operation is not valid for the object's storage class"
|
|
|
|
)
|
|
|
|
err["StorageClass"].should.equal("GLACIER")
|