Merge pull request #2353 from bkovacki/add_new_s3_storage_classes

Add S3 support for INTELLIGENT_TIERING, GLACIER and DEEP_ARCHIVE storage
This commit is contained in:
Steve Pulec 2019-08-05 21:32:21 -05:00 committed by GitHub
commit ff5ea29c3d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 56 additions and 8 deletions

View File

@ -60,6 +60,17 @@ class MissingKey(S3ClientError):
) )
class ObjectNotInActiveTierError(S3ClientError):
code = 403
def __init__(self, key_name):
super(ObjectNotInActiveTierError, self).__init__(
"ObjectNotInActiveTierError",
"The source object of the COPY operation is not in the active tier and is only stored in Amazon Glacier.",
Key=key_name,
)
class InvalidPartOrder(S3ClientError): class InvalidPartOrder(S3ClientError):
code = 400 code = 400

View File

@ -28,7 +28,8 @@ MAX_BUCKET_NAME_LENGTH = 63
MIN_BUCKET_NAME_LENGTH = 3 MIN_BUCKET_NAME_LENGTH = 3
UPLOAD_ID_BYTES = 43 UPLOAD_ID_BYTES = 43
UPLOAD_PART_MIN_SIZE = 5242880 UPLOAD_PART_MIN_SIZE = 5242880
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA",
"INTELLIGENT_TIERING", "GLACIER", "DEEP_ARCHIVE"]
DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024 DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()

View File

@ -17,7 +17,7 @@ from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_n
parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys
from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, MissingKey, InvalidPartOrder, MalformedXML, \ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, MissingKey, InvalidPartOrder, MalformedXML, \
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent, ObjectNotInActiveTierError
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \ from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
FakeTag FakeTag
from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url
@ -902,7 +902,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
src_version_id = parse_qs(src_key_parsed.query).get( src_version_id = parse_qs(src_key_parsed.query).get(
'versionId', [None])[0] 'versionId', [None])[0]
if self.backend.get_key(src_bucket, src_key, version_id=src_version_id): key = self.backend.get_key(src_bucket, src_key, version_id=src_version_id)
if key is not None:
if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]:
raise ObjectNotInActiveTierError(key)
self.backend.copy_key(src_bucket, src_key, bucket_name, key_name, self.backend.copy_key(src_bucket, src_key, bucket_name, key_name,
storage=storage_class, acl=acl, src_version_id=src_version_id) storage=storage_class, acl=acl, src_version_id=src_version_id)
else: else:

View File

@ -1,16 +1,12 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import boto
import boto3 import boto3
from boto.exception import S3CreateError, S3ResponseError
from boto.s3.lifecycle import Lifecycle, Transition, Expiration, Rule
import sure # noqa import sure # noqa
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from datetime import datetime
from nose.tools import assert_raises from nose.tools import assert_raises
from moto import mock_s3_deprecated, mock_s3 from moto import mock_s3
@mock_s3 @mock_s3
@ -41,6 +37,18 @@ def test_s3_storage_class_infrequent_access():
D['Contents'][0]["StorageClass"].should.equal("STANDARD_IA") D['Contents'][0]["StorageClass"].should.equal("STANDARD_IA")
@mock_s3
def test_s3_storage_class_intelligent_tiering():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.put_object(Bucket="Bucket", Key="my_key_infrequent", Body="my_value_infrequent", StorageClass="INTELLIGENT_TIERING")
objects = s3.list_objects(Bucket="Bucket")
objects['Contents'][0]["StorageClass"].should.equal("INTELLIGENT_TIERING")
@mock_s3 @mock_s3
def test_s3_storage_class_copy(): def test_s3_storage_class_copy():
s3 = boto3.client("s3") s3 = boto3.client("s3")
@ -90,6 +98,7 @@ def test_s3_invalid_storage_class():
e.response["Error"]["Code"].should.equal("InvalidStorageClass") e.response["Error"]["Code"].should.equal("InvalidStorageClass")
e.response["Error"]["Message"].should.equal("The storage class you specified is not valid") e.response["Error"]["Message"].should.equal("The storage class you specified is not valid")
@mock_s3 @mock_s3
def test_s3_default_storage_class(): def test_s3_default_storage_class():
s3 = boto3.client("s3") s3 = boto3.client("s3")
@ -103,4 +112,27 @@ def test_s3_default_storage_class():
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD") list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
@mock_s3
def test_s3_copy_object_error_for_glacier_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER")
with assert_raises(ClientError) as exc:
s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object")
exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
@mock_s3
def test_s3_copy_object_error_for_deep_archive_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE")
with assert_raises(ClientError) as exc:
s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object")
exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")