Merge pull request #2353 from bkovacki/add_new_s3_storage_classes
Add S3 support for INTELLIGENT_TIERING, GLACIER and DEEP_ARCHIVE storage
This commit is contained in:
commit
ff5ea29c3d
@ -60,6 +60,17 @@ class MissingKey(S3ClientError):
|
||||
)
|
||||
|
||||
|
||||
class ObjectNotInActiveTierError(S3ClientError):
|
||||
code = 403
|
||||
|
||||
def __init__(self, key_name):
|
||||
super(ObjectNotInActiveTierError, self).__init__(
|
||||
"ObjectNotInActiveTierError",
|
||||
"The source object of the COPY operation is not in the active tier and is only stored in Amazon Glacier.",
|
||||
Key=key_name,
|
||||
)
|
||||
|
||||
|
||||
class InvalidPartOrder(S3ClientError):
|
||||
code = 400
|
||||
|
||||
|
@ -28,7 +28,8 @@ MAX_BUCKET_NAME_LENGTH = 63
|
||||
MIN_BUCKET_NAME_LENGTH = 3
|
||||
UPLOAD_ID_BYTES = 43
|
||||
UPLOAD_PART_MIN_SIZE = 5242880
|
||||
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"]
|
||||
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA",
|
||||
"INTELLIGENT_TIERING", "GLACIER", "DEEP_ARCHIVE"]
|
||||
DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024
|
||||
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
|
||||
|
||||
|
@ -17,7 +17,7 @@ from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_n
|
||||
parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys
|
||||
|
||||
from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, MissingKey, InvalidPartOrder, MalformedXML, \
|
||||
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent
|
||||
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent, ObjectNotInActiveTierError
|
||||
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
||||
FakeTag
|
||||
from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url
|
||||
@ -902,7 +902,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
src_version_id = parse_qs(src_key_parsed.query).get(
|
||||
'versionId', [None])[0]
|
||||
|
||||
if self.backend.get_key(src_bucket, src_key, version_id=src_version_id):
|
||||
key = self.backend.get_key(src_bucket, src_key, version_id=src_version_id)
|
||||
|
||||
if key is not None:
|
||||
if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]:
|
||||
raise ObjectNotInActiveTierError(key)
|
||||
self.backend.copy_key(src_bucket, src_key, bucket_name, key_name,
|
||||
storage=storage_class, acl=acl, src_version_id=src_version_id)
|
||||
else:
|
||||
|
@ -1,16 +1,12 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import S3CreateError, S3ResponseError
|
||||
from boto.s3.lifecycle import Lifecycle, Transition, Expiration, Rule
|
||||
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_s3_deprecated, mock_s3
|
||||
from moto import mock_s3
|
||||
|
||||
|
||||
@mock_s3
|
||||
@ -41,6 +37,18 @@ def test_s3_storage_class_infrequent_access():
|
||||
D['Contents'][0]["StorageClass"].should.equal("STANDARD_IA")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_storage_class_intelligent_tiering():
|
||||
s3 = boto3.client("s3")
|
||||
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
s3.put_object(Bucket="Bucket", Key="my_key_infrequent", Body="my_value_infrequent", StorageClass="INTELLIGENT_TIERING")
|
||||
|
||||
objects = s3.list_objects(Bucket="Bucket")
|
||||
|
||||
objects['Contents'][0]["StorageClass"].should.equal("INTELLIGENT_TIERING")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_storage_class_copy():
|
||||
s3 = boto3.client("s3")
|
||||
@ -90,6 +98,7 @@ def test_s3_invalid_storage_class():
|
||||
e.response["Error"]["Code"].should.equal("InvalidStorageClass")
|
||||
e.response["Error"]["Message"].should.equal("The storage class you specified is not valid")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_default_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
@ -103,4 +112,27 @@ def test_s3_default_storage_class():
|
||||
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_copy_object_error_for_glacier_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER")
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object")
|
||||
|
||||
exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_copy_object_error_for_deep_archive_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE")
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object")
|
||||
|
||||
exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
|
||||
|
Loading…
x
Reference in New Issue
Block a user