Fix Storage Class error handling (#1653)
Added check for valid storage classes in set_key and copy_key added unit test for standard storage and infrequent access
This commit is contained in:
parent
8092929258
commit
6c10dc0403
@ -168,3 +168,13 @@ class InvalidNotificationEvent(S3ClientError):
|
||||
"InvalidArgument",
|
||||
"The event is not supported for notifications",
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
class InvalidStorageClass(S3ClientError):
|
||||
code = 400
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(InvalidStorageClass, self).__init__(
|
||||
"InvalidStorageClass",
|
||||
"The storage class you specified is not valid",
|
||||
*args, **kwargs)
|
||||
|
@ -15,11 +15,12 @@ from bisect import insort
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
||||
from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \
|
||||
InvalidNotificationDestination, MalformedXML
|
||||
InvalidNotificationDestination, MalformedXML, InvalidStorageClass
|
||||
from .utils import clean_key_name, _VersionedKeyStore
|
||||
|
||||
UPLOAD_ID_BYTES = 43
|
||||
UPLOAD_PART_MIN_SIZE = 5242880
|
||||
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"]
|
||||
|
||||
|
||||
class FakeDeleteMarker(BaseModel):
|
||||
@ -67,8 +68,10 @@ class FakeKey(BaseModel):
|
||||
def set_tagging(self, tagging):
|
||||
self._tagging = tagging
|
||||
|
||||
def set_storage_class(self, storage_class):
|
||||
self._storage_class = storage_class
|
||||
def set_storage_class(self, storage):
|
||||
if storage is not None and storage not in STORAGE_CLASS:
|
||||
raise InvalidStorageClass(storage=storage)
|
||||
self._storage_class = storage
|
||||
|
||||
def set_acl(self, acl):
|
||||
self.acl = acl
|
||||
@ -676,6 +679,8 @@ class S3Backend(BaseBackend):
|
||||
|
||||
def set_key(self, bucket_name, key_name, value, storage=None, etag=None):
|
||||
key_name = clean_key_name(key_name)
|
||||
if storage is not None and storage not in STORAGE_CLASS:
|
||||
raise InvalidStorageClass(storage=storage)
|
||||
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
|
||||
|
106
tests/test_s3/test_s3_storageclass.py
Normal file
106
tests/test_s3/test_s3_storageclass.py
Normal file
@ -0,0 +1,106 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import S3CreateError, S3ResponseError
|
||||
from boto.s3.lifecycle import Lifecycle, Transition, Expiration, Rule
|
||||
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_s3_deprecated, mock_s3
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_storage_class_standard():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
# add an object to the bucket with standard storage
|
||||
|
||||
s3.put_object(Bucket="Bucket", Key="my_key", Body="my_value")
|
||||
|
||||
list_of_objects = s3.list_objects(Bucket="Bucket")
|
||||
|
||||
list_of_objects['Contents'][0]["StorageClass"].should.equal("STANDARD")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_storage_class_infrequent_access():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
# add an object to the bucket with standard storage
|
||||
|
||||
s3.put_object(Bucket="Bucket", Key="my_key_infrequent", Body="my_value_infrequent", StorageClass="STANDARD_IA")
|
||||
|
||||
D = s3.list_objects(Bucket="Bucket")
|
||||
|
||||
D['Contents'][0]["StorageClass"].should.equal("STANDARD_IA")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_storage_class_copy():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD")
|
||||
|
||||
s3.create_bucket(Bucket="Bucket2")
|
||||
# second object is originally of storage class REDUCED_REDUNDANCY
|
||||
s3.put_object(Bucket="Bucket2", Key="Second_Object", Body="Body2")
|
||||
|
||||
s3.copy_object(CopySource = {"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket2", Key="Second_Object", StorageClass="ONEZONE_IA")
|
||||
|
||||
list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
|
||||
|
||||
# checks that a copied object can be properly copied
|
||||
list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("ONEZONE_IA")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_invalid_copied_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD")
|
||||
|
||||
s3.create_bucket(Bucket="Bucket2")
|
||||
s3.put_object(Bucket="Bucket2", Key="Second_Object", Body="Body2", StorageClass="REDUCED_REDUNDANCY")
|
||||
|
||||
# Try to copy an object with an invalid storage class
|
||||
with assert_raises(ClientError) as err:
|
||||
s3.copy_object(CopySource = {"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket2", Key="Second_Object", StorageClass="STANDARD2")
|
||||
|
||||
e = err.exception
|
||||
e.response["Error"]["Code"].should.equal("InvalidStorageClass")
|
||||
e.response["Error"]["Message"].should.equal("The storage class you specified is not valid")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_invalid_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
# Try to add an object with an invalid storage class
|
||||
with assert_raises(ClientError) as err:
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARDD")
|
||||
|
||||
e = err.exception
|
||||
e.response["Error"]["Code"].should.equal("InvalidStorageClass")
|
||||
e.response["Error"]["Message"].should.equal("The storage class you specified is not valid")
|
||||
|
||||
@mock_s3
|
||||
def test_s3_default_storage_class():
|
||||
s3 = boto3.client("s3")
|
||||
s3.create_bucket(Bucket="Bucket")
|
||||
|
||||
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
|
||||
|
||||
list_of_objects = s3.list_objects(Bucket="Bucket")
|
||||
|
||||
# tests that the default storage class is still STANDARD
|
||||
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user