S3 - Add Glacier IR (#5989)
This commit is contained in:
parent
267509413e
commit
0f93cd24c8
@ -53,21 +53,13 @@ from moto.s3.exceptions import (
|
|||||||
from .cloud_formation import cfn_to_api_encryption, is_replacement_update
|
from .cloud_formation import cfn_to_api_encryption, is_replacement_update
|
||||||
from . import notifications
|
from . import notifications
|
||||||
from .utils import clean_key_name, _VersionedKeyStore, undo_clean_key_name
|
from .utils import clean_key_name, _VersionedKeyStore, undo_clean_key_name
|
||||||
|
from .utils import ARCHIVE_STORAGE_CLASSES, STORAGE_CLASS
|
||||||
from ..events.notifications import send_notification as events_send_notification
|
from ..events.notifications import send_notification as events_send_notification
|
||||||
from ..settings import get_s3_default_key_buffer_size, S3_UPLOAD_PART_MIN_SIZE
|
from ..settings import get_s3_default_key_buffer_size, S3_UPLOAD_PART_MIN_SIZE
|
||||||
|
|
||||||
MAX_BUCKET_NAME_LENGTH = 63
|
MAX_BUCKET_NAME_LENGTH = 63
|
||||||
MIN_BUCKET_NAME_LENGTH = 3
|
MIN_BUCKET_NAME_LENGTH = 3
|
||||||
UPLOAD_ID_BYTES = 43
|
UPLOAD_ID_BYTES = 43
|
||||||
STORAGE_CLASS = [
|
|
||||||
"STANDARD",
|
|
||||||
"REDUCED_REDUNDANCY",
|
|
||||||
"STANDARD_IA",
|
|
||||||
"ONEZONE_IA",
|
|
||||||
"INTELLIGENT_TIERING",
|
|
||||||
"GLACIER",
|
|
||||||
"DEEP_ARCHIVE",
|
|
||||||
]
|
|
||||||
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
|
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
|
||||||
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
|
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
|
||||||
|
|
||||||
@ -2249,7 +2241,7 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
|
|||||||
|
|
||||||
if acl is not None:
|
if acl is not None:
|
||||||
new_key.set_acl(acl)
|
new_key.set_acl(acl)
|
||||||
if src_key.storage_class in "GLACIER":
|
if src_key.storage_class in ARCHIVE_STORAGE_CLASSES:
|
||||||
# Object copied from Glacier object should not have expiry
|
# Object copied from Glacier object should not have expiry
|
||||||
new_key.set_expiry(None)
|
new_key.set_expiry(None)
|
||||||
|
|
||||||
|
@ -58,6 +58,7 @@ from .utils import (
|
|||||||
metadata_from_headers,
|
metadata_from_headers,
|
||||||
parse_region_from_url,
|
parse_region_from_url,
|
||||||
compute_checksum,
|
compute_checksum,
|
||||||
|
ARCHIVE_STORAGE_CLASSES,
|
||||||
)
|
)
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
@ -1311,8 +1312,8 @@ class S3Response(BaseResponse):
|
|||||||
if key.version_id:
|
if key.version_id:
|
||||||
response_headers["x-amz-version-id"] = key.version_id
|
response_headers["x-amz-version-id"] = key.version_id
|
||||||
|
|
||||||
if key.storage_class == "GLACIER":
|
if key.storage_class in ARCHIVE_STORAGE_CLASSES:
|
||||||
raise InvalidObjectState(storage_class="GLACIER")
|
raise InvalidObjectState(storage_class=key.storage_class)
|
||||||
if if_unmodified_since:
|
if if_unmodified_since:
|
||||||
if_unmodified_since = str_to_rfc_1123_datetime(if_unmodified_since)
|
if_unmodified_since = str_to_rfc_1123_datetime(if_unmodified_since)
|
||||||
if key.last_modified.replace(microsecond=0) > if_unmodified_since:
|
if key.last_modified.replace(microsecond=0) > if_unmodified_since:
|
||||||
@ -1515,7 +1516,7 @@ class S3Response(BaseResponse):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if key is not None:
|
if key is not None:
|
||||||
if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]:
|
if key.storage_class in ARCHIVE_STORAGE_CLASSES:
|
||||||
if key.response_dict.get(
|
if key.response_dict.get(
|
||||||
"x-amz-restore"
|
"x-amz-restore"
|
||||||
) is None or 'ongoing-request="true"' in key.response_dict.get(
|
) is None or 'ongoing-request="true"' in key.response_dict.get(
|
||||||
@ -2090,7 +2091,7 @@ class S3Response(BaseResponse):
|
|||||||
es = minidom.parseString(body).getElementsByTagName("Days")
|
es = minidom.parseString(body).getElementsByTagName("Days")
|
||||||
days = es[0].childNodes[0].wholeText
|
days = es[0].childNodes[0].wholeText
|
||||||
key = self.backend.get_object(bucket_name, key_name)
|
key = self.backend.get_object(bucket_name, key_name)
|
||||||
if key.storage_class not in ["GLACIER", "DEEP_ARCHIVE"]:
|
if key.storage_class not in ARCHIVE_STORAGE_CLASSES:
|
||||||
raise InvalidObjectState(storage_class=key.storage_class)
|
raise InvalidObjectState(storage_class=key.storage_class)
|
||||||
r = 202
|
r = 202
|
||||||
if key.expiry_date is not None:
|
if key.expiry_date is not None:
|
||||||
|
@ -24,6 +24,18 @@ user_settable_fields = {
|
|||||||
"content-disposition",
|
"content-disposition",
|
||||||
"x-robots-tag",
|
"x-robots-tag",
|
||||||
}
|
}
|
||||||
|
ARCHIVE_STORAGE_CLASSES = [
|
||||||
|
"GLACIER",
|
||||||
|
"DEEP_ARCHIVE",
|
||||||
|
"GLACIER_IR",
|
||||||
|
]
|
||||||
|
STORAGE_CLASS = [
|
||||||
|
"STANDARD",
|
||||||
|
"REDUCED_REDUNDANCY",
|
||||||
|
"STANDARD_IA",
|
||||||
|
"ONEZONE_IA",
|
||||||
|
"INTELLIGENT_TIERING",
|
||||||
|
] + ARCHIVE_STORAGE_CLASSES
|
||||||
|
|
||||||
|
|
||||||
def bucket_name_from_url(url):
|
def bucket_name_from_url(url):
|
||||||
|
Loading…
Reference in New Issue
Block a user