Feature S3 Object Lock (#4174)

This commit is contained in:
Cristopher Pinzón 2021-08-17 00:16:59 -05:00 committed by GitHub
parent 796ca1a814
commit 1d90946072
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 449 additions and 2 deletions

View File

@ -491,6 +491,50 @@ class InvalidContinuationToken(S3ClientError):
)
class LockNotEnabled(S3ClientError):
code = 400
def __init__(self):
super(LockNotEnabled, self).__init__(
"InvalidRequest", "Bucket is missing ObjectLockConfiguration"
)
class AccessDeniedByLock(S3ClientError):
code = 400
def __init__(self):
super(AccessDeniedByLock, self).__init__("AccessDenied", "Access Denied")
class InvalidContentMD5(S3ClientError):
code = 400
def __init__(self):
super(InvalidContentMD5, self).__init__(
"InvalidContentMD5", "Content MD5 header is invalid"
)
class BucketNeedsToBeNew(S3ClientError):
code = 400
def __init__(self):
super(BucketNeedsToBeNew, self).__init__(
"InvalidBucket", "Bucket needs to be empty"
)
class BucketMustHaveLockeEnabled(S3ClientError):
code = 400
def __init__(self):
super(BucketMustHaveLockeEnabled, self).__init__(
"InvalidBucketState",
"Object Lock configuration cannot be enabled on existing buckets",
)
class InvalidFilterRuleName(InvalidArgumentError):
code = 400

View File

@ -25,7 +25,9 @@ from moto.core.utils import iso_8601_datetime_without_milliseconds_s3, rfc_1123_
from moto.cloudwatch.models import MetricDatum
from moto.utilities.tagging_service import TaggingService
from .exceptions import (
AccessDeniedByLock,
BucketAlreadyExists,
BucketNeedsToBeNew,
MissingBucket,
InvalidBucketName,
InvalidPart,
@ -100,6 +102,9 @@ class FakeKey(BaseModel):
encryption=None,
kms_key_id=None,
bucket_key_enabled=None,
lock_mode=None,
lock_legal_status="OFF",
lock_until=None,
):
self.name = name
self.last_modified = datetime.datetime.utcnow()
@ -125,6 +130,10 @@ class FakeKey(BaseModel):
self.kms_key_id = kms_key_id
self.bucket_key_enabled = bucket_key_enabled
self.lock_mode = lock_mode
self.lock_legal_status = lock_legal_status
self.lock_until = lock_until
@property
def version_id(self):
return self._version_id
@ -291,6 +300,27 @@ class FakeKey(BaseModel):
self.value = state["value"]
self.lock = threading.Lock()
@property
def is_locked(self):
if self.lock_legal_status == "ON":
return True
if self.lock_mode == "COMPLIANCE":
now = datetime.datetime.utcnow()
try:
until = datetime.datetime.strptime(
self.lock_until, "%Y-%m-%dT%H:%M:%SZ"
)
except ValueError:
until = datetime.datetime.strptime(
self.lock_until, "%Y-%m-%dT%H:%M:%S.%fZ"
)
if until > now:
return True
return False
class FakeMultipart(BaseModel):
def __init__(self, key_name, metadata):
@ -534,7 +564,7 @@ class LifecycleAndFilter(BaseModel):
for key, value in self.tags.items():
data.append(
{"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value}}
{"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},}
)
return data
@ -789,6 +819,10 @@ class FakeBucket(CloudFormationModel):
self.creation_date = datetime.datetime.now(tz=pytz.utc)
self.public_access_block = None
self.encryption = None
self.object_lock_enabled = False
self.default_lock_mode = ""
self.default_lock_days = 0
self.default_lock_years = 0
@property
def location(self):
@ -1242,6 +1276,22 @@ class FakeBucket(CloudFormationModel):
return config_dict
@property
def has_default_lock(self):
if not self.object_lock_enabled:
return False
if self.default_lock_mode:
return True
return False
def default_retention(self):
now = datetime.datetime.utcnow()
now += datetime.timedelta(self.default_lock_days)
now += datetime.timedelta(self.default_lock_years * 365)
return now.strftime("%Y-%m-%dT%H:%M:%SZ")
class S3Backend(BaseBackend):
def __init__(self):
@ -1290,6 +1340,7 @@ class S3Backend(BaseBackend):
if not MIN_BUCKET_NAME_LENGTH <= len(bucket_name) <= MAX_BUCKET_NAME_LENGTH:
raise InvalidBucketName()
new_bucket = FakeBucket(name=bucket_name, region_name=region_name)
self.buckets[bucket_name] = new_bucket
return new_bucket
@ -1418,6 +1469,9 @@ class S3Backend(BaseBackend):
encryption=None,
kms_key_id=None,
bucket_key_enabled=None,
lock_mode=None,
lock_legal_status="OFF",
lock_until=None,
):
key_name = clean_key_name(key_name)
if storage is not None and storage not in STORAGE_CLASS:
@ -1436,6 +1490,9 @@ class S3Backend(BaseBackend):
encryption=encryption,
kms_key_id=kms_key_id,
bucket_key_enabled=bucket_key_enabled,
lock_mode=lock_mode,
lock_legal_status=lock_legal_status,
lock_until=lock_until,
)
keys = [
@ -1500,6 +1557,20 @@ class S3Backend(BaseBackend):
bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
)
def put_bucket_lock(self, bucket_name, lock_enabled, mode, days, years):
bucket = self.get_bucket(bucket_name)
if bucket.keys.item_size() > 0:
raise BucketNeedsToBeNew
if lock_enabled:
bucket.object_lock_enabled = True
bucket.versioning_status = "Enabled"
bucket.default_lock_mode = mode
bucket.default_lock_days = days
bucket.default_lock_years = years
def delete_bucket_tagging(self, bucket_name):
bucket = self.get_bucket(bucket_name)
self.tagger.delete_all_tags_for_resource(bucket.arn)
@ -1697,6 +1768,10 @@ class S3Backend(BaseBackend):
response_meta["delete-marker"] = "false"
for key in bucket.keys.getlist(key_name):
if str(key.version_id) == str(version_id):
if hasattr(key, "is_locked") and key.is_locked:
raise AccessDeniedByLock
if type(key) is FakeDeleteMarker:
response_meta["delete-marker"] = "true"
break

View File

@ -36,7 +36,9 @@ from moto.s3bucket_path.utils import (
from .exceptions import (
BucketAlreadyExists,
BucketMustHaveLockeEnabled,
DuplicateTagKeys,
InvalidContentMD5,
InvalidContinuationToken,
S3ClientError,
MissingBucket,
@ -52,6 +54,7 @@ from .exceptions import (
NoSystemTags,
PreconditionFailed,
InvalidRange,
LockNotEnabled,
)
from .models import (
s3_backend,
@ -344,6 +347,17 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
self._set_action("BUCKET", "GET", querystring)
self._authenticate_and_authorize_s3_action()
if "object-lock" in querystring:
bucket = self.backend.get_bucket(bucket_name)
template = self.response_template(S3_BUCKET_LOCK_CONFIGURATION)
return template.render(
lock_enabled=bucket.object_lock_enabled,
mode=bucket.default_lock_mode,
days=bucket.default_lock_days,
years=bucket.default_lock_years,
)
if "uploads" in querystring:
for unsup in ("delimiter", "max-uploads"):
if unsup in querystring:
@ -676,6 +690,22 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
self._set_action("BUCKET", "PUT", querystring)
self._authenticate_and_authorize_s3_action()
if "object-lock" in querystring:
body_decoded = body.decode()
config = self._lock_config_from_xml(body_decoded)
if not self.backend.get_bucket(bucket_name).object_lock_enabled:
raise BucketMustHaveLockeEnabled
self.backend.put_bucket_lock(
bucket_name,
config["enabled"],
config["mode"],
config["days"],
config["years"],
)
return ""
if "versioning" in querystring:
ver = re.search("<Status>([A-Za-z]+)</Status>", body.decode())
if ver:
@ -804,6 +834,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
bucket_name, self._acl_from_headers(request.headers)
)
if request.headers.get("x-amz-bucket-object-lock-enabled", "") == "True":
new_bucket.object_lock_enabled = True
new_bucket.versioning_status = "Enabled"
template = self.response_template(S3_BUCKET_CREATE_RESPONSE)
return 200, {}, template.render(bucket=new_bucket)
@ -1002,7 +1036,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if isinstance(request, AWSPreparedRequest) and "s3-control" in request.url:
response = self._control_response(request, full_url, headers)
else:
response = self._key_response(request, full_url, headers)
response = self._key_response(request, full_url, self.headers)
except S3ClientError as s3error:
response = s3error.code, {}, s3error.description
@ -1301,11 +1335,49 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if bucket_key_enabled is not None:
bucket_key_enabled = str(bucket_key_enabled).lower()
bucket = self.backend.get_bucket(bucket_name)
lock_enabled = bucket.object_lock_enabled
lock_mode = request.headers.get("x-amz-object-lock-mode", None)
lock_until = request.headers.get("x-amz-object-lock-retain-until-date", None)
legal_hold = request.headers.get("x-amz-object-lock-legal-hold", "OFF")
if lock_mode or lock_until or legal_hold == "ON":
if not request.headers.get("Content-Md5"):
raise InvalidContentMD5
if not lock_enabled:
raise LockNotEnabled
elif lock_enabled and bucket.has_default_lock:
if not request.headers.get("Content-Md5"):
raise InvalidContentMD5
lock_until = bucket.default_retention()
lock_mode = bucket.default_lock_mode
acl = self._acl_from_headers(request.headers)
if acl is None:
acl = self.backend.get_bucket(bucket_name).acl
tagging = self._tagging_from_headers(request.headers)
if "retention" in query:
if not lock_enabled:
raise LockNotEnabled
version_id = query.get("VersionId")
key = self.backend.get_object(bucket_name, key_name, version_id=version_id)
retention = self._mode_until_from_xml(body)
key.lock_mode = retention[0]
key.lock_until = retention[1]
return 200, response_headers, ""
if "legal-hold" in query:
if not lock_enabled:
raise LockNotEnabled
version_id = query.get("VersionId")
key = self.backend.get_object(bucket_name, key_name, version_id=version_id)
legal_hold_status = self._legal_hold_status_from_xml(body)
key.lock_legal_status = legal_hold_status
return 200, response_headers, ""
if "acl" in query:
key = self.backend.get_object(bucket_name, key_name)
# TODO: Support the XML-based ACL format
@ -1384,6 +1456,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
# Streaming request, more data
new_key = self.backend.append_to_key(bucket_name, key_name, body)
else:
# Initial data
new_key = self.backend.set_object(
bucket_name,
@ -1393,7 +1466,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
encryption=encryption,
kms_key_id=kms_key_id,
bucket_key_enabled=bucket_key_enabled,
lock_mode=lock_mode,
lock_legal_status=legal_hold,
lock_until=lock_until,
)
request.streaming = True
metadata = metadata_from_headers(request.headers)
metadata.update(metadata_from_headers(query))
@ -1444,6 +1521,25 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
else:
return 404, response_headers, ""
def _lock_config_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml)
enabled = (
parsed_xml["ObjectLockConfiguration"]["ObjectLockEnabled"] == "Enabled"
)
default_retention = parsed_xml["ObjectLockConfiguration"]["Rule"][
"DefaultRetention"
]
mode = default_retention["Mode"]
days = int(default_retention["Days"]) if "Days" in default_retention else 0
years = int(default_retention["Years"]) if "Years" in default_retention else 0
if days and years:
raise MalformedXML
return {"enabled": enabled, "mode": mode, "days": days, "years": years}
def _acl_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml)
if not parsed_xml.get("AccessControlPolicy"):
@ -1591,6 +1687,17 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return [parsed_xml["CORSConfiguration"]["CORSRule"]]
def _mode_until_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml)
return (
parsed_xml["Retention"]["Mode"],
parsed_xml["Retention"]["RetainUntilDate"],
)
def _legal_hold_status_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml)
return parsed_xml["LegalHold"]["Status"]
def _encryption_config_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml)
@ -2505,3 +2612,20 @@ S3_PUBLIC_ACCESS_BLOCK_CONFIGURATION = """
<RestrictPublicBuckets>{{public_block_config.restrict_public_buckets}}</RestrictPublicBuckets>
</PublicAccessBlockConfiguration>
"""
S3_BUCKET_LOCK_CONFIGURATION = """
<ObjectLockConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
{%if lock_enabled %}
<ObjectLockEnabled>Enabled</ObjectLockEnabled>
{% else %}
<ObjectLockEnabled>Disabled</ObjectLockEnabled>
{% endif %}
<Rule>
<DefaultRetention>
<Mode>{{mode}}</Mode>
<Days>{{days}}</Days>
<Years>{{years}}</Years>
</DefaultRetention>
#</Rule>
</ObjectLockConfiguration>
"""

View File

@ -0,0 +1,204 @@
import time
import boto
import boto3
import datetime
import botocore
from moto import mock_s3
import os
from botocore.config import Config
from moto.s3.responses import DEFAULT_REGION_NAME
import sure
@mock_s3
def test_locked_object():
s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "locked-bucket-test"
key_name = "file.txt"
seconds_lock = 2
s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
s3.put_object(
Bucket=bucket_name,
Body=b"test",
Key=key_name,
ObjectLockMode="COMPLIANCE",
ObjectLockRetainUntilDate=until,
)
versions_response = s3.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
deleted = False
try:
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
except botocore.client.ClientError as e:
e.response["Error"]["Code"].should.equal("AccessDenied")
deleted.should.equal(False)
# cleaning
time.sleep(seconds_lock)
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
s3.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_fail_locked_object():
bucket_name = "locked-bucket2"
key_name = "file.txt"
seconds_lock = 2
s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=False)
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
failed = False
try:
s3.put_object(
Bucket=bucket_name,
Body=b"test",
Key=key_name,
ObjectLockMode="COMPLIANCE",
ObjectLockRetainUntilDate=until,
)
except botocore.client.ClientError as e:
e.response["Error"]["Code"].should.equal("InvalidRequest")
failed = True
failed.should.equal(True)
s3.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_object_lock():
s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-lock-bucket-test"
key_name = "file.txt"
seconds_lock = 2
s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
s3.put_object(
Bucket=bucket_name, Body=b"test", Key=key_name,
)
versions_response = s3.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
s3.put_object_retention(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
Retention={"Mode": "COMPLIANCE", "RetainUntilDate": until},
)
deleted = False
try:
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
except botocore.client.ClientError as e:
e.response["Error"]["Code"].should.equal("AccessDenied")
deleted.should.equal(False)
# cleaning
time.sleep(seconds_lock)
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
s3.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_object_legal_hold():
s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-legal-bucket"
key_name = "file.txt"
s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
s3.put_object(
Bucket=bucket_name, Body=b"test", Key=key_name,
)
versions_response = s3.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
s3.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
LegalHold={"Status": "ON"},
)
deleted = False
try:
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
except botocore.client.ClientError as e:
e.response["Error"]["Code"].should.equal("AccessDenied")
deleted.should.equal(False)
# cleaning
s3.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
LegalHold={"Status": "OFF"},
)
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
s3.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_default_lock():
# do not run this test in aws, it will block the deletion for a whole day
s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-default-lock-bucket"
key_name = "file.txt"
days = 1
mode = "COMPLIANCE"
enabled = "Enabled"
s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
s3.put_object_lock_configuration(
Bucket=bucket_name,
ObjectLockConfiguration={
"ObjectLockEnabled": enabled,
"Rule": {"DefaultRetention": {"Mode": mode, "Days": days,}},
},
)
s3.put_object(
Bucket=bucket_name, Body=b"test", Key=key_name,
)
deleted = False
versions_response = s3.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
try:
s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
except botocore.client.ClientError as e:
e.response["Error"]["Code"].should.equal("AccessDenied")
deleted.should.equal(False)
response = s3.get_object_lock_configuration(Bucket=bucket_name)
response["ObjectLockConfiguration"]["ObjectLockEnabled"].should.equal(enabled)
response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"][
"Mode"
].should.equal(mode)
response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"][
"Days"
].should.equal(days)