diff --git a/moto/s3/models.py b/moto/s3/models.py
index ec3e69f1b..3cd50050d 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -18,6 +18,17 @@ UPLOAD_ID_BYTES = 43
UPLOAD_PART_MIN_SIZE = 5242880
+class FakeDeleteMarker(BaseModel):
+
+ def __init__(self, key):
+ self.key = key
+ self._version_id = key.version_id + 1
+
+ @property
+ def version_id(self):
+ return self._version_id
+
+
class FakeKey(BaseModel):
def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0):
@@ -33,6 +44,10 @@ class FakeKey(BaseModel):
self._version_id = version_id
self._is_versioned = is_versioned
+ @property
+ def version_id(self):
+ return self._version_id
+
def copy(self, new_name=None):
r = copy.deepcopy(self)
if new_name is not None:
@@ -102,7 +117,7 @@ class FakeKey(BaseModel):
res['x-amz-restore'] = rhdr.format(self.expiry_date)
if self._is_versioned:
- res['x-amz-version-id'] = str(self._version_id)
+ res['x-amz-version-id'] = str(self.version_id)
if self.website_redirect_location:
res['x-amz-website-redirect-location'] = self.website_redirect_location
@@ -356,6 +371,26 @@ class S3Backend(BaseBackend):
def get_bucket_versioning(self, bucket_name):
return self.get_bucket(bucket_name).versioning_status
+ def get_bucket_latest_versions(self, bucket_name):
+ versions = self.get_bucket_versions(bucket_name)
+ maximum_version_per_key = {}
+ latest_versions = {}
+
+ for version in versions:
+ if isinstance(version, FakeDeleteMarker):
+ name = version.key.name
+ else:
+ name = version.name
+ version_id = version.version_id
+ maximum_version_per_key[name] = max(
+ version_id,
+ maximum_version_per_key.get(name, -1)
+ )
+ if version_id == maximum_version_per_key[name]:
+ latest_versions[name] = version_id
+
+ return latest_versions
+
def get_bucket_versions(self, bucket_name, delimiter=None,
encoding_type=None,
key_marker=None,
@@ -423,15 +458,22 @@ class S3Backend(BaseBackend):
def get_key(self, bucket_name, key_name, version_id=None):
key_name = clean_key_name(key_name)
bucket = self.get_bucket(bucket_name)
+ key = None
+
if bucket:
if version_id is None:
if key_name in bucket.keys:
- return bucket.keys[key_name]
+ key = bucket.keys[key_name]
else:
- for key in bucket.keys.getlist(key_name):
- if str(key._version_id) == str(version_id):
- return key
- raise MissingKey(key_name=key_name)
+ for key_version in bucket.keys.getlist(key_name):
+ if str(key_version.version_id) == str(version_id):
+ key = key_version
+ break
+
+ if isinstance(key, FakeKey):
+ return key
+ else:
+ raise MissingKey(key_name=key_name)
def initiate_multipart(self, bucket_name, key_name, metadata):
bucket = self.get_bucket(bucket_name)
@@ -510,12 +552,33 @@ class S3Backend(BaseBackend):
return key_results, folder_results
- def delete_key(self, bucket_name, key_name):
+ def _set_delete_marker(self, bucket_name, key_name):
+ bucket = self.get_bucket(bucket_name)
+ bucket.keys[key_name] = FakeDeleteMarker(
+ key=bucket.keys[key_name]
+ )
+
+ def delete_key(self, bucket_name, key_name, version_id=None):
key_name = clean_key_name(key_name)
bucket = self.get_bucket(bucket_name)
try:
- bucket.keys.pop(key_name)
+ if not bucket.is_versioned:
+ bucket.keys.pop(key_name)
+ else:
+ if version_id is None:
+ self._set_delete_marker(bucket_name, key_name)
+ else:
+ if key_name not in bucket.keys:
+ raise KeyError
+ bucket.keys.setlist(
+ key_name,
+ [
+ key
+ for key in bucket.keys.getlist(key_name)
+ if str(key.version_id) != str(version_id)
+ ]
+ )
return True
except KeyError:
return False
diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index 68530c190..fd33c5ead 100644
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -13,7 +13,7 @@ from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_n
from .exceptions import BucketAlreadyExists, S3ClientError, InvalidPartOrder
-from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl
+from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey
from .utils import bucket_name_from_url, metadata_from_headers
from xml.dom import minidom
@@ -219,9 +219,21 @@ class ResponseObject(_TemplateEnvironmentMixin):
max_keys=max_keys,
version_id_marker=version_id_marker
)
+ latest_versions = self.backend.get_bucket_latest_versions(
+ bucket_name=bucket_name
+ )
+ key_list = []
+ delete_marker_list = []
+ for version in versions:
+ if isinstance(version, FakeKey):
+ key_list.append(version)
+ else:
+ delete_marker_list.append(version)
template = self.response_template(S3_BUCKET_GET_VERSIONS)
return 200, {}, template.render(
- key_list=versions,
+ key_list=key_list,
+ delete_marker_list=delete_marker_list,
+ latest_versions=latest_versions,
bucket=bucket,
prefix='',
max_keys=1000,
@@ -478,7 +490,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
return self._key_response_post(request, body, bucket_name, query, key_name, headers)
else:
raise NotImplementedError(
- "Method {0} has not been impelemented in the S3 backend yet".format(method))
+ "Method {0} has not been implemented in the S3 backend yet".format(method))
def _key_response_get(self, bucket_name, query, key_name, headers):
response_headers = {}
@@ -630,7 +642,8 @@ class ResponseObject(_TemplateEnvironmentMixin):
upload_id = query['uploadId'][0]
self.backend.cancel_multipart(bucket_name, upload_id)
return 204, {}, ""
- self.backend.delete_key(bucket_name, key_name)
+ version_id = query.get('versionId', [None])[0]
+ self.backend.delete_key(bucket_name, key_name, version_id=version_id)
template = self.response_template(S3_DELETE_OBJECT_SUCCESS)
return 204, {}, template.render()
@@ -851,8 +864,8 @@ S3_BUCKET_GET_VERSIONS = """
{% for key in key_list %}
{{ key.name }}
- {{ key._version_id }}
- false
+ {{ key.version_id }}
+ {% if latest_versions[key.name] == key.version_id %}true{% else %}false{% endif %}
{{ key.last_modified_ISO8601 }}
{{ key.etag }}
{{ key.size }}
@@ -863,6 +876,18 @@ S3_BUCKET_GET_VERSIONS = """
{% endfor %}
+ {% for marker in delete_marker_list %}
+
+ {{ marker.key.name }}
+ {{ marker.version_id }}
+ {% if latest_versions[marker.key.name] == marker.version_id %}true{% else %}false{% endif %}
+ {{ marker.key.last_modified_ISO8601 }}
+
+ 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
+ webfile
+
+
+ {% endfor %}
"""
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 09ef235a8..de9c6a7de 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -1300,6 +1300,12 @@ def test_boto3_list_object_versions():
bucket_name = 'mybucket'
key = 'key-with-versions'
s3.create_bucket(Bucket=bucket_name)
+ s3.put_bucket_versioning(
+ Bucket=bucket_name,
+ VersioningConfiguration={
+ 'Status': 'Enabled'
+ }
+ )
items = (six.b('v1'), six.b('v2'))
for body in items:
s3.put_object(
@@ -1319,6 +1325,58 @@ def test_boto3_list_object_versions():
response['Body'].read().should.equal(items[-1])
+@mock_s3
+def test_boto3_delete_markers():
+ s3 = boto3.client('s3', region_name='us-east-1')
+ bucket_name = 'mybucket'
+ key = 'key-with-versions'
+ s3.create_bucket(Bucket=bucket_name)
+ s3.put_bucket_versioning(
+ Bucket=bucket_name,
+ VersioningConfiguration={
+ 'Status': 'Enabled'
+ }
+ )
+ items = (six.b('v1'), six.b('v2'))
+ for body in items:
+ s3.put_object(
+ Bucket=bucket_name,
+ Key=key,
+ Body=body
+ )
+ s3.delete_object(
+ Bucket=bucket_name,
+ Key=key
+ )
+ with assert_raises(ClientError) as e:
+ s3.get_object(
+ Bucket=bucket_name,
+ Key=key
+ )
+ e.response['Error']['Code'].should.equal('NoSuchKey')
+
+ s3.delete_object(
+ Bucket=bucket_name,
+ Key=key,
+ VersionId='2'
+ )
+ response = s3.get_object(
+ Bucket=bucket_name,
+ Key=key
+ )
+ response['Body'].read().should.equal(items[-1])
+ response = s3.list_object_versions(
+ Bucket=bucket_name
+ )
+ response['Versions'].should.have.length_of(2)
+ response['Versions'][-1]['IsLatest'].should.be.true
+ response['Versions'][0]['IsLatest'].should.be.false
+ [(key_metadata['Key'], key_metadata['VersionId'])
+ for key_metadata in response['Versions']].should.equal(
+ [('key-with-versions', '0'), ('key-with-versions', '1')]
+ )
+
+
TEST_XML = """\
@@ -1337,3 +1395,4 @@ TEST_XML = """\
"""
+