S3 - Dont error when deleting unknown keys (#4036)

* #1581 - S3 - Dont error when deleting unknown keys

* Update test_s3bucket_path.py
This commit is contained in:
Bert Blommers 2021-06-25 10:26:03 +01:00 committed by GitHub
parent 21a77510b3
commit 0f4ab4da1c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 21 additions and 9 deletions

View File

@ -943,13 +943,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
key_name = object_["Key"]
version_id = object_.get("VersionId", None)
success, _ = self.backend.delete_object(
self.backend.delete_object(
bucket_name, undo_clean_key_name(key_name), version_id=version_id
)
if success:
deleted_objects.append((key_name, version_id))
else:
error_names.append(key_name)
return (
200,

View File

@ -689,8 +689,7 @@ def test_delete_keys_invalid():
# non-existing key case
result = bucket.delete_keys(["abc", "file3"])
result.deleted.should.have.length_of(1)
result.errors.should.have.length_of(1)
result.deleted.should.have.length_of(2)
keys = bucket.get_all_keys()
keys.should.have.length_of(3)
keys[0].name.should.equal("file1")
@ -4215,6 +4214,22 @@ def test_delete_objects_with_url_encoded_key(key):
assert_deleted()
@mock_s3
def test_delete_objects_unknown_key():
bucket_name = "test-moto-issue-1581"
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket=bucket_name)
client.put_object(Bucket=bucket_name, Key="file1", Body="body")
s = client.delete_objects(
Bucket=bucket_name, Delete={"Objects": [{"Key": "file1"}, {"Key": "file2"}]}
)
s["Deleted"].should.have.length_of(2)
s["Deleted"].should.contain({"Key": "file1"})
s["Deleted"].should.contain({"Key": "file2"})
client.delete_bucket(Bucket=bucket_name)
@mock_s3
@mock_config
def test_public_access_block():

View File

@ -315,8 +315,8 @@ def test_delete_keys_with_invalid():
result = bucket.delete_keys(["abc", "file3"])
result.deleted.should.have.length_of(1)
result.errors.should.have.length_of(1)
result.deleted.should.have.length_of(2)
result.errors.should.have.length_of(0)
keys = bucket.get_all_keys()
keys.should.have.length_of(3)
keys[0].name.should.equal("file1")