Add test_multipart_duplicate_upload

Test to make sure we do not duplicate data when uploading the same part twice
This commit is contained in:
jraby 2015-02-27 18:48:51 -05:00
parent c4793fc1f9
commit 4a14d8d3b3

View File

@ -208,7 +208,23 @@ def test_multipart_invalid_order():
bucket.complete_multipart_upload.when.called_with(
multipart.key_name, multipart.id, xml).should.throw(S3ResponseError)
@mock_s3
@reduced_min_part_size
def test_multipart_duplicate_upload():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# same part again
multipart.upload_part_from_file(BytesIO(part1), 1)
part2 = b'1' * 1024
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# We should get only one copy of part 1.
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3
def test_list_multiparts():
# Create Bucket so that test can run