support listing all multipart uploads

This commit is contained in:
Konstantinos Koukopoulos 2014-04-02 19:03:40 +03:00
parent 51b3a0507d
commit d4602b9cae
3 changed files with 62 additions and 1 deletions

View File

@ -219,6 +219,10 @@ class S3Backend(BaseBackend):
bucket = self.buckets[bucket_name]
return bucket.multiparts[multipart_id].list_parts()
def get_all_multiparts(self, bucket_name):
bucket = self.buckets[bucket_name]
return bucket.multiparts
def set_part(self, bucket_name, multipart_id, part_id, value):
bucket = self.buckets[bucket_name]
multipart = bucket.multiparts[multipart_id]

View File

@ -35,7 +35,7 @@ class ResponseObject(object):
def _bucket_response(self, request, full_url, headers):
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
bucket_name = self.bucket_name_from_url(full_url)
@ -64,6 +64,16 @@ class ResponseObject(object):
return 404, headers, ""
def _bucket_response_get(self, bucket_name, querystring, headers):
if 'uploads' in querystring:
for unsup in ('delimiter', 'prefix', 'max-uploads'):
if unsup in querystring:
raise NotImplementedError("Listing multipart uploads with {} has not been implemented yet.".format(unsup))
multiparts = list(self.backend.get_all_multiparts(bucket_name).itervalues())
template = Template(S3_ALL_MULTIPARTS)
return 200, headers, template.render(
bucket_name=bucket_name,
uploads=multiparts)
bucket = self.backend.get_bucket(bucket_name)
if bucket:
prefix = querystring.get('prefix', [None])[0]
@ -460,3 +470,29 @@ S3_MULTIPART_COMPLETE_TOO_SMALL_ERROR = """<?xml version="1.0" encoding="UTF-8"?
<RequestId>asdfasdfsdafds</RequestId>
<HostId>sdfgdsfgdsfgdfsdsfgdfs</HostId>
</Error>"""
S3_ALL_MULTIPARTS = """<?xml version="1.0" encoding="UTF-8"?>
<ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Bucket>{{ bucket_name }}</Bucket>
<KeyMarker></KeyMarker>
<UploadIdMarker></UploadIdMarker>
<MaxUploads>1000</MaxUploads>
<IsTruncated>False</IsTruncated>
{% for upload in uploads %}
<Upload>
<Key>{{ upload.key_name }}</Key>
<UploadId>{{ upload.id }}</UploadId>
<Initiator>
<ID>arn:aws:iam::111122223333:user/user1-11111a31-17b5-4fb7-9df5-b111111f13de</ID>
<DisplayName>user1-11111a31-17b5-4fb7-9df5-b111111f13de</DisplayName>
</Initiator>
<Owner>
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
<DisplayName>OwnerDisplayName</DisplayName>
</Owner>
<StorageClass>STANDARD</StorageClass>
<Initiated>2010-11-10T20:48:33.000Z</Initiated>
</Upload>
{% endfor %}
</ListMultipartUploadsResult>
"""

View File

@ -127,6 +127,27 @@ def test_multipart_etag():
'"140f92a6df9f9e415f74a1463bcee9bb-2"')
@mock_s3
def test_list_multiparts():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart1 = bucket.initiate_multipart_upload("one-key")
multipart2 = bucket.initiate_multipart_upload("two-key")
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(2)
dict([(u.key_name, u.id) for u in uploads]).should.equal(
{'one-key': multipart1.id, 'two-key': multipart2.id})
multipart2.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(1)
uploads[0].key_name.should.equal("one-key")
multipart1.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.be.empty
@mock_s3
def test_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')