Fix issue with large S3 values. Closes #11.

This commit is contained in:
Steve Pulec 2013-05-06 23:33:59 -04:00
parent 000006e49e
commit caf73557cd
3 changed files with 34 additions and 1 deletions

View File

@ -11,6 +11,10 @@ class FakeKey(object):
self.value = value
self.last_modified = datetime.datetime.now()
def append_to_value(self, value):
self.value += value
self.last_modified = datetime.datetime.now()
@property
def etag(self):
value_md5 = md5.new()
@ -81,6 +85,13 @@ class S3Backend(BaseBackend):
return new_key
def append_to_key(self, bucket_name, key_name, value):
key_name = clean_key_name(key_name)
key = self.get_key(bucket_name, key_name)
key.append_to_value(value)
return key
def get_key(self, bucket_name, key_name):
key_name = clean_key_name(key_name)
bucket = self.get_bucket(bucket_name)

View File

@ -106,7 +106,18 @@ def _key_response(request, full_url, headers):
s3_backend.copy_key(src_bucket, src_key, bucket_name, key_name)
template = Template(S3_OBJECT_COPY_RESPONSE)
return template.render(key=src_key)
new_key = s3_backend.set_key(bucket_name, key_name, body)
streaming_request = hasattr(request, 'streaming') and request.streaming
closing_connection = headers.get('connection') == 'close'
if closing_connection and streaming_request:
# Closing the connection of a streaming request. No more data
new_key = s3_backend.get_key(bucket_name, key_name)
elif streaming_request:
# Streaming request, more data
new_key = s3_backend.append_to_key(bucket_name, key_name, body)
else:
# Initial data
new_key = s3_backend.set_key(bucket_name, key_name, body)
request.streaming = True
template = Template(S3_OBJECT_RESPONSE)
headers.update(new_key.response_dict)
return 200, headers, template.render(key=new_key)

View File

@ -78,6 +78,17 @@ def test_empty_key_set_on_existing_key():
bucket.get_key("the-key").get_contents_as_string().should.equal('')
@mock_s3
def test_large_key_save():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("foobar" * 100000)
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar' * 100000)
@mock_s3
def test_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')