fix s3 key list and missing key response

This commit is contained in:
Steve Pulec 2013-02-27 01:12:11 -05:00
parent 91b61c7be5
commit b0d89bb7b7
3 changed files with 121 additions and 20 deletions

View File

@ -16,6 +16,10 @@ class FakeKey(object):
value_md5.update(self.value)
return '"{0}"'.format(value_md5.hexdigest())
@property
def size(self):
return len(self.value)
class FakeBucket(object):
def __init__(self, name):
@ -60,6 +64,29 @@ class S3Backend(BaseBackend):
bucket = self.buckets[bucket_name]
return bucket.keys.get(key_name)
def prefix_query(self, bucket, prefix):
key_results = set()
folder_results = set()
if prefix:
for key_name, key in bucket.keys.iteritems():
if key_name.startswith(prefix):
if '/' in key_name.lstrip(prefix):
key_without_prefix = key_name.lstrip(prefix).split("/")[0]
folder_results.add("{}{}".format(prefix, key_without_prefix))
else:
key_results.add(key)
else:
for key_name, key in bucket.keys.iteritems():
if '/' in key_name:
folder_results.add(key_name.split("/")[0])
else:
key_results.add(key)
key_results = sorted(key_results, key=lambda key: key.name)
folder_results = [folder_name for folder_name in sorted(folder_results, key=lambda key: key)]
return key_results, folder_results
def delete_key(self, bucket_name, key_name):
bucket = self.buckets[bucket_name]
return bucket.keys.pop(key_name)

View File

@ -1,3 +1,5 @@
from urlparse import parse_qs
from jinja2 import Template
from .models import s3_backend
@ -15,14 +17,18 @@ def all_buckets(uri, body, method):
def bucket_response(uri, body, headers):
hostname = uri.hostname
method = uri.method
querystring = parse_qs(uri.query)
bucket_name = bucket_name_from_hostname(hostname)
if method == 'GET':
bucket = s3_backend.get_bucket(bucket_name)
if bucket:
prefix = querystring.get('prefix', [None])[0]
result_keys, result_folders = s3_backend.prefix_query(bucket, prefix)
template = Template(S3_BUCKET_GET_RESPONSE)
return template.render(bucket=bucket)
return template.render(bucket=bucket, prefix=prefix,
result_keys=result_keys, result_folders=result_folders)
else:
return "", dict(status=404)
elif method == 'PUT':
@ -58,17 +64,21 @@ def key_response(uri_info, body, headers):
if method == 'GET':
key = s3_backend.get_key(bucket_name, key_name)
return key.value
if key:
return key.value
else:
return "", dict(status=404)
if method == 'PUT':
if 'x-amz-copy-source' in headers:
# Copy key
src_bucket, src_key = headers.get("x-amz-copy-source").split("/")
s3_backend.copy_key(src_bucket, src_key, bucket_name, key_name)
return S3_OBJECT_COPY_RESPONSE
template = Template(S3_OBJECT_COPY_RESPONSE)
return template.render(key=src_key)
if body:
new_key = s3_backend.set_key(bucket_name, key_name, body)
return S3_OBJECT_RESPONSE, dict(etag=new_key.etag)
template = Template(S3_OBJECT_RESPONSE)
return template.render(key=new_key), dict(etag=new_key.etag)
key = s3_backend.get_key(bucket_name, key_name)
if key:
return "", dict(etag=key.etag)
@ -103,15 +113,33 @@ S3_ALL_BUCKETS = """<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2
</Buckets>
</ListAllMyBucketsResult>"""
S3_BUCKET_GET_RESPONSE = """<ListBucket xmlns="http://doc.s3.amazonaws.com/2006-03-01">\
<Bucket>{{ bucket.name }}</Bucket>\
<Prefix>notes/</Prefix>\
<Delimiter>/</Delimiter>\
<MaxKeys>1000</MaxKeys>\
<AWSAccessKeyId>AKIAIOSFODNN7EXAMPLE</AWSAccessKeyId>\
<Timestamp>2006-03-01T12:00:00.183Z</Timestamp>\
<Signature>Iuyz3d3P0aTou39dzbqaEXAMPLE=</Signature>\
</ListBucket>"""
S3_BUCKET_GET_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>{{ bucket.name }}</Name>
<Prefix>{{ prefix }}</Prefix>
<MaxKeys>1000</MaxKeys>
<Delimiter>/</Delimiter>
<IsTruncated>false</IsTruncated>
{% for key in result_keys %}
<Contents>
<Key>{{ key.name }}</Key>
<LastModified>2006-01-01T12:00:00.000Z</LastModified>
<ETag>{{ key.etag }}</ETag>
<Size>{{ key.size }}</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
<DisplayName>webfile</DisplayName>
</Owner>
<StorageClass>STANDARD</StorageClass>
</Contents>
{% endfor %}
{% for folder in result_folders %}
<CommonPrefixes>
<Prefix>{{ folder }}</Prefix>
</CommonPrefixes>
{% endfor %}
</ListBucketResult>"""
S3_BUCKET_CREATE_RESPONSE = """<CreateBucketResponse xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<CreateBucketResponse>
@ -151,14 +179,14 @@ S3_DELETE_OBJECT_SUCCESS = """<DeleteObjectResponse xmlns="http://s3.amazonaws.c
S3_OBJECT_RESPONSE = """<PutObjectResponse xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<PutObjectResponse>
<ETag>&quot;asdlfkdalsjfsalfkjsadlfjsdjkk&quot;</ETag>
<ETag>{{ key.etag }}</ETag>
<LastModified>2006-03-01T12:00:00.183Z</LastModified>
</PutObjectResponse>
</PutObjectResponse>"""
S3_OBJECT_COPY_RESPONSE = """<CopyObjectResponse xmlns="http://doc.s3.amazonaws.com/2006-03-01">
<CopyObjectResponse>
<ETag>"asdfadsfdsafjsadfdafsadf"</ETag>
<ETag>{{ key.etag }}</ETag>
<LastModified>2008-02-18T13:54:10.183Z</LastModified>
</CopyObjectResponse>
</CopyObjectResponse>"""

View File

@ -1,9 +1,11 @@
import urllib2
import boto
from boto.exception import S3ResponseError
from boto.s3.key import Key
import requests
import sure
from sure import expect
from moto import mock_s3
@ -31,7 +33,7 @@ def test_my_model_save():
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
assert conn.get_bucket('mybucket').get_key('steve').get_contents_as_string() == 'is awesome'
expect(conn.get_bucket('mybucket').get_key('steve').get_contents_as_string()).should.equal('is awesome')
@mock_s3
@ -41,6 +43,14 @@ def test_missing_key():
bucket.get_key("the-key").should.equal(None)
@mock_s3
def test_missing_key_urllib2():
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket("foobar")
urllib2.urlopen.when.called_with("http://foobar.s3.amazonaws.com/the-key").should.throw(urllib2.HTTPError)
@mock_s3
def test_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')
@ -55,6 +65,42 @@ def test_copy_key():
bucket.get_key("new-key").get_contents_as_string().should.equal("some value")
@mock_s3
def test_get_all_keys():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
key2 = Key(bucket)
key2.key = "folder/some-stuff"
key2.set_contents_from_string("some value")
key3 = Key(bucket)
key3.key = "folder/more-folder/foobar"
key3.set_contents_from_string("some value")
key4 = Key(bucket)
key4.key = "a-key"
key4.set_contents_from_string("some value")
keys = bucket.get_all_keys()
keys.should.have.length_of(3)
keys[0].name.should.equal("a-key")
keys[1].name.should.equal("the-key")
# Prefix
keys[2].name.should.equal("folder")
keys = bucket.get_all_keys(prefix="folder/")
keys.should.have.length_of(2)
keys[0].name.should.equal("folder/some-stuff")
keys[1].name.should.equal("folder/more-folder")
@mock_s3
def test_missing_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
@ -86,8 +132,8 @@ def test_bucket_deletion():
@mock_s3
def test_get_all_buckets():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar2")
conn.create_bucket("foobar")
conn.create_bucket("foobar2")
buckets = conn.get_all_buckets()
buckets.should.have.length_of(2)