Merge pull request #471 from spulec/combined-s3

Combined s3
This commit is contained in:
Steve Pulec 2016-01-17 16:32:08 -05:00
commit 0535f445be
9 changed files with 77 additions and 71 deletions

View File

@ -15,7 +15,6 @@ from moto.kms import kms_backend
from moto.rds import rds_backend
from moto.redshift import redshift_backend
from moto.s3 import s3_backend
from moto.s3bucket_path import s3bucket_path_backend
from moto.ses import ses_backend
from moto.sns import sns_backend
from moto.sqs import sqs_backend
@ -39,7 +38,7 @@ BACKENDS = {
'redshift': redshift_backend,
'rds': rds_backend,
's3': s3_backend,
's3bucket_path': s3bucket_path_backend,
's3bucket_path': s3_backend,
'ses': ses_backend,
'sns': sns_backend,
'sqs': sqs_backend,

View File

@ -8,6 +8,9 @@ import xmltodict
from moto.core.responses import _TemplateEnvironmentMixin
from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_name_from_url, parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys
from .exceptions import BucketAlreadyExists, S3ClientError, InvalidPartOrder
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl
from .utils import bucket_name_from_url, metadata_from_headers
@ -21,19 +24,14 @@ def parse_key_name(pth):
return pth.lstrip("/")
def is_delete_keys(path, bucket_name):
return path == u'/?delete'
class ResponseObject(_TemplateEnvironmentMixin):
def __init__(self, backend, bucket_name_from_url, parse_key_name,
is_delete_keys=None):
def __init__(self, backend):
super(ResponseObject, self).__init__()
self.backend = backend
self.bucket_name_from_url = bucket_name_from_url
self.parse_key_name = parse_key_name
if is_delete_keys:
self.is_delete_keys = is_delete_keys
@staticmethod
def is_delete_keys(path, bucket_name):
return path == u'/?delete'
def all_buckets(self):
# No bucket specified. Listing all buckets
@ -41,6 +39,40 @@ class ResponseObject(_TemplateEnvironmentMixin):
template = self.response_template(S3_ALL_BUCKETS)
return template.render(buckets=all_buckets)
def subdomain_based_buckets(self, request):
host = request.headers['host']
if host.startswith("localhost"):
# For localhost, default to path-based buckets
return False
return host != 's3.amazonaws.com' and not re.match("s3.(.*).amazonaws.com", host)
def is_delete_keys(self, request, path, bucket_name):
if self.subdomain_based_buckets(request):
return is_delete_keys(path, bucket_name)
else:
return bucketpath_is_delete_keys(path, bucket_name)
def parse_bucket_name_from_url(self, request, url):
if self.subdomain_based_buckets(request):
return bucket_name_from_url(url)
else:
return bucketpath_bucket_name_from_url(url)
def parse_key_name(self, request, url):
if self.subdomain_based_buckets(request):
return parse_key_name(url)
else:
return bucketpath_parse_key_name(url)
def ambiguous_response(self, request, full_url, headers):
# Depending on which calling format the client is using, we don't know
# if this is a bucket or key request so we have to check
if self.subdomain_based_buckets(request):
return self.key_response(request, full_url, headers)
else:
# Using path-based buckets
return self.bucket_response(request, full_url, headers)
def bucket_response(self, request, full_url, headers):
try:
response = self._bucket_response(request, full_url, headers)
@ -62,7 +94,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
if region_match:
region_name = region_match.groups()[0]
bucket_name = self.bucket_name_from_url(full_url)
bucket_name = self.parse_bucket_name_from_url(request, full_url)
if not bucket_name:
# If no bucket specified, list all buckets
return self.all_buckets()
@ -232,7 +264,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
return 409, headers, template.render(bucket=removed_bucket)
def _bucket_response_post(self, request, bucket_name, headers):
if self.is_delete_keys(request.path, bucket_name):
if self.is_delete_keys(request, request.path, bucket_name):
return self._bucket_response_delete_keys(request, bucket_name, headers)
# POST to bucket-url should create file from form
@ -320,8 +352,8 @@ class ResponseObject(_TemplateEnvironmentMixin):
query = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
key_name = self.parse_key_name(parsed_url.path)
bucket_name = self.bucket_name_from_url(full_url)
key_name = self.parse_key_name(request, parsed_url.path)
bucket_name = self.parse_bucket_name_from_url(request, full_url)
if hasattr(request, 'body'):
# Boto
@ -526,7 +558,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
else:
raise NotImplementedError("Method POST had only been implemented for multipart uploads and restore operations, so far")
S3ResponseInstance = ResponseObject(s3_backend, bucket_name_from_url, parse_key_name)
S3ResponseInstance = ResponseObject(s3_backend)
S3_ALL_BUCKETS = """<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<Owner>

View File

@ -2,10 +2,17 @@ from __future__ import unicode_literals
from .responses import S3ResponseInstance
url_bases = [
"https?://s3(.*).amazonaws.com",
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3(.*).amazonaws.com"
]
url_paths = {
# subdomain bucket
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
# subdomain key of path-based bucket
'{0}/(?P<key_or_bucket_name>.+)': S3ResponseInstance.ambiguous_response,
# path-based bucket + key
'{0}/(?P<bucket_name_path>[a-zA-Z0-9\-_./]+)/(?P<key_name>.+)': S3ResponseInstance.key_response,
}

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
from .models import s3bucket_path_backend
mock_s3bucket_path = s3bucket_path_backend.decorator
from moto import mock_s3
mock_s3bucket_path = mock_s3

View File

@ -1,8 +0,0 @@
from __future__ import unicode_literals
from moto.s3.models import S3Backend
class S3BucketPathBackend(S3Backend):
pass
s3bucket_path_backend = S3BucketPathBackend()

View File

@ -1,22 +0,0 @@
from __future__ import unicode_literals
from .models import s3bucket_path_backend
from .utils import bucket_name_from_url
from moto.s3.responses import ResponseObject
def parse_key_name(pth):
return "/".join(pth.rstrip("/").split("/")[2:])
def is_delete_keys(path, bucket_name):
return path == u'/' + bucket_name + u'/?delete'
S3BucketPathResponseInstance = ResponseObject(
s3bucket_path_backend,
bucket_name_from_url,
parse_key_name,
is_delete_keys,
)

View File

@ -1,21 +0,0 @@
from __future__ import unicode_literals
from .responses import S3BucketPathResponseInstance as ro
url_bases = [
"https?://s3(.*).amazonaws.com"
]
def bucket_response2(*args):
return ro.bucket_response(*args)
def bucket_response3(*args):
return ro.bucket_response(*args)
url_paths = {
'{0}/$': bucket_response3,
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_.]+)$': ro.bucket_response,
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_.]+)/$': bucket_response2,
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_./]+)/(?P<key_name>.+)': ro.key_response
}

View File

@ -9,3 +9,11 @@ def bucket_name_from_url(url):
if len(l) == 0 or l[0] == "":
return None
return l[0]
def parse_key_name(path):
return "/".join(path.rstrip("/").split("/")[2:])
def is_delete_keys(path, bucket_name):
return path == u'/' + bucket_name + u'/?delete'

View File

@ -961,6 +961,16 @@ def test_boto3_bucket_create():
s3.Object('blah', 'hello.txt').get()['Body'].read().decode("utf-8").should.equal("some text")
@mock_s3
def test_boto3_bucket_create_eu_central():
s3 = boto3.resource('s3', region_name='eu-central-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').get()['Body'].read().decode("utf-8").should.equal("some text")
@mock_s3
def test_boto3_head_object():
s3 = boto3.resource('s3', region_name='us-east-1')