Merge pull request #1460 from okomestudio/s3-region-from-url

Support both virtual-hosted and path-style URLs for region name parsing
This commit is contained in:
Steve Pulec 2018-03-06 21:57:57 -05:00 committed by GitHub
commit 326698f3fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 41 additions and 7 deletions

View File

@ -18,10 +18,10 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi
MalformedACLError
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
FakeTag
from .utils import bucket_name_from_url, metadata_from_headers
from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url
from xml.dom import minidom
REGION_URL_REGEX = r'\.s3-(.+?)\.amazonaws\.com'
DEFAULT_REGION_NAME = 'us-east-1'
@ -128,10 +128,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
method = request.method
region_name = DEFAULT_REGION_NAME
region_match = re.search(REGION_URL_REGEX, full_url)
if region_match:
region_name = region_match.groups()[0]
region_name = parse_region_from_url(full_url)
bucket_name = self.parse_bucket_name_from_url(request, full_url)
if not bucket_name:

View File

@ -1,4 +1,5 @@
from __future__ import unicode_literals
import logging
from boto.s3.key import Key
import re
@ -6,6 +7,10 @@ import six
from six.moves.urllib.parse import urlparse, unquote
import sys
log = logging.getLogger(__name__)
bucket_name_regex = re.compile("(.+).s3(.*).amazonaws.com")
@ -27,6 +32,20 @@ def bucket_name_from_url(url):
return None
REGION_URL_REGEX = re.compile(
r'^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|'
r'(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?')
def parse_region_from_url(url):
match = REGION_URL_REGEX.search(url)
if match:
region = match.group('region1') or match.group('region2')
else:
region = 'us-east-1'
return region
def metadata_from_headers(headers):
metadata = {}
meta_regex = re.compile(

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals
from sure import expect
from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore
from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore, parse_region_from_url
def test_base_url():
@ -53,3 +53,21 @@ def test_versioned_key_store():
d.setlist('key', [[1], [2]])
d['key'].should.have.length_of(1)
d.getlist('key').should.be.equal([[1], [2]])
def test_parse_region_from_url():
expected = 'us-west-2'
for url in ['http://s3-us-west-2.amazonaws.com/bucket',
'http://s3.us-west-2.amazonaws.com/bucket',
'http://bucket.s3-us-west-2.amazonaws.com',
'https://s3-us-west-2.amazonaws.com/bucket',
'https://s3.us-west-2.amazonaws.com/bucket',
'https://bucket.s3-us-west-2.amazonaws.com']:
parse_region_from_url(url).should.equal(expected)
expected = 'us-east-1'
for url in ['http://s3.amazonaws.com/bucket',
'http://bucket.s3.amazonaws.com',
'https://s3.amazonaws.com/bucket',
'https://bucket.s3.amazonaws.com']:
parse_region_from_url(url).should.equal(expected)