Merge pull request #1460 from okomestudio/s3-region-from-url
Support both virtual-hosted and path-style URLs for region name parsing
This commit is contained in:
commit
326698f3fa
@ -18,10 +18,10 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi
|
|||||||
MalformedACLError
|
MalformedACLError
|
||||||
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
||||||
FakeTag
|
FakeTag
|
||||||
from .utils import bucket_name_from_url, metadata_from_headers
|
from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
REGION_URL_REGEX = r'\.s3-(.+?)\.amazonaws\.com'
|
|
||||||
DEFAULT_REGION_NAME = 'us-east-1'
|
DEFAULT_REGION_NAME = 'us-east-1'
|
||||||
|
|
||||||
|
|
||||||
@ -128,10 +128,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
parsed_url = urlparse(full_url)
|
parsed_url = urlparse(full_url)
|
||||||
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
|
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
|
||||||
method = request.method
|
method = request.method
|
||||||
region_name = DEFAULT_REGION_NAME
|
region_name = parse_region_from_url(full_url)
|
||||||
region_match = re.search(REGION_URL_REGEX, full_url)
|
|
||||||
if region_match:
|
|
||||||
region_name = region_match.groups()[0]
|
|
||||||
|
|
||||||
bucket_name = self.parse_bucket_name_from_url(request, full_url)
|
bucket_name = self.parse_bucket_name_from_url(request, full_url)
|
||||||
if not bucket_name:
|
if not bucket_name:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import logging
|
||||||
|
|
||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
import re
|
import re
|
||||||
@ -6,6 +7,10 @@ import six
|
|||||||
from six.moves.urllib.parse import urlparse, unquote
|
from six.moves.urllib.parse import urlparse, unquote
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
bucket_name_regex = re.compile("(.+).s3(.*).amazonaws.com")
|
bucket_name_regex = re.compile("(.+).s3(.*).amazonaws.com")
|
||||||
|
|
||||||
|
|
||||||
@ -27,6 +32,20 @@ def bucket_name_from_url(url):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
REGION_URL_REGEX = re.compile(
|
||||||
|
r'^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|'
|
||||||
|
r'(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_region_from_url(url):
|
||||||
|
match = REGION_URL_REGEX.search(url)
|
||||||
|
if match:
|
||||||
|
region = match.group('region1') or match.group('region2')
|
||||||
|
else:
|
||||||
|
region = 'us-east-1'
|
||||||
|
return region
|
||||||
|
|
||||||
|
|
||||||
def metadata_from_headers(headers):
|
def metadata_from_headers(headers):
|
||||||
metadata = {}
|
metadata = {}
|
||||||
meta_regex = re.compile(
|
meta_regex = re.compile(
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from sure import expect
|
from sure import expect
|
||||||
from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore
|
from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore, parse_region_from_url
|
||||||
|
|
||||||
|
|
||||||
def test_base_url():
|
def test_base_url():
|
||||||
@ -53,3 +53,21 @@ def test_versioned_key_store():
|
|||||||
d.setlist('key', [[1], [2]])
|
d.setlist('key', [[1], [2]])
|
||||||
d['key'].should.have.length_of(1)
|
d['key'].should.have.length_of(1)
|
||||||
d.getlist('key').should.be.equal([[1], [2]])
|
d.getlist('key').should.be.equal([[1], [2]])
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_region_from_url():
|
||||||
|
expected = 'us-west-2'
|
||||||
|
for url in ['http://s3-us-west-2.amazonaws.com/bucket',
|
||||||
|
'http://s3.us-west-2.amazonaws.com/bucket',
|
||||||
|
'http://bucket.s3-us-west-2.amazonaws.com',
|
||||||
|
'https://s3-us-west-2.amazonaws.com/bucket',
|
||||||
|
'https://s3.us-west-2.amazonaws.com/bucket',
|
||||||
|
'https://bucket.s3-us-west-2.amazonaws.com']:
|
||||||
|
parse_region_from_url(url).should.equal(expected)
|
||||||
|
|
||||||
|
expected = 'us-east-1'
|
||||||
|
for url in ['http://s3.amazonaws.com/bucket',
|
||||||
|
'http://bucket.s3.amazonaws.com',
|
||||||
|
'https://s3.amazonaws.com/bucket',
|
||||||
|
'https://bucket.s3.amazonaws.com']:
|
||||||
|
parse_region_from_url(url).should.equal(expected)
|
||||||
|
Loading…
Reference in New Issue
Block a user