2018-01-30 16:10:43 -08:00
|
|
|
import logging
|
2014-12-07 12:43:14 -05:00
|
|
|
|
2013-02-18 21:22:03 -05:00
|
|
|
import re
|
2021-07-26 07:40:39 +01:00
|
|
|
from urllib.parse import urlparse, unquote, quote
|
2020-07-12 18:29:10 +01:00
|
|
|
from requests.structures import CaseInsensitiveDict
|
2022-10-26 21:36:02 +00:00
|
|
|
from typing import Union, Tuple
|
2014-12-07 12:43:14 -05:00
|
|
|
import sys
|
2021-03-26 17:51:19 +01:00
|
|
|
from moto.settings import S3_IGNORE_SUBDOMAIN_BUCKETNAME
|
2013-02-18 21:22:03 -05:00
|
|
|
|
2018-01-30 16:10:43 -08:00
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-10-08 10:03:05 +00:00
|
|
|
bucket_name_regex = re.compile(r"(.+)\.s3(.*)\.amazonaws.com")
|
2020-11-19 09:01:53 +00:00
|
|
|
user_settable_fields = {
|
|
|
|
"content-md5",
|
|
|
|
"content-language",
|
|
|
|
"content-type",
|
|
|
|
"content-encoding",
|
|
|
|
"cache-control",
|
|
|
|
"expires",
|
|
|
|
"content-disposition",
|
|
|
|
"x-robots-tag",
|
|
|
|
}
|
2013-02-18 21:22:03 -05:00
|
|
|
|
|
|
|
|
2013-05-03 19:33:13 -04:00
|
|
|
def bucket_name_from_url(url):
|
2021-03-26 17:51:19 +01:00
|
|
|
if S3_IGNORE_SUBDOMAIN_BUCKETNAME:
|
2018-03-21 16:33:09 +00:00
|
|
|
return None
|
2014-08-26 13:25:50 -04:00
|
|
|
domain = urlparse(url).netloc
|
2013-05-03 19:33:13 -04:00
|
|
|
|
2019-10-31 08:44:26 -07:00
|
|
|
if domain.startswith("www."):
|
2013-09-24 00:00:52 +02:00
|
|
|
domain = domain[4:]
|
2013-05-03 19:33:13 -04:00
|
|
|
|
2019-10-31 08:44:26 -07:00
|
|
|
if "amazonaws.com" in domain:
|
2013-05-03 19:33:13 -04:00
|
|
|
bucket_result = bucket_name_regex.search(domain)
|
2013-03-05 08:14:43 -05:00
|
|
|
if bucket_result:
|
|
|
|
return bucket_result.groups()[0]
|
|
|
|
else:
|
2019-10-31 08:44:26 -07:00
|
|
|
if "." in domain:
|
2013-05-03 19:33:13 -04:00
|
|
|
return domain.split(".")[0]
|
2013-03-05 08:14:43 -05:00
|
|
|
else:
|
|
|
|
# No subdomain found.
|
|
|
|
return None
|
2013-04-13 19:00:37 -04:00
|
|
|
|
|
|
|
|
2020-03-18 13:02:07 +00:00
|
|
|
# 'owi-common-cf', 'snippets/test.json' = bucket_and_name_from_url('s3://owi-common-cf/snippets/test.json')
|
2022-10-26 21:36:02 +00:00
|
|
|
def bucket_and_name_from_url(url: str) -> Union[Tuple[str, str], Tuple[None, None]]:
|
2020-03-18 13:02:07 +00:00
|
|
|
prefix = "s3://"
|
|
|
|
if url.startswith(prefix):
|
|
|
|
bucket_name = url[len(prefix) : url.index("/", len(prefix))]
|
|
|
|
key = url[url.index("/", len(prefix)) + 1 :]
|
|
|
|
return bucket_name, key
|
|
|
|
else:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
2018-01-30 16:10:43 -08:00
|
|
|
REGION_URL_REGEX = re.compile(
|
2019-10-31 08:44:26 -07:00
|
|
|
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
|
2020-02-02 10:36:51 +00:00
|
|
|
r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
|
2019-10-31 08:44:26 -07:00
|
|
|
)
|
2018-01-30 16:10:43 -08:00
|
|
|
|
|
|
|
|
2022-03-31 05:47:29 -05:00
|
|
|
def parse_region_from_url(url, use_default_region=True):
|
2018-01-30 16:10:43 -08:00
|
|
|
match = REGION_URL_REGEX.search(url)
|
|
|
|
if match:
|
2019-10-31 08:44:26 -07:00
|
|
|
region = match.group("region1") or match.group("region2")
|
2018-01-30 16:10:43 -08:00
|
|
|
else:
|
2022-03-31 05:47:29 -05:00
|
|
|
region = "us-east-1" if use_default_region else None
|
2018-01-30 16:10:43 -08:00
|
|
|
return region
|
|
|
|
|
|
|
|
|
2014-12-07 12:43:14 -05:00
|
|
|
def metadata_from_headers(headers):
|
2020-07-12 18:29:10 +01:00
|
|
|
metadata = CaseInsensitiveDict()
|
2021-05-26 06:27:25 -07:00
|
|
|
meta_regex = re.compile(r"^x-amz-meta-([a-zA-Z0-9\-_.]+)$", flags=re.IGNORECASE)
|
2022-01-14 18:51:49 -01:00
|
|
|
for header in headers.keys():
|
2021-07-26 07:40:39 +01:00
|
|
|
if isinstance(header, str):
|
2014-12-07 12:43:14 -05:00
|
|
|
result = meta_regex.match(header)
|
|
|
|
meta_key = None
|
|
|
|
if result:
|
|
|
|
# Check for extra metadata
|
|
|
|
meta_key = result.group(0).lower()
|
2020-11-19 09:01:53 +00:00
|
|
|
elif header.lower() in user_settable_fields:
|
2014-12-07 12:43:14 -05:00
|
|
|
# Check for special metadata that doesn't start with x-amz-meta
|
|
|
|
meta_key = header
|
|
|
|
if meta_key:
|
2020-07-12 13:33:46 +01:00
|
|
|
metadata[meta_key] = (
|
|
|
|
headers[header][0]
|
|
|
|
if type(headers[header]) == list
|
|
|
|
else headers[header]
|
|
|
|
)
|
2014-12-07 12:43:14 -05:00
|
|
|
return metadata
|
|
|
|
|
|
|
|
|
2019-09-24 17:07:58 -05:00
|
|
|
def clean_key_name(key_name):
|
|
|
|
return unquote(key_name)
|
|
|
|
|
|
|
|
|
|
|
|
def undo_clean_key_name(key_name):
|
|
|
|
return quote(key_name)
|
2014-06-27 15:37:51 -06:00
|
|
|
|
|
|
|
|
|
|
|
class _VersionedKeyStore(dict):
|
|
|
|
|
2020-10-06 08:46:05 +02:00
|
|
|
"""A simplified/modified version of Django's `MultiValueDict` taken from:
|
2014-06-27 15:37:51 -06:00
|
|
|
https://github.com/django/django/blob/70576740b0bb5289873f5a9a9a4e1a26b2c330e5/django/utils/datastructures.py#L282
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __sgetitem__(self, key):
|
2022-01-14 18:51:49 -01:00
|
|
|
return super().__getitem__(key)
|
2014-06-27 15:37:51 -06:00
|
|
|
|
2022-10-12 21:08:01 +00:00
|
|
|
def pop(self, key):
|
|
|
|
for version in self.getlist(key, []):
|
|
|
|
version.dispose()
|
|
|
|
super().pop(key)
|
|
|
|
|
2014-06-27 15:37:51 -06:00
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.__sgetitem__(key)[-1]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
try:
|
|
|
|
current = self.__sgetitem__(key)
|
|
|
|
current.append(value)
|
|
|
|
except (KeyError, IndexError):
|
|
|
|
current = [value]
|
|
|
|
|
2022-01-14 18:51:49 -01:00
|
|
|
super().__setitem__(key, current)
|
2014-06-27 15:37:51 -06:00
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
try:
|
|
|
|
return self[key]
|
|
|
|
except (KeyError, IndexError):
|
|
|
|
pass
|
|
|
|
return default
|
|
|
|
|
|
|
|
def getlist(self, key, default=None):
|
|
|
|
try:
|
|
|
|
return self.__sgetitem__(key)
|
|
|
|
except (KeyError, IndexError):
|
|
|
|
pass
|
|
|
|
return default
|
|
|
|
|
|
|
|
def setlist(self, key, list_):
|
|
|
|
if isinstance(list_, tuple):
|
|
|
|
list_ = list(list_)
|
|
|
|
elif not isinstance(list_, list):
|
|
|
|
list_ = [list_]
|
|
|
|
|
2022-01-14 18:51:49 -01:00
|
|
|
super().__setitem__(key, list_)
|
2014-06-27 15:37:51 -06:00
|
|
|
|
|
|
|
def _iteritems(self):
|
2021-07-26 16:21:17 +02:00
|
|
|
for key in self._self_iterable():
|
2014-06-27 15:37:51 -06:00
|
|
|
yield key, self[key]
|
|
|
|
|
|
|
|
def _itervalues(self):
|
2021-07-26 16:21:17 +02:00
|
|
|
for key in self._self_iterable():
|
2014-06-27 15:37:51 -06:00
|
|
|
yield self[key]
|
|
|
|
|
|
|
|
def _iterlists(self):
|
2021-07-26 16:21:17 +02:00
|
|
|
for key in self._self_iterable():
|
2014-06-27 15:37:51 -06:00
|
|
|
yield key, self.getlist(key)
|
|
|
|
|
2020-03-23 15:53:39 +00:00
|
|
|
def item_size(self):
|
|
|
|
size = 0
|
2021-07-26 16:21:17 +02:00
|
|
|
for val in self._self_iterable().values():
|
2020-03-23 15:53:39 +00:00
|
|
|
size += sys.getsizeof(val)
|
|
|
|
return size
|
|
|
|
|
2021-07-26 16:21:17 +02:00
|
|
|
def _self_iterable(self):
|
|
|
|
# to enable concurrency, return a copy, to avoid "dictionary changed size during iteration"
|
|
|
|
# TODO: look into replacing with a locking mechanism, potentially
|
|
|
|
return dict(self)
|
|
|
|
|
2014-06-27 15:37:51 -06:00
|
|
|
items = iteritems = _iteritems
|
|
|
|
lists = iterlists = _iterlists
|
|
|
|
values = itervalues = _itervalues
|