2022-09-28 09:35:12 +00:00
|
|
|
from functools import lru_cache
|
2015-11-27 19:14:40 +00:00
|
|
|
|
|
|
|
import datetime
|
2013-02-24 03:26:46 +00:00
|
|
|
import inspect
|
2013-03-05 13:14:43 +00:00
|
|
|
import re
|
2019-11-20 08:27:46 +00:00
|
|
|
from botocore.exceptions import ClientError
|
2021-12-24 21:02:45 +00:00
|
|
|
from boto3 import Session
|
|
|
|
from moto.settings import allow_unknown_region
|
2022-06-04 11:30:16 +00:00
|
|
|
from threading import RLock
|
2021-07-26 06:40:39 +00:00
|
|
|
from urllib.parse import urlparse
|
2022-08-13 09:49:43 +00:00
|
|
|
from uuid import uuid4
|
2017-10-17 00:06:22 +00:00
|
|
|
|
|
|
|
|
2013-02-24 03:26:46 +00:00
|
|
|
def camelcase_to_underscores(argument):
|
2020-10-06 06:46:05 +00:00
|
|
|
"""Converts a camelcase param like theNewAttribute to the equivalent
|
2019-10-31 15:44:26 +00:00
|
|
|
python underscore variable like the_new_attribute"""
|
|
|
|
result = ""
|
2013-02-24 03:26:46 +00:00
|
|
|
prev_char_title = True
|
2017-12-28 03:58:24 +00:00
|
|
|
if not argument:
|
|
|
|
return argument
|
2017-03-16 03:39:36 +00:00
|
|
|
for index, char in enumerate(argument):
|
|
|
|
try:
|
|
|
|
next_char_title = argument[index + 1].istitle()
|
|
|
|
except IndexError:
|
|
|
|
next_char_title = True
|
|
|
|
|
|
|
|
upper_to_lower = char.istitle() and not next_char_title
|
|
|
|
lower_to_upper = char.istitle() and not prev_char_title
|
|
|
|
|
|
|
|
if index and (upper_to_lower or lower_to_upper):
|
|
|
|
# Only add underscore if char is capital, not first letter, and next
|
|
|
|
# char is not capital
|
2013-02-24 03:26:46 +00:00
|
|
|
result += "_"
|
|
|
|
prev_char_title = char.istitle()
|
|
|
|
if not char.isspace(): # Only add non-whitespace
|
|
|
|
result += char.lower()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2015-11-23 13:09:31 +00:00
|
|
|
def underscores_to_camelcase(argument):
|
2020-10-06 06:46:05 +00:00
|
|
|
"""Converts a camelcase param like the_new_attribute to the equivalent
|
2015-11-23 13:09:31 +00:00
|
|
|
camelcase version like theNewAttribute. Note that the first letter is
|
2020-10-06 06:46:05 +00:00
|
|
|
NOT capitalized by this function"""
|
2019-10-31 15:44:26 +00:00
|
|
|
result = ""
|
2015-11-23 13:09:31 +00:00
|
|
|
previous_was_underscore = False
|
|
|
|
for char in argument:
|
2019-10-31 15:44:26 +00:00
|
|
|
if char != "_":
|
2015-11-23 13:09:31 +00:00
|
|
|
if previous_was_underscore:
|
|
|
|
result += char.upper()
|
|
|
|
else:
|
|
|
|
result += char
|
2019-10-31 15:44:26 +00:00
|
|
|
previous_was_underscore = char == "_"
|
2015-11-23 13:09:31 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-10-12 19:53:30 +00:00
|
|
|
def pascal_to_camelcase(argument):
|
|
|
|
"""Converts a PascalCase param to the camelCase equivalent"""
|
|
|
|
return argument[0].lower() + argument[1:]
|
|
|
|
|
|
|
|
|
2021-03-05 10:42:07 +00:00
|
|
|
def camelcase_to_pascal(argument):
|
|
|
|
"""Converts a camelCase param to the PascalCase equivalent"""
|
|
|
|
return argument[0].upper() + argument[1:]
|
|
|
|
|
|
|
|
|
2013-02-24 03:26:46 +00:00
|
|
|
def method_names_from_class(clazz):
|
2021-07-26 06:40:39 +00:00
|
|
|
predicate = inspect.isfunction
|
2014-08-26 17:25:50 +00:00
|
|
|
return [x[0] for x in inspect.getmembers(clazz, predicate=predicate)]
|
2013-02-24 16:06:42 +00:00
|
|
|
|
|
|
|
|
2013-03-05 13:14:43 +00:00
|
|
|
def convert_regex_to_flask_path(url_path):
|
|
|
|
"""
|
|
|
|
Converts a regex matching url to one that can be used with flask
|
|
|
|
"""
|
|
|
|
for token in ["$"]:
|
|
|
|
url_path = url_path.replace(token, "")
|
|
|
|
|
|
|
|
def caller(reg):
|
|
|
|
match_name, match_pattern = reg.groups()
|
|
|
|
return '<regex("{0}"):{1}>'.format(match_pattern, match_name)
|
|
|
|
|
2020-03-12 16:34:25 +00:00
|
|
|
url_path = re.sub(r"\(\?P<(.*?)>(.*?)\)", caller, url_path)
|
2016-01-24 22:13:32 +00:00
|
|
|
|
|
|
|
if url_path.endswith("/?"):
|
|
|
|
# Flask does own handling of trailing slashes
|
|
|
|
url_path = url_path.rstrip("/?")
|
2013-03-05 13:14:43 +00:00
|
|
|
return url_path
|
|
|
|
|
|
|
|
|
2022-01-18 15:18:57 +00:00
|
|
|
class convert_to_flask_response(object):
|
2013-03-05 13:14:43 +00:00
|
|
|
def __init__(self, callback):
|
|
|
|
self.callback = callback
|
|
|
|
|
|
|
|
@property
|
|
|
|
def __name__(self):
|
|
|
|
# For instance methods, use class and method names. Otherwise
|
|
|
|
# use module and method name
|
|
|
|
if inspect.ismethod(self.callback):
|
2014-08-26 17:25:50 +00:00
|
|
|
outer = self.callback.__self__.__class__.__name__
|
2013-03-05 13:14:43 +00:00
|
|
|
else:
|
|
|
|
outer = self.callback.__module__
|
2013-10-04 00:34:13 +00:00
|
|
|
return "{0}.{1}".format(outer, self.callback.__name__)
|
2013-03-05 13:14:43 +00:00
|
|
|
|
|
|
|
def __call__(self, args=None, **kwargs):
|
2017-02-24 00:43:48 +00:00
|
|
|
from flask import request, Response
|
2022-09-19 15:04:29 +00:00
|
|
|
from moto.moto_api import recorder
|
2016-11-07 19:54:22 +00:00
|
|
|
|
2019-11-20 08:27:46 +00:00
|
|
|
try:
|
2022-09-19 15:04:29 +00:00
|
|
|
recorder._record_request(request)
|
2021-11-29 14:35:43 +00:00
|
|
|
result = self.callback(request, request.url, dict(request.headers))
|
2019-11-20 08:27:46 +00:00
|
|
|
except ClientError as exc:
|
|
|
|
result = 400, {}, exc.response["Error"]["Message"]
|
2013-05-04 00:14:33 +00:00
|
|
|
# result is a status, headers, response tuple
|
2017-03-12 03:45:42 +00:00
|
|
|
if len(result) == 3:
|
|
|
|
status, headers, content = result
|
|
|
|
else:
|
|
|
|
status, headers, content = 200, {}, result
|
2017-02-24 00:43:48 +00:00
|
|
|
|
|
|
|
response = Response(response=content, status=status, headers=headers)
|
2019-10-31 15:44:26 +00:00
|
|
|
if request.method == "HEAD" and "content-length" in headers:
|
|
|
|
response.headers["Content-Length"] = headers["content-length"]
|
2017-02-24 00:43:48 +00:00
|
|
|
return response
|
2013-05-24 21:22:34 +00:00
|
|
|
|
|
|
|
|
2017-02-16 03:35:45 +00:00
|
|
|
class convert_flask_to_responses_response(object):
|
|
|
|
def __init__(self, callback):
|
|
|
|
self.callback = callback
|
|
|
|
|
|
|
|
@property
|
|
|
|
def __name__(self):
|
|
|
|
# For instance methods, use class and method names. Otherwise
|
|
|
|
# use module and method name
|
|
|
|
if inspect.ismethod(self.callback):
|
|
|
|
outer = self.callback.__self__.__class__.__name__
|
|
|
|
else:
|
|
|
|
outer = self.callback.__module__
|
|
|
|
return "{0}.{1}".format(outer, self.callback.__name__)
|
|
|
|
|
|
|
|
def __call__(self, request, *args, **kwargs):
|
2017-02-17 03:51:04 +00:00
|
|
|
for key, val in request.headers.items():
|
2021-07-26 06:40:39 +00:00
|
|
|
if isinstance(val, bytes):
|
2017-02-17 03:51:04 +00:00
|
|
|
request.headers[key] = val.decode("utf-8")
|
|
|
|
|
2017-02-16 03:35:45 +00:00
|
|
|
result = self.callback(request, request.url, request.headers)
|
|
|
|
status, headers, response = result
|
|
|
|
return status, headers, response
|
|
|
|
|
|
|
|
|
2014-11-30 04:34:40 +00:00
|
|
|
def iso_8601_datetime_with_milliseconds(datetime):
|
2019-10-31 15:44:26 +00:00
|
|
|
return datetime.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
|
2013-05-24 21:22:34 +00:00
|
|
|
|
|
|
|
|
2021-08-16 07:52:19 +00:00
|
|
|
# Even Python does not support nanoseconds, other languages like Go do (needed for Terraform)
|
|
|
|
def iso_8601_datetime_with_nanoseconds(datetime):
|
|
|
|
return datetime.strftime("%Y-%m-%dT%H:%M:%S.%f000Z")
|
|
|
|
|
|
|
|
|
2017-04-14 01:39:00 +00:00
|
|
|
def iso_8601_datetime_without_milliseconds(datetime):
|
2021-08-16 07:52:19 +00:00
|
|
|
return None if datetime is None else datetime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2017-04-14 01:39:00 +00:00
|
|
|
|
|
|
|
|
2020-07-26 15:00:15 +00:00
|
|
|
def iso_8601_datetime_without_milliseconds_s3(datetime):
|
2021-08-16 07:52:19 +00:00
|
|
|
return None if datetime is None else datetime.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
2020-07-26 15:00:15 +00:00
|
|
|
|
|
|
|
|
2019-10-31 15:44:26 +00:00
|
|
|
RFC1123 = "%a, %d %b %Y %H:%M:%S GMT"
|
2017-05-19 22:59:25 +00:00
|
|
|
|
|
|
|
|
2013-05-24 21:22:34 +00:00
|
|
|
def rfc_1123_datetime(datetime):
|
|
|
|
return datetime.strftime(RFC1123)
|
2015-11-27 19:14:40 +00:00
|
|
|
|
|
|
|
|
2021-12-01 23:06:58 +00:00
|
|
|
def str_to_rfc_1123_datetime(value):
|
|
|
|
return datetime.datetime.strptime(value, RFC1123)
|
2017-05-19 22:59:25 +00:00
|
|
|
|
|
|
|
|
2015-11-27 19:14:40 +00:00
|
|
|
def unix_time(dt=None):
|
|
|
|
dt = dt or datetime.datetime.utcnow()
|
|
|
|
epoch = datetime.datetime.utcfromtimestamp(0)
|
|
|
|
delta = dt - epoch
|
|
|
|
return (delta.days * 86400) + (delta.seconds + (delta.microseconds / 1e6))
|
|
|
|
|
|
|
|
|
|
|
|
def unix_time_millis(dt=None):
|
|
|
|
return unix_time(dt) * 1000.0
|
2017-10-17 00:06:22 +00:00
|
|
|
|
|
|
|
|
2018-09-06 22:15:27 +00:00
|
|
|
def path_url(url):
|
|
|
|
parsed_url = urlparse(url)
|
|
|
|
path = parsed_url.path
|
|
|
|
if not path:
|
2019-10-31 15:44:26 +00:00
|
|
|
path = "/"
|
2018-09-06 22:15:27 +00:00
|
|
|
if parsed_url.query:
|
2019-10-31 15:44:26 +00:00
|
|
|
path = path + "?" + parsed_url.query
|
2018-09-06 22:15:27 +00:00
|
|
|
return path
|
2019-12-10 01:38:26 +00:00
|
|
|
|
|
|
|
|
2019-09-04 14:55:34 +00:00
|
|
|
def tags_from_query_string(
|
2020-03-30 12:42:00 +00:00
|
|
|
querystring_dict, prefix="Tag", key_suffix="Key", value_suffix="Value"
|
2019-09-04 14:55:34 +00:00
|
|
|
):
|
2019-09-04 14:25:43 +00:00
|
|
|
response_values = {}
|
2021-12-01 23:06:58 +00:00
|
|
|
for key in querystring_dict.keys():
|
2019-09-04 14:55:34 +00:00
|
|
|
if key.startswith(prefix) and key.endswith(key_suffix):
|
|
|
|
tag_index = key.replace(prefix + ".", "").replace("." + key_suffix, "")
|
|
|
|
tag_key = querystring_dict.get(
|
|
|
|
"{prefix}.{index}.{key_suffix}".format(
|
2022-03-10 14:39:59 +00:00
|
|
|
prefix=prefix, index=tag_index, key_suffix=key_suffix
|
2020-03-30 12:42:00 +00:00
|
|
|
)
|
|
|
|
)[0]
|
2019-09-04 14:55:34 +00:00
|
|
|
tag_value_key = "{prefix}.{index}.{value_suffix}".format(
|
2022-03-10 14:39:59 +00:00
|
|
|
prefix=prefix, index=tag_index, value_suffix=value_suffix
|
2019-09-04 14:55:34 +00:00
|
|
|
)
|
2019-09-04 14:25:43 +00:00
|
|
|
if tag_value_key in querystring_dict:
|
2020-03-30 12:42:00 +00:00
|
|
|
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
2019-09-04 14:25:43 +00:00
|
|
|
else:
|
|
|
|
response_values[tag_key] = None
|
|
|
|
return response_values
|
2020-07-29 10:44:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
def tags_from_cloudformation_tags_list(tags_list):
|
|
|
|
"""Return tags in dict form from cloudformation resource tags form (list of dicts)"""
|
|
|
|
tags = {}
|
|
|
|
for entry in tags_list:
|
|
|
|
key = entry["Key"]
|
|
|
|
value = entry["Value"]
|
|
|
|
tags[key] = value
|
|
|
|
|
|
|
|
return tags
|
2020-10-12 19:53:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def remap_nested_keys(root, key_transform):
|
|
|
|
"""This remap ("recursive map") function is used to traverse and
|
|
|
|
transform the dictionary keys of arbitrarily nested structures.
|
|
|
|
List comprehensions do not recurse, making it tedious to apply
|
|
|
|
transforms to all keys in a tree-like structure.
|
|
|
|
|
|
|
|
A common issue for `moto` is changing the casing of dict keys:
|
|
|
|
|
|
|
|
>>> remap_nested_keys({'KeyName': 'Value'}, camelcase_to_underscores)
|
|
|
|
{'key_name': 'Value'}
|
|
|
|
|
|
|
|
Args:
|
|
|
|
root: The target data to traverse. Supports iterables like
|
|
|
|
:class:`list`, :class:`tuple`, and :class:`dict`.
|
|
|
|
key_transform (callable): This function is called on every
|
|
|
|
dictionary key found in *root*.
|
|
|
|
"""
|
|
|
|
if isinstance(root, (list, tuple)):
|
|
|
|
return [remap_nested_keys(item, key_transform) for item in root]
|
|
|
|
if isinstance(root, dict):
|
|
|
|
return {
|
|
|
|
key_transform(k): remap_nested_keys(v, key_transform)
|
2021-07-26 06:40:39 +00:00
|
|
|
for k, v in root.items()
|
2020-10-12 19:53:30 +00:00
|
|
|
}
|
|
|
|
return root
|
2021-06-30 07:15:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def merge_dicts(dict1, dict2, remove_nulls=False):
|
|
|
|
"""Given two arbitrarily nested dictionaries, merge the second dict into the first.
|
|
|
|
|
|
|
|
:param dict dict1: the dictionary to be updated.
|
|
|
|
:param dict dict2: a dictionary of keys/values to be merged into dict1.
|
|
|
|
|
|
|
|
:param bool remove_nulls: If true, updated values equal to None or an empty dictionary
|
|
|
|
will be removed from dict1.
|
|
|
|
"""
|
|
|
|
for key in dict2:
|
|
|
|
if isinstance(dict2[key], dict):
|
|
|
|
if key in dict1 and key in dict2:
|
|
|
|
merge_dicts(dict1[key], dict2[key], remove_nulls)
|
|
|
|
else:
|
|
|
|
dict1[key] = dict2[key]
|
|
|
|
if dict1[key] == {} and remove_nulls:
|
|
|
|
dict1.pop(key)
|
|
|
|
else:
|
|
|
|
dict1[key] = dict2[key]
|
|
|
|
if dict1[key] is None and remove_nulls:
|
|
|
|
dict1.pop(key)
|
2021-09-30 15:28:13 +00:00
|
|
|
|
|
|
|
|
2021-11-16 12:24:14 +00:00
|
|
|
def aws_api_matches(pattern, string):
|
|
|
|
"""
|
2022-03-10 14:39:59 +00:00
|
|
|
AWS API can match a value based on a glob, or an exact match
|
2021-11-16 12:24:14 +00:00
|
|
|
"""
|
|
|
|
# use a negative lookback regex to match stars that are not prefixed with a backslash
|
|
|
|
# and replace all stars not prefixed w/ a backslash with '.*' to take this from "glob" to PCRE syntax
|
2021-12-01 23:06:58 +00:00
|
|
|
pattern, _ = re.subn(r"(?<!\\)\*", r".*", pattern)
|
2021-11-16 12:24:14 +00:00
|
|
|
|
|
|
|
# ? in the AWS glob form becomes .? in regex
|
|
|
|
# also, don't substitute it if it is prefixed w/ a backslash
|
2021-12-01 23:06:58 +00:00
|
|
|
pattern, _ = re.subn(r"(?<!\\)\?", r".?", pattern)
|
2021-09-30 15:28:13 +00:00
|
|
|
|
2021-11-16 12:24:14 +00:00
|
|
|
# aws api seems to anchor
|
|
|
|
anchored_pattern = f"^{pattern}$"
|
2021-09-30 15:28:13 +00:00
|
|
|
|
2021-11-16 12:24:14 +00:00
|
|
|
if re.match(anchored_pattern, str(string)):
|
2021-09-30 15:28:13 +00:00
|
|
|
return True
|
2021-11-16 12:24:14 +00:00
|
|
|
else:
|
|
|
|
return False
|
2021-12-24 21:02:45 +00:00
|
|
|
|
|
|
|
|
2022-03-31 10:47:29 +00:00
|
|
|
def extract_region_from_aws_authorization(string):
|
|
|
|
auth = string or ""
|
|
|
|
region = re.sub(r".*Credential=[^/]+/[^/]+/([^/]+)/.*", r"\1", auth)
|
|
|
|
if region == auth:
|
|
|
|
return None
|
|
|
|
return region
|
|
|
|
|
|
|
|
|
2022-06-04 11:30:16 +00:00
|
|
|
backend_lock = RLock()
|
|
|
|
|
|
|
|
|
2022-06-09 17:40:22 +00:00
|
|
|
class AccountSpecificBackend(dict):
|
|
|
|
"""
|
|
|
|
Dictionary storing the data for a service in a specific account.
|
|
|
|
Data access pattern:
|
|
|
|
account_specific_backend[region: str] = backend: BaseBackend
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self, service_name, account_id, backend, use_boto3_regions, additional_regions
|
|
|
|
):
|
|
|
|
self.service_name = service_name
|
|
|
|
self.account_id = account_id
|
|
|
|
self.backend = backend
|
|
|
|
self.regions = []
|
|
|
|
if use_boto3_regions:
|
|
|
|
sess = Session()
|
|
|
|
self.regions.extend(sess.get_available_regions(service_name))
|
|
|
|
self.regions.extend(
|
|
|
|
sess.get_available_regions(service_name, partition_name="aws-us-gov")
|
|
|
|
)
|
|
|
|
self.regions.extend(
|
|
|
|
sess.get_available_regions(service_name, partition_name="aws-cn")
|
|
|
|
)
|
|
|
|
self.regions.extend(additional_regions or [])
|
2022-08-13 09:49:43 +00:00
|
|
|
self._id = str(uuid4())
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self._id)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return (
|
|
|
|
other
|
|
|
|
and isinstance(other, AccountSpecificBackend)
|
|
|
|
and other._id == self._id
|
|
|
|
)
|
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
return not self.__eq__(other)
|
2022-06-09 17:40:22 +00:00
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
for region_specific_backend in self.values():
|
|
|
|
region_specific_backend.reset()
|
|
|
|
|
|
|
|
def __contains__(self, region):
|
|
|
|
return region in self.regions or region in self.keys()
|
|
|
|
|
2022-08-13 09:49:43 +00:00
|
|
|
@lru_cache()
|
2022-06-09 17:40:22 +00:00
|
|
|
def __getitem__(self, region_name):
|
|
|
|
if region_name in self.keys():
|
|
|
|
return super().__getitem__(region_name)
|
|
|
|
# Create the backend for a specific region
|
|
|
|
with backend_lock:
|
|
|
|
if region_name in self.regions and region_name not in self.keys():
|
|
|
|
super().__setitem__(
|
|
|
|
region_name, self.backend(region_name, account_id=self.account_id)
|
|
|
|
)
|
|
|
|
if region_name not in self.regions and allow_unknown_region():
|
|
|
|
super().__setitem__(
|
|
|
|
region_name, self.backend(region_name, account_id=self.account_id)
|
|
|
|
)
|
|
|
|
return super().__getitem__(region_name)
|
|
|
|
|
|
|
|
|
2021-12-24 21:02:45 +00:00
|
|
|
class BackendDict(dict):
|
2022-06-04 11:30:16 +00:00
|
|
|
"""
|
|
|
|
Data Structure to store everything related to a specific service.
|
|
|
|
Format:
|
|
|
|
[account_id: str]: AccountSpecificBackend
|
|
|
|
[account_id: str][region: str] = BaseBackend
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self, backend, service_name, use_boto3_regions=True, additional_regions=None
|
|
|
|
):
|
|
|
|
self.backend = backend
|
|
|
|
self.service_name = service_name
|
|
|
|
self._use_boto3_regions = use_boto3_regions
|
|
|
|
self._additional_regions = additional_regions
|
2022-08-13 09:49:43 +00:00
|
|
|
self._id = str(uuid4())
|
2022-06-04 11:30:16 +00:00
|
|
|
|
2022-08-13 09:49:43 +00:00
|
|
|
def __hash__(self):
|
|
|
|
# Required for the LRUcache to work.
|
|
|
|
# service_name is enough to determine uniqueness - other properties are dependent
|
|
|
|
return hash(self._id)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return other and isinstance(other, BackendDict) and other._id == self._id
|
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
return not self.__eq__(other)
|
|
|
|
|
|
|
|
@lru_cache()
|
|
|
|
def __getitem__(self, account_id) -> AccountSpecificBackend:
|
|
|
|
self._create_account_specific_backend(account_id)
|
|
|
|
return super().__getitem__(account_id)
|
2022-06-04 11:30:16 +00:00
|
|
|
|
2022-08-13 09:49:43 +00:00
|
|
|
def _create_account_specific_backend(self, account_id) -> None:
|
2022-06-04 11:30:16 +00:00
|
|
|
with backend_lock:
|
|
|
|
if account_id not in self.keys():
|
|
|
|
self[account_id] = AccountSpecificBackend(
|
|
|
|
service_name=self.service_name,
|
|
|
|
account_id=account_id,
|
|
|
|
backend=self.backend,
|
|
|
|
use_boto3_regions=self._use_boto3_regions,
|
|
|
|
additional_regions=self._additional_regions,
|
|
|
|
)
|