Back to Black
This commit is contained in:
parent
ea489bce6c
commit
5697ff87a8
150
docs/conf.py
150
docs/conf.py
@ -20,12 +20,12 @@ import shlex
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
@ -33,23 +33,23 @@ import shlex
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = 'Moto'
|
||||
copyright = '2015, Steve Pulec'
|
||||
author = 'Steve Pulec'
|
||||
project = "Moto"
|
||||
copyright = "2015, Steve Pulec"
|
||||
author = "Steve Pulec"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@ -57,6 +57,7 @@ author = 'Steve Pulec'
|
||||
#
|
||||
# The short X.Y version.
|
||||
import moto
|
||||
|
||||
version = moto.__version__
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = moto.__version__
|
||||
@ -70,37 +71,37 @@ language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
@ -110,156 +111,149 @@ todo_include_todos = False
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Motodoc'
|
||||
htmlhelp_basename = "Motodoc"
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Moto.tex', 'Moto Documentation',
|
||||
'Steve Pulec', 'manual'),
|
||||
(master_doc, "Moto.tex", "Moto Documentation", "Steve Pulec", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'moto', 'Moto Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
man_pages = [(master_doc, "moto", "Moto Documentation", [author], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
@ -268,19 +262,25 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Moto', 'Moto Documentation',
|
||||
author, 'Moto', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"Moto",
|
||||
"Moto Documentation",
|
||||
author,
|
||||
"Moto",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
# texinfo_no_detailmenu = False
|
||||
|
@ -72,7 +72,10 @@ class ApplicationAutoscalingBackend(BaseBackend):
|
||||
return applicationautoscaling_backends[self.region]
|
||||
|
||||
def describe_scalable_targets(
|
||||
self, namespace, r_ids=None, dimension=None,
|
||||
self,
|
||||
namespace,
|
||||
r_ids=None,
|
||||
dimension=None,
|
||||
):
|
||||
""" Describe scalable targets. """
|
||||
if r_ids is None:
|
||||
@ -110,8 +113,8 @@ class ApplicationAutoscalingBackend(BaseBackend):
|
||||
return r_id in self.targets.get(dimension, [])
|
||||
|
||||
def _ecs_service_exists_for_target(self, r_id):
|
||||
""" Raises a ValidationException if an ECS service does not exist
|
||||
for the specified resource ID.
|
||||
"""Raises a ValidationException if an ECS service does not exist
|
||||
for the specified resource ID.
|
||||
"""
|
||||
resource_type, cluster, service = r_id.split("/")
|
||||
result = self.ecs_backend.describe_services(cluster, [service])
|
||||
|
@ -21,8 +21,10 @@ class ApplicationAutoScalingResponse(BaseResponse):
|
||||
scalable_dimension = self._get_param("ScalableDimension")
|
||||
max_results = self._get_int_param("MaxResults", 50)
|
||||
marker = self._get_param("NextToken")
|
||||
all_scalable_targets = self.applicationautoscaling_backend.describe_scalable_targets(
|
||||
service_namespace, resource_ids, scalable_dimension
|
||||
all_scalable_targets = (
|
||||
self.applicationautoscaling_backend.describe_scalable_targets(
|
||||
service_namespace, resource_ids, scalable_dimension
|
||||
)
|
||||
)
|
||||
start = int(marker) + 1 if marker else 0
|
||||
next_token = None
|
||||
@ -96,8 +98,8 @@ class ApplicationAutoScalingResponse(BaseResponse):
|
||||
return json.dumps({})
|
||||
|
||||
def _validate_params(self):
|
||||
""" Validate parameters.
|
||||
TODO Integrate this validation with the validation in models.py
|
||||
"""Validate parameters.
|
||||
TODO Integrate this validation with the validation in models.py
|
||||
"""
|
||||
namespace = self._get_param("ServiceNamespace")
|
||||
dimension = self._get_param("ScalableDimension")
|
||||
|
@ -82,7 +82,12 @@ class AthenaResponse(BaseResponse):
|
||||
|
||||
def error(self, msg, status):
|
||||
return (
|
||||
json.dumps({"__type": "InvalidRequestException", "Message": msg,}),
|
||||
json.dumps(
|
||||
{
|
||||
"__type": "InvalidRequestException",
|
||||
"Message": msg,
|
||||
}
|
||||
),
|
||||
dict(status=status),
|
||||
)
|
||||
|
||||
|
@ -863,7 +863,7 @@ class AutoScalingBackend(BaseBackend):
|
||||
self.set_desired_capacity(group_name, desired_capacity)
|
||||
|
||||
def change_capacity_percent(self, group_name, scaling_adjustment):
|
||||
""" http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
"""http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
If PercentChangeInCapacity returns a value between 0 and 1,
|
||||
Auto Scaling will round it off to 1. If the PercentChangeInCapacity
|
||||
returns a value greater than 1, Auto Scaling will round it off to the
|
||||
|
@ -42,8 +42,8 @@ class AutoScalingResponse(BaseResponse):
|
||||
|
||||
def describe_launch_configurations(self):
|
||||
names = self._get_multi_param("LaunchConfigurationNames.member")
|
||||
all_launch_configurations = self.autoscaling_backend.describe_launch_configurations(
|
||||
names
|
||||
all_launch_configurations = (
|
||||
self.autoscaling_backend.describe_launch_configurations(names)
|
||||
)
|
||||
marker = self._get_param("NextToken")
|
||||
all_names = [lc.name for lc in all_launch_configurations]
|
||||
@ -153,8 +153,8 @@ class AutoScalingResponse(BaseResponse):
|
||||
@amzn_request_id
|
||||
def describe_load_balancer_target_groups(self):
|
||||
group_name = self._get_param("AutoScalingGroupName")
|
||||
target_group_arns = self.autoscaling_backend.describe_load_balancer_target_groups(
|
||||
group_name
|
||||
target_group_arns = (
|
||||
self.autoscaling_backend.describe_load_balancer_target_groups(group_name)
|
||||
)
|
||||
template = self.response_template(DESCRIBE_LOAD_BALANCER_TARGET_GROUPS)
|
||||
return template.render(target_group_arns=target_group_arns)
|
||||
|
@ -254,7 +254,8 @@ def generate_resource_name(resource_type, stack_name, logical_id):
|
||||
|
||||
|
||||
def parse_resource(
|
||||
resource_json, resources_map,
|
||||
resource_json,
|
||||
resources_map,
|
||||
):
|
||||
resource_type = resource_json["Type"]
|
||||
resource_class = resource_class_from_type(resource_type)
|
||||
@ -275,7 +276,9 @@ def parse_resource(
|
||||
|
||||
|
||||
def parse_resource_and_generate_name(
|
||||
logical_id, resource_json, resources_map,
|
||||
logical_id,
|
||||
resource_json,
|
||||
resources_map,
|
||||
):
|
||||
resource_tuple = parse_resource(resource_json, resources_map)
|
||||
if not resource_tuple:
|
||||
@ -695,7 +698,10 @@ class ResourceMap(collections_abc.Mapping):
|
||||
]
|
||||
|
||||
parse_and_delete_resource(
|
||||
resource_name, resource_json, self, self._region_name,
|
||||
resource_name,
|
||||
resource_json,
|
||||
self,
|
||||
self._region_name,
|
||||
)
|
||||
|
||||
self._parsed_resources.pop(parsed_resource.logical_resource_id)
|
||||
|
@ -41,8 +41,7 @@ def random_suffix():
|
||||
|
||||
|
||||
def yaml_tag_constructor(loader, tag, node):
|
||||
"""convert shorthand intrinsic function to full name
|
||||
"""
|
||||
"""convert shorthand intrinsic function to full name"""
|
||||
|
||||
def _f(loader, tag, node):
|
||||
if tag == "!GetAtt":
|
||||
|
@ -412,7 +412,9 @@ class CognitoIdpResponse(BaseResponse):
|
||||
username = self._get_param("Username")
|
||||
confirmation_code = self._get_param("ConfirmationCode")
|
||||
cognitoidp_backends[self.region].confirm_sign_up(
|
||||
client_id=client_id, username=username, confirmation_code=confirmation_code,
|
||||
client_id=client_id,
|
||||
username=username,
|
||||
confirmation_code=confirmation_code,
|
||||
)
|
||||
return ""
|
||||
|
||||
|
@ -101,8 +101,10 @@ class InvalidDeliveryChannelNameException(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
def __init__(self, name):
|
||||
message = "The delivery channel name '{name}' is not valid, blank string.".format(
|
||||
name=name
|
||||
message = (
|
||||
"The delivery channel name '{name}' is not valid, blank string.".format(
|
||||
name=name
|
||||
)
|
||||
)
|
||||
super(InvalidDeliveryChannelNameException, self).__init__(
|
||||
"InvalidDeliveryChannelNameException", message
|
||||
@ -287,8 +289,10 @@ class InvalidTagCharacters(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
def __init__(self, tag, param="tags.X.member.key"):
|
||||
message = "1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(
|
||||
tag, param
|
||||
message = (
|
||||
"1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(
|
||||
tag, param
|
||||
)
|
||||
)
|
||||
message += "constraint: Member must satisfy regular expression pattern: [\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-@]+"
|
||||
|
||||
|
@ -395,8 +395,10 @@ class OrganizationConformancePack(ConfigEmptyDictable):
|
||||
self.delivery_s3_key_prefix = delivery_s3_key_prefix
|
||||
self.excluded_accounts = excluded_accounts or []
|
||||
self.last_update_time = datetime2int(datetime.utcnow())
|
||||
self.organization_conformance_pack_arn = "arn:aws:config:{0}:{1}:organization-conformance-pack/{2}".format(
|
||||
region, DEFAULT_ACCOUNT_ID, self._unique_pack_name
|
||||
self.organization_conformance_pack_arn = (
|
||||
"arn:aws:config:{0}:{1}:organization-conformance-pack/{2}".format(
|
||||
region, DEFAULT_ACCOUNT_ID, self._unique_pack_name
|
||||
)
|
||||
)
|
||||
self.organization_conformance_pack_name = name
|
||||
|
||||
@ -1006,9 +1008,9 @@ class ConfigBackend(BaseBackend):
|
||||
def get_resource_config_history(self, resource_type, id, backend_region):
|
||||
"""Returns the configuration of an item in the AWS Config format of the resource for the current regional backend.
|
||||
|
||||
NOTE: This is --NOT-- returning history as it is not supported in moto at this time. (PR's welcome!)
|
||||
As such, the later_time, earlier_time, limit, and next_token are ignored as this will only
|
||||
return 1 item. (If no items, it raises an exception)
|
||||
NOTE: This is --NOT-- returning history as it is not supported in moto at this time. (PR's welcome!)
|
||||
As such, the later_time, earlier_time, limit, and next_token are ignored as this will only
|
||||
return 1 item. (If no items, it raises an exception)
|
||||
"""
|
||||
# If the type isn't implemented then we won't find the item:
|
||||
if resource_type not in RESOURCE_MAP:
|
||||
@ -1090,10 +1092,10 @@ class ConfigBackend(BaseBackend):
|
||||
):
|
||||
"""Returns the configuration of an item in the AWS Config format of the resource for the current regional backend.
|
||||
|
||||
As far a moto goes -- the only real difference between this function and the `batch_get_resource_config` function is that
|
||||
this will require a Config Aggregator be set up a priori and can search based on resource regions.
|
||||
As far a moto goes -- the only real difference between this function and the `batch_get_resource_config` function is that
|
||||
this will require a Config Aggregator be set up a priori and can search based on resource regions.
|
||||
|
||||
Note: moto will IGNORE the resource account ID in the search query.
|
||||
Note: moto will IGNORE the resource account ID in the search query.
|
||||
"""
|
||||
if not self.config_aggregators.get(aggregator_name):
|
||||
raise NoSuchConfigurationAggregatorException()
|
||||
|
@ -190,8 +190,10 @@ class ConfigResponse(BaseResponse):
|
||||
|
||||
def get_organization_conformance_pack_detailed_status(self):
|
||||
# 'Filters' parameter is not implemented yet
|
||||
statuses = self.config_backend.get_organization_conformance_pack_detailed_status(
|
||||
self._get_param("OrganizationConformancePackName")
|
||||
statuses = (
|
||||
self.config_backend.get_organization_conformance_pack_detailed_status(
|
||||
self._get_param("OrganizationConformancePackName")
|
||||
)
|
||||
)
|
||||
|
||||
return json.dumps(statuses)
|
||||
|
@ -62,9 +62,9 @@ def _decode_dict(d):
|
||||
|
||||
class DynamicDictLoader(DictLoader):
|
||||
"""
|
||||
Note: There's a bug in jinja2 pre-2.7.3 DictLoader where caching does not work.
|
||||
Including the fixed (current) method version here to ensure performance benefit
|
||||
even for those using older jinja versions.
|
||||
Note: There's a bug in jinja2 pre-2.7.3 DictLoader where caching does not work.
|
||||
Including the fixed (current) method version here to ensure performance benefit
|
||||
even for those using older jinja versions.
|
||||
"""
|
||||
|
||||
def get_source(self, environment, template):
|
||||
|
@ -16,7 +16,7 @@ REQUEST_ID_LONG = string.digits + string.ascii_uppercase
|
||||
|
||||
|
||||
def camelcase_to_underscores(argument):
|
||||
""" Converts a camelcase param like theNewAttribute to the equivalent
|
||||
"""Converts a camelcase param like theNewAttribute to the equivalent
|
||||
python underscore variable like the_new_attribute"""
|
||||
result = ""
|
||||
prev_char_title = True
|
||||
@ -42,9 +42,9 @@ def camelcase_to_underscores(argument):
|
||||
|
||||
|
||||
def underscores_to_camelcase(argument):
|
||||
""" Converts a camelcase param like the_new_attribute to the equivalent
|
||||
"""Converts a camelcase param like the_new_attribute to the equivalent
|
||||
camelcase version like theNewAttribute. Note that the first letter is
|
||||
NOT capitalized by this function """
|
||||
NOT capitalized by this function"""
|
||||
result = ""
|
||||
previous_was_underscore = False
|
||||
for char in argument:
|
||||
@ -350,11 +350,15 @@ def tags_from_query_string(
|
||||
tag_index = key.replace(prefix + ".", "").replace("." + key_suffix, "")
|
||||
tag_key = querystring_dict.get(
|
||||
"{prefix}.{index}.{key_suffix}".format(
|
||||
prefix=prefix, index=tag_index, key_suffix=key_suffix,
|
||||
prefix=prefix,
|
||||
index=tag_index,
|
||||
key_suffix=key_suffix,
|
||||
)
|
||||
)[0]
|
||||
tag_value_key = "{prefix}.{index}.{value_suffix}".format(
|
||||
prefix=prefix, index=tag_index, value_suffix=value_suffix,
|
||||
prefix=prefix,
|
||||
index=tag_index,
|
||||
value_suffix=value_suffix,
|
||||
)
|
||||
if tag_value_key in querystring_dict:
|
||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
||||
|
@ -1052,7 +1052,8 @@ class DynamoDBBackend(BaseBackend):
|
||||
)
|
||||
|
||||
gsis_by_name[gsi_to_create["IndexName"]] = GlobalSecondaryIndex.create(
|
||||
gsi_to_create, table.table_key_attrs,
|
||||
gsi_to_create,
|
||||
table.table_key_attrs,
|
||||
)
|
||||
|
||||
# in python 3.6, dict.values() returns a dict_values object, but we expect it to be a list in other
|
||||
|
@ -340,7 +340,9 @@ class InvalidDependantParameterError(EC2ClientError):
|
||||
super(InvalidDependantParameterError, self).__init__(
|
||||
"InvalidParameter",
|
||||
"{0} can't be empty if {1} is {2}.".format(
|
||||
dependant_parameter, parameter, parameter_value,
|
||||
dependant_parameter,
|
||||
parameter,
|
||||
parameter_value,
|
||||
),
|
||||
)
|
||||
|
||||
@ -350,7 +352,9 @@ class InvalidDependantParameterTypeError(EC2ClientError):
|
||||
super(InvalidDependantParameterTypeError, self).__init__(
|
||||
"InvalidParameter",
|
||||
"{0} type must be {1} if {2} is provided.".format(
|
||||
dependant_parameter, parameter_value, parameter,
|
||||
dependant_parameter,
|
||||
parameter_value,
|
||||
parameter,
|
||||
),
|
||||
)
|
||||
|
||||
@ -358,7 +362,8 @@ class InvalidDependantParameterTypeError(EC2ClientError):
|
||||
class InvalidAggregationIntervalParameterError(EC2ClientError):
|
||||
def __init__(self, parameter):
|
||||
super(InvalidAggregationIntervalParameterError, self).__init__(
|
||||
"InvalidParameter", "Invalid {0}".format(parameter),
|
||||
"InvalidParameter",
|
||||
"Invalid {0}".format(parameter),
|
||||
)
|
||||
|
||||
|
||||
|
@ -1059,7 +1059,7 @@ class InstanceBackend(object):
|
||||
return instance
|
||||
|
||||
def get_reservations_by_instance_ids(self, instance_ids, filters=None):
|
||||
""" Go through all of the reservations and filter to only return those
|
||||
"""Go through all of the reservations and filter to only return those
|
||||
associated with the given instance_ids.
|
||||
"""
|
||||
reservations = []
|
||||
@ -1358,9 +1358,9 @@ class Ami(TaggedEC2Resource):
|
||||
|
||||
elif source_ami:
|
||||
"""
|
||||
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
|
||||
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
|
||||
~ 2014.09.29
|
||||
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
|
||||
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
|
||||
~ 2014.09.29
|
||||
"""
|
||||
self.virtualization_type = source_ami.virtualization_type
|
||||
self.architecture = source_ami.architecture
|
||||
@ -1491,7 +1491,12 @@ class AmiBackend(object):
|
||||
# Limit by owner ids
|
||||
if owners:
|
||||
# support filtering by Owners=['self']
|
||||
owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,))
|
||||
owners = list(
|
||||
map(
|
||||
lambda o: OWNER_ID if o == "self" else o,
|
||||
owners,
|
||||
)
|
||||
)
|
||||
images = [ami for ami in images if ami.owner_id in owners]
|
||||
|
||||
# Generic filters
|
||||
@ -1518,9 +1523,9 @@ class AmiBackend(object):
|
||||
# If anything is invalid, nothing is added. (No partial success.)
|
||||
if user_ids:
|
||||
"""
|
||||
AWS docs:
|
||||
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
|
||||
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
|
||||
AWS docs:
|
||||
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
|
||||
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
|
||||
"""
|
||||
for user_id in user_ids:
|
||||
if len(user_id) != 12 or not user_id.isdigit():
|
||||
@ -3705,13 +3710,17 @@ class FlowLogsBackend(object):
|
||||
):
|
||||
if log_group_name is None and log_destination is None:
|
||||
raise InvalidDependantParameterError(
|
||||
"LogDestination", "LogGroupName", "not provided",
|
||||
"LogDestination",
|
||||
"LogGroupName",
|
||||
"not provided",
|
||||
)
|
||||
|
||||
if log_destination_type == "s3":
|
||||
if log_group_name is not None:
|
||||
raise InvalidDependantParameterTypeError(
|
||||
"LogDestination", "cloud-watch-logs", "LogGroupName",
|
||||
"LogDestination",
|
||||
"cloud-watch-logs",
|
||||
"LogGroupName",
|
||||
)
|
||||
elif log_destination_type == "cloud-watch-logs":
|
||||
if deliver_logs_permission_arn is None:
|
||||
@ -3859,7 +3868,8 @@ class FlowLogsBackend(object):
|
||||
|
||||
if non_existing:
|
||||
raise InvalidFlowLogIdError(
|
||||
len(flow_log_ids), " ".join(x for x in flow_log_ids),
|
||||
len(flow_log_ids),
|
||||
" ".join(x for x in flow_log_ids),
|
||||
)
|
||||
return True
|
||||
|
||||
|
@ -70,8 +70,8 @@ class VPCs(BaseResponse):
|
||||
|
||||
def enable_vpc_classic_link_dns_support(self):
|
||||
vpc_id = self._get_param("VpcId")
|
||||
classic_link_dns_supported = self.ec2_backend.enable_vpc_classic_link_dns_support(
|
||||
vpc_id=vpc_id
|
||||
classic_link_dns_supported = (
|
||||
self.ec2_backend.enable_vpc_classic_link_dns_support(vpc_id=vpc_id)
|
||||
)
|
||||
doc_date = self._get_doc_date()
|
||||
template = self.response_template(ENABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE)
|
||||
@ -81,8 +81,8 @@ class VPCs(BaseResponse):
|
||||
|
||||
def disable_vpc_classic_link_dns_support(self):
|
||||
vpc_id = self._get_param("VpcId")
|
||||
classic_link_dns_supported = self.ec2_backend.disable_vpc_classic_link_dns_support(
|
||||
vpc_id=vpc_id
|
||||
classic_link_dns_supported = (
|
||||
self.ec2_backend.disable_vpc_classic_link_dns_support(vpc_id=vpc_id)
|
||||
)
|
||||
doc_date = self._get_doc_date()
|
||||
template = self.response_template(DISABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE)
|
||||
|
@ -38,5 +38,6 @@ class ClusterNotFoundException(JsonRESTError):
|
||||
|
||||
def __init__(self):
|
||||
super(ClusterNotFoundException, self).__init__(
|
||||
error_type="ClientException", message="Cluster not found",
|
||||
error_type="ClientException",
|
||||
message="Cluster not found",
|
||||
)
|
||||
|
@ -431,8 +431,10 @@ class ContainerInstance(BaseObject):
|
||||
"type": "STRINGSET",
|
||||
},
|
||||
]
|
||||
self.container_instance_arn = "arn:aws:ecs:{0}:012345678910:container-instance/{1}".format(
|
||||
region_name, str(uuid.uuid4())
|
||||
self.container_instance_arn = (
|
||||
"arn:aws:ecs:{0}:012345678910:container-instance/{1}".format(
|
||||
region_name, str(uuid.uuid4())
|
||||
)
|
||||
)
|
||||
self.pending_tasks_count = 0
|
||||
self.remaining_resources = [
|
||||
|
@ -8,7 +8,11 @@ from .exceptions import InvalidParameterValueError, ResourceNotFoundException
|
||||
|
||||
class FakeEnvironment(BaseModel):
|
||||
def __init__(
|
||||
self, application, environment_name, solution_stack_name, tags,
|
||||
self,
|
||||
application,
|
||||
environment_name,
|
||||
solution_stack_name,
|
||||
tags,
|
||||
):
|
||||
self.application = weakref.proxy(
|
||||
application
|
||||
@ -49,7 +53,10 @@ class FakeApplication(BaseModel):
|
||||
self.environments = dict()
|
||||
|
||||
def create_environment(
|
||||
self, environment_name, solution_stack_name, tags,
|
||||
self,
|
||||
environment_name,
|
||||
solution_stack_name,
|
||||
tags,
|
||||
):
|
||||
if environment_name in self.environments:
|
||||
raise InvalidParameterValueError
|
||||
@ -86,7 +93,10 @@ class EBBackend(BaseBackend):
|
||||
raise InvalidParameterValueError(
|
||||
"Application {} already exists.".format(application_name)
|
||||
)
|
||||
new_app = FakeApplication(backend=self, application_name=application_name,)
|
||||
new_app = FakeApplication(
|
||||
backend=self,
|
||||
application_name=application_name,
|
||||
)
|
||||
self.applications[application_name] = new_app
|
||||
return new_app
|
||||
|
||||
|
@ -18,11 +18,16 @@ class EBResponse(BaseResponse):
|
||||
)
|
||||
|
||||
template = self.response_template(EB_CREATE_APPLICATION)
|
||||
return template.render(region_name=self.backend.region, application=app,)
|
||||
return template.render(
|
||||
region_name=self.backend.region,
|
||||
application=app,
|
||||
)
|
||||
|
||||
def describe_applications(self):
|
||||
template = self.response_template(EB_DESCRIBE_APPLICATIONS)
|
||||
return template.render(applications=self.backend.applications.values(),)
|
||||
return template.render(
|
||||
applications=self.backend.applications.values(),
|
||||
)
|
||||
|
||||
def create_environment(self):
|
||||
application_name = self._get_param("ApplicationName")
|
||||
@ -42,13 +47,18 @@ class EBResponse(BaseResponse):
|
||||
)
|
||||
|
||||
template = self.response_template(EB_CREATE_ENVIRONMENT)
|
||||
return template.render(environment=env, region=self.backend.region,)
|
||||
return template.render(
|
||||
environment=env,
|
||||
region=self.backend.region,
|
||||
)
|
||||
|
||||
def describe_environments(self):
|
||||
envs = self.backend.describe_environments()
|
||||
|
||||
template = self.response_template(EB_DESCRIBE_ENVIRONMENTS)
|
||||
return template.render(environments=envs,)
|
||||
return template.render(
|
||||
environments=envs,
|
||||
)
|
||||
|
||||
def list_available_solution_stacks(self):
|
||||
return EB_LIST_AVAILABLE_SOLUTION_STACKS
|
||||
@ -68,7 +78,10 @@ class EBResponse(BaseResponse):
|
||||
tags = self.backend.list_tags_for_resource(resource_arn)
|
||||
|
||||
template = self.response_template(EB_LIST_TAGS_FOR_RESOURCE)
|
||||
return template.render(tags=tags, arn=resource_arn,)
|
||||
return template.render(
|
||||
tags=tags,
|
||||
arn=resource_arn,
|
||||
)
|
||||
|
||||
|
||||
EB_CREATE_APPLICATION = """
|
||||
|
@ -43,14 +43,14 @@ def steps_from_query_string(querystring_dict):
|
||||
class Unflattener:
|
||||
@staticmethod
|
||||
def unflatten_complex_params(input_dict, param_name):
|
||||
""" Function to unflatten (portions of) dicts with complex keys. The moto request parser flattens the incoming
|
||||
"""Function to unflatten (portions of) dicts with complex keys. The moto request parser flattens the incoming
|
||||
request bodies, which is generally helpful, but for nested dicts/lists can result in a hard-to-manage
|
||||
parameter exposion. This function allows one to selectively unflatten a set of dict keys, replacing them
|
||||
with a deep dist/list structure named identically to the root component in the complex name.
|
||||
|
||||
Complex keys are composed of multiple components
|
||||
separated by periods. Components may be prefixed with _, which is stripped. Lists indexes are represented
|
||||
with two components, 'member' and the index number. """
|
||||
with two components, 'member' and the index number."""
|
||||
items_to_process = {}
|
||||
for k in input_dict.keys():
|
||||
if k.startswith(param_name):
|
||||
|
@ -125,10 +125,12 @@ class AssumedRoleAccessKey(object):
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=self._owner_role_name,
|
||||
session_name=self._session_name,
|
||||
return (
|
||||
"arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=self._owner_role_name,
|
||||
session_name=self._session_name,
|
||||
)
|
||||
)
|
||||
|
||||
def create_credentials(self):
|
||||
|
@ -88,8 +88,10 @@ class InvalidTagCharacters(RESTError):
|
||||
code = 400
|
||||
|
||||
def __init__(self, tag, param="tags.X.member.key"):
|
||||
message = "1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(
|
||||
tag, param
|
||||
message = (
|
||||
"1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(
|
||||
tag, param
|
||||
)
|
||||
)
|
||||
message += "constraint: Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+"
|
||||
|
||||
|
@ -362,7 +362,12 @@ class InlinePolicy(CloudFormationModel):
|
||||
self.update(policy_name, policy_document, group_names, role_names, user_names)
|
||||
|
||||
def update(
|
||||
self, policy_name, policy_document, group_names, role_names, user_names,
|
||||
self,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
):
|
||||
self.policy_name = policy_name
|
||||
self.policy_document = (
|
||||
@ -404,7 +409,11 @@ class InlinePolicy(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
cls,
|
||||
original_resource,
|
||||
new_resource_name,
|
||||
cloudformation_json,
|
||||
region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
@ -807,11 +816,18 @@ class AccessKey(CloudFormationModel):
|
||||
user_name = properties.get("UserName")
|
||||
status = properties.get("Status", "Active")
|
||||
|
||||
return iam_backend.create_access_key(user_name, status=status,)
|
||||
return iam_backend.create_access_key(
|
||||
user_name,
|
||||
status=status,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
cls,
|
||||
original_resource,
|
||||
new_resource_name,
|
||||
cloudformation_json,
|
||||
region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
@ -1139,7 +1155,11 @@ class User(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
cls,
|
||||
original_resource,
|
||||
new_resource_name,
|
||||
cloudformation_json,
|
||||
region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
@ -2557,7 +2577,11 @@ class IAMBackend(BaseBackend):
|
||||
inline_policy = self.get_inline_policy(resource_name)
|
||||
inline_policy.unapply_policy(self)
|
||||
inline_policy.update(
|
||||
policy_name, policy_document, group_names, role_names, user_names,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
)
|
||||
inline_policy.apply_policy(self)
|
||||
return inline_policy
|
||||
|
@ -343,8 +343,10 @@ class IAMPolicyDocumentValidator:
|
||||
resource_partitions = resource.partition(":")
|
||||
|
||||
if resource_partitions[1] == "":
|
||||
self._resource_error = 'Resource {resource} must be in ARN format or "*".'.format(
|
||||
resource=resource
|
||||
self._resource_error = (
|
||||
'Resource {resource} must be in ARN format or "*".'.format(
|
||||
resource=resource
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
@ -390,15 +392,14 @@ class IAMPolicyDocumentValidator:
|
||||
|
||||
service = resource_partitions[0]
|
||||
|
||||
if service in SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS.keys() and not resource_partitions[
|
||||
2
|
||||
].startswith(
|
||||
":"
|
||||
if (
|
||||
service in SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS.keys()
|
||||
and not resource_partitions[2].startswith(":")
|
||||
):
|
||||
self._resource_error = SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS[
|
||||
service
|
||||
].format(
|
||||
resource=resource
|
||||
self._resource_error = (
|
||||
SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS[service].format(
|
||||
resource=resource
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
@ -520,8 +521,8 @@ class IAMPolicyDocumentValidator:
|
||||
assert 0 <= int(time_zone_minutes) <= 59
|
||||
else:
|
||||
seconds_with_decimal_fraction = time_parts[2]
|
||||
seconds_with_decimal_fraction_partition = seconds_with_decimal_fraction.partition(
|
||||
"."
|
||||
seconds_with_decimal_fraction_partition = (
|
||||
seconds_with_decimal_fraction.partition(".")
|
||||
)
|
||||
seconds = seconds_with_decimal_fraction_partition[0]
|
||||
assert 0 <= int(seconds) <= 59
|
||||
|
@ -340,7 +340,8 @@ class IoTResponse(BaseResponse):
|
||||
status = self._get_param("status")
|
||||
|
||||
cert = self.iot_backend.register_certificate_without_ca(
|
||||
certificate_pem=certificate_pem, status=status,
|
||||
certificate_pem=certificate_pem,
|
||||
status=status,
|
||||
)
|
||||
return json.dumps(
|
||||
dict(certificateId=cert.certificate_id, certificateArn=cert.arn)
|
||||
|
@ -114,8 +114,7 @@ class FakeShadow(BaseModel):
|
||||
}
|
||||
|
||||
def to_dict(self, include_delta=True):
|
||||
"""returning nothing except for just top-level keys for now.
|
||||
"""
|
||||
"""returning nothing except for just top-level keys for now."""
|
||||
if self.deleted:
|
||||
return {"timestamp": self.timestamp, "version": self.version}
|
||||
delta = self.parse_payload(self.desired, self.reported)
|
||||
|
@ -261,7 +261,11 @@ class Stream(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
cls,
|
||||
original_resource,
|
||||
new_resource_name,
|
||||
cloudformation_json,
|
||||
region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
|
@ -20,5 +20,6 @@ class ResourceInUseException(KinesisvideoClientError):
|
||||
def __init__(self, message):
|
||||
self.code = 400
|
||||
super(ResourceInUseException, self).__init__(
|
||||
"ResourceInUseException", message,
|
||||
"ResourceInUseException",
|
||||
message,
|
||||
)
|
||||
|
@ -32,7 +32,8 @@ class KinesisVideoResponse(BaseResponse):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
stream_info = self.kinesisvideo_backend.describe_stream(
|
||||
stream_name=stream_name, stream_arn=stream_arn,
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
)
|
||||
return json.dumps(dict(StreamInfo=stream_info))
|
||||
|
||||
@ -51,7 +52,8 @@ class KinesisVideoResponse(BaseResponse):
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
current_version = self._get_param("CurrentVersion")
|
||||
self.kinesisvideo_backend.delete_stream(
|
||||
stream_arn=stream_arn, current_version=current_version,
|
||||
stream_arn=stream_arn,
|
||||
current_version=current_version,
|
||||
)
|
||||
return json.dumps(dict())
|
||||
|
||||
@ -60,6 +62,8 @@ class KinesisVideoResponse(BaseResponse):
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
api_name = self._get_param("APIName")
|
||||
data_endpoint = self.kinesisvideo_backend.get_data_endpoint(
|
||||
stream_name=stream_name, stream_arn=stream_arn, api_name=api_name,
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
api_name=api_name,
|
||||
)
|
||||
return json.dumps(dict(DataEndpoint=data_endpoint))
|
||||
|
@ -23,16 +23,18 @@ class KinesisVideoArchivedMediaResponse(BaseResponse):
|
||||
max_media_playlist_fragment_results = self._get_param(
|
||||
"MaxMediaPlaylistFragmentResults"
|
||||
)
|
||||
hls_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
hls_fragment_selector=hls_fragment_selector,
|
||||
container_format=container_format,
|
||||
discontinuity_mode=discontinuity_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
expires=expires,
|
||||
max_media_playlist_fragment_results=max_media_playlist_fragment_results,
|
||||
hls_streaming_session_url = (
|
||||
self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
hls_fragment_selector=hls_fragment_selector,
|
||||
container_format=container_format,
|
||||
discontinuity_mode=discontinuity_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
expires=expires,
|
||||
max_media_playlist_fragment_results=max_media_playlist_fragment_results,
|
||||
)
|
||||
)
|
||||
return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url))
|
||||
|
||||
@ -45,15 +47,17 @@ class KinesisVideoArchivedMediaResponse(BaseResponse):
|
||||
dash_fragment_selector = self._get_param("DASHFragmentSelector")
|
||||
expires = self._get_int_param("Expires")
|
||||
max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults")
|
||||
dash_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
display_fragment_number=display_fragment_number,
|
||||
dash_fragment_selector=dash_fragment_selector,
|
||||
expires=expires,
|
||||
max_manifest_fragment_results=max_manifest_fragment_results,
|
||||
dash_streaming_session_url = (
|
||||
self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
display_fragment_number=display_fragment_number,
|
||||
dash_fragment_selector=dash_fragment_selector,
|
||||
expires=expires,
|
||||
max_manifest_fragment_results=max_manifest_fragment_results,
|
||||
)
|
||||
)
|
||||
return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url))
|
||||
|
||||
|
@ -352,7 +352,11 @@ class ManagedBlockchainInvitation(BaseModel):
|
||||
|
||||
class ManagedBlockchainMember(BaseModel):
|
||||
def __init__(
|
||||
self, id, networkid, member_configuration, region,
|
||||
self,
|
||||
id,
|
||||
networkid,
|
||||
member_configuration,
|
||||
region,
|
||||
):
|
||||
self.creationdate = datetime.datetime.utcnow()
|
||||
self.id = id
|
||||
@ -583,7 +587,11 @@ class ManagedBlockchainBackend(BaseBackend):
|
||||
return self.networks.get(network_id)
|
||||
|
||||
def create_proposal(
|
||||
self, networkid, memberid, actions, description=None,
|
||||
self,
|
||||
networkid,
|
||||
memberid,
|
||||
actions,
|
||||
description=None,
|
||||
):
|
||||
# Check if network exists
|
||||
if networkid not in self.networks:
|
||||
@ -783,7 +791,10 @@ class ManagedBlockchainBackend(BaseBackend):
|
||||
self.invitations.get(invitationid).reject_invitation()
|
||||
|
||||
def create_member(
|
||||
self, invitationid, networkid, member_configuration,
|
||||
self,
|
||||
invitationid,
|
||||
networkid,
|
||||
member_configuration,
|
||||
):
|
||||
# Check if network exists
|
||||
if networkid not in self.networks:
|
||||
@ -988,7 +999,8 @@ class ManagedBlockchainBackend(BaseBackend):
|
||||
chkregionpreregex = self.region_name + "[a-z]"
|
||||
if re.match(chkregionpreregex, availabilityzone, re.IGNORECASE) is None:
|
||||
raise InvalidRequestException(
|
||||
"CreateNode", "Availability Zone is not valid",
|
||||
"CreateNode",
|
||||
"Availability Zone is not valid",
|
||||
)
|
||||
|
||||
node_id = get_node_id()
|
||||
|
@ -134,7 +134,10 @@ class ManagedBlockchainResponse(BaseResponse):
|
||||
description = json_body.get("Description", None)
|
||||
|
||||
response = self.backend.create_proposal(
|
||||
network_id, memberid, actions, description,
|
||||
network_id,
|
||||
memberid,
|
||||
actions,
|
||||
description,
|
||||
)
|
||||
return 200, headers, json.dumps(response)
|
||||
|
||||
@ -198,7 +201,10 @@ class ManagedBlockchainResponse(BaseResponse):
|
||||
vote = json_body["Vote"]
|
||||
|
||||
self.backend.vote_on_proposal(
|
||||
network_id, proposal_id, votermemberid, vote,
|
||||
network_id,
|
||||
proposal_id,
|
||||
votermemberid,
|
||||
vote,
|
||||
)
|
||||
return 200, headers, ""
|
||||
|
||||
@ -278,7 +284,9 @@ class ManagedBlockchainResponse(BaseResponse):
|
||||
member_configuration = json_body["MemberConfiguration"]
|
||||
|
||||
response = self.backend.create_member(
|
||||
invitationid, network_id, member_configuration,
|
||||
invitationid,
|
||||
network_id,
|
||||
member_configuration,
|
||||
)
|
||||
return 200, headers, json.dumps(response)
|
||||
|
||||
@ -317,7 +325,9 @@ class ManagedBlockchainResponse(BaseResponse):
|
||||
def _memberid_response_patch(self, network_id, member_id, json_body, headers):
|
||||
logpublishingconfiguration = json_body["LogPublishingConfiguration"]
|
||||
self.backend.update_member(
|
||||
network_id, member_id, logpublishingconfiguration,
|
||||
network_id,
|
||||
member_id,
|
||||
logpublishingconfiguration,
|
||||
)
|
||||
return 200, headers, ""
|
||||
|
||||
@ -417,7 +427,10 @@ class ManagedBlockchainResponse(BaseResponse):
|
||||
):
|
||||
logpublishingconfiguration = json_body
|
||||
self.backend.update_node(
|
||||
network_id, member_id, node_id, logpublishingconfiguration,
|
||||
network_id,
|
||||
member_id,
|
||||
node_id,
|
||||
logpublishingconfiguration,
|
||||
)
|
||||
return 200, headers, ""
|
||||
|
||||
|
@ -785,7 +785,8 @@ class OrganizationsBackend(BaseBackend):
|
||||
)
|
||||
|
||||
admin = next(
|
||||
(admin for admin in self.admins if admin.account.id == account_id), None,
|
||||
(admin for admin in self.admins if admin.account.id == account_id),
|
||||
None,
|
||||
)
|
||||
if admin is None:
|
||||
account = next(
|
||||
@ -841,7 +842,8 @@ class OrganizationsBackend(BaseBackend):
|
||||
)
|
||||
elif re.match(account_id_regex, target_id):
|
||||
account = next(
|
||||
(account for account in self.accounts if account.id == target_id), None,
|
||||
(account for account in self.accounts if account.id == target_id),
|
||||
None,
|
||||
)
|
||||
if account is not None:
|
||||
if account in account.attached_policies:
|
||||
|
@ -269,13 +269,13 @@ class fakesock(object):
|
||||
_sock=None,
|
||||
):
|
||||
"""
|
||||
Matches both the Python 2 API:
|
||||
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
|
||||
https://github.com/python/cpython/blob/2.7/Lib/socket.py
|
||||
Matches both the Python 2 API:
|
||||
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
|
||||
https://github.com/python/cpython/blob/2.7/Lib/socket.py
|
||||
|
||||
and the Python 3 API:
|
||||
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
|
||||
https://github.com/python/cpython/blob/3.5/Lib/socket.py
|
||||
and the Python 3 API:
|
||||
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
|
||||
https://github.com/python/cpython/blob/3.5/Lib/socket.py
|
||||
"""
|
||||
if httpretty.allow_net_connect:
|
||||
if PY3:
|
||||
|
@ -88,8 +88,10 @@ class ResourceShare(BaseModel):
|
||||
)
|
||||
|
||||
if root_id:
|
||||
ous = self.organizations_backend.list_organizational_units_for_parent(
|
||||
ParentId=root_id
|
||||
ous = (
|
||||
self.organizations_backend.list_organizational_units_for_parent(
|
||||
ParentId=root_id
|
||||
)
|
||||
)
|
||||
if any(principal == ou["Arn"] for ou in ous["OrganizationalUnits"]):
|
||||
continue
|
||||
|
@ -523,7 +523,10 @@ class LifecycleAndFilter(BaseModel):
|
||||
|
||||
for key, value in self.tags.items():
|
||||
data.append(
|
||||
{"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},}
|
||||
{
|
||||
"type": "LifecycleTagPredicate",
|
||||
"tag": {"key": key, "value": value},
|
||||
}
|
||||
)
|
||||
|
||||
return data
|
||||
@ -1129,7 +1132,11 @@ class FakeBucket(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
cls,
|
||||
original_resource,
|
||||
new_resource_name,
|
||||
cloudformation_json,
|
||||
region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
@ -1469,7 +1476,8 @@ class S3Backend(BaseBackend):
|
||||
raise MissingKey(key_name)
|
||||
self.tagger.delete_all_tags_for_resource(key.arn)
|
||||
self.tagger.tag_resource(
|
||||
key.arn, [{"Key": k, "Value": v} for (k, v) in tags.items()],
|
||||
key.arn,
|
||||
[{"Key": k, "Value": v} for (k, v) in tags.items()],
|
||||
)
|
||||
return key
|
||||
|
||||
@ -1481,7 +1489,8 @@ class S3Backend(BaseBackend):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
self.tagger.delete_all_tags_for_resource(bucket.arn)
|
||||
self.tagger.tag_resource(
|
||||
bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
|
||||
bucket.arn,
|
||||
[{"Key": key, "Value": value} for key, value in tags.items()],
|
||||
)
|
||||
|
||||
def delete_bucket_tagging(self, bucket_name):
|
||||
|
@ -406,8 +406,8 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
template = self.response_template(S3_BUCKET_CORS_RESPONSE)
|
||||
return template.render(cors=cors)
|
||||
elif "notification" in querystring:
|
||||
notification_configuration = self.backend.get_bucket_notification_configuration(
|
||||
bucket_name
|
||||
notification_configuration = (
|
||||
self.backend.get_bucket_notification_configuration(bucket_name)
|
||||
)
|
||||
if not notification_configuration:
|
||||
return 200, {}, ""
|
||||
|
@ -98,7 +98,7 @@ def undo_clean_key_name(key_name):
|
||||
|
||||
class _VersionedKeyStore(dict):
|
||||
|
||||
""" A simplified/modified version of Django's `MultiValueDict` taken from:
|
||||
"""A simplified/modified version of Django's `MultiValueDict` taken from:
|
||||
https://github.com/django/django/blob/70576740b0bb5289873f5a9a9a4e1a26b2c330e5/django/utils/datastructures.py#L282
|
||||
"""
|
||||
|
||||
|
@ -517,8 +517,10 @@ class FakeSageMakerNotebookInstanceLifecycleConfig(BaseObject):
|
||||
self.creation_time = self.last_modified_time = datetime.now().strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
self.notebook_instance_lifecycle_config_arn = FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
||||
self.notebook_instance_lifecycle_config_name, self.region_name
|
||||
self.notebook_instance_lifecycle_config_arn = (
|
||||
FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
||||
self.notebook_instance_lifecycle_config_name, self.region_name
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -580,7 +582,11 @@ class SageMakerModelBackend(BaseBackend):
|
||||
message = "Could not find model '{}'.".format(
|
||||
Model.arn_for_model_name(model_name, self.region_name)
|
||||
)
|
||||
raise ValidationError(message=message)
|
||||
raise RESTError(
|
||||
error_type="ValidationException",
|
||||
message=message,
|
||||
template="error_json",
|
||||
)
|
||||
|
||||
def list_models(self):
|
||||
models = []
|
||||
@ -790,7 +796,10 @@ class SageMakerModelBackend(BaseBackend):
|
||||
raise ValidationError(message=message)
|
||||
|
||||
def create_endpoint(
|
||||
self, endpoint_name, endpoint_config_name, tags,
|
||||
self,
|
||||
endpoint_name,
|
||||
endpoint_config_name,
|
||||
tags,
|
||||
):
|
||||
try:
|
||||
endpoint_config = self.describe_endpoint_config(endpoint_config_name)
|
||||
|
@ -243,12 +243,14 @@ class SageMakerResponse(BaseResponse):
|
||||
@amzn_request_id
|
||||
def create_notebook_instance_lifecycle_config(self):
|
||||
try:
|
||||
lifecycle_configuration = self.sagemaker_backend.create_notebook_instance_lifecycle_config(
|
||||
notebook_instance_lifecycle_config_name=self._get_param(
|
||||
"NotebookInstanceLifecycleConfigName"
|
||||
),
|
||||
on_create=self._get_param("OnCreate"),
|
||||
on_start=self._get_param("OnStart"),
|
||||
lifecycle_configuration = (
|
||||
self.sagemaker_backend.create_notebook_instance_lifecycle_config(
|
||||
notebook_instance_lifecycle_config_name=self._get_param(
|
||||
"NotebookInstanceLifecycleConfigName"
|
||||
),
|
||||
on_create=self._get_param("OnCreate"),
|
||||
on_start=self._get_param("OnStart"),
|
||||
)
|
||||
)
|
||||
response = {
|
||||
"NotebookInstanceLifecycleConfigArn": lifecycle_configuration.notebook_instance_lifecycle_config_arn,
|
||||
|
@ -340,12 +340,14 @@ class PlatformEndpoint(BaseModel):
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:sns:{region}:{AccountId}:endpoint/{platform}/{name}/{id}".format(
|
||||
region=self.region,
|
||||
AccountId=DEFAULT_ACCOUNT_ID,
|
||||
platform=self.application.platform,
|
||||
name=self.application.name,
|
||||
id=self.id,
|
||||
return (
|
||||
"arn:aws:sns:{region}:{AccountId}:endpoint/{platform}/{name}/{id}".format(
|
||||
region=self.region,
|
||||
AccountId=DEFAULT_ACCOUNT_ID,
|
||||
platform=self.application.platform,
|
||||
name=self.application.name,
|
||||
id=self.id,
|
||||
)
|
||||
)
|
||||
|
||||
def publish(self, message):
|
||||
|
@ -354,7 +354,9 @@ class SQSResponse(BaseResponse):
|
||||
queue_name = self._get_queue_name()
|
||||
message_attributes = self._get_multi_param("message_attributes")
|
||||
if not message_attributes:
|
||||
message_attributes = extract_input_message_attributes(self.querystring,)
|
||||
message_attributes = extract_input_message_attributes(
|
||||
self.querystring,
|
||||
)
|
||||
|
||||
queue = self.sqs_backend.get_queue(queue_name)
|
||||
|
||||
@ -718,8 +720,10 @@ ERROR_TOO_LONG_RESPONSE = """<ErrorResponse xmlns="http://queue.amazonaws.com/do
|
||||
<RequestId>6fde8d1e-52cd-4581-8cd9-c512f4c64223</RequestId>
|
||||
</ErrorResponse>"""
|
||||
|
||||
ERROR_MAX_VISIBILITY_TIMEOUT_RESPONSE = "Invalid request, maximum visibility timeout is {0}".format(
|
||||
MAXIMUM_VISIBILTY_TIMEOUT
|
||||
ERROR_MAX_VISIBILITY_TIMEOUT_RESPONSE = (
|
||||
"Invalid request, maximum visibility timeout is {0}".format(
|
||||
MAXIMUM_VISIBILTY_TIMEOUT
|
||||
)
|
||||
)
|
||||
|
||||
ERROR_INEXISTENT_QUEUE = """<ErrorResponse xmlns="http://queue.amazonaws.com/doc/2012-11-05/">
|
||||
|
@ -148,7 +148,9 @@ class StateMachine(CloudFormationModel):
|
||||
tags = cfn_to_api_tags(properties.get("Tags", []))
|
||||
sf_backend = stepfunction_backends[region_name]
|
||||
state_machine = sf_backend.update_state_machine(
|
||||
original_resource.arn, definition=definition, role_arn=role_arn,
|
||||
original_resource.arn,
|
||||
definition=definition,
|
||||
role_arn=role_arn,
|
||||
)
|
||||
state_machine.add_tags(tags)
|
||||
return state_machine
|
||||
|
@ -48,10 +48,12 @@ class AssumedRole(BaseModel):
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=self.role_arn.split("/")[-1],
|
||||
session_name=self.session_name,
|
||||
return (
|
||||
"arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=self.role_arn.split("/")[-1],
|
||||
session_name=self.session_name,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
@ -153,7 +153,11 @@ class FakeMedicalTranscriptionJob(BaseObject):
|
||||
|
||||
class FakeMedicalVocabulary(BaseObject):
|
||||
def __init__(
|
||||
self, region_name, vocabulary_name, language_code, vocabulary_file_uri,
|
||||
self,
|
||||
region_name,
|
||||
vocabulary_name,
|
||||
language_code,
|
||||
vocabulary_file_uri,
|
||||
):
|
||||
self._region_name = region_name
|
||||
self.vocabulary_name = vocabulary_name
|
||||
|
@ -3,35 +3,64 @@ import json
|
||||
|
||||
# Taken from free tier list when creating an instance
|
||||
instances = [
|
||||
'ami-760aaa0f', 'ami-bb9a6bc2', 'ami-35e92e4c', 'ami-785db401', 'ami-b7e93bce', 'ami-dca37ea5', 'ami-999844e0',
|
||||
'ami-9b32e8e2', 'ami-f8e54081', 'ami-bceb39c5', 'ami-03cf127a', 'ami-1ecc1e67', 'ami-c2ff2dbb', 'ami-12c6146b',
|
||||
'ami-d1cb19a8', 'ami-61db0918', 'ami-56ec3e2f', 'ami-84ee3cfd', 'ami-86ee3cff', 'ami-f0e83a89', 'ami-1f12c066',
|
||||
'ami-afee3cd6', 'ami-1812c061', 'ami-77ed3f0e', 'ami-3bf32142', 'ami-6ef02217', 'ami-f4cf1d8d', 'ami-3df32144',
|
||||
'ami-c6f321bf', 'ami-24f3215d', 'ami-fa7cdd89', 'ami-1e749f67', 'ami-a9cc1ed0', 'ami-8104a4f8'
|
||||
"ami-760aaa0f",
|
||||
"ami-bb9a6bc2",
|
||||
"ami-35e92e4c",
|
||||
"ami-785db401",
|
||||
"ami-b7e93bce",
|
||||
"ami-dca37ea5",
|
||||
"ami-999844e0",
|
||||
"ami-9b32e8e2",
|
||||
"ami-f8e54081",
|
||||
"ami-bceb39c5",
|
||||
"ami-03cf127a",
|
||||
"ami-1ecc1e67",
|
||||
"ami-c2ff2dbb",
|
||||
"ami-12c6146b",
|
||||
"ami-d1cb19a8",
|
||||
"ami-61db0918",
|
||||
"ami-56ec3e2f",
|
||||
"ami-84ee3cfd",
|
||||
"ami-86ee3cff",
|
||||
"ami-f0e83a89",
|
||||
"ami-1f12c066",
|
||||
"ami-afee3cd6",
|
||||
"ami-1812c061",
|
||||
"ami-77ed3f0e",
|
||||
"ami-3bf32142",
|
||||
"ami-6ef02217",
|
||||
"ami-f4cf1d8d",
|
||||
"ami-3df32144",
|
||||
"ami-c6f321bf",
|
||||
"ami-24f3215d",
|
||||
"ami-fa7cdd89",
|
||||
"ami-1e749f67",
|
||||
"ami-a9cc1ed0",
|
||||
"ami-8104a4f8",
|
||||
]
|
||||
|
||||
client = boto3.client('ec2', region_name='eu-west-1')
|
||||
client = boto3.client("ec2", region_name="eu-west-1")
|
||||
|
||||
test = client.describe_images(ImageIds=instances)
|
||||
|
||||
result = []
|
||||
for image in test['Images']:
|
||||
for image in test["Images"]:
|
||||
try:
|
||||
tmp = {
|
||||
'ami_id': image['ImageId'],
|
||||
'name': image['Name'],
|
||||
'description': image['Description'],
|
||||
'owner_id': image['OwnerId'],
|
||||
'public': image['Public'],
|
||||
'virtualization_type': image['VirtualizationType'],
|
||||
'architecture': image['Architecture'],
|
||||
'state': image['State'],
|
||||
'platform': image.get('Platform'),
|
||||
'image_type': image['ImageType'],
|
||||
'hypervisor': image['Hypervisor'],
|
||||
'root_device_name': image['RootDeviceName'],
|
||||
'root_device_type': image['RootDeviceType'],
|
||||
'sriov': image.get('SriovNetSupport', 'simple')
|
||||
"ami_id": image["ImageId"],
|
||||
"name": image["Name"],
|
||||
"description": image["Description"],
|
||||
"owner_id": image["OwnerId"],
|
||||
"public": image["Public"],
|
||||
"virtualization_type": image["VirtualizationType"],
|
||||
"architecture": image["Architecture"],
|
||||
"state": image["State"],
|
||||
"platform": image.get("Platform"),
|
||||
"image_type": image["ImageType"],
|
||||
"hypervisor": image["Hypervisor"],
|
||||
"root_device_name": image["RootDeviceName"],
|
||||
"root_device_type": image["RootDeviceType"],
|
||||
"sriov": image.get("SriovNetSupport", "simple"),
|
||||
}
|
||||
result.append(tmp)
|
||||
except Exception as err:
|
||||
|
@ -7,12 +7,18 @@ import boto3
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
alternative_service_names = {'lambda': 'awslambda', 'dynamodb': 'dynamodb2'}
|
||||
alternative_service_names = {"lambda": "awslambda", "dynamodb": "dynamodb2"}
|
||||
|
||||
|
||||
def get_moto_implementation(service_name):
|
||||
service_name = service_name.replace("-", "") if "-" in service_name else service_name
|
||||
alt_service_name = alternative_service_names[service_name] if service_name in alternative_service_names else service_name
|
||||
service_name = (
|
||||
service_name.replace("-", "") if "-" in service_name else service_name
|
||||
)
|
||||
alt_service_name = (
|
||||
alternative_service_names[service_name]
|
||||
if service_name in alternative_service_names
|
||||
else service_name
|
||||
)
|
||||
if hasattr(moto, "mock_{}".format(alt_service_name)):
|
||||
mock = getattr(moto, "mock_{}".format(alt_service_name))
|
||||
elif hasattr(moto, "mock_{}".format(service_name)):
|
||||
@ -31,11 +37,13 @@ def calculate_implementation_coverage():
|
||||
coverage = {}
|
||||
for service_name in service_names:
|
||||
moto_client = get_moto_implementation(service_name)
|
||||
real_client = boto3.client(service_name, region_name='us-east-1')
|
||||
real_client = boto3.client(service_name, region_name="us-east-1")
|
||||
implemented = []
|
||||
not_implemented = []
|
||||
|
||||
operation_names = [xform_name(op) for op in real_client.meta.service_model.operation_names]
|
||||
operation_names = [
|
||||
xform_name(op) for op in real_client.meta.service_model.operation_names
|
||||
]
|
||||
for op in operation_names:
|
||||
if moto_client and op in dir(moto_client):
|
||||
implemented.append(op)
|
||||
@ -43,20 +51,22 @@ def calculate_implementation_coverage():
|
||||
not_implemented.append(op)
|
||||
|
||||
coverage[service_name] = {
|
||||
'implemented': implemented,
|
||||
'not_implemented': not_implemented,
|
||||
"implemented": implemented,
|
||||
"not_implemented": not_implemented,
|
||||
}
|
||||
return coverage
|
||||
|
||||
|
||||
def print_implementation_coverage(coverage):
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
implemented = coverage.get(service_name)["implemented"]
|
||||
not_implemented = coverage.get(service_name)["not_implemented"]
|
||||
operations = sorted(implemented + not_implemented)
|
||||
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
percentage_implemented = int(
|
||||
100.0 * len(implemented) / (len(implemented) + len(not_implemented))
|
||||
)
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
@ -84,12 +94,14 @@ def write_implementation_coverage_to_file(coverage):
|
||||
print("Writing to {}".format(implementation_coverage_file))
|
||||
with open(implementation_coverage_file, "w+") as file:
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
implemented = coverage.get(service_name)["implemented"]
|
||||
not_implemented = coverage.get(service_name)["not_implemented"]
|
||||
operations = sorted(implemented + not_implemented)
|
||||
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
percentage_implemented = int(
|
||||
100.0 * len(implemented) / (len(implemented) + len(not_implemented))
|
||||
)
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
@ -98,7 +110,9 @@ def write_implementation_coverage_to_file(coverage):
|
||||
file.write("\n")
|
||||
file.write("## {}\n".format(service_name))
|
||||
file.write("<details>\n")
|
||||
file.write("<summary>{}% implemented</summary>\n\n".format(percentage_implemented))
|
||||
file.write(
|
||||
"<summary>{}% implemented</summary>\n\n".format(percentage_implemented)
|
||||
)
|
||||
for op in operations:
|
||||
if op in implemented:
|
||||
file.write("- [X] {}\n".format(op))
|
||||
@ -107,7 +121,7 @@ def write_implementation_coverage_to_file(coverage):
|
||||
file.write("</details>\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
cov = calculate_implementation_coverage()
|
||||
write_implementation_coverage_to_file(cov)
|
||||
print_implementation_coverage(cov)
|
||||
|
@ -17,9 +17,7 @@ from lxml import etree
|
||||
|
||||
import click
|
||||
import jinja2
|
||||
from prompt_toolkit import (
|
||||
prompt
|
||||
)
|
||||
from prompt_toolkit import prompt
|
||||
from prompt_toolkit.completion import WordCompleter
|
||||
from prompt_toolkit.shortcuts import print_formatted_text
|
||||
|
||||
@ -29,35 +27,35 @@ import boto3
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from moto.core import BaseBackend
|
||||
from implementation_coverage import (
|
||||
get_moto_implementation
|
||||
)
|
||||
from implementation_coverage import get_moto_implementation
|
||||
from inflection import singularize
|
||||
|
||||
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), './template')
|
||||
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), "./template")
|
||||
|
||||
INPUT_IGNORED_IN_BACKEND = ['Marker', 'PageSize']
|
||||
OUTPUT_IGNORED_IN_BACKEND = ['NextMarker']
|
||||
INPUT_IGNORED_IN_BACKEND = ["Marker", "PageSize"]
|
||||
OUTPUT_IGNORED_IN_BACKEND = ["NextMarker"]
|
||||
|
||||
|
||||
def print_progress(title, body, color):
|
||||
click.secho(u'\t{}\t'.format(title), fg=color, nl=False)
|
||||
click.secho(u"\t{}\t".format(title), fg=color, nl=False)
|
||||
click.echo(body)
|
||||
|
||||
|
||||
def select_service_and_operation():
|
||||
service_names = Session().get_available_services()
|
||||
service_completer = WordCompleter(service_names)
|
||||
service_name = prompt(u'Select service: ', completer=service_completer)
|
||||
service_name = prompt(u"Select service: ", completer=service_completer)
|
||||
if service_name not in service_names:
|
||||
click.secho(u'{} is not valid service'.format(service_name), fg='red')
|
||||
click.secho(u"{} is not valid service".format(service_name), fg="red")
|
||||
raise click.Abort()
|
||||
moto_client = get_moto_implementation(service_name)
|
||||
real_client = boto3.client(service_name, region_name='us-east-1')
|
||||
real_client = boto3.client(service_name, region_name="us-east-1")
|
||||
implemented = []
|
||||
not_implemented = []
|
||||
|
||||
operation_names = [xform_name(op) for op in real_client.meta.service_model.operation_names]
|
||||
operation_names = [
|
||||
xform_name(op) for op in real_client.meta.service_model.operation_names
|
||||
]
|
||||
for op in operation_names:
|
||||
if moto_client and op in dir(moto_client):
|
||||
implemented.append(op)
|
||||
@ -65,133 +63,148 @@ def select_service_and_operation():
|
||||
not_implemented.append(op)
|
||||
operation_completer = WordCompleter(operation_names)
|
||||
|
||||
click.echo('==Current Implementation Status==')
|
||||
click.echo("==Current Implementation Status==")
|
||||
for operation_name in operation_names:
|
||||
check = 'X' if operation_name in implemented else ' '
|
||||
click.secho('[{}] {}'.format(check, operation_name))
|
||||
click.echo('=================================')
|
||||
operation_name = prompt(u'Select Operation: ', completer=operation_completer)
|
||||
check = "X" if operation_name in implemented else " "
|
||||
click.secho("[{}] {}".format(check, operation_name))
|
||||
click.echo("=================================")
|
||||
operation_name = prompt(u"Select Operation: ", completer=operation_completer)
|
||||
|
||||
if operation_name not in operation_names:
|
||||
click.secho('{} is not valid operation'.format(operation_name), fg='red')
|
||||
click.secho("{} is not valid operation".format(operation_name), fg="red")
|
||||
raise click.Abort()
|
||||
|
||||
if operation_name in implemented:
|
||||
click.secho('{} is already implemented'.format(operation_name), fg='red')
|
||||
click.secho("{} is already implemented".format(operation_name), fg="red")
|
||||
raise click.Abort()
|
||||
return service_name, operation_name
|
||||
|
||||
|
||||
def get_escaped_service(service):
|
||||
return service.replace('-', '')
|
||||
return service.replace("-", "")
|
||||
|
||||
|
||||
def get_lib_dir(service):
|
||||
return os.path.join('moto', get_escaped_service(service))
|
||||
return os.path.join("moto", get_escaped_service(service))
|
||||
|
||||
|
||||
def get_test_dir(service):
|
||||
return os.path.join('tests', 'test_{}'.format(get_escaped_service(service)))
|
||||
return os.path.join("tests", "test_{}".format(get_escaped_service(service)))
|
||||
|
||||
|
||||
def render_template(tmpl_dir, tmpl_filename, context, service, alt_filename=None):
|
||||
is_test = True if 'test' in tmpl_dir else False
|
||||
rendered = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(tmpl_dir)
|
||||
).get_template(tmpl_filename).render(context)
|
||||
is_test = True if "test" in tmpl_dir else False
|
||||
rendered = (
|
||||
jinja2.Environment(loader=jinja2.FileSystemLoader(tmpl_dir))
|
||||
.get_template(tmpl_filename)
|
||||
.render(context)
|
||||
)
|
||||
|
||||
dirname = get_test_dir(service) if is_test else get_lib_dir(service)
|
||||
filename = alt_filename or os.path.splitext(tmpl_filename)[0]
|
||||
filepath = os.path.join(dirname, filename)
|
||||
|
||||
if os.path.exists(filepath):
|
||||
print_progress('skip creating', filepath, 'yellow')
|
||||
print_progress("skip creating", filepath, "yellow")
|
||||
else:
|
||||
print_progress('creating', filepath, 'green')
|
||||
with open(filepath, 'w') as f:
|
||||
print_progress("creating", filepath, "green")
|
||||
with open(filepath, "w") as f:
|
||||
f.write(rendered)
|
||||
|
||||
|
||||
def append_mock_to_init_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', '__init__.py')
|
||||
path = os.path.join(os.path.dirname(__file__), "..", "moto", "__init__.py")
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match('^mock_{}.*lazy_load(.*)$'.format(service), _)):
|
||||
if any(_ for _ in lines if re.match("^mock_{}.*lazy_load(.*)$".format(service), _)):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match('^mock_.*lazy_load(.*)$', _)]
|
||||
filtered_lines = [_ for _ in lines if re.match("^mock_.*lazy_load(.*)$", _)]
|
||||
last_import_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(get_escaped_service(service), get_escaped_service(service), get_escaped_service(service))
|
||||
new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(
|
||||
get_escaped_service(service),
|
||||
get_escaped_service(service),
|
||||
get_escaped_service(service),
|
||||
)
|
||||
lines.insert(last_import_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def append_mock_dict_to_backends_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py')
|
||||
path = os.path.join(os.path.dirname(__file__), "..", "moto", "backends.py")
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match(".*\"{}\": {}_backends.*".format(service, service), _)):
|
||||
if any(
|
||||
_
|
||||
for _ in lines
|
||||
if re.match('.*"{}": {}_backends.*'.format(service, service), _)
|
||||
):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match(".*\".*\":.*_backends.*", _)]
|
||||
filtered_lines = [_ for _ in lines if re.match('.*".*":.*_backends.*', _)]
|
||||
last_elem_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = " \"{}\": (\"{}\", \"{}_backends\"),".format(service, get_escaped_service(service), get_escaped_service(service))
|
||||
new_line = ' "{}": ("{}", "{}_backends"),'.format(
|
||||
service, get_escaped_service(service), get_escaped_service(service)
|
||||
)
|
||||
prev_line = lines[last_elem_line_index]
|
||||
if not prev_line.endswith('{') and not prev_line.endswith(','):
|
||||
lines[last_elem_line_index] += ','
|
||||
if not prev_line.endswith("{") and not prev_line.endswith(","):
|
||||
lines[last_elem_line_index] += ","
|
||||
lines.insert(last_elem_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def initialize_service(service, operation, api_protocol):
|
||||
"""create lib and test dirs if not exist
|
||||
"""
|
||||
"""create lib and test dirs if not exist"""
|
||||
lib_dir = get_lib_dir(service)
|
||||
test_dir = get_test_dir(service)
|
||||
|
||||
print_progress('Initializing service', service, 'green')
|
||||
print_progress("Initializing service", service, "green")
|
||||
|
||||
client = boto3.client(service)
|
||||
service_class = client.__class__.__name__
|
||||
endpoint_prefix = client._service_model.endpoint_prefix
|
||||
|
||||
tmpl_context = {
|
||||
'service': service,
|
||||
'service_class': service_class,
|
||||
'endpoint_prefix': endpoint_prefix,
|
||||
'api_protocol': api_protocol,
|
||||
'escaped_service': get_escaped_service(service)
|
||||
"service": service,
|
||||
"service_class": service_class,
|
||||
"endpoint_prefix": endpoint_prefix,
|
||||
"api_protocol": api_protocol,
|
||||
"escaped_service": get_escaped_service(service),
|
||||
}
|
||||
|
||||
# initialize service directory
|
||||
if os.path.exists(lib_dir):
|
||||
print_progress('skip creating', lib_dir, 'yellow')
|
||||
print_progress("skip creating", lib_dir, "yellow")
|
||||
else:
|
||||
print_progress('creating', lib_dir, 'green')
|
||||
print_progress("creating", lib_dir, "green")
|
||||
os.makedirs(lib_dir)
|
||||
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, 'lib')
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, "lib")
|
||||
for tmpl_filename in os.listdir(tmpl_dir):
|
||||
render_template(
|
||||
tmpl_dir, tmpl_filename, tmpl_context, service
|
||||
)
|
||||
render_template(tmpl_dir, tmpl_filename, tmpl_context, service)
|
||||
|
||||
# initialize test directory
|
||||
if os.path.exists(test_dir):
|
||||
print_progress('skip creating', test_dir, 'yellow')
|
||||
print_progress("skip creating", test_dir, "yellow")
|
||||
else:
|
||||
print_progress('creating', test_dir, 'green')
|
||||
print_progress("creating", test_dir, "green")
|
||||
os.makedirs(test_dir)
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, 'test')
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, "test")
|
||||
for tmpl_filename in os.listdir(tmpl_dir):
|
||||
alt_filename = 'test_{}.py'.format(get_escaped_service(service)) if tmpl_filename == 'test_service.py.j2' else None
|
||||
render_template(
|
||||
tmpl_dir, tmpl_filename, tmpl_context, service, alt_filename
|
||||
alt_filename = (
|
||||
"test_{}.py".format(get_escaped_service(service))
|
||||
if tmpl_filename == "test_service.py.j2"
|
||||
else None
|
||||
)
|
||||
render_template(tmpl_dir, tmpl_filename, tmpl_context, service, alt_filename)
|
||||
|
||||
# append mock to init files
|
||||
append_mock_to_init_py(service)
|
||||
@ -199,22 +212,24 @@ def initialize_service(service, operation, api_protocol):
|
||||
|
||||
|
||||
def to_upper_camel_case(s):
|
||||
return ''.join([_.title() for _ in s.split('_')])
|
||||
return "".join([_.title() for _ in s.split("_")])
|
||||
|
||||
|
||||
def to_lower_camel_case(s):
|
||||
words = s.split('_')
|
||||
return ''.join(words[:1] + [_.title() for _ in words[1:]])
|
||||
words = s.split("_")
|
||||
return "".join(words[:1] + [_.title() for _ in words[1:]])
|
||||
|
||||
|
||||
def to_snake_case(s):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
||||
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", s)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
|
||||
|
||||
|
||||
def get_operation_name_in_keys(operation_name, operation_keys):
|
||||
index = [_.lower() for _ in operation_keys].index(operation_name.lower())
|
||||
return operation_keys[index]
|
||||
|
||||
|
||||
def get_function_in_responses(service, operation, protocol):
|
||||
"""refers to definition of API in botocore, and autogenerates function
|
||||
You can see example of elbv2 from link below.
|
||||
@ -224,44 +239,56 @@ def get_function_in_responses(service, operation, protocol):
|
||||
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
if not hasattr(op_model.output_shape, "members"):
|
||||
outputs = {}
|
||||
else:
|
||||
outputs = op_model.output_shape.members
|
||||
inputs = op_model.input_shape.members
|
||||
input_names = [to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND]
|
||||
output_names = [to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND]
|
||||
body = '\ndef {}(self):\n'.format(operation)
|
||||
input_names = [
|
||||
to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
output_names = [
|
||||
to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
body = "\ndef {}(self):\n".format(operation)
|
||||
|
||||
for input_name, input_type in inputs.items():
|
||||
type_name = input_type.type_name
|
||||
if type_name == 'integer':
|
||||
if type_name == "integer":
|
||||
arg_line_tmpl = ' {} = self._get_int_param("{}")\n'
|
||||
elif type_name == 'list':
|
||||
elif type_name == "list":
|
||||
arg_line_tmpl = ' {} = self._get_list_prefix("{}.member")\n'
|
||||
else:
|
||||
arg_line_tmpl = ' {} = self._get_param("{}")\n'
|
||||
body += arg_line_tmpl.format(to_snake_case(input_name), input_name)
|
||||
if output_names:
|
||||
body += ' {} = self.{}_backend.{}(\n'.format(', '.join(output_names), get_escaped_service(service), operation)
|
||||
else:
|
||||
body += ' self.{}_backend.{}(\n'.format(get_escaped_service(service), operation)
|
||||
for input_name in input_names:
|
||||
body += ' {}={},\n'.format(input_name, input_name)
|
||||
|
||||
body += ' )\n'
|
||||
if protocol == 'query':
|
||||
body += ' template = self.response_template({}_TEMPLATE)\n'.format(operation.upper())
|
||||
body += ' return template.render({})\n'.format(
|
||||
', '.join(['{}={}'.format(_, _) for _ in output_names])
|
||||
body += " {} = self.{}_backend.{}(\n".format(
|
||||
", ".join(output_names), get_escaped_service(service), operation
|
||||
)
|
||||
else:
|
||||
body += " self.{}_backend.{}(\n".format(
|
||||
get_escaped_service(service), operation
|
||||
)
|
||||
for input_name in input_names:
|
||||
body += " {}={},\n".format(input_name, input_name)
|
||||
|
||||
body += " )\n"
|
||||
if protocol == "query":
|
||||
body += " template = self.response_template({}_TEMPLATE)\n".format(
|
||||
operation.upper()
|
||||
)
|
||||
body += " return template.render({})\n".format(
|
||||
", ".join(["{}={}".format(_, _) for _ in output_names])
|
||||
)
|
||||
elif protocol in ["json", "rest-json"]:
|
||||
body += " # TODO: adjust response\n"
|
||||
body += " return json.dumps(dict({}))\n".format(
|
||||
", ".join(["{}={}".format(to_lower_camel_case(_), _) for _ in output_names])
|
||||
)
|
||||
elif protocol in ['json', 'rest-json']:
|
||||
body += ' # TODO: adjust response\n'
|
||||
body += ' return json.dumps(dict({}))\n'.format(', '.join(['{}={}'.format(to_lower_camel_case(_), _) for _ in output_names]))
|
||||
return body
|
||||
|
||||
|
||||
@ -273,44 +300,55 @@ def get_function_in_models(service, operation):
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
inputs = op_model.input_shape.members
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
if not hasattr(op_model.output_shape, "members"):
|
||||
outputs = {}
|
||||
else:
|
||||
outputs = op_model.output_shape.members
|
||||
input_names = [to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND]
|
||||
output_names = [to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND]
|
||||
input_names = [
|
||||
to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
output_names = [
|
||||
to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
if input_names:
|
||||
body = 'def {}(self, {}):\n'.format(operation, ', '.join(input_names))
|
||||
body = "def {}(self, {}):\n".format(operation, ", ".join(input_names))
|
||||
else:
|
||||
body = 'def {}(self)\n'
|
||||
body += ' # implement here\n'
|
||||
body += ' return {}\n\n'.format(', '.join(output_names))
|
||||
body = "def {}(self)\n"
|
||||
body += " # implement here\n"
|
||||
body += " return {}\n\n".format(", ".join(output_names))
|
||||
|
||||
return body
|
||||
|
||||
|
||||
def _get_subtree(name, shape, replace_list, name_prefix=[]):
|
||||
class_name = shape.__class__.__name__
|
||||
if class_name in ('StringShape', 'Shape'):
|
||||
if class_name in ("StringShape", "Shape"):
|
||||
t = etree.Element(name)
|
||||
if name_prefix:
|
||||
t.text = '{{ %s.%s }}' % (name_prefix[-1], to_snake_case(name))
|
||||
t.text = "{{ %s.%s }}" % (name_prefix[-1], to_snake_case(name))
|
||||
else:
|
||||
t.text = '{{ %s }}' % to_snake_case(name)
|
||||
t.text = "{{ %s }}" % to_snake_case(name)
|
||||
return t
|
||||
elif class_name in ('ListShape', ):
|
||||
elif class_name in ("ListShape",):
|
||||
replace_list.append((name, name_prefix))
|
||||
t = etree.Element(name)
|
||||
t_member = etree.Element('member')
|
||||
t_member = etree.Element("member")
|
||||
t.append(t_member)
|
||||
for nested_name, nested_shape in shape.member.members.items():
|
||||
t_member.append(_get_subtree(nested_name, nested_shape, replace_list, name_prefix + [singularize(name.lower())]))
|
||||
t_member.append(
|
||||
_get_subtree(
|
||||
nested_name,
|
||||
nested_shape,
|
||||
replace_list,
|
||||
name_prefix + [singularize(name.lower())],
|
||||
)
|
||||
)
|
||||
return t
|
||||
raise ValueError('Not supported Shape')
|
||||
raise ValueError("Not supported Shape")
|
||||
|
||||
|
||||
def get_response_query_template(service, operation):
|
||||
@ -323,22 +361,22 @@ def get_response_query_template(service, operation):
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
result_wrapper = op_model.output_shape.serialization['resultWrapper']
|
||||
response_wrapper = result_wrapper.replace('Result', 'Response')
|
||||
result_wrapper = op_model.output_shape.serialization["resultWrapper"]
|
||||
response_wrapper = result_wrapper.replace("Result", "Response")
|
||||
metadata = op_model.metadata
|
||||
xml_namespace = metadata['xmlNamespace']
|
||||
xml_namespace = metadata["xmlNamespace"]
|
||||
|
||||
# build xml tree
|
||||
t_root = etree.Element(response_wrapper, xmlns=xml_namespace)
|
||||
t_root = etree.Element(response_wrapper, xmlns=xml_namespace)
|
||||
|
||||
# build metadata
|
||||
t_metadata = etree.Element('ResponseMetadata')
|
||||
t_request_id = etree.Element('RequestId')
|
||||
t_request_id.text = '1549581b-12b7-11e3-895e-1334aEXAMPLE'
|
||||
t_metadata = etree.Element("ResponseMetadata")
|
||||
t_request_id = etree.Element("RequestId")
|
||||
t_request_id.text = "1549581b-12b7-11e3-895e-1334aEXAMPLE"
|
||||
t_metadata.append(t_request_id)
|
||||
t_root.append(t_metadata)
|
||||
|
||||
@ -349,68 +387,73 @@ def get_response_query_template(service, operation):
|
||||
for output_name, output_shape in outputs.items():
|
||||
t_result.append(_get_subtree(output_name, output_shape, replace_list))
|
||||
t_root.append(t_result)
|
||||
xml_body = etree.tostring(t_root, pretty_print=True).decode('utf-8')
|
||||
xml_body = etree.tostring(t_root, pretty_print=True).decode("utf-8")
|
||||
xml_body_lines = xml_body.splitlines()
|
||||
for replace in replace_list:
|
||||
name = replace[0]
|
||||
prefix = replace[1]
|
||||
singular_name = singularize(name)
|
||||
|
||||
start_tag = '<%s>' % name
|
||||
iter_name = '{}.{}'.format(prefix[-1], name.lower())if prefix else name.lower()
|
||||
loop_start = '{%% for %s in %s %%}' % (singular_name.lower(), iter_name)
|
||||
end_tag = '</%s>' % name
|
||||
loop_end = '{{ endfor }}'
|
||||
start_tag = "<%s>" % name
|
||||
iter_name = "{}.{}".format(prefix[-1], name.lower()) if prefix else name.lower()
|
||||
loop_start = "{%% for %s in %s %%}" % (singular_name.lower(), iter_name)
|
||||
end_tag = "</%s>" % name
|
||||
loop_end = "{{ endfor }}"
|
||||
|
||||
start_tag_indexes = [i for i, l in enumerate(xml_body_lines) if start_tag in l]
|
||||
if len(start_tag_indexes) != 1:
|
||||
raise Exception('tag %s not found in response body' % start_tag)
|
||||
raise Exception("tag %s not found in response body" % start_tag)
|
||||
start_tag_index = start_tag_indexes[0]
|
||||
xml_body_lines.insert(start_tag_index + 1, loop_start)
|
||||
|
||||
end_tag_indexes = [i for i, l in enumerate(xml_body_lines) if end_tag in l]
|
||||
if len(end_tag_indexes) != 1:
|
||||
raise Exception('tag %s not found in response body' % end_tag)
|
||||
raise Exception("tag %s not found in response body" % end_tag)
|
||||
end_tag_index = end_tag_indexes[0]
|
||||
xml_body_lines.insert(end_tag_index, loop_end)
|
||||
xml_body = '\n'.join(xml_body_lines)
|
||||
xml_body = "\n".join(xml_body_lines)
|
||||
body = '\n{}_TEMPLATE = """{}"""'.format(operation.upper(), xml_body)
|
||||
return body
|
||||
|
||||
|
||||
def insert_code_to_class(path, base_class, new_code):
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
mod_path = os.path.splitext(path)[0].replace('/', '.')
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
mod_path = os.path.splitext(path)[0].replace("/", ".")
|
||||
mod = importlib.import_module(mod_path)
|
||||
clsmembers = inspect.getmembers(mod, inspect.isclass)
|
||||
_response_cls = [_[1] for _ in clsmembers if issubclass(_[1], base_class) and _[1] != base_class]
|
||||
_response_cls = [
|
||||
_[1] for _ in clsmembers if issubclass(_[1], base_class) and _[1] != base_class
|
||||
]
|
||||
if len(_response_cls) != 1:
|
||||
raise Exception('unknown error, number of clsmembers is not 1')
|
||||
raise Exception("unknown error, number of clsmembers is not 1")
|
||||
response_cls = _response_cls[0]
|
||||
code_lines, line_no = inspect.getsourcelines(response_cls)
|
||||
end_line_no = line_no + len(code_lines)
|
||||
|
||||
func_lines = [' ' * 4 + _ for _ in new_code.splitlines()]
|
||||
func_lines = [" " * 4 + _ for _ in new_code.splitlines()]
|
||||
|
||||
lines = lines[:end_line_no] + func_lines + lines[end_line_no:]
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def insert_url(service, operation, api_protocol):
|
||||
client = boto3.client(service)
|
||||
service_class = client.__class__.__name__
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
uri = client._service_model.operation_model(aws_operation_name).http['requestUri']
|
||||
uri = client._service_model.operation_model(aws_operation_name).http["requestUri"]
|
||||
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', get_escaped_service(service), 'urls.py')
|
||||
path = os.path.join(
|
||||
os.path.dirname(__file__), "..", "moto", get_escaped_service(service), "urls.py"
|
||||
)
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match(uri, _)):
|
||||
return
|
||||
@ -418,50 +461,49 @@ def insert_url(service, operation, api_protocol):
|
||||
url_paths_found = False
|
||||
last_elem_line_index = -1
|
||||
for i, line in enumerate(lines):
|
||||
if line.startswith('url_paths'):
|
||||
if line.startswith("url_paths"):
|
||||
url_paths_found = True
|
||||
if url_paths_found and line.startswith('}'):
|
||||
if url_paths_found and line.startswith("}"):
|
||||
last_elem_line_index = i - 1
|
||||
|
||||
prev_line = lines[last_elem_line_index]
|
||||
if not prev_line.endswith('{') and not prev_line.endswith(','):
|
||||
lines[last_elem_line_index] += ','
|
||||
if not prev_line.endswith("{") and not prev_line.endswith(","):
|
||||
lines[last_elem_line_index] += ","
|
||||
|
||||
# generate url pattern
|
||||
if api_protocol == 'rest-json':
|
||||
if api_protocol == "rest-json":
|
||||
new_line = " '{0}/.*$': response.dispatch,"
|
||||
else:
|
||||
new_line = " '{0}%s$': %sResponse.dispatch," % (
|
||||
uri, service_class
|
||||
)
|
||||
new_line = " '{0}%s$': %sResponse.dispatch," % (uri, service_class)
|
||||
if new_line in lines:
|
||||
return
|
||||
lines.insert(last_elem_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def insert_codes(service, operation, api_protocol):
|
||||
func_in_responses = get_function_in_responses(service, operation, api_protocol)
|
||||
func_in_models = get_function_in_models(service, operation)
|
||||
# edit responses.py
|
||||
responses_path = 'moto/{}/responses.py'.format(get_escaped_service(service))
|
||||
print_progress('inserting code', responses_path, 'green')
|
||||
responses_path = "moto/{}/responses.py".format(get_escaped_service(service))
|
||||
print_progress("inserting code", responses_path, "green")
|
||||
insert_code_to_class(responses_path, BaseResponse, func_in_responses)
|
||||
|
||||
# insert template
|
||||
if api_protocol == 'query':
|
||||
if api_protocol == "query":
|
||||
template = get_response_query_template(service, operation)
|
||||
with open(responses_path) as f:
|
||||
lines = [_[:-1] for _ in f.readlines()]
|
||||
lines += template.splitlines()
|
||||
with open(responses_path, 'w') as f:
|
||||
f.write('\n'.join(lines))
|
||||
with open(responses_path, "w") as f:
|
||||
f.write("\n".join(lines))
|
||||
|
||||
# edit models.py
|
||||
models_path = 'moto/{}/models.py'.format(get_escaped_service(service))
|
||||
print_progress('inserting code', models_path, 'green')
|
||||
models_path = "moto/{}/models.py".format(get_escaped_service(service))
|
||||
print_progress("inserting code", models_path, "green")
|
||||
insert_code_to_class(models_path, BaseBackend, func_in_models)
|
||||
|
||||
# edit urls.py
|
||||
@ -471,15 +513,20 @@ def insert_codes(service, operation, api_protocol):
|
||||
@click.command()
|
||||
def main():
|
||||
service, operation = select_service_and_operation()
|
||||
api_protocol = boto3.client(service)._service_model.metadata['protocol']
|
||||
api_protocol = boto3.client(service)._service_model.metadata["protocol"]
|
||||
initialize_service(service, operation, api_protocol)
|
||||
|
||||
if api_protocol in ['query', 'json', 'rest-json']:
|
||||
if api_protocol in ["query", "json", "rest-json"]:
|
||||
insert_codes(service, operation, api_protocol)
|
||||
else:
|
||||
print_progress('skip inserting code', 'api protocol "{}" is not supported'.format(api_protocol), 'yellow')
|
||||
print_progress(
|
||||
"skip inserting code",
|
||||
'api protocol "{}" is not supported'.format(api_protocol),
|
||||
"yellow",
|
||||
)
|
||||
|
||||
click.echo('You will still need to add the mock into "__init__.py"'.format(service))
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -23,42 +23,53 @@ def json_serial(obj):
|
||||
raise TypeError("Type not serializable")
|
||||
|
||||
|
||||
client = boto3.client('iam')
|
||||
client = boto3.client("iam")
|
||||
|
||||
policies = {}
|
||||
|
||||
paginator = client.get_paginator('list_policies')
|
||||
paginator = client.get_paginator("list_policies")
|
||||
try:
|
||||
response_iterator = paginator.paginate(Scope='AWS')
|
||||
response_iterator = paginator.paginate(Scope="AWS")
|
||||
for response in response_iterator:
|
||||
for policy in response['Policies']:
|
||||
policies[policy['PolicyName']] = policy
|
||||
for policy in response["Policies"]:
|
||||
policies[policy["PolicyName"]] = policy
|
||||
except NoCredentialsError:
|
||||
print("USAGE:")
|
||||
print("Put your AWS credentials into ~/.aws/credentials and run:")
|
||||
print(__file__)
|
||||
print("")
|
||||
print("Or specify them on the command line:")
|
||||
print("AWS_ACCESS_KEY_ID=your_personal_access_key AWS_SECRET_ACCESS_KEY=your_personal_secret {}".format(__file__))
|
||||
print(
|
||||
"AWS_ACCESS_KEY_ID=your_personal_access_key AWS_SECRET_ACCESS_KEY=your_personal_secret {}".format(
|
||||
__file__
|
||||
)
|
||||
)
|
||||
print("")
|
||||
sys.exit(1)
|
||||
|
||||
for policy_name in policies:
|
||||
response = client.get_policy_version(
|
||||
PolicyArn=policies[policy_name]['Arn'],
|
||||
VersionId=policies[policy_name]['DefaultVersionId'])
|
||||
for key in response['PolicyVersion']:
|
||||
if key != "CreateDate": # the policy's CreateDate should not be overwritten by its version's CreateDate
|
||||
policies[policy_name][key] = response['PolicyVersion'][key]
|
||||
PolicyArn=policies[policy_name]["Arn"],
|
||||
VersionId=policies[policy_name]["DefaultVersionId"],
|
||||
)
|
||||
for key in response["PolicyVersion"]:
|
||||
if (
|
||||
key != "CreateDate"
|
||||
): # the policy's CreateDate should not be overwritten by its version's CreateDate
|
||||
policies[policy_name][key] = response["PolicyVersion"][key]
|
||||
|
||||
with open(output_file, 'w') as f:
|
||||
triple_quote = '\"\"\"'
|
||||
with open(output_file, "w") as f:
|
||||
triple_quote = '"""'
|
||||
|
||||
f.write("# Imported via `make aws_managed_policies`\n")
|
||||
f.write('aws_managed_policies_data = {}\n'.format(triple_quote))
|
||||
f.write(json.dumps(policies,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
separators=(',', ': '),
|
||||
default=json_serial))
|
||||
f.write('{}\n'.format(triple_quote))
|
||||
f.write("aws_managed_policies_data = {}\n".format(triple_quote))
|
||||
f.write(
|
||||
json.dumps(
|
||||
policies,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
separators=(",", ": "),
|
||||
default=json_serial,
|
||||
)
|
||||
)
|
||||
f.write("{}\n".format(triple_quote))
|
||||
|
70
setup.py
70
setup.py
@ -13,20 +13,22 @@ PY2 = sys.version_info[0] == 2
|
||||
# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def read(*parts):
|
||||
# intentionally *not* adding an encoding option to open, See:
|
||||
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
|
||||
with open(os.path.join(here, *parts), 'r') as fp:
|
||||
with open(os.path.join(here, *parts), "r") as fp:
|
||||
return fp.read()
|
||||
|
||||
|
||||
def get_version():
|
||||
version_file = read('moto', '__init__.py')
|
||||
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
|
||||
version_file, re.MULTILINE)
|
||||
version_file = read("moto", "__init__.py")
|
||||
version_match = re.search(
|
||||
r'^__version__ = [\'"]([^\'"]*)[\'"]', version_file, re.MULTILINE
|
||||
)
|
||||
if version_match:
|
||||
return version_match.group(1)
|
||||
raise RuntimeError('Unable to find version string.')
|
||||
raise RuntimeError("Unable to find version string.")
|
||||
|
||||
|
||||
install_requires = [
|
||||
@ -77,7 +79,9 @@ else:
|
||||
|
||||
_dep_PyYAML = "PyYAML>=5.1"
|
||||
_dep_python_jose = "python-jose[cryptography]>=3.1.0,<4.0.0"
|
||||
_dep_python_jose_ecdsa_pin = "ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984
|
||||
_dep_python_jose_ecdsa_pin = (
|
||||
"ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984
|
||||
)
|
||||
_dep_docker = "docker>=2.5.1"
|
||||
_dep_jsondiff = "jsondiff>=1.1.2"
|
||||
_dep_aws_xray_sdk = "aws-xray-sdk!=0.96,>=0.93"
|
||||
@ -98,31 +102,31 @@ all_extra_deps = [
|
||||
_dep_sshpubkeys_py2,
|
||||
_dep_sshpubkeys_py3,
|
||||
]
|
||||
all_server_deps = all_extra_deps + ['flask', 'flask-cors']
|
||||
all_server_deps = all_extra_deps + ["flask", "flask-cors"]
|
||||
|
||||
# TODO: do we want to add ALL services here?
|
||||
# i.e. even those without extra dependencies.
|
||||
# Would be good for future-compatibility, I guess.
|
||||
extras_per_service = {
|
||||
'apigateway': [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
'awslambda': [_dep_docker],
|
||||
'batch': [_dep_docker],
|
||||
'cloudformation': [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
'cognitoidp': [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
'dynamodb2': [_dep_docker],
|
||||
'dynamodbstreams': [_dep_docker],
|
||||
"apigateway": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
"awslambda": [_dep_docker],
|
||||
"batch": [_dep_docker],
|
||||
"cloudformation": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
"cognitoidp": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
"dynamodb2": [_dep_docker],
|
||||
"dynamodbstreams": [_dep_docker],
|
||||
"ec2": [_dep_docker, _dep_sshpubkeys_py2, _dep_sshpubkeys_py3],
|
||||
'iotdata': [_dep_jsondiff],
|
||||
's3': [_dep_PyYAML],
|
||||
'ses': [_dep_docker],
|
||||
'sns': [_dep_docker],
|
||||
'sqs': [_dep_docker],
|
||||
'ssm': [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
'xray': [_dep_aws_xray_sdk],
|
||||
"iotdata": [_dep_jsondiff],
|
||||
"s3": [_dep_PyYAML],
|
||||
"ses": [_dep_docker],
|
||||
"sns": [_dep_docker],
|
||||
"sqs": [_dep_docker],
|
||||
"ssm": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
"xray": [_dep_aws_xray_sdk],
|
||||
}
|
||||
extras_require = {
|
||||
'all': all_extra_deps,
|
||||
'server': all_server_deps,
|
||||
"all": all_extra_deps,
|
||||
"server": all_server_deps,
|
||||
}
|
||||
|
||||
extras_require.update(extras_per_service)
|
||||
@ -136,18 +140,18 @@ else:
|
||||
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
name="moto",
|
||||
version=get_version(),
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
long_description=read('README.md'),
|
||||
long_description_content_type='text/markdown',
|
||||
author='Steve Pulec',
|
||||
author_email='spulec@gmail.com',
|
||||
url='https://github.com/spulec/moto',
|
||||
description="A library that allows your python tests to easily"
|
||||
" mock out the boto library",
|
||||
long_description=read("README.md"),
|
||||
long_description_content_type="text/markdown",
|
||||
author="Steve Pulec",
|
||||
author_email="spulec@gmail.com",
|
||||
url="https://github.com/spulec/moto",
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'moto_server = moto.server:main',
|
||||
"console_scripts": [
|
||||
"moto_server = moto.server:main",
|
||||
],
|
||||
},
|
||||
packages=find_packages(exclude=("tests", "tests.*")),
|
||||
|
@ -22,8 +22,10 @@ SERVER_CRT = _GET_RESOURCE("star_moto_com.pem")
|
||||
SERVER_COMMON_NAME = "*.moto.com"
|
||||
SERVER_CRT_BAD = _GET_RESOURCE("star_moto_com-bad.pem")
|
||||
SERVER_KEY = _GET_RESOURCE("star_moto_com.key")
|
||||
BAD_ARN = "arn:aws:acm:us-east-2:{}:certificate/_0000000-0000-0000-0000-000000000000".format(
|
||||
ACCOUNT_ID
|
||||
BAD_ARN = (
|
||||
"arn:aws:acm:us-east-2:{}:certificate/_0000000-0000-0000-0000-000000000000".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@ -56,7 +58,10 @@ def test_import_certificate_with_tags():
|
||||
Certificate=SERVER_CRT,
|
||||
PrivateKey=SERVER_KEY,
|
||||
CertificateChain=CA_CRT,
|
||||
Tags=[{"Key": "Environment", "Value": "QA"}, {"Key": "KeyOnly"},],
|
||||
Tags=[
|
||||
{"Key": "Environment", "Value": "QA"},
|
||||
{"Key": "KeyOnly"},
|
||||
],
|
||||
)
|
||||
arn = resp["CertificateArn"]
|
||||
|
||||
@ -368,7 +373,10 @@ def test_request_certificate_with_tags():
|
||||
DomainName="google.com",
|
||||
IdempotencyToken=token,
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
Tags=[{"Key": "Environment", "Value": "Prod"}, {"Key": "KeyOnly"},],
|
||||
Tags=[
|
||||
{"Key": "Environment", "Value": "Prod"},
|
||||
{"Key": "KeyOnly"},
|
||||
],
|
||||
)
|
||||
arn_2 = resp["CertificateArn"]
|
||||
|
||||
@ -398,7 +406,8 @@ def test_operations_with_invalid_tags():
|
||||
# request certificate with invalid tags
|
||||
with assert_raises(ClientError) as ex:
|
||||
client.request_certificate(
|
||||
DomainName="example.com", Tags=[{"Key": "X" * 200, "Value": "Valid"}],
|
||||
DomainName="example.com",
|
||||
Tags=[{"Key": "X" * 200, "Value": "Valid"}],
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(
|
||||
|
@ -105,7 +105,9 @@ def test_create_rest_api_valid_apikeysources():
|
||||
|
||||
# 1. test creating rest api with HEADER apiKeySource
|
||||
response = client.create_rest_api(
|
||||
name="my_api", description="this is my api", apiKeySource="HEADER",
|
||||
name="my_api",
|
||||
description="this is my api",
|
||||
apiKeySource="HEADER",
|
||||
)
|
||||
api_id = response["id"]
|
||||
|
||||
@ -114,7 +116,9 @@ def test_create_rest_api_valid_apikeysources():
|
||||
|
||||
# 2. test creating rest api with AUTHORIZER apiKeySource
|
||||
response = client.create_rest_api(
|
||||
name="my_api2", description="this is my api", apiKeySource="AUTHORIZER",
|
||||
name="my_api2",
|
||||
description="this is my api",
|
||||
apiKeySource="AUTHORIZER",
|
||||
)
|
||||
api_id = response["id"]
|
||||
|
||||
@ -149,7 +153,9 @@ def test_create_rest_api_valid_endpointconfigurations():
|
||||
|
||||
response = client.get_rest_api(restApiId=api_id)
|
||||
response["endpointConfiguration"].should.equal(
|
||||
{"types": ["PRIVATE"],}
|
||||
{
|
||||
"types": ["PRIVATE"],
|
||||
}
|
||||
)
|
||||
|
||||
# 2. test creating rest api with REGIONAL endpointConfiguration
|
||||
@ -162,7 +168,9 @@ def test_create_rest_api_valid_endpointconfigurations():
|
||||
|
||||
response = client.get_rest_api(restApiId=api_id)
|
||||
response["endpointConfiguration"].should.equal(
|
||||
{"types": ["REGIONAL"],}
|
||||
{
|
||||
"types": ["REGIONAL"],
|
||||
}
|
||||
)
|
||||
|
||||
# 3. test creating rest api with EDGE endpointConfiguration
|
||||
@ -175,7 +183,9 @@ def test_create_rest_api_valid_endpointconfigurations():
|
||||
|
||||
response = client.get_rest_api(restApiId=api_id)
|
||||
response["endpointConfiguration"].should.equal(
|
||||
{"types": ["EDGE"],}
|
||||
{
|
||||
"types": ["EDGE"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@ -221,7 +231,11 @@ def test_create_resource():
|
||||
root_resource["ResponseMetadata"].pop("HTTPHeaders", None)
|
||||
root_resource["ResponseMetadata"].pop("RetryAttempts", None)
|
||||
root_resource.should.equal(
|
||||
{"path": "/", "id": root_id, "ResponseMetadata": {"HTTPStatusCode": 200},}
|
||||
{
|
||||
"path": "/",
|
||||
"id": root_id,
|
||||
"ResponseMetadata": {"HTTPStatusCode": 200},
|
||||
}
|
||||
)
|
||||
|
||||
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
|
||||
@ -1669,9 +1683,7 @@ def test_get_domain_name():
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_domain_name(domainName=domain_name)
|
||||
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Domain Name specified"
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Domain Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
# adding a domain name
|
||||
client.create_domain_name(domainName=domain_name)
|
||||
@ -1708,9 +1720,7 @@ def test_create_model():
|
||||
description=description,
|
||||
contentType=content_type,
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Rest API Id specified"
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Rest API Id specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
with pytest.raises(ClientError) as ex:
|
||||
@ -1772,9 +1782,7 @@ def test_get_model_by_name():
|
||||
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_model(restApiId=dummy_rest_api_id, modelName=model_name)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Rest API Id specified"
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Rest API Id specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
|
||||
@ -1786,9 +1794,7 @@ def test_get_model_with_invalid_name():
|
||||
# test with an invalid model name
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_model(restApiId=rest_api_id, modelName="fake")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Model Name specified"
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Model Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
|
||||
@ -1828,8 +1834,10 @@ def test_http_proxying_integration():
|
||||
stage_name = "staging"
|
||||
client.create_deployment(restApiId=api_id, stageName=stage_name)
|
||||
|
||||
deploy_url = "https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}".format(
|
||||
api_id=api_id, region_name=region_name, stage_name=stage_name
|
||||
deploy_url = (
|
||||
"https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}".format(
|
||||
api_id=api_id, region_name=region_name, stage_name=stage_name
|
||||
)
|
||||
)
|
||||
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
|
@ -49,7 +49,8 @@ def test_describe_scalable_targets_with_invalid_scalable_dimension_should_return
|
||||
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace=DEFAULT_SERVICE_NAMESPACE, ScalableDimension="foo",
|
||||
ServiceNamespace=DEFAULT_SERVICE_NAMESPACE,
|
||||
ScalableDimension="foo",
|
||||
)
|
||||
err.response["Error"]["Code"].should.equal("ValidationException")
|
||||
err.response["Error"]["Message"].split(":")[0].should.look_like(
|
||||
@ -64,7 +65,8 @@ def test_describe_scalable_targets_with_invalid_service_namespace_should_return_
|
||||
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace="foo", ScalableDimension=DEFAULT_SCALABLE_DIMENSION,
|
||||
ServiceNamespace="foo",
|
||||
ScalableDimension=DEFAULT_SCALABLE_DIMENSION,
|
||||
)
|
||||
err.response["Error"]["Code"].should.equal("ValidationException")
|
||||
err.response["Error"]["Message"].split(":")[0].should.look_like(
|
||||
@ -79,7 +81,8 @@ def test_describe_scalable_targets_with_multiple_invalid_parameters_should_retur
|
||||
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace="foo", ScalableDimension="bar",
|
||||
ServiceNamespace="foo",
|
||||
ScalableDimension="bar",
|
||||
)
|
||||
err.response["Error"]["Code"].should.equal("ValidationException")
|
||||
err.response["Error"]["Message"].split(":")[0].should.look_like(
|
||||
|
@ -178,7 +178,9 @@ def test_create_named_query():
|
||||
|
||||
# craete named query
|
||||
res = client.create_named_query(
|
||||
Name="query-name", Database="target_db", QueryString="SELECT * FROM table1",
|
||||
Name="query-name",
|
||||
Database="target_db",
|
||||
QueryString="SELECT * FROM table1",
|
||||
)
|
||||
|
||||
assert "NamedQueryId" in res
|
||||
@ -215,6 +217,8 @@ def create_basic_workgroup(client, name):
|
||||
Name=name,
|
||||
Description="Test work group",
|
||||
Configuration={
|
||||
"ResultConfiguration": {"OutputLocation": "s3://bucket-name/prefix/",}
|
||||
"ResultConfiguration": {
|
||||
"OutputLocation": "s3://bucket-name/prefix/",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
@ -97,8 +97,8 @@ def test_create_autoscaling_group():
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_create_autoscaling_groups_defaults():
|
||||
""" Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes """
|
||||
"""Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes"""
|
||||
|
||||
mocked_networking = setup_networking_deprecated()
|
||||
conn = boto.connect_autoscale()
|
||||
@ -961,7 +961,8 @@ def test_describe_autoscaling_groups_boto3_launch_config():
|
||||
mocked_networking = setup_networking()
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
client.create_launch_configuration(
|
||||
LaunchConfigurationName="test_launch_configuration", InstanceType="t2.micro",
|
||||
LaunchConfigurationName="test_launch_configuration",
|
||||
InstanceType="t2.micro",
|
||||
)
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
@ -1040,7 +1041,8 @@ def test_describe_autoscaling_instances_boto3_launch_config():
|
||||
mocked_networking = setup_networking()
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
client.create_launch_configuration(
|
||||
LaunchConfigurationName="test_launch_configuration", InstanceType="t2.micro",
|
||||
LaunchConfigurationName="test_launch_configuration",
|
||||
InstanceType="t2.micro",
|
||||
)
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
@ -2154,7 +2156,8 @@ def test_standby_exit_standby():
|
||||
response["AutoScalingInstances"][0]["LifecycleState"].should.equal("Standby")
|
||||
|
||||
response = client.exit_standby(
|
||||
AutoScalingGroupName="test_asg", InstanceIds=[instance_to_standby_exit_standby],
|
||||
AutoScalingGroupName="test_asg",
|
||||
InstanceIds=[instance_to_standby_exit_standby],
|
||||
)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
|
||||
|
@ -32,7 +32,8 @@ Outputs:
|
||||
""".strip()
|
||||
|
||||
cf_client.create_stack(
|
||||
StackName=stack_name, TemplateBody=cf_template,
|
||||
StackName=stack_name,
|
||||
TemplateBody=cf_template,
|
||||
)
|
||||
stack = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0]
|
||||
stack["Outputs"][0]["OutputValue"].should.be.equal("test_launch_configuration")
|
||||
@ -56,7 +57,8 @@ Outputs:
|
||||
""".strip()
|
||||
|
||||
cf_client.update_stack(
|
||||
StackName=stack_name, TemplateBody=cf_template,
|
||||
StackName=stack_name,
|
||||
TemplateBody=cf_template,
|
||||
)
|
||||
stack = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0]
|
||||
stack["Outputs"][0]["OutputValue"].should.be.equal("test_launch_configuration")
|
||||
@ -76,7 +78,8 @@ def test_autoscaling_group_from_launch_config():
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
|
||||
client.create_launch_configuration(
|
||||
LaunchConfigurationName="test_launch_configuration", InstanceType="t2.micro",
|
||||
LaunchConfigurationName="test_launch_configuration",
|
||||
InstanceType="t2.micro",
|
||||
)
|
||||
stack_name = "test-auto-scaling-group"
|
||||
|
||||
|
@ -152,8 +152,8 @@ def test_create_launch_configuration_using_ip_association_should_default_to_fals
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_create_launch_configuration_defaults():
|
||||
""" Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes """
|
||||
"""Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes"""
|
||||
conn = boto.connect_autoscale()
|
||||
config = LaunchConfiguration(
|
||||
name="tester", image_id="ami-abcd1234", instance_type="m1.small"
|
||||
|
@ -170,7 +170,7 @@ def test_execute_policy_percent_change_in_capacity():
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_execute_policy_small_percent_change_in_capacity():
|
||||
""" http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
"""http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
If PercentChangeInCapacity returns a value between 0 and 1,
|
||||
Auto Scaling will round it off to 1."""
|
||||
setup_autoscale_group()
|
||||
|
@ -204,7 +204,9 @@ def test_invoke_dryrun_function():
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1(),},
|
||||
Code={
|
||||
"ZipFile": get_test_zip_file1(),
|
||||
},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
@ -1275,7 +1277,8 @@ def wait_for_log_msg(expected_msg, log_group):
|
||||
|
||||
for log_stream in log_streams:
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName=log_group, logStreamName=log_stream["logStreamName"],
|
||||
logGroupName=log_group,
|
||||
logStreamName=log_stream["logStreamName"],
|
||||
)
|
||||
received_messages.extend(
|
||||
[event["message"] for event in result.get("events")]
|
||||
@ -1713,7 +1716,9 @@ def test_remove_function_permission():
|
||||
)
|
||||
|
||||
remove = conn.remove_permission(
|
||||
FunctionName="testFunction", StatementId="1", Qualifier="2",
|
||||
FunctionName="testFunction",
|
||||
StatementId="1",
|
||||
Qualifier="2",
|
||||
)
|
||||
remove["ResponseMetadata"]["HTTPStatusCode"].should.equal(204)
|
||||
policy = conn.get_policy(FunctionName="testFunction", Qualifier="2")["Policy"]
|
||||
|
@ -23,7 +23,9 @@ depends_on_template_list = {
|
||||
},
|
||||
"LaunchConfig": {
|
||||
"Type": "AWS::AutoScaling::LaunchConfiguration",
|
||||
"Properties": {"LaunchConfigurationName": "test-launch-config",},
|
||||
"Properties": {
|
||||
"LaunchConfigurationName": "test-launch-config",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -45,7 +47,9 @@ depends_on_template_string = {
|
||||
},
|
||||
"LaunchConfig": {
|
||||
"Type": "AWS::AutoScaling::LaunchConfiguration",
|
||||
"Properties": {"LaunchConfigurationName": "test-launch-config",},
|
||||
"Properties": {
|
||||
"LaunchConfigurationName": "test-launch-config",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -1369,10 +1369,12 @@ def test_non_json_redrive_policy():
|
||||
def test_boto3_create_duplicate_stack():
|
||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
cf_conn.create_stack(
|
||||
StackName="test_stack", TemplateBody=dummy_template_json,
|
||||
StackName="test_stack",
|
||||
TemplateBody=dummy_template_json,
|
||||
)
|
||||
|
||||
with pytest.raises(ClientError):
|
||||
cf_conn.create_stack(
|
||||
StackName="test_stack", TemplateBody=dummy_template_json,
|
||||
StackName="test_stack",
|
||||
TemplateBody=dummy_template_json,
|
||||
)
|
||||
|
@ -2325,7 +2325,10 @@ def test_stack_dynamodb_resources_integration():
|
||||
dynamodb_client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
table_desc = dynamodb_client.describe_table(TableName="myTableName")["Table"]
|
||||
table_desc["StreamSpecification"].should.equal(
|
||||
{"StreamEnabled": True, "StreamViewType": "KEYS_ONLY",}
|
||||
{
|
||||
"StreamEnabled": True,
|
||||
"StreamViewType": "KEYS_ONLY",
|
||||
}
|
||||
)
|
||||
|
||||
dynamodb_conn = boto3.resource("dynamodb", region_name="us-east-1")
|
||||
@ -2779,7 +2782,9 @@ def test_stack_events_get_attribute_integration():
|
||||
@mock_dynamodb2
|
||||
def test_dynamodb_table_creation():
|
||||
CFN_TEMPLATE = {
|
||||
"Outputs": {"MyTableName": {"Value": {"Ref": "MyTable"}},},
|
||||
"Outputs": {
|
||||
"MyTableName": {"Value": {"Ref": "MyTable"}},
|
||||
},
|
||||
"Resources": {
|
||||
"MyTable": {
|
||||
"Type": "AWS::DynamoDB::Table",
|
||||
|
@ -326,7 +326,9 @@ def test_update_pipeline():
|
||||
"S3Bucket": "different-bucket",
|
||||
"S3ObjectKey": "test-object",
|
||||
},
|
||||
"outputArtifacts": [{"name": "artifact"},],
|
||||
"outputArtifacts": [
|
||||
{"name": "artifact"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -435,7 +437,9 @@ def test_update_pipeline_errors():
|
||||
"S3Bucket": "test-bucket",
|
||||
"S3ObjectKey": "test-object",
|
||||
},
|
||||
"outputArtifacts": [{"name": "artifact"},],
|
||||
"outputArtifacts": [
|
||||
{"name": "artifact"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -696,7 +700,9 @@ def create_basic_codepipeline(client, name):
|
||||
"S3Bucket": "test-bucket",
|
||||
"S3ObjectKey": "test-object",
|
||||
},
|
||||
"outputArtifacts": [{"name": "artifact"},],
|
||||
"outputArtifacts": [
|
||||
{"name": "artifact"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -1272,15 +1272,20 @@ def user_authentication_flow(conn):
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
|
||||
conn.sign_up(
|
||||
ClientId=client_id, Username=username, Password=password,
|
||||
ClientId=client_id,
|
||||
Username=username,
|
||||
Password=password,
|
||||
)
|
||||
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
ClientId=client_id,
|
||||
Username=username,
|
||||
ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
# generating secret hash
|
||||
@ -1318,18 +1323,25 @@ def user_authentication_flow(conn):
|
||||
)
|
||||
|
||||
conn.verify_software_token(
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"], UserCode="123456",
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"],
|
||||
UserCode="123456",
|
||||
)
|
||||
|
||||
conn.set_user_mfa_preference(
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"],
|
||||
SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True,},
|
||||
SoftwareTokenMfaSettings={
|
||||
"Enabled": True,
|
||||
"PreferredMfa": True,
|
||||
},
|
||||
)
|
||||
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="REFRESH_TOKEN",
|
||||
AuthParameters={"SECRET_HASH": secret_hash, "REFRESH_TOKEN": refresh_token,},
|
||||
AuthParameters={
|
||||
"SECRET_HASH": secret_hash,
|
||||
"REFRESH_TOKEN": refresh_token,
|
||||
},
|
||||
)
|
||||
|
||||
result["AuthenticationResult"]["IdToken"].should_not.be.none
|
||||
@ -1583,7 +1595,8 @@ def test_sign_up():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()),
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
@ -1599,12 +1612,16 @@ def test_confirm_sign_up():
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
ClientId=client_id,
|
||||
Username=username,
|
||||
ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
|
||||
@ -1618,14 +1635,19 @@ def test_initiate_auth_USER_SRP_AUTH():
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
ClientId=client_id,
|
||||
Username=username,
|
||||
ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
key = bytes(str(client_secret).encode("latin-1"))
|
||||
@ -1669,11 +1691,14 @@ def test_initiate_auth_for_unconfirmed_user():
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
|
||||
key = bytes(str(client_secret).encode("latin-1"))
|
||||
@ -1705,14 +1730,19 @@ def test_initiate_auth_with_invalid_secret_hash():
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
UserPoolId=user_pool_id,
|
||||
ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
ClientId=client_id,
|
||||
Username=username,
|
||||
ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
invalid_secret_hash = str(uuid.uuid4())
|
||||
|
@ -76,9 +76,7 @@ def test_put_configuration_recorder():
|
||||
"recordingGroup": bg,
|
||||
}
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "InvalidRecordingGroupException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidRecordingGroupException"
|
||||
assert (
|
||||
ce.value.response["Error"]["Message"]
|
||||
== "The recording group provided is not valid"
|
||||
@ -255,8 +253,7 @@ def test_put_configuration_aggregator():
|
||||
],
|
||||
)
|
||||
assert (
|
||||
"You must choose one of these options"
|
||||
in ce.value.response["Error"]["Message"]
|
||||
"You must choose one of these options" in ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidParameterValueException"
|
||||
|
||||
@ -270,8 +267,7 @@ def test_put_configuration_aggregator():
|
||||
},
|
||||
)
|
||||
assert (
|
||||
"You must choose one of these options"
|
||||
in ce.value.response["Error"]["Message"]
|
||||
"You must choose one of these options" in ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidParameterValueException"
|
||||
|
||||
@ -475,8 +471,7 @@ def test_describe_configuration_aggregators():
|
||||
in ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "NoSuchConfigurationAggregatorException"
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationAggregatorException"
|
||||
)
|
||||
|
||||
# Error describe with more than 1 item in the list:
|
||||
@ -489,8 +484,7 @@ def test_describe_configuration_aggregators():
|
||||
in ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "NoSuchConfigurationAggregatorException"
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationAggregatorException"
|
||||
)
|
||||
|
||||
# Get the normal list:
|
||||
@ -553,9 +547,7 @@ def test_describe_configuration_aggregators():
|
||||
# Test with an invalid filter:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.describe_configuration_aggregators(NextToken="WRONG")
|
||||
assert (
|
||||
"The nextToken provided is invalid" == ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert "The nextToken provided is invalid" == ce.value.response["Error"]["Message"]
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidNextTokenException"
|
||||
|
||||
|
||||
@ -710,9 +702,7 @@ def test_describe_aggregation_authorizations():
|
||||
# Test with an invalid filter:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.describe_aggregation_authorizations(NextToken="WRONG")
|
||||
assert (
|
||||
"The nextToken provided is invalid" == ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert "The nextToken provided is invalid" == ce.value.response["Error"]["Message"]
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidNextTokenException"
|
||||
|
||||
|
||||
@ -758,8 +748,7 @@ def test_delete_configuration_aggregator():
|
||||
in ce.value.response["Error"]["Message"]
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "NoSuchConfigurationAggregatorException"
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationAggregatorException"
|
||||
)
|
||||
|
||||
|
||||
@ -798,9 +787,7 @@ def test_describe_configurations():
|
||||
# Specify an incorrect name:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.describe_configuration_recorders(ConfigurationRecorderNames=["wrong"])
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
assert "wrong" in ce.value.response["Error"]["Message"]
|
||||
|
||||
# And with both a good and wrong name:
|
||||
@ -808,9 +795,7 @@ def test_describe_configurations():
|
||||
client.describe_configuration_recorders(
|
||||
ConfigurationRecorderNames=["testrecorder", "wrong"]
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
assert "wrong" in ce.value.response["Error"]["Message"]
|
||||
|
||||
|
||||
@ -847,9 +832,7 @@ def test_delivery_channels():
|
||||
# Try without a name supplied:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.put_delivery_channel(DeliveryChannel={})
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "InvalidDeliveryChannelNameException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "InvalidDeliveryChannelNameException"
|
||||
assert "is not valid, blank string." in ce.value.response["Error"]["Message"]
|
||||
|
||||
# Try with a really long name:
|
||||
@ -1034,9 +1017,7 @@ def test_start_configuration_recorder():
|
||||
# Without a config recorder:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.start_configuration_recorder(ConfigurationRecorderName="testrecorder")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
|
||||
# Make the config recorder;
|
||||
client.put_configuration_recorder(
|
||||
@ -1054,9 +1035,7 @@ def test_start_configuration_recorder():
|
||||
# Without a delivery channel:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.start_configuration_recorder(ConfigurationRecorderName="testrecorder")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoAvailableDeliveryChannelException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoAvailableDeliveryChannelException"
|
||||
|
||||
# Make the delivery channel:
|
||||
client.put_delivery_channel(
|
||||
@ -1092,9 +1071,7 @@ def test_stop_configuration_recorder():
|
||||
# Without a config recorder:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.stop_configuration_recorder(ConfigurationRecorderName="testrecorder")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
|
||||
# Make the config recorder;
|
||||
client.put_configuration_recorder(
|
||||
@ -1184,9 +1161,7 @@ def test_describe_configuration_recorder_status():
|
||||
client.describe_configuration_recorder_status(
|
||||
ConfigurationRecorderNames=["testrecorder", "wrong"]
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
assert "wrong" in ce.value.response["Error"]["Message"]
|
||||
|
||||
|
||||
@ -1213,9 +1188,7 @@ def test_delete_configuration_recorder():
|
||||
# Try again -- it should be deleted:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.delete_configuration_recorder(ConfigurationRecorderName="testrecorder")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchConfigurationRecorderException"
|
||||
|
||||
|
||||
@mock_config
|
||||
@ -1243,8 +1216,7 @@ def test_delete_delivery_channel():
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.delete_delivery_channel(DeliveryChannelName="testchannel")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "LastDeliveryChannelDeleteFailedException"
|
||||
ce.value.response["Error"]["Code"] == "LastDeliveryChannelDeleteFailedException"
|
||||
)
|
||||
assert (
|
||||
"because there is a running configuration recorder."
|
||||
@ -1267,7 +1239,7 @@ def test_delete_delivery_channel():
|
||||
@mock_s3
|
||||
def test_list_discovered_resource():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
"""
|
||||
client = boto3.client("config", region_name="us-west-2")
|
||||
|
||||
@ -1373,7 +1345,7 @@ def test_list_discovered_resource():
|
||||
@mock_s3
|
||||
def test_list_aggregate_discovered_resource():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
"""
|
||||
client = boto3.client("config", region_name="us-west-2")
|
||||
|
||||
@ -1517,7 +1489,7 @@ def test_list_aggregate_discovered_resource():
|
||||
@mock_s3
|
||||
def test_get_resource_config_history():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "get_config_resource" function.
|
||||
for that individual service's "get_config_resource" function.
|
||||
"""
|
||||
client = boto3.client("config", region_name="us-west-2")
|
||||
|
||||
@ -1576,7 +1548,7 @@ def test_get_resource_config_history():
|
||||
@mock_s3
|
||||
def test_batch_get_resource_config():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "get_config_resource" function.
|
||||
for that individual service's "get_config_resource" function.
|
||||
"""
|
||||
client = boto3.client("config", region_name="us-west-2")
|
||||
|
||||
@ -1640,7 +1612,7 @@ def test_batch_get_resource_config():
|
||||
@mock_s3
|
||||
def test_batch_get_aggregate_resource_config():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "get_config_resource" function.
|
||||
for that individual service's "get_config_resource" function.
|
||||
"""
|
||||
from moto.config.models import DEFAULT_ACCOUNT_ID
|
||||
|
||||
@ -1873,7 +1845,12 @@ def test_put_evaluations():
|
||||
response["ResponseMetadata"].pop("HTTPHeaders", None)
|
||||
response["ResponseMetadata"].pop("RetryAttempts", None)
|
||||
response.should.equal(
|
||||
{"FailedEvaluations": [], "ResponseMetadata": {"HTTPStatusCode": 200,},}
|
||||
{
|
||||
"FailedEvaluations": [],
|
||||
"ResponseMetadata": {
|
||||
"HTTPStatusCode": 200,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
|
@ -325,7 +325,9 @@ def test_access_denied_for_run_instances():
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:RunInstances",
|
||||
account_id=ACCOUNT_ID,
|
||||
user_name=user_name,
|
||||
operation="ec2:RunInstances",
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -1347,9 +1347,13 @@ def test_get_item_returns_consumed_capacity():
|
||||
def test_put_empty_item():
|
||||
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
AttributeDefinitions=[{"AttributeName": "structure_id", "AttributeType": "S"},],
|
||||
AttributeDefinitions=[
|
||||
{"AttributeName": "structure_id", "AttributeType": "S"},
|
||||
],
|
||||
TableName="test",
|
||||
KeySchema=[{"AttributeName": "structure_id", "KeyType": "HASH"},],
|
||||
KeySchema=[
|
||||
{"AttributeName": "structure_id", "KeyType": "HASH"},
|
||||
],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 123, "WriteCapacityUnits": 123},
|
||||
)
|
||||
table = dynamodb.Table("test")
|
||||
@ -1366,9 +1370,13 @@ def test_put_empty_item():
|
||||
def test_put_item_nonexisting_hash_key():
|
||||
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
AttributeDefinitions=[{"AttributeName": "structure_id", "AttributeType": "S"},],
|
||||
AttributeDefinitions=[
|
||||
{"AttributeName": "structure_id", "AttributeType": "S"},
|
||||
],
|
||||
TableName="test",
|
||||
KeySchema=[{"AttributeName": "structure_id", "KeyType": "HASH"},],
|
||||
KeySchema=[
|
||||
{"AttributeName": "structure_id", "KeyType": "HASH"},
|
||||
],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 123, "WriteCapacityUnits": 123},
|
||||
)
|
||||
table = dynamodb.Table("test")
|
||||
@ -2287,7 +2295,10 @@ def test_update_item_on_map():
|
||||
table.update_item(
|
||||
Key={"forum_name": "the-key", "subject": "123"},
|
||||
UpdateExpression="SET body.#nested.#data = :tb",
|
||||
ExpressionAttributeNames={"#nested": "nested", "#data": "data",},
|
||||
ExpressionAttributeNames={
|
||||
"#nested": "nested",
|
||||
"#data": "data",
|
||||
},
|
||||
ExpressionAttributeValues={":tb": "new_value"},
|
||||
)
|
||||
# Running this against AWS DDB gives an exception so make sure it also fails.:
|
||||
@ -3951,19 +3962,30 @@ def test_update_supports_nested_update_if_nested_value_not_exists():
|
||||
|
||||
table = dynamodb.Table(name)
|
||||
table.put_item(
|
||||
Item={"user_id": "1234", "friends": {"5678": {"name": "friend_5678"}},},
|
||||
Item={
|
||||
"user_id": "1234",
|
||||
"friends": {"5678": {"name": "friend_5678"}},
|
||||
},
|
||||
)
|
||||
table.update_item(
|
||||
Key={"user_id": "1234"},
|
||||
ExpressionAttributeNames={"#friends": "friends", "#friendid": "0000",},
|
||||
ExpressionAttributeValues={":friend": {"name": "friend_0000"},},
|
||||
ExpressionAttributeNames={
|
||||
"#friends": "friends",
|
||||
"#friendid": "0000",
|
||||
},
|
||||
ExpressionAttributeValues={
|
||||
":friend": {"name": "friend_0000"},
|
||||
},
|
||||
UpdateExpression="SET #friends.#friendid = :friend",
|
||||
ReturnValues="UPDATED_NEW",
|
||||
)
|
||||
item = table.get_item(Key={"user_id": "1234"})["Item"]
|
||||
assert item == {
|
||||
"user_id": "1234",
|
||||
"friends": {"5678": {"name": "friend_5678"}, "0000": {"name": "friend_0000"},},
|
||||
"friends": {
|
||||
"5678": {"name": "friend_5678"},
|
||||
"0000": {"name": "friend_0000"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -4057,9 +4079,7 @@ def test_update_catches_invalid_list_append_operation():
|
||||
|
||||
# Verify correct error is returned
|
||||
str(ex.value).should.match("Parameter validation failed:")
|
||||
str(ex.value).should.match(
|
||||
"Invalid type for parameter ExpressionAttributeValues."
|
||||
)
|
||||
str(ex.value).should.match("Invalid type for parameter ExpressionAttributeValues.")
|
||||
|
||||
|
||||
def _create_user_table():
|
||||
@ -4188,11 +4208,17 @@ def test_invalid_transact_get_items():
|
||||
)
|
||||
table = dynamodb.Table("test1")
|
||||
table.put_item(
|
||||
Item={"id": "1", "val": "1",}
|
||||
Item={
|
||||
"id": "1",
|
||||
"val": "1",
|
||||
}
|
||||
)
|
||||
|
||||
table.put_item(
|
||||
Item={"id": "1", "val": "2",}
|
||||
Item={
|
||||
"id": "1",
|
||||
"val": "2",
|
||||
}
|
||||
)
|
||||
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
@ -4214,16 +4240,28 @@ def test_invalid_transact_get_items():
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.transact_get_items(
|
||||
TransactItems=[
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "test1"}},
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "non_exists_table"}},
|
||||
{
|
||||
"Get": {
|
||||
"Key": {
|
||||
"id": {"S": "1"},
|
||||
},
|
||||
"TableName": "test1",
|
||||
}
|
||||
},
|
||||
{
|
||||
"Get": {
|
||||
"Key": {
|
||||
"id": {"S": "1"},
|
||||
},
|
||||
"TableName": "non_exists_table",
|
||||
}
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Requested resource not found"
|
||||
)
|
||||
ex.value.response["Error"]["Message"].should.equal("Requested resource not found")
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
@ -4243,11 +4281,17 @@ def test_valid_transact_get_items():
|
||||
)
|
||||
table1 = dynamodb.Table("test1")
|
||||
table1.put_item(
|
||||
Item={"id": "1", "sort_key": "1",}
|
||||
Item={
|
||||
"id": "1",
|
||||
"sort_key": "1",
|
||||
}
|
||||
)
|
||||
|
||||
table1.put_item(
|
||||
Item={"id": "1", "sort_key": "2",}
|
||||
Item={
|
||||
"id": "1",
|
||||
"sort_key": "2",
|
||||
}
|
||||
)
|
||||
|
||||
dynamodb.create_table(
|
||||
@ -4264,7 +4308,10 @@ def test_valid_transact_get_items():
|
||||
)
|
||||
table2 = dynamodb.Table("test2")
|
||||
table2.put_item(
|
||||
Item={"id": "1", "sort_key": "1",}
|
||||
Item={
|
||||
"id": "1",
|
||||
"sort_key": "1",
|
||||
}
|
||||
)
|
||||
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
@ -4378,7 +4425,10 @@ def test_valid_transact_get_items():
|
||||
"TableName": "test1",
|
||||
"CapacityUnits": 4.0,
|
||||
"ReadCapacityUnits": 4.0,
|
||||
"Table": {"CapacityUnits": 4.0, "ReadCapacityUnits": 4.0,},
|
||||
"Table": {
|
||||
"CapacityUnits": 4.0,
|
||||
"ReadCapacityUnits": 4.0,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@ -4387,7 +4437,10 @@ def test_valid_transact_get_items():
|
||||
"TableName": "test2",
|
||||
"CapacityUnits": 2.0,
|
||||
"ReadCapacityUnits": 2.0,
|
||||
"Table": {"CapacityUnits": 2.0, "ReadCapacityUnits": 2.0,},
|
||||
"Table": {
|
||||
"CapacityUnits": 2.0,
|
||||
"ReadCapacityUnits": 2.0,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@ -4403,7 +4456,9 @@ def test_gsi_verify_negative_number_order():
|
||||
{"AttributeName": "gsiK1PartitionKey", "KeyType": "HASH"},
|
||||
{"AttributeName": "gsiK1SortKey", "KeyType": "RANGE"},
|
||||
],
|
||||
"Projection": {"ProjectionType": "KEYS_ONLY",},
|
||||
"Projection": {
|
||||
"ProjectionType": "KEYS_ONLY",
|
||||
},
|
||||
}
|
||||
],
|
||||
"AttributeDefinitions": [
|
||||
@ -4454,7 +4509,9 @@ def test_gsi_verify_negative_number_order():
|
||||
def test_transact_write_items_put():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4465,7 +4522,10 @@ def test_transact_write_items_put():
|
||||
TransactItems=[
|
||||
{
|
||||
"Put": {
|
||||
"Item": {"id": {"S": "foo{}".format(str(i))}, "foo": {"S": "bar"},},
|
||||
"Item": {
|
||||
"id": {"S": "foo{}".format(str(i))},
|
||||
"foo": {"S": "bar"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
}
|
||||
}
|
||||
@ -4481,14 +4541,19 @@ def test_transact_write_items_put():
|
||||
def test_transact_write_items_put_conditional_expressions():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
TableName="test-table", BillingMode="PAY_PER_REQUEST", **table_schema
|
||||
)
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo2"},},
|
||||
TableName="test-table",
|
||||
Item={
|
||||
"id": {"S": "foo2"},
|
||||
},
|
||||
)
|
||||
# Put multiple items
|
||||
with pytest.raises(ClientError) as ex:
|
||||
@ -4526,7 +4591,9 @@ def test_transact_write_items_put_conditional_expressions():
|
||||
def test_transact_write_items_conditioncheck_passes():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4534,7 +4601,10 @@ def test_transact_write_items_conditioncheck_passes():
|
||||
)
|
||||
# Insert an item without email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
TableName="test-table",
|
||||
Item={
|
||||
"id": {"S": "foo"},
|
||||
},
|
||||
)
|
||||
# Put an email address, after verifying it doesn't exist yet
|
||||
dynamodb.transact_write_items(
|
||||
@ -4568,7 +4638,9 @@ def test_transact_write_items_conditioncheck_passes():
|
||||
def test_transact_write_items_conditioncheck_fails():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4617,7 +4689,9 @@ def test_transact_write_items_conditioncheck_fails():
|
||||
def test_transact_write_items_delete():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4625,12 +4699,20 @@ def test_transact_write_items_delete():
|
||||
)
|
||||
# Insert an item
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
TableName="test-table",
|
||||
Item={
|
||||
"id": {"S": "foo"},
|
||||
},
|
||||
)
|
||||
# Delete the item
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{"Delete": {"Key": {"id": {"S": "foo"}}, "TableName": "test-table",}}
|
||||
{
|
||||
"Delete": {
|
||||
"Key": {"id": {"S": "foo"}},
|
||||
"TableName": "test-table",
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
# Assert the item is deleted
|
||||
@ -4642,7 +4724,9 @@ def test_transact_write_items_delete():
|
||||
def test_transact_write_items_delete_with_successful_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4650,14 +4734,19 @@ def test_transact_write_items_delete_with_successful_condition_expression():
|
||||
)
|
||||
# Insert an item without email address
|
||||
dynamodb.put_item(
|
||||
TableName="test-table", Item={"id": {"S": "foo"},},
|
||||
TableName="test-table",
|
||||
Item={
|
||||
"id": {"S": "foo"},
|
||||
},
|
||||
)
|
||||
# ConditionExpression will pass - no email address has been specified yet
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
"Delete": {
|
||||
"Key": {"id": {"S": "foo"},},
|
||||
"Key": {
|
||||
"id": {"S": "foo"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
@ -4674,7 +4763,9 @@ def test_transact_write_items_delete_with_successful_condition_expression():
|
||||
def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4692,7 +4783,9 @@ def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
TransactItems=[
|
||||
{
|
||||
"Delete": {
|
||||
"Key": {"id": {"S": "foo"},},
|
||||
"Key": {
|
||||
"id": {"S": "foo"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
"ConditionExpression": "attribute_not_exists(#e)",
|
||||
"ExpressionAttributeNames": {"#e": "email_address"},
|
||||
@ -4713,7 +4806,9 @@ def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
def test_transact_write_items_update():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4745,7 +4840,9 @@ def test_transact_write_items_update():
|
||||
def test_transact_write_items_update_with_failed_condition_expression():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -4935,12 +5032,18 @@ def create_simple_table_and_return_client():
|
||||
dynamodb.create_table(
|
||||
TableName="moto-test",
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"},],
|
||||
AttributeDefinitions=[
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
|
||||
)
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "myNum": {"N": "1"}, "MyStr": {"S": "1"},},
|
||||
Item={
|
||||
"id": {"S": "1"},
|
||||
"myNum": {"N": "1"},
|
||||
"MyStr": {"S": "1"},
|
||||
},
|
||||
)
|
||||
return dynamodb
|
||||
|
||||
@ -5004,7 +5107,11 @@ def test_update_expression_with_plus_in_attribute_name():
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my+Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
Item={
|
||||
"id": {"S": "1"},
|
||||
"my+Num": {"S": "1"},
|
||||
"MyStr": {"S": "aaa"},
|
||||
},
|
||||
)
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
@ -5031,7 +5138,11 @@ def test_update_expression_with_minus_in_attribute_name():
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my-Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
Item={
|
||||
"id": {"S": "1"},
|
||||
"my-Num": {"S": "1"},
|
||||
"MyStr": {"S": "aaa"},
|
||||
},
|
||||
)
|
||||
try:
|
||||
dynamodb.update_item(
|
||||
@ -5058,7 +5169,11 @@ def test_update_expression_with_space_in_attribute_name():
|
||||
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "1"}, "my Num": {"S": "1"}, "MyStr": {"S": "aaa"},},
|
||||
Item={
|
||||
"id": {"S": "1"},
|
||||
"my Num": {"S": "1"},
|
||||
"MyStr": {"S": "aaa"},
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
@ -5241,7 +5356,8 @@ def test_update_item_atomic_counter_from_zero():
|
||||
key = {"t_id": {"S": "item1"}}
|
||||
|
||||
ddb_mock.put_item(
|
||||
TableName=table, Item=key,
|
||||
TableName=table,
|
||||
Item=key,
|
||||
)
|
||||
|
||||
ddb_mock.update_item(
|
||||
@ -5267,7 +5383,8 @@ def test_update_item_add_to_non_existent_set():
|
||||
)
|
||||
key = {"t_id": {"S": "item1"}}
|
||||
ddb_mock.put_item(
|
||||
TableName=table, Item=key,
|
||||
TableName=table,
|
||||
Item=key,
|
||||
)
|
||||
|
||||
ddb_mock.update_item(
|
||||
@ -5292,7 +5409,8 @@ def test_update_item_add_to_non_existent_number_set():
|
||||
)
|
||||
key = {"t_id": {"S": "item1"}}
|
||||
ddb_mock.put_item(
|
||||
TableName=table, Item=key,
|
||||
TableName=table,
|
||||
Item=key,
|
||||
)
|
||||
|
||||
ddb_mock.update_item(
|
||||
@ -5309,7 +5427,9 @@ def test_update_item_add_to_non_existent_number_set():
|
||||
def test_transact_write_items_fails_with_transaction_canceled_exception():
|
||||
table_schema = {
|
||||
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
"AttributeDefinitions": [{"AttributeName": "id", "AttributeType": "S"},],
|
||||
"AttributeDefinitions": [
|
||||
{"AttributeName": "id", "AttributeType": "S"},
|
||||
],
|
||||
}
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
dynamodb.create_table(
|
||||
@ -5361,7 +5481,9 @@ def test_gsi_projection_type_keys_only():
|
||||
{"AttributeName": "gsiK1PartitionKey", "KeyType": "HASH"},
|
||||
{"AttributeName": "gsiK1SortKey", "KeyType": "RANGE"},
|
||||
],
|
||||
"Projection": {"ProjectionType": "KEYS_ONLY",},
|
||||
"Projection": {
|
||||
"ProjectionType": "KEYS_ONLY",
|
||||
},
|
||||
}
|
||||
],
|
||||
"AttributeDefinitions": [
|
||||
@ -5414,7 +5536,9 @@ def test_lsi_projection_type_keys_only():
|
||||
{"AttributeName": "partitionKey", "KeyType": "HASH"},
|
||||
{"AttributeName": "lsiK1SortKey", "KeyType": "RANGE"},
|
||||
],
|
||||
"Projection": {"ProjectionType": "KEYS_ONLY",},
|
||||
"Projection": {
|
||||
"ProjectionType": "KEYS_ONLY",
|
||||
},
|
||||
}
|
||||
],
|
||||
"AttributeDefinitions": [
|
||||
@ -5439,7 +5563,8 @@ def test_lsi_projection_type_keys_only():
|
||||
table.put_item(Item=item)
|
||||
|
||||
items = table.query(
|
||||
KeyConditionExpression=Key("partitionKey").eq("pk-1"), IndexName="LSI",
|
||||
KeyConditionExpression=Key("partitionKey").eq("pk-1"),
|
||||
IndexName="LSI",
|
||||
)["Items"]
|
||||
items.should.have.length_of(1)
|
||||
# Item should only include GSI Keys and Table Keys, as per the ProjectionType
|
||||
|
@ -211,7 +211,11 @@ def test_execution_of_remove_in_map():
|
||||
"itemlist": {
|
||||
"L": [
|
||||
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
|
||||
{"M": {"foo10": {"S": "bar1"},}},
|
||||
{
|
||||
"M": {
|
||||
"foo10": {"S": "bar1"},
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -260,7 +264,9 @@ def test_execution_of_remove_in_list():
|
||||
"itemmap": {
|
||||
"M": {
|
||||
"itemlist": {
|
||||
"L": [{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},]
|
||||
"L": [
|
||||
{"M": {"foo00": {"S": "bar1"}, "foo01": {"S": "bar2"}}},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -277,7 +283,10 @@ def test_execution_of_delete_element_from_set():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"SS": ["value1", "value2", "value3"]},
|
||||
},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
@ -291,7 +300,10 @@ def test_execution_of_delete_element_from_set():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value3"]},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"SS": ["value1", "value3"]},
|
||||
},
|
||||
)
|
||||
assert expected_item == item
|
||||
|
||||
@ -304,7 +316,10 @@ def test_execution_of_add_number():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"N": "5"},
|
||||
},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
@ -331,7 +346,10 @@ def test_execution_of_add_set_to_a_number():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"N": "5"},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"N": "5"},
|
||||
},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
@ -362,7 +380,10 @@ def test_execution_of_add_to_a_set():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"SS": ["value1", "value2", "value3"]},
|
||||
},
|
||||
)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
@ -386,13 +407,34 @@ def test_execution_of_add_to_a_set():
|
||||
|
||||
@parameterized(
|
||||
[
|
||||
({":value": {"S": "10"}}, "STRING",),
|
||||
({":value": {"N": "10"}}, "NUMBER",),
|
||||
({":value": {"B": "10"}}, "BINARY",),
|
||||
({":value": {"BOOL": True}}, "BOOLEAN",),
|
||||
({":value": {"NULL": True}}, "NULL",),
|
||||
({":value": {"M": {"el0": {"S": "10"}}}}, "MAP",),
|
||||
({":value": {"L": []}}, "LIST",),
|
||||
(
|
||||
{":value": {"S": "10"}},
|
||||
"STRING",
|
||||
),
|
||||
(
|
||||
{":value": {"N": "10"}},
|
||||
"NUMBER",
|
||||
),
|
||||
(
|
||||
{":value": {"B": "10"}},
|
||||
"BINARY",
|
||||
),
|
||||
(
|
||||
{":value": {"BOOL": True}},
|
||||
"BOOLEAN",
|
||||
),
|
||||
(
|
||||
{":value": {"NULL": True}},
|
||||
"NULL",
|
||||
),
|
||||
(
|
||||
{":value": {"M": {"el0": {"S": "10"}}}},
|
||||
"MAP",
|
||||
),
|
||||
(
|
||||
{":value": {"L": []}},
|
||||
"LIST",
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_execution_of__delete_element_from_set_invalid_value(
|
||||
@ -406,7 +448,10 @@ def test_execution_of__delete_element_from_set_invalid_value(
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"SS": ["value1", "value2", "value3"]},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"SS": ["value1", "value2", "value3"]},
|
||||
},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
@ -431,7 +476,10 @@ def test_execution_of_delete_element_from_a_string_attribute():
|
||||
hash_key_type="TYPE",
|
||||
range_key=None,
|
||||
range_key_type=None,
|
||||
attrs={"id": {"S": "foo2"}, "s": {"S": "5"},},
|
||||
attrs={
|
||||
"id": {"S": "foo2"},
|
||||
"s": {"S": "5"},
|
||||
},
|
||||
)
|
||||
try:
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
|
@ -42,7 +42,10 @@ def test_validation_of_update_expression_with_keyword():
|
||||
|
||||
|
||||
@parameterized(
|
||||
["SET a = #b + :val2", "SET a = :val2 + #b",]
|
||||
[
|
||||
"SET a = #b + :val2",
|
||||
"SET a = :val2 + #b",
|
||||
]
|
||||
)
|
||||
def test_validation_of_a_set_statement_with_incorrect_passed_value(update_expression):
|
||||
"""
|
||||
@ -99,7 +102,10 @@ def test_validation_of_update_expression_with_attribute_that_does_not_exist_in_i
|
||||
|
||||
|
||||
@parameterized(
|
||||
["SET a = #c", "SET a = #c + #d",]
|
||||
[
|
||||
"SET a = #c",
|
||||
"SET a = #c + #d",
|
||||
]
|
||||
)
|
||||
def test_validation_of_update_expression_with_attribute_name_that_is_not_defined(
|
||||
update_expression,
|
||||
|
@ -616,9 +616,9 @@ def test_ami_describe_executable_users_and_filter():
|
||||
@mock_ec2_deprecated
|
||||
def test_ami_attribute_user_and_group_permissions():
|
||||
"""
|
||||
Boto supports adding/removing both users and groups at the same time.
|
||||
Just spot-check this -- input variations, idempotency, etc are validated
|
||||
via user-specific and group-specific tests above.
|
||||
Boto supports adding/removing both users and groups at the same time.
|
||||
Just spot-check this -- input variations, idempotency, etc are validated
|
||||
via user-specific and group-specific tests above.
|
||||
"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
|
@ -144,7 +144,9 @@ def test_create_flow_log_create():
|
||||
|
||||
bucket = s3.create_bucket(
|
||||
Bucket="test-flow-logs",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-1",},
|
||||
CreateBucketConfiguration={
|
||||
"LocationConstraint": "us-west-1",
|
||||
},
|
||||
)
|
||||
|
||||
response = client.create_flow_logs(
|
||||
|
@ -211,16 +211,16 @@ def test_instance_detach_volume_wrong_path():
|
||||
ImageId="ami-d3adb33f",
|
||||
MinCount=1,
|
||||
MaxCount=1,
|
||||
BlockDeviceMappings=[{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 50}},],
|
||||
BlockDeviceMappings=[
|
||||
{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 50}},
|
||||
],
|
||||
)
|
||||
instance = result[0]
|
||||
for volume in instance.volumes.all():
|
||||
with pytest.raises(ClientError) as ex:
|
||||
instance.detach_volume(VolumeId=volume.volume_id, Device="/dev/sdf")
|
||||
|
||||
ex.value.response["Error"]["Code"].should.equal(
|
||||
"InvalidAttachment.NotFound"
|
||||
)
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidAttachment.NotFound")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The volume {0} is not attached to instance {1} as device {2}".format(
|
||||
@ -1585,7 +1585,9 @@ def test_create_instance_ebs_optimized():
|
||||
instance.ebs_optimized.should.be(False)
|
||||
|
||||
instance = ec2_resource.create_instances(
|
||||
ImageId="ami-12345678", MaxCount=1, MinCount=1,
|
||||
ImageId="ami-12345678",
|
||||
MaxCount=1,
|
||||
MinCount=1,
|
||||
)[0]
|
||||
instance.load()
|
||||
instance.ebs_optimized.should.be(False)
|
||||
|
@ -235,8 +235,8 @@ def test_route_table_associations():
|
||||
@mock_ec2_deprecated
|
||||
def test_route_table_replace_route_table_association():
|
||||
"""
|
||||
Note: Boto has deprecated replace_route_table_association (which returns status)
|
||||
and now uses replace_route_table_association_with_assoc (which returns association ID).
|
||||
Note: Boto has deprecated replace_route_table_association (which returns status)
|
||||
and now uses replace_route_table_association_with_assoc (which returns association ID).
|
||||
"""
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
@ -661,7 +661,11 @@ def test_run_instances_should_attach_to_default_subnet():
|
||||
client = boto3.client("ec2", region_name="us-west-1")
|
||||
ec2.create_security_group(GroupName="sg01", Description="Test security group sg01")
|
||||
# run_instances
|
||||
instances = client.run_instances(MinCount=1, MaxCount=1, SecurityGroups=["sg01"],)
|
||||
instances = client.run_instances(
|
||||
MinCount=1,
|
||||
MaxCount=1,
|
||||
SecurityGroups=["sg01"],
|
||||
)
|
||||
# Assert subnet is created appropriately
|
||||
subnets = client.describe_subnets()["Subnets"]
|
||||
default_subnet_id = subnets[0]["SubnetId"]
|
||||
|
@ -60,7 +60,9 @@ def test_create_vpn_connection_with_vpn_gateway():
|
||||
|
||||
vpn_gateway = client.create_vpn_gateway(Type="ipsec.1").get("VpnGateway", {})
|
||||
customer_gateway = client.create_customer_gateway(
|
||||
Type="ipsec.1", PublicIp="205.251.242.54", BgpAsn=65534,
|
||||
Type="ipsec.1",
|
||||
PublicIp="205.251.242.54",
|
||||
BgpAsn=65534,
|
||||
).get("CustomerGateway", {})
|
||||
vpn_connection = client.create_vpn_connection(
|
||||
Type="ipsec.1",
|
||||
|
@ -2531,7 +2531,9 @@ def test_describe_task_sets():
|
||||
assert "tags" not in task_sets[0]
|
||||
|
||||
task_sets = client.describe_task_sets(
|
||||
cluster=cluster_name, service=service_name, include=["TAGS"],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
include=["TAGS"],
|
||||
)["taskSets"]
|
||||
|
||||
cluster_arn = client.describe_clusters(clusters=[cluster_name])["clusters"][0][
|
||||
@ -2591,29 +2593,39 @@ def test_delete_task_set():
|
||||
)
|
||||
|
||||
task_set = client.create_task_set(
|
||||
cluster=cluster_name, service=service_name, taskDefinition=task_def_name,
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskDefinition=task_def_name,
|
||||
)["taskSet"]
|
||||
|
||||
task_sets = client.describe_task_sets(
|
||||
cluster=cluster_name, service=service_name, taskSets=[task_set["taskSetArn"]],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskSets=[task_set["taskSetArn"]],
|
||||
)["taskSets"]
|
||||
|
||||
assert len(task_sets) == 1
|
||||
|
||||
response = client.delete_task_set(
|
||||
cluster=cluster_name, service=service_name, taskSet=task_set["taskSetArn"],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskSet=task_set["taskSetArn"],
|
||||
)
|
||||
assert response["taskSet"]["taskSetArn"] == task_set["taskSetArn"]
|
||||
|
||||
task_sets = client.describe_task_sets(
|
||||
cluster=cluster_name, service=service_name, taskSets=[task_set["taskSetArn"]],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskSets=[task_set["taskSetArn"]],
|
||||
)["taskSets"]
|
||||
|
||||
assert len(task_sets) == 0
|
||||
|
||||
with pytest.raises(ClientError):
|
||||
_ = client.delete_task_set(
|
||||
cluster=cluster_name, service=service_name, taskSet=task_set["taskSetArn"],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskSet=task_set["taskSetArn"],
|
||||
)
|
||||
|
||||
|
||||
@ -2649,7 +2661,9 @@ def test_update_service_primary_task_set():
|
||||
)
|
||||
|
||||
task_set = client.create_task_set(
|
||||
cluster=cluster_name, service=service_name, taskDefinition=task_def_name,
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskDefinition=task_def_name,
|
||||
)["taskSet"]
|
||||
|
||||
service = client.describe_services(cluster=cluster_name, services=[service_name],)[
|
||||
@ -2669,7 +2683,9 @@ def test_update_service_primary_task_set():
|
||||
assert service["taskDefinition"] == service["taskSets"][0]["taskDefinition"]
|
||||
|
||||
another_task_set = client.create_task_set(
|
||||
cluster=cluster_name, service=service_name, taskDefinition=task_def_name,
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskDefinition=task_def_name,
|
||||
)["taskSet"]
|
||||
service = client.describe_services(cluster=cluster_name, services=[service_name],)[
|
||||
"services"
|
||||
@ -2721,11 +2737,15 @@ def test_update_task_set():
|
||||
)
|
||||
|
||||
task_set = client.create_task_set(
|
||||
cluster=cluster_name, service=service_name, taskDefinition=task_def_name,
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskDefinition=task_def_name,
|
||||
)["taskSet"]
|
||||
|
||||
another_task_set = client.create_task_set(
|
||||
cluster=cluster_name, service=service_name, taskDefinition=task_def_name,
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskDefinition=task_def_name,
|
||||
)["taskSet"]
|
||||
assert another_task_set["scale"]["unit"] == "PERCENT"
|
||||
assert another_task_set["scale"]["value"] == 100.0
|
||||
@ -2738,7 +2758,9 @@ def test_update_task_set():
|
||||
)
|
||||
|
||||
updated_task_set = client.describe_task_sets(
|
||||
cluster=cluster_name, service=service_name, taskSets=[task_set["taskSetArn"]],
|
||||
cluster=cluster_name,
|
||||
service=service_name,
|
||||
taskSets=[task_set["taskSetArn"]],
|
||||
)["taskSets"][0]
|
||||
assert updated_task_set["scale"]["value"] == 25.0
|
||||
assert updated_task_set["scale"]["unit"] == "PERCENT"
|
||||
@ -2784,11 +2806,13 @@ def test_list_tasks_with_filters():
|
||||
}
|
||||
|
||||
_ = ecs.register_task_definition(
|
||||
family="test_task_def_1", containerDefinitions=[test_container_def],
|
||||
family="test_task_def_1",
|
||||
containerDefinitions=[test_container_def],
|
||||
)
|
||||
|
||||
_ = ecs.register_task_definition(
|
||||
family="test_task_def_2", containerDefinitions=[test_container_def],
|
||||
family="test_task_def_2",
|
||||
containerDefinitions=[test_container_def],
|
||||
)
|
||||
|
||||
_ = ecs.start_task(
|
||||
|
@ -9,24 +9,30 @@ from moto import mock_elasticbeanstalk
|
||||
def test_create_application():
|
||||
# Create Elastic Beanstalk Application
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
app = conn.create_application(ApplicationName="myapp",)
|
||||
app = conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
app["Application"]["ApplicationName"].should.equal("myapp")
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_application_dup():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application.when.called_with(ApplicationName="myapp",).should.throw(
|
||||
ClientError
|
||||
conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
conn.create_application.when.called_with(
|
||||
ApplicationName="myapp",
|
||||
).should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_elasticbeanstalk
|
||||
def test_describe_applications():
|
||||
# Create Elastic Beanstalk Application
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
|
||||
apps = conn.describe_applications()
|
||||
len(apps["Applications"]).should.equal(1)
|
||||
@ -37,8 +43,13 @@ def test_describe_applications():
|
||||
def test_create_environment():
|
||||
# Create Elastic Beanstalk Environment
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
app = conn.create_application(ApplicationName="myapp",)
|
||||
env = conn.create_environment(ApplicationName="myapp", EnvironmentName="myenv",)
|
||||
app = conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
env = conn.create_environment(
|
||||
ApplicationName="myapp",
|
||||
EnvironmentName="myenv",
|
||||
)
|
||||
env["EnvironmentName"].should.equal("myenv")
|
||||
|
||||
|
||||
@ -46,9 +57,12 @@ def test_create_environment():
|
||||
def test_describe_environments():
|
||||
# List Elastic Beanstalk Envs
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
conn.create_environment(
|
||||
ApplicationName="myapp", EnvironmentName="myenv",
|
||||
ApplicationName="myapp",
|
||||
EnvironmentName="myenv",
|
||||
)
|
||||
|
||||
envs = conn.describe_environments()
|
||||
@ -75,7 +89,9 @@ def tags_list_to_dict(tag_list):
|
||||
@mock_elasticbeanstalk
|
||||
def test_create_environment_tags():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
env_tags = {"initial key": "initial value"}
|
||||
env = conn.create_environment(
|
||||
ApplicationName="myapp",
|
||||
@ -83,7 +99,9 @@ def test_create_environment_tags():
|
||||
Tags=tags_dict_to_list(env_tags),
|
||||
)
|
||||
|
||||
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||
tags = conn.list_tags_for_resource(
|
||||
ResourceArn=env["EnvironmentArn"],
|
||||
)
|
||||
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||
tags_list_to_dict(tags["ResourceTags"]).should.equal(env_tags)
|
||||
|
||||
@ -91,7 +109,9 @@ def test_create_environment_tags():
|
||||
@mock_elasticbeanstalk
|
||||
def test_update_tags():
|
||||
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||
conn.create_application(ApplicationName="myapp",)
|
||||
conn.create_application(
|
||||
ApplicationName="myapp",
|
||||
)
|
||||
env_tags = {
|
||||
"initial key": "initial value",
|
||||
"to remove": "delete me",
|
||||
@ -117,7 +137,9 @@ def test_update_tags():
|
||||
total_env_tags.update(extra_env_tags)
|
||||
del total_env_tags["to remove"]
|
||||
|
||||
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||
tags = conn.list_tags_for_resource(
|
||||
ResourceArn=env["EnvironmentArn"],
|
||||
)
|
||||
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||
tags_list_to_dict(tags["ResourceTags"]).should.equal(total_env_tags)
|
||||
|
||||
|
@ -184,8 +184,9 @@ def test_apply_security_groups_to_load_balancer():
|
||||
response = client.apply_security_groups_to_load_balancer(
|
||||
LoadBalancerName="my-lb", SecurityGroups=["not-really-a-security-group"]
|
||||
)
|
||||
assert "One or more of the specified security groups do not exist." \
|
||||
in str(error.value)
|
||||
assert "One or more of the specified security groups do not exist." in str(
|
||||
error.value
|
||||
)
|
||||
|
||||
|
||||
@mock_elb_deprecated
|
||||
|
@ -524,8 +524,10 @@ def test_run_job_flow_with_instance_groups_with_autoscaling():
|
||||
if "AutoScalingPolicy" in y:
|
||||
x["AutoScalingPolicy"]["Status"]["State"].should.equal("ATTACHED")
|
||||
returned_policy = deepcopy(x["AutoScalingPolicy"])
|
||||
auto_scaling_policy_with_cluster_id = _patch_cluster_id_placeholder_in_autoscaling_policy(
|
||||
y["AutoScalingPolicy"], cluster_id
|
||||
auto_scaling_policy_with_cluster_id = (
|
||||
_patch_cluster_id_placeholder_in_autoscaling_policy(
|
||||
y["AutoScalingPolicy"], cluster_id
|
||||
)
|
||||
)
|
||||
del returned_policy["Status"]
|
||||
returned_policy.should.equal(auto_scaling_policy_with_cluster_id)
|
||||
@ -551,8 +553,10 @@ def test_put_remove_auto_scaling_policy():
|
||||
AutoScalingPolicy=auto_scaling_policy,
|
||||
)
|
||||
|
||||
auto_scaling_policy_with_cluster_id = _patch_cluster_id_placeholder_in_autoscaling_policy(
|
||||
auto_scaling_policy, cluster_id
|
||||
auto_scaling_policy_with_cluster_id = (
|
||||
_patch_cluster_id_placeholder_in_autoscaling_policy(
|
||||
auto_scaling_policy, cluster_id
|
||||
)
|
||||
)
|
||||
del resp["AutoScalingPolicy"]["Status"]
|
||||
resp["AutoScalingPolicy"].should.equal(auto_scaling_policy_with_cluster_id)
|
||||
|
@ -223,9 +223,7 @@ def test_get_table_not_exits():
|
||||
helpers.get_table(client, database_name, "myfirsttable")
|
||||
|
||||
exc.value.response["Error"]["Code"].should.equal("EntityNotFoundException")
|
||||
exc.value.response["Error"]["Message"].should.match(
|
||||
"Table myfirsttable not found"
|
||||
)
|
||||
exc.value.response["Error"]["Message"].should.match("Table myfirsttable not found")
|
||||
|
||||
|
||||
@mock_glue
|
||||
|
@ -207,7 +207,9 @@ def test_remove_role_from_instance_profile():
|
||||
def test_delete_instance_profile():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
conn.create_instance_profile(InstanceProfileName="my-profile")
|
||||
conn.add_role_to_instance_profile(
|
||||
@ -257,7 +259,9 @@ def test_delete_role():
|
||||
|
||||
# Test deletion failure with a managed policy
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
response = conn.create_policy(
|
||||
PolicyName="my-managed-policy", PolicyDocument=MOCK_POLICY
|
||||
@ -273,10 +277,14 @@ def test_delete_role():
|
||||
|
||||
# Test deletion failure with an inline policy
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
conn.put_role_policy(
|
||||
RoleName="my-role", PolicyName="my-role-policy", PolicyDocument=MOCK_POLICY
|
||||
RoleName="my-role",
|
||||
PolicyName="my-role-policy",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
with pytest.raises(conn.exceptions.DeleteConflictException):
|
||||
conn.delete_role(RoleName="my-role")
|
||||
@ -287,7 +295,9 @@ def test_delete_role():
|
||||
|
||||
# Test deletion failure with attachment to an instance profile
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
conn.create_instance_profile(InstanceProfileName="my-profile")
|
||||
conn.add_role_to_instance_profile(
|
||||
@ -304,7 +314,9 @@ def test_delete_role():
|
||||
|
||||
# Test deletion with no conflicts
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
conn.delete_role(RoleName="my-role")
|
||||
with pytest.raises(conn.exceptions.NoSuchEntityException):
|
||||
@ -331,7 +343,9 @@ def test_list_instance_profiles_for_role():
|
||||
conn = boto.connect_iam()
|
||||
|
||||
conn.create_role(
|
||||
role_name="my-role", assume_role_policy_document="some policy", path="my-path"
|
||||
role_name="my-role",
|
||||
assume_role_policy_document="some policy",
|
||||
path="my-path",
|
||||
)
|
||||
conn.create_role(
|
||||
role_name="my-role2",
|
||||
@ -343,7 +357,8 @@ def test_list_instance_profiles_for_role():
|
||||
profile_path_list = ["my-path", "my-path2"]
|
||||
for profile_count in range(0, 2):
|
||||
conn.create_instance_profile(
|
||||
profile_name_list[profile_count], path=profile_path_list[profile_count]
|
||||
profile_name_list[profile_count],
|
||||
path=profile_path_list[profile_count],
|
||||
)
|
||||
|
||||
for profile_count in range(0, 2):
|
||||
@ -409,7 +424,9 @@ def test_put_role_policy():
|
||||
def test_get_role_policy():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="my-path"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="my-path",
|
||||
)
|
||||
with pytest.raises(conn.exceptions.NoSuchEntityException):
|
||||
conn.get_role_policy(RoleName="my-role", PolicyName="does-not-exist")
|
||||
@ -898,19 +915,19 @@ def test_get_all_access_keys():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_user("my-user")
|
||||
response = conn.get_all_access_keys("my-user")
|
||||
assert \
|
||||
assert (
|
||||
response["list_access_keys_response"]["list_access_keys_result"][
|
||||
"access_key_metadata"
|
||||
] == []
|
||||
]
|
||||
== []
|
||||
)
|
||||
conn.create_access_key("my-user")
|
||||
response = conn.get_all_access_keys("my-user")
|
||||
assert \
|
||||
sorted(
|
||||
response["list_access_keys_response"]["list_access_keys_result"][
|
||||
"access_key_metadata"
|
||||
][0].keys()
|
||||
) == \
|
||||
sorted(["status", "create_date", "user_name", "access_key_id"])
|
||||
assert sorted(
|
||||
response["list_access_keys_response"]["list_access_keys_result"][
|
||||
"access_key_metadata"
|
||||
][0].keys()
|
||||
) == sorted(["status", "create_date", "user_name", "access_key_id"])
|
||||
|
||||
|
||||
@mock_iam
|
||||
@ -921,9 +938,9 @@ def test_list_access_keys():
|
||||
assert response["AccessKeyMetadata"] == []
|
||||
access_key = conn.create_access_key(UserName="my-user")["AccessKey"]
|
||||
response = conn.list_access_keys(UserName="my-user")
|
||||
assert \
|
||||
sorted(response["AccessKeyMetadata"][0].keys()) == \
|
||||
sorted(["Status", "CreateDate", "UserName", "AccessKeyId"]
|
||||
assert sorted(response["AccessKeyMetadata"][0].keys()) == sorted(
|
||||
["Status", "CreateDate", "UserName", "AccessKeyId"]
|
||||
)
|
||||
conn = boto3.client(
|
||||
"iam",
|
||||
region_name="us-east-1",
|
||||
@ -931,9 +948,9 @@ def test_list_access_keys():
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
response = conn.list_access_keys()
|
||||
assert \
|
||||
sorted(response["AccessKeyMetadata"][0].keys()) == \
|
||||
sorted(["Status", "CreateDate", "UserName", "AccessKeyId"])
|
||||
assert sorted(response["AccessKeyMetadata"][0].keys()) == sorted(
|
||||
["Status", "CreateDate", "UserName", "AccessKeyId"]
|
||||
)
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
@ -1022,7 +1039,8 @@ def test_create_virtual_mfa_device_errors():
|
||||
client.create_virtual_mfa_device.when.called_with(
|
||||
VirtualMFADeviceName="test-device"
|
||||
).should.throw(
|
||||
ClientError, "MFADevice entity at the same path and name already exists."
|
||||
ClientError,
|
||||
"MFADevice entity at the same path and name already exists.",
|
||||
)
|
||||
|
||||
client.create_virtual_mfa_device.when.called_with(
|
||||
@ -1211,7 +1229,9 @@ def test_delete_user():
|
||||
# Test deletion failure with an inline policy
|
||||
conn.create_user(UserName="my-user")
|
||||
conn.put_user_policy(
|
||||
UserName="my-user", PolicyName="my-user-policy", PolicyDocument=MOCK_POLICY
|
||||
UserName="my-user",
|
||||
PolicyName="my-user-policy",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
with pytest.raises(conn.exceptions.DeleteConflictException):
|
||||
conn.delete_user(UserName="my-user")
|
||||
@ -1396,7 +1416,9 @@ def test_managed_policy():
|
||||
|
||||
role_name = "my-role"
|
||||
conn.create_role(
|
||||
role_name, assume_role_policy_document={"policy": "test"}, path="my-path"
|
||||
role_name,
|
||||
assume_role_policy_document={"policy": "test"},
|
||||
path="my-path",
|
||||
)
|
||||
for policy_name in [
|
||||
"AmazonElasticMapReduceRole",
|
||||
@ -1423,7 +1445,8 @@ def test_managed_policy():
|
||||
].should.have.length_of(2)
|
||||
|
||||
conn.detach_role_policy(
|
||||
"arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", role_name
|
||||
"arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole",
|
||||
role_name,
|
||||
)
|
||||
rows = conn.list_policies(only_attached=True)["list_policies_response"][
|
||||
"list_policies_result"
|
||||
@ -1444,7 +1467,8 @@ def test_managed_policy():
|
||||
|
||||
with pytest.raises(BotoServerError):
|
||||
conn.detach_role_policy(
|
||||
"arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole", role_name
|
||||
"arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole",
|
||||
role_name,
|
||||
)
|
||||
|
||||
with pytest.raises(BotoServerError):
|
||||
@ -1562,7 +1586,9 @@ def test_get_ssh_public_key():
|
||||
|
||||
with pytest.raises(ClientError):
|
||||
client.get_ssh_public_key(
|
||||
UserName=username, SSHPublicKeyId="xxnon-existent-keyxx", Encoding="SSH"
|
||||
UserName=username,
|
||||
SSHPublicKeyId="xxnon-existent-keyxx",
|
||||
Encoding="SSH",
|
||||
)
|
||||
|
||||
resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
|
||||
@ -1603,7 +1629,9 @@ def test_update_ssh_public_key():
|
||||
|
||||
with pytest.raises(ClientError):
|
||||
client.update_ssh_public_key(
|
||||
UserName=username, SSHPublicKeyId="xxnon-existent-keyxx", Status="Inactive"
|
||||
UserName=username,
|
||||
SSHPublicKeyId="xxnon-existent-keyxx",
|
||||
Status="Inactive",
|
||||
)
|
||||
|
||||
resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
|
||||
@ -1681,7 +1709,9 @@ def test_get_account_authorization_details():
|
||||
UserName="testUser", PolicyName="testPolicy", PolicyDocument=test_policy
|
||||
)
|
||||
conn.put_group_policy(
|
||||
GroupName="testGroup", PolicyName="testPolicy", PolicyDocument=test_policy
|
||||
GroupName="testGroup",
|
||||
PolicyName="testPolicy",
|
||||
PolicyDocument=test_policy,
|
||||
)
|
||||
|
||||
conn.attach_user_policy(
|
||||
@ -1981,7 +2011,9 @@ def test_create_role_with_tags():
|
||||
map(lambda x: {"Key": str(x), "Value": str(x)}, range(0, 51))
|
||||
)
|
||||
conn.create_role(
|
||||
RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=too_many_tags
|
||||
RoleName="my-role3",
|
||||
AssumeRolePolicyDocument="{}",
|
||||
Tags=too_many_tags,
|
||||
)
|
||||
assert (
|
||||
"failed to satisfy constraint: Member must have length less than or equal to 50."
|
||||
@ -2247,7 +2279,9 @@ def test_update_role_description():
|
||||
conn.delete_role(RoleName="my-role")
|
||||
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
response = conn.update_role_description(RoleName="my-role", Description="test")
|
||||
|
||||
@ -2262,7 +2296,9 @@ def test_update_role():
|
||||
conn.delete_role(RoleName="my-role")
|
||||
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
response = conn.update_role_description(RoleName="my-role", Description="test")
|
||||
assert response["Role"]["RoleName"] == "my-role"
|
||||
@ -2276,7 +2312,9 @@ def test_update_role():
|
||||
conn.delete_role(RoleName="my-role")
|
||||
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
response = conn.update_role(RoleName="my-role", Description="test")
|
||||
assert len(response.keys()) == 1
|
||||
@ -2317,7 +2355,9 @@ def test_list_entities_for_policy():
|
||||
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)
|
||||
conn.create_user(Path="/", UserName="testUser")
|
||||
conn.create_group(Path="/", GroupName="testGroup")
|
||||
@ -2333,7 +2373,9 @@ def test_list_entities_for_policy():
|
||||
UserName="testUser", PolicyName="testPolicy", PolicyDocument=test_policy
|
||||
)
|
||||
conn.put_group_policy(
|
||||
GroupName="testGroup", PolicyName="testPolicy", PolicyDocument=test_policy
|
||||
GroupName="testGroup",
|
||||
PolicyName="testPolicy",
|
||||
PolicyDocument=test_policy,
|
||||
)
|
||||
|
||||
conn.attach_user_policy(
|
||||
@ -2396,7 +2438,9 @@ def test_list_entities_for_policy():
|
||||
def test_create_role_no_path():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
resp = conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Description="test"
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Description="test",
|
||||
)
|
||||
resp.get("Role").get("Arn").should.equal(
|
||||
"arn:aws:iam::{}:role/my-role".format(ACCOUNT_ID)
|
||||
@ -2452,7 +2496,9 @@ def test_create_role_with_same_name_should_fail():
|
||||
iam = boto3.client("iam", region_name="us-east-1")
|
||||
test_role_name = str(uuid4())
|
||||
iam.create_role(
|
||||
RoleName=test_role_name, AssumeRolePolicyDocument="policy", Description="test"
|
||||
RoleName=test_role_name,
|
||||
AssumeRolePolicyDocument="policy",
|
||||
Description="test",
|
||||
)
|
||||
# Create the role again, and verify that it fails
|
||||
with pytest.raises(ClientError) as err:
|
||||
@ -2539,14 +2585,24 @@ def test_create_open_id_connect_provider_errors():
|
||||
|
||||
client.create_open_id_connect_provider.when.called_with(
|
||||
Url="http://example.org",
|
||||
ThumbprintList=["a" * 40, "b" * 40, "c" * 40, "d" * 40, "e" * 40, "f" * 40],
|
||||
ThumbprintList=[
|
||||
"a" * 40,
|
||||
"b" * 40,
|
||||
"c" * 40,
|
||||
"d" * 40,
|
||||
"e" * 40,
|
||||
"f" * 40,
|
||||
],
|
||||
).should.throw(ClientError, "Thumbprint list must contain fewer than 5 entries.")
|
||||
|
||||
too_many_client_ids = ["{}".format(i) for i in range(101)]
|
||||
client.create_open_id_connect_provider.when.called_with(
|
||||
Url="http://example.org", ThumbprintList=[], ClientIDList=too_many_client_ids
|
||||
Url="http://example.org",
|
||||
ThumbprintList=[],
|
||||
ClientIDList=too_many_client_ids,
|
||||
).should.throw(
|
||||
ClientError, "Cannot exceed quota for ClientIdsPerOpenIdConnectProvider: 100"
|
||||
ClientError,
|
||||
"Cannot exceed quota for ClientIdsPerOpenIdConnectProvider: 100",
|
||||
)
|
||||
|
||||
too_long_url = "b" * 256
|
||||
@ -2587,7 +2643,8 @@ def test_delete_open_id_connect_provider():
|
||||
client.get_open_id_connect_provider.when.called_with(
|
||||
OpenIDConnectProviderArn=open_id_arn
|
||||
).should.throw(
|
||||
ClientError, "OpenIDConnect Provider not found for arn {}".format(open_id_arn)
|
||||
ClientError,
|
||||
"OpenIDConnect Provider not found for arn {}".format(open_id_arn),
|
||||
)
|
||||
|
||||
# deleting a non existing provider should be successful
|
||||
@ -2679,7 +2736,9 @@ def test_update_account_password_policy_errors():
|
||||
client = boto3.client("iam", region_name="us-east-1")
|
||||
|
||||
client.update_account_password_policy.when.called_with(
|
||||
MaxPasswordAge=1096, MinimumPasswordLength=129, PasswordReusePrevention=25
|
||||
MaxPasswordAge=1096,
|
||||
MinimumPasswordLength=129,
|
||||
PasswordReusePrevention=25,
|
||||
).should.throw(
|
||||
ClientError,
|
||||
"3 validation errors detected: "
|
||||
@ -2757,7 +2816,8 @@ def test_delete_account_password_policy_errors():
|
||||
client = boto3.client("iam", region_name="us-east-1")
|
||||
|
||||
client.delete_account_password_policy.when.called_with().should.throw(
|
||||
ClientError, "The account policy with name PasswordPolicy cannot be found."
|
||||
ClientError,
|
||||
"The account policy with name PasswordPolicy cannot be found.",
|
||||
)
|
||||
|
||||
|
||||
@ -2885,7 +2945,8 @@ def test_list_user_tags():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_user(UserName="kenny-bania")
|
||||
conn.create_user(
|
||||
UserName="jackie-chiles", Tags=[{"Key": "Sue-Allen", "Value": "Oh-Henry"}]
|
||||
UserName="jackie-chiles",
|
||||
Tags=[{"Key": "Sue-Allen", "Value": "Oh-Henry"}],
|
||||
)
|
||||
conn.create_user(
|
||||
UserName="cosmo",
|
||||
@ -2904,7 +2965,10 @@ def test_list_user_tags():
|
||||
|
||||
response = conn.list_user_tags(UserName="cosmo")
|
||||
response["Tags"].should.equal(
|
||||
[{"Key": "Stan", "Value": "The Caddy"}, {"Key": "like-a", "Value": "glove"}]
|
||||
[
|
||||
{"Key": "Stan", "Value": "The Caddy"},
|
||||
{"Key": "like-a", "Value": "glove"},
|
||||
]
|
||||
)
|
||||
response["IsTruncated"].should_not.be.ok
|
||||
|
||||
@ -2947,7 +3011,8 @@ def test_delete_account_password_policy_errors():
|
||||
client = boto3.client("iam", region_name="us-east-1")
|
||||
|
||||
client.delete_account_password_policy.when.called_with().should.throw(
|
||||
ClientError, "The account policy with name PasswordPolicy cannot be found."
|
||||
ClientError,
|
||||
"The account policy with name PasswordPolicy cannot be found.",
|
||||
)
|
||||
|
||||
|
||||
@ -2976,7 +3041,10 @@ def test_role_list_config_discovered_resources():
|
||||
max_session_duration=3600,
|
||||
)
|
||||
roles.append(
|
||||
{"id": this_role.id, "name": this_role.name,}
|
||||
{
|
||||
"id": this_role.id,
|
||||
"name": this_role.name,
|
||||
}
|
||||
)
|
||||
|
||||
assert len(roles) == num_roles
|
||||
@ -3034,7 +3102,11 @@ def test_role_config_dict():
|
||||
basic_assume_role = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Principal": {"AWS": "*"}, "Action": "sts:AssumeRole"}
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"AWS": "*"},
|
||||
"Action": "sts:AssumeRole",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
@ -3351,7 +3423,9 @@ def test_role_config_client():
|
||||
# Test non-aggregated pagination
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Role", limit=1, nextToken=result["nextToken"]
|
||||
resourceType="AWS::IAM::Role",
|
||||
limit=1,
|
||||
nextToken=result["nextToken"],
|
||||
)["resourceIdentifiers"][0]["resourceId"]
|
||||
) != first_result
|
||||
|
||||
@ -3387,14 +3461,18 @@ def test_role_config_client():
|
||||
# Test non-aggregated resource name/id filter
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Role", resourceName=roles[1]["name"], limit=1,
|
||||
resourceType="AWS::IAM::Role",
|
||||
resourceName=roles[1]["name"],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== roles[1]["name"]
|
||||
)
|
||||
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Role", resourceIds=[roles[0]["id"]], limit=1,
|
||||
resourceType="AWS::IAM::Role",
|
||||
resourceIds=[roles[0]["id"]],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== roles[0]["name"]
|
||||
)
|
||||
@ -3440,13 +3518,17 @@ def test_role_config_client():
|
||||
# Test non-aggregated resource name/id filter
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Role", resourceName=roles[1]["name"], limit=1,
|
||||
resourceType="AWS::IAM::Role",
|
||||
resourceName=roles[1]["name"],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== roles[1]["name"]
|
||||
)
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Role", resourceIds=[roles[0]["id"]], limit=1,
|
||||
resourceType="AWS::IAM::Role",
|
||||
resourceIds=[roles[0]["id"]],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== roles[0]["name"]
|
||||
)
|
||||
@ -3556,7 +3638,10 @@ def test_policy_list_config_discovered_resources():
|
||||
policy_name="policy{}".format(ix),
|
||||
)
|
||||
policies.append(
|
||||
{"id": this_policy.id, "name": this_policy.name,}
|
||||
{
|
||||
"id": this_policy.id,
|
||||
"name": this_policy.name,
|
||||
}
|
||||
)
|
||||
|
||||
assert len(policies) == num_policies
|
||||
@ -3781,7 +3866,9 @@ def test_policy_config_client():
|
||||
# Test non-aggregated pagination
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Policy", limit=1, nextToken=result["nextToken"]
|
||||
resourceType="AWS::IAM::Policy",
|
||||
limit=1,
|
||||
nextToken=result["nextToken"],
|
||||
)["resourceIdentifiers"][0]["resourceId"]
|
||||
) != first_result
|
||||
|
||||
@ -3818,14 +3905,18 @@ def test_policy_config_client():
|
||||
# Test non-aggregated resource name/id filter
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Policy", resourceName=policies[1]["name"], limit=1,
|
||||
resourceType="AWS::IAM::Policy",
|
||||
resourceName=policies[1]["name"],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== policies[1]["name"]
|
||||
)
|
||||
|
||||
assert (
|
||||
config_client.list_discovered_resources(
|
||||
resourceType="AWS::IAM::Policy", resourceIds=[policies[0]["id"]], limit=1,
|
||||
resourceType="AWS::IAM::Policy",
|
||||
resourceIds=[policies[0]["id"]],
|
||||
limit=1,
|
||||
)["resourceIdentifiers"][0]["resourceName"]
|
||||
== policies[0]["name"]
|
||||
)
|
||||
@ -3906,7 +3997,10 @@ def test_policy_config_client():
|
||||
assert (
|
||||
config_client.batch_get_resource_config(
|
||||
resourceKeys=[
|
||||
{"resourceType": "AWS::IAM::Policy", "resourceId": policies[7]["id"]}
|
||||
{
|
||||
"resourceType": "AWS::IAM::Policy",
|
||||
"resourceId": policies[7]["id"],
|
||||
}
|
||||
]
|
||||
)["baseConfigurationItems"][0]["resourceName"]
|
||||
== policies[7]["name"]
|
||||
|
@ -939,9 +939,7 @@ class TestListThingGroup:
|
||||
resp["thingGroups"].should.have.length_of(0)
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.list_thing_groups(parentGroup="inexistant-group-name")
|
||||
e.value.response["Error"]["Code"].should.equal(
|
||||
"ResourceNotFoundException"
|
||||
)
|
||||
e.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
|
||||
@mock_iot
|
||||
def test_should_list_all_groups_filtered_by_parent_non_recursively(self):
|
||||
@ -1019,7 +1017,9 @@ def test_delete_thing_group():
|
||||
group_name_1a = "my-group-name-1a"
|
||||
group_name_2a = "my-group-name-2a"
|
||||
tree_dict = {
|
||||
group_name_1a: {group_name_2a: {},},
|
||||
group_name_1a: {
|
||||
group_name_2a: {},
|
||||
},
|
||||
}
|
||||
group_catalog = generate_thing_group_tree(client, tree_dict)
|
||||
|
||||
|
@ -24,7 +24,9 @@ def test_get_hls_streaming_session_url():
|
||||
region_name=region_name,
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_hls_streaming_session_url(StreamName=stream_name,)
|
||||
res = client.get_hls_streaming_session_url(
|
||||
StreamName=stream_name,
|
||||
)
|
||||
reg_exp = "^{}/hls/v1/getHLSMasterPlaylist.m3u8\?SessionToken\=.+$".format(
|
||||
data_endpoint
|
||||
)
|
||||
@ -48,7 +50,9 @@ def test_get_dash_streaming_session_url():
|
||||
region_name=region_name,
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_dash_streaming_session_url(StreamName=stream_name,)
|
||||
res = client.get_dash_streaming_session_url(
|
||||
StreamName=stream_name,
|
||||
)
|
||||
reg_exp = "^{}/dash/v1/getDASHManifest.mpd\?SessionToken\=.+$".format(data_endpoint)
|
||||
res.should.have.key("DASHStreamingSessionURL").which.should.match(reg_exp)
|
||||
|
||||
|
@ -575,8 +575,10 @@ def test__delete_alias__raises_if_alias_is_not_found():
|
||||
with pytest.raises(NotFoundException) as err:
|
||||
kms.delete_alias(alias_name)
|
||||
|
||||
expected_message_match = r"Alias arn:aws:kms:{region}:[0-9]{{12}}:{alias_name} is not found.".format(
|
||||
region=region, alias_name=alias_name
|
||||
expected_message_match = (
|
||||
r"Alias arn:aws:kms:{region}:[0-9]{{12}}:{alias_name} is not found.".format(
|
||||
region=region, alias_name=alias_name
|
||||
)
|
||||
)
|
||||
ex = err.value
|
||||
ex.body["__type"].should.equal("NotFoundException")
|
||||
|
@ -55,14 +55,20 @@ def test_create_key():
|
||||
key["KeyMetadata"]["Origin"].should.equal("AWS_KMS")
|
||||
key["KeyMetadata"].should_not.have.key("SigningAlgorithms")
|
||||
|
||||
key = conn.create_key(KeyUsage="ENCRYPT_DECRYPT", CustomerMasterKeySpec="RSA_2048",)
|
||||
key = conn.create_key(
|
||||
KeyUsage="ENCRYPT_DECRYPT",
|
||||
CustomerMasterKeySpec="RSA_2048",
|
||||
)
|
||||
|
||||
sorted(key["KeyMetadata"]["EncryptionAlgorithms"]).should.equal(
|
||||
["RSAES_OAEP_SHA_1", "RSAES_OAEP_SHA_256"]
|
||||
)
|
||||
key["KeyMetadata"].should_not.have.key("SigningAlgorithms")
|
||||
|
||||
key = conn.create_key(KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="RSA_2048",)
|
||||
key = conn.create_key(
|
||||
KeyUsage="SIGN_VERIFY",
|
||||
CustomerMasterKeySpec="RSA_2048",
|
||||
)
|
||||
|
||||
key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms")
|
||||
sorted(key["KeyMetadata"]["SigningAlgorithms"]).should.equal(
|
||||
@ -77,21 +83,24 @@ def test_create_key():
|
||||
)
|
||||
|
||||
key = conn.create_key(
|
||||
KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_SECG_P256K1",
|
||||
KeyUsage="SIGN_VERIFY",
|
||||
CustomerMasterKeySpec="ECC_SECG_P256K1",
|
||||
)
|
||||
|
||||
key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms")
|
||||
key["KeyMetadata"]["SigningAlgorithms"].should.equal(["ECDSA_SHA_256"])
|
||||
|
||||
key = conn.create_key(
|
||||
KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_NIST_P384",
|
||||
KeyUsage="SIGN_VERIFY",
|
||||
CustomerMasterKeySpec="ECC_NIST_P384",
|
||||
)
|
||||
|
||||
key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms")
|
||||
key["KeyMetadata"]["SigningAlgorithms"].should.equal(["ECDSA_SHA_384"])
|
||||
|
||||
key = conn.create_key(
|
||||
KeyUsage="SIGN_VERIFY", CustomerMasterKeySpec="ECC_NIST_P521",
|
||||
KeyUsage="SIGN_VERIFY",
|
||||
CustomerMasterKeySpec="ECC_NIST_P521",
|
||||
)
|
||||
|
||||
key["KeyMetadata"].should_not.have.key("EncryptionAlgorithms")
|
||||
@ -101,7 +110,10 @@ def test_create_key():
|
||||
@mock_kms
|
||||
def test_describe_key():
|
||||
client = boto3.client("kms", region_name="us-east-1")
|
||||
response = client.create_key(Description="my key", KeyUsage="ENCRYPT_DECRYPT",)
|
||||
response = client.create_key(
|
||||
Description="my key",
|
||||
KeyUsage="ENCRYPT_DECRYPT",
|
||||
)
|
||||
key_id = response["KeyMetadata"]["KeyId"]
|
||||
|
||||
response = client.describe_key(KeyId=key_id)
|
||||
|
@ -205,7 +205,8 @@ def test_delete_subscription_filter_errors():
|
||||
|
||||
# when
|
||||
client_logs.delete_subscription_filter(
|
||||
logGroupName="/test", filterName="test",
|
||||
logGroupName="/test",
|
||||
filterName="test",
|
||||
)
|
||||
|
||||
# then
|
||||
@ -243,7 +244,8 @@ def test_delete_subscription_filter_errors():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client_logs.delete_subscription_filter(
|
||||
logGroupName="not-existing-log-group", filterName="test",
|
||||
logGroupName="not-existing-log-group",
|
||||
filterName="test",
|
||||
)
|
||||
|
||||
# then
|
||||
@ -258,7 +260,8 @@ def test_delete_subscription_filter_errors():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client_logs.delete_subscription_filter(
|
||||
logGroupName="/test", filterName="wrong-filter-name",
|
||||
logGroupName="/test",
|
||||
filterName="wrong-filter-name",
|
||||
)
|
||||
|
||||
# then
|
||||
@ -342,7 +345,9 @@ def _get_role_name(region_name):
|
||||
return iam.get_role(RoleName="test-role")["Role"]["Arn"]
|
||||
except ClientError:
|
||||
return iam.create_role(
|
||||
RoleName="test-role", AssumeRolePolicyDocument="test policy", Path="/",
|
||||
RoleName="test-role",
|
||||
AssumeRolePolicyDocument="test policy",
|
||||
Path="/",
|
||||
)["Role"]["Arn"]
|
||||
|
||||
|
||||
@ -372,7 +377,8 @@ def _wait_for_log_msg(client, log_group_name, expected_msg_part):
|
||||
|
||||
for log_stream in log_streams:
|
||||
result = client.get_log_events(
|
||||
logGroupName=log_group_name, logStreamName=log_stream["logStreamName"],
|
||||
logGroupName=log_group_name,
|
||||
logStreamName=log_stream["logStreamName"],
|
||||
)
|
||||
received_messages.extend(
|
||||
[event["message"] for event in result.get("events")]
|
||||
|
@ -448,7 +448,9 @@ def test_describe_subscription_filters_errors():
|
||||
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.describe_subscription_filters(logGroupName="not-existing-log-group",)
|
||||
client.describe_subscription_filters(
|
||||
logGroupName="not-existing-log-group",
|
||||
)
|
||||
|
||||
# then
|
||||
ex = e.value
|
||||
|
@ -183,7 +183,8 @@ def test_create_another_member_withopts():
|
||||
|
||||
# But cannot get
|
||||
response = conn.get_member.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id2,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id2,
|
||||
).should.throw(Exception, "Member {0} not found".format(member_id2))
|
||||
|
||||
# Delete member 1
|
||||
@ -255,7 +256,9 @@ def test_invite_and_remove_member():
|
||||
|
||||
# Create proposal (invite and remove member)
|
||||
response = conn.create_proposal(
|
||||
NetworkId=network_id, MemberId=member_id, Actions=both_policy_actions,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
Actions=both_policy_actions,
|
||||
)
|
||||
proposal_id2 = response["ProposalId"]
|
||||
|
||||
@ -368,7 +371,10 @@ def test_create_too_many_members():
|
||||
MemberConfiguration=helpers.create_member_configuration(
|
||||
"testmember6", "admin", "Admin12345", False, "Test Member 6"
|
||||
),
|
||||
).should.throw(Exception, "is the maximum number of members allowed in a",)
|
||||
).should.throw(
|
||||
Exception,
|
||||
"is the maximum number of members allowed in a",
|
||||
)
|
||||
|
||||
|
||||
@mock_managedblockchain
|
||||
@ -594,7 +600,8 @@ def test_get_member_badmember():
|
||||
network_id = response["NetworkId"]
|
||||
|
||||
response = conn.get_member.when.called_with(
|
||||
NetworkId=network_id, MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
NetworkId=network_id,
|
||||
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
|
||||
|
||||
|
||||
@ -624,7 +631,8 @@ def test_delete_member_badmember():
|
||||
network_id = response["NetworkId"]
|
||||
|
||||
response = conn.delete_member.when.called_with(
|
||||
NetworkId=network_id, MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
NetworkId=network_id,
|
||||
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
|
||||
|
||||
|
||||
|
@ -58,7 +58,9 @@ def test_create_node():
|
||||
|
||||
# Delete node
|
||||
conn.delete_node(
|
||||
NetworkId=network_id, MemberId=member_id, NodeId=node_id,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeId=node_id,
|
||||
)
|
||||
|
||||
# Find node in full list
|
||||
@ -77,7 +79,9 @@ def test_create_node():
|
||||
|
||||
# But cannot get
|
||||
response = conn.get_node.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, NodeId=node_id,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeId=node_id,
|
||||
).should.throw(Exception, "Node {0} not found".format(node_id))
|
||||
|
||||
|
||||
@ -103,7 +107,9 @@ def test_create_node_standard_edition():
|
||||
logconfigbad = dict(helpers.default_nodeconfiguration)
|
||||
logconfigbad["InstanceType"] = "bc.t3.large"
|
||||
response = conn.create_node(
|
||||
NetworkId=network_id, MemberId=member_id, NodeConfiguration=logconfigbad,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeConfiguration=logconfigbad,
|
||||
)
|
||||
node_id = response["NodeId"]
|
||||
|
||||
@ -146,7 +152,8 @@ def test_create_node_standard_edition():
|
||||
|
||||
# Should now be an exception
|
||||
response = conn.list_nodes.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
).should.throw(Exception, "Member {0} not found".format(member_id))
|
||||
|
||||
|
||||
@ -192,7 +199,8 @@ def test_create_too_many_nodes():
|
||||
MemberId=member_id,
|
||||
NodeConfiguration=helpers.default_nodeconfiguration,
|
||||
).should.throw(
|
||||
Exception, "Maximum number of nodes exceeded in member {0}".format(member_id),
|
||||
Exception,
|
||||
"Maximum number of nodes exceeded in member {0}".format(member_id),
|
||||
)
|
||||
|
||||
|
||||
@ -249,14 +257,18 @@ def test_create_node_badnodeconfig():
|
||||
logconfigbad = dict(helpers.default_nodeconfiguration)
|
||||
logconfigbad["InstanceType"] = "foo"
|
||||
response = conn.create_node.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, NodeConfiguration=logconfigbad,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeConfiguration=logconfigbad,
|
||||
).should.throw(Exception, "Requested instance foo isn't supported.")
|
||||
|
||||
# Incorrect instance type for edition
|
||||
logconfigbad = dict(helpers.default_nodeconfiguration)
|
||||
logconfigbad["InstanceType"] = "bc.t3.large"
|
||||
response = conn.create_node.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, NodeConfiguration=logconfigbad,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeConfiguration=logconfigbad,
|
||||
).should.throw(
|
||||
Exception,
|
||||
"Instance type bc.t3.large is not supported with STARTER Edition networks",
|
||||
@ -266,7 +278,9 @@ def test_create_node_badnodeconfig():
|
||||
logconfigbad = dict(helpers.default_nodeconfiguration)
|
||||
logconfigbad["AvailabilityZone"] = "us-east-11"
|
||||
response = conn.create_node.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, NodeConfiguration=logconfigbad,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
NodeConfiguration=logconfigbad,
|
||||
).should.throw(Exception, "Availability Zone is not valid")
|
||||
|
||||
|
||||
@ -296,7 +310,8 @@ def test_list_nodes_badmember():
|
||||
network_id = response["NetworkId"]
|
||||
|
||||
response = conn.list_nodes.when.called_with(
|
||||
NetworkId=network_id, MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
NetworkId=network_id,
|
||||
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
|
||||
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
|
||||
|
||||
|
||||
|
@ -131,7 +131,9 @@ def test_create_proposal_badinvitationacctid():
|
||||
member_id = response["MemberId"]
|
||||
|
||||
response = conn.create_proposal.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, Actions=actions,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
Actions=actions,
|
||||
).should.throw(Exception, "Account ID format specified in proposal is not valid")
|
||||
|
||||
|
||||
@ -155,7 +157,9 @@ def test_create_proposal_badremovalmemid():
|
||||
member_id = response["MemberId"]
|
||||
|
||||
response = conn.create_proposal.when.called_with(
|
||||
NetworkId=network_id, MemberId=member_id, Actions=actions,
|
||||
NetworkId=network_id,
|
||||
MemberId=member_id,
|
||||
Actions=actions,
|
||||
).should.throw(Exception, "Member ID format specified in proposal is not valid")
|
||||
|
||||
|
||||
@ -194,5 +198,6 @@ def test_get_proposal_badproposal():
|
||||
network_id = response["NetworkId"]
|
||||
|
||||
response = conn.get_proposal.when.called_with(
|
||||
NetworkId=network_id, ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
|
||||
NetworkId=network_id,
|
||||
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
|
||||
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
|
||||
|
@ -666,5 +666,6 @@ def test_list_proposal_votes_badproposal():
|
||||
member_id = response["MemberId"]
|
||||
|
||||
response = conn.list_proposal_votes.when.called_with(
|
||||
NetworkId=network_id, ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
|
||||
NetworkId=network_id,
|
||||
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
|
||||
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
|
||||
|
@ -931,7 +931,10 @@ def test_tag_resource_errors():
|
||||
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
ResourceId="000000000000", Tags=[{"Key": "key", "Value": "value"},],
|
||||
ResourceId="000000000000",
|
||||
Tags=[
|
||||
{"Key": "key", "Value": "value"},
|
||||
],
|
||||
)
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
|
@ -48,8 +48,8 @@ else:
|
||||
|
||||
|
||||
def reduced_min_part_size(f):
|
||||
""" speed up tests by temporarily making the multipart minimum part size
|
||||
small
|
||||
"""speed up tests by temporarily making the multipart minimum part size
|
||||
small
|
||||
"""
|
||||
orig_size = s3model.UPLOAD_PART_MIN_SIZE
|
||||
|
||||
@ -1207,8 +1207,7 @@ if not settings.TEST_SERVER_MODE:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.get_public_access_block(AccountId=ACCOUNT_ID)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "NoSuchPublicAccessBlockConfiguration"
|
||||
ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
)
|
||||
|
||||
# Put a with an invalid account ID:
|
||||
@ -1265,8 +1264,7 @@ if not settings.TEST_SERVER_MODE:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.get_public_access_block(AccountId=ACCOUNT_ID)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"]
|
||||
== "NoSuchPublicAccessBlockConfiguration"
|
||||
ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
)
|
||||
|
||||
@mock_s3
|
||||
@ -1465,9 +1463,7 @@ if not settings.TEST_SERVER_MODE:
|
||||
config_client.get_resource_config_history(
|
||||
resourceType="AWS::S3::AccountPublicAccessBlock", resourceId=ACCOUNT_ID
|
||||
)
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "ResourceNotDiscoveredException"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "ResourceNotDiscoveredException"
|
||||
# aggregate
|
||||
result = config_client.batch_get_resource_config(
|
||||
resourceKeys=[
|
||||
@ -2402,7 +2398,9 @@ def test_boto3_get_object_if_match():
|
||||
|
||||
with pytest.raises(botocore.exceptions.ClientError) as err:
|
||||
s3.get_object(
|
||||
Bucket=bucket_name, Key=key, IfMatch='"hello"',
|
||||
Bucket=bucket_name,
|
||||
Key=key,
|
||||
IfMatch='"hello"',
|
||||
)
|
||||
e = err.value
|
||||
e.response["Error"]["Code"].should.equal("PreconditionFailed")
|
||||
@ -2421,7 +2419,9 @@ def test_boto3_get_object_if_none_match():
|
||||
|
||||
with pytest.raises(botocore.exceptions.ClientError) as err:
|
||||
s3.get_object(
|
||||
Bucket=bucket_name, Key=key, IfNoneMatch=etag,
|
||||
Bucket=bucket_name,
|
||||
Key=key,
|
||||
IfNoneMatch=etag,
|
||||
)
|
||||
e = err.value
|
||||
e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
|
||||
@ -2479,7 +2479,9 @@ def test_boto3_head_object_if_match():
|
||||
|
||||
with pytest.raises(botocore.exceptions.ClientError) as err:
|
||||
s3.head_object(
|
||||
Bucket=bucket_name, Key=key, IfMatch='"hello"',
|
||||
Bucket=bucket_name,
|
||||
Key=key,
|
||||
IfMatch='"hello"',
|
||||
)
|
||||
e = err.value
|
||||
e.response["Error"].should.equal({"Code": "412", "Message": "Precondition Failed"})
|
||||
@ -2497,7 +2499,9 @@ def test_boto3_head_object_if_none_match():
|
||||
|
||||
with pytest.raises(botocore.exceptions.ClientError) as err:
|
||||
s3.head_object(
|
||||
Bucket=bucket_name, Key=key, IfNoneMatch=etag,
|
||||
Bucket=bucket_name,
|
||||
Key=key,
|
||||
IfNoneMatch=etag,
|
||||
)
|
||||
e = err.value
|
||||
e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
|
||||
@ -3200,9 +3204,7 @@ def test_put_bucket_notification_errors():
|
||||
)
|
||||
|
||||
assert err.value.response["Error"]["Code"] == "InvalidArgument"
|
||||
assert (
|
||||
err.value.response["Error"]["Message"] == "The ARN is not well formed"
|
||||
)
|
||||
assert err.value.response["Error"]["Message"] == "The ARN is not well formed"
|
||||
|
||||
# Region not the same as the bucket:
|
||||
with pytest.raises(ClientError) as err:
|
||||
@ -4075,9 +4077,7 @@ def test_public_access_block():
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.get_public_access_block(Bucket="mybucket")
|
||||
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
assert (
|
||||
ce.value.response["Error"]["Message"]
|
||||
== "The public access block configuration was not found"
|
||||
@ -4157,9 +4157,7 @@ def test_public_access_block():
|
||||
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.get_public_access_block(Bucket="mybucket")
|
||||
assert (
|
||||
ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
)
|
||||
assert ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
|
||||
|
||||
|
||||
@mock_s3
|
||||
|
@ -14,7 +14,12 @@ def test_s3_bucket_cloudformation_basic():
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}},
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
|
@ -88,11 +88,15 @@ def test_delete_endpoint_config():
|
||||
resp = sagemaker.delete_endpoint_config(EndpointConfigName=endpoint_config_name)
|
||||
with pytest.raises(ClientError) as e:
|
||||
sagemaker.describe_endpoint_config(EndpointConfigName=endpoint_config_name)
|
||||
assert e.value.response["Error"]["Message"].startswith("Could not find endpoint configuration")
|
||||
assert e.value.response["Error"]["Message"].startswith(
|
||||
"Could not find endpoint configuration"
|
||||
)
|
||||
|
||||
with pytest.raises(ClientError) as e:
|
||||
sagemaker.delete_endpoint_config(EndpointConfigName=endpoint_config_name)
|
||||
assert e.value.response["Error"]["Message"].startswith( "Could not find endpoint configuration")
|
||||
assert e.value.response["Error"]["Message"].startswith(
|
||||
"Could not find endpoint configuration"
|
||||
)
|
||||
|
||||
|
||||
@mock_sagemaker
|
||||
@ -134,7 +138,9 @@ def test_create_endpoint():
|
||||
sagemaker.create_endpoint(
|
||||
EndpointName=endpoint_name, EndpointConfigName="NonexistentEndpointConfig"
|
||||
)
|
||||
assert e.value.response["Error"]["Message"].startswith("Could not find endpoint configuration")
|
||||
assert e.value.response["Error"]["Message"].startswith(
|
||||
"Could not find endpoint configuration"
|
||||
)
|
||||
|
||||
model_name = "MyModel"
|
||||
_create_model(sagemaker, model_name)
|
||||
|
@ -49,8 +49,9 @@ def test_create_notebook_instance_minimal_params():
|
||||
assert resp["NotebookInstanceArn"].endswith(args["NotebookInstanceName"])
|
||||
assert resp["NotebookInstanceName"] == NAME_PARAM
|
||||
assert resp["NotebookInstanceStatus"] == "InService"
|
||||
assert resp["Url"] == \
|
||||
"{}.notebook.{}.sagemaker.aws".format(NAME_PARAM, TEST_REGION_NAME)
|
||||
assert resp["Url"] == "{}.notebook.{}.sagemaker.aws".format(
|
||||
NAME_PARAM, TEST_REGION_NAME
|
||||
)
|
||||
assert resp["InstanceType"] == INSTANCE_TYPE_PARAM
|
||||
assert resp["RoleArn"] == FAKE_ROLE_ARN
|
||||
assert isinstance(resp["LastModifiedTime"], datetime.datetime)
|
||||
@ -99,8 +100,9 @@ def test_create_notebook_instance_params():
|
||||
assert resp["NotebookInstanceArn"].endswith(args["NotebookInstanceName"])
|
||||
assert resp["NotebookInstanceName"] == NAME_PARAM
|
||||
assert resp["NotebookInstanceStatus"] == "InService"
|
||||
assert resp["Url"] == \
|
||||
"{}.notebook.{}.sagemaker.aws".format(NAME_PARAM, TEST_REGION_NAME)
|
||||
assert resp["Url"] == "{}.notebook.{}.sagemaker.aws".format(
|
||||
NAME_PARAM, TEST_REGION_NAME
|
||||
)
|
||||
assert resp["InstanceType"] == INSTANCE_TYPE_PARAM
|
||||
assert resp["RoleArn"] == FAKE_ROLE_ARN
|
||||
assert isinstance(resp["LastModifiedTime"], datetime.datetime)
|
||||
@ -111,8 +113,7 @@ def test_create_notebook_instance_params():
|
||||
assert resp["SubnetId"] == FAKE_SUBNET_ID
|
||||
assert resp["SecurityGroups"] == FAKE_SECURITY_GROUP_IDS
|
||||
assert resp["KmsKeyId"] == FAKE_KMS_KEY_ID
|
||||
assert resp["NotebookInstanceLifecycleConfigName"] == \
|
||||
FAKE_LIFECYCLE_CONFIG_NAME
|
||||
assert resp["NotebookInstanceLifecycleConfigName"] == FAKE_LIFECYCLE_CONFIG_NAME
|
||||
assert resp["AcceleratorTypes"] == ACCELERATOR_TYPES_PARAM
|
||||
assert resp["DefaultCodeRepository"] == FAKE_DEFAULT_CODE_REPO
|
||||
assert resp["AdditionalCodeRepositories"] == FAKE_ADDL_CODE_REPOS
|
||||
@ -135,9 +136,11 @@ def test_create_notebook_instance_bad_volume_size():
|
||||
}
|
||||
with pytest.raises(ParamValidationError) as ex:
|
||||
sagemaker.create_notebook_instance(**args)
|
||||
assert \
|
||||
ex.value.args[0] == \
|
||||
"Parameter validation failed:\nInvalid range for parameter VolumeSizeInGB, value: {}, valid range: 5-inf".format(vol_size)
|
||||
assert ex.value.args[
|
||||
0
|
||||
] == "Parameter validation failed:\nInvalid range for parameter VolumeSizeInGB, value: {}, valid range: 5-inf".format(
|
||||
vol_size
|
||||
)
|
||||
|
||||
|
||||
@mock_sagemaker
|
||||
@ -238,17 +241,15 @@ def test_notebook_instance_lifecycle_config():
|
||||
OnCreate=on_create,
|
||||
OnStart=on_start,
|
||||
)
|
||||
assert \
|
||||
e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config already exists.)"
|
||||
)
|
||||
assert e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config already exists.)"
|
||||
)
|
||||
|
||||
resp = sagemaker.describe_notebook_instance_lifecycle_config(
|
||||
NotebookInstanceLifecycleConfigName=name,
|
||||
)
|
||||
assert resp["NotebookInstanceLifecycleConfigName"] == name
|
||||
assert \
|
||||
resp["NotebookInstanceLifecycleConfigArn"].startswith("arn:aws:sagemaker")
|
||||
assert resp["NotebookInstanceLifecycleConfigArn"].startswith("arn:aws:sagemaker")
|
||||
assert resp["NotebookInstanceLifecycleConfigArn"].endswith(name)
|
||||
assert resp["OnStart"] == on_start
|
||||
assert resp["OnCreate"] == on_create
|
||||
@ -263,16 +264,14 @@ def test_notebook_instance_lifecycle_config():
|
||||
sagemaker.describe_notebook_instance_lifecycle_config(
|
||||
NotebookInstanceLifecycleConfigName=name,
|
||||
)
|
||||
assert \
|
||||
e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config does not exist.)"
|
||||
)
|
||||
assert e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config does not exist.)"
|
||||
)
|
||||
|
||||
with pytest.raises(ClientError) as e:
|
||||
sagemaker.delete_notebook_instance_lifecycle_config(
|
||||
NotebookInstanceLifecycleConfigName=name,
|
||||
)
|
||||
assert \
|
||||
e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config does not exist.)"
|
||||
)
|
||||
assert e.value.response["Error"]["Message"].endswith(
|
||||
"Notebook Instance Lifecycle Config does not exist.)"
|
||||
)
|
||||
|
@ -82,20 +82,21 @@ def test_create_training_job():
|
||||
r"^arn:aws:sagemaker:.*:.*:training-job/{}$".format(training_job_name)
|
||||
)
|
||||
assert resp["ModelArtifacts"]["S3ModelArtifacts"].startswith(
|
||||
params["OutputDataConfig"]["S3OutputPath"]
|
||||
)
|
||||
params["OutputDataConfig"]["S3OutputPath"]
|
||||
)
|
||||
assert training_job_name in (resp["ModelArtifacts"]["S3ModelArtifacts"])
|
||||
assert \
|
||||
resp["ModelArtifacts"]["S3ModelArtifacts"].endswith("output/model.tar.gz")
|
||||
assert resp["ModelArtifacts"]["S3ModelArtifacts"].endswith("output/model.tar.gz")
|
||||
assert resp["TrainingJobStatus"] == "Completed"
|
||||
assert resp["SecondaryStatus"] == "Completed"
|
||||
assert resp["HyperParameters"] == params["HyperParameters"]
|
||||
assert \
|
||||
resp["AlgorithmSpecification"]["TrainingImage"] == \
|
||||
params["AlgorithmSpecification"]["TrainingImage"]
|
||||
assert \
|
||||
resp["AlgorithmSpecification"]["TrainingInputMode"] == \
|
||||
params["AlgorithmSpecification"]["TrainingInputMode"]
|
||||
assert (
|
||||
resp["AlgorithmSpecification"]["TrainingImage"]
|
||||
== params["AlgorithmSpecification"]["TrainingImage"]
|
||||
)
|
||||
assert (
|
||||
resp["AlgorithmSpecification"]["TrainingInputMode"]
|
||||
== params["AlgorithmSpecification"]["TrainingInputMode"]
|
||||
)
|
||||
assert "MetricDefinitions" in resp["AlgorithmSpecification"]
|
||||
assert "Name" in resp["AlgorithmSpecification"]["MetricDefinitions"][0]
|
||||
assert "Regex" in resp["AlgorithmSpecification"]["MetricDefinitions"][0]
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user