Merge pull request #3361 from mcepl/denose
Port test suite from nose to pytest
This commit is contained in:
commit
9db62d32bf
6
Makefile
6
Makefile
@ -7,7 +7,7 @@ ifeq ($(TEST_SERVER_MODE), true)
|
||||
# exclude test_kinesisvideoarchivedmedia
|
||||
# because testing with moto_server is difficult with data-endpoint
|
||||
|
||||
TEST_EXCLUDE := --exclude='test_iot.*' --exclude="test_kinesisvideoarchivedmedia.*"
|
||||
TEST_EXCLUDE := -k 'not (test_iot or test_kinesisvideoarchivedmedia)'
|
||||
else
|
||||
TEST_EXCLUDE :=
|
||||
endif
|
||||
@ -23,13 +23,13 @@ lint:
|
||||
test-only:
|
||||
rm -f .coverage
|
||||
rm -rf cover
|
||||
@nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE)
|
||||
@pytest -sv --cov=moto --cov-report html ./tests/ $(TEST_EXCLUDE)
|
||||
|
||||
|
||||
test: lint test-only
|
||||
|
||||
test_server:
|
||||
@TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/
|
||||
@TEST_SERVER_MODE=true pytest -sv --cov=moto --cov-report html ./tests/
|
||||
|
||||
aws_managed_policies:
|
||||
scripts/update_managed_policies.py
|
||||
|
150
docs/conf.py
150
docs/conf.py
@ -20,12 +20,12 @@ import shlex
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
@ -33,23 +33,23 @@ import shlex
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = 'Moto'
|
||||
copyright = '2015, Steve Pulec'
|
||||
author = 'Steve Pulec'
|
||||
project = "Moto"
|
||||
copyright = "2015, Steve Pulec"
|
||||
author = "Steve Pulec"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@ -57,6 +57,7 @@ author = 'Steve Pulec'
|
||||
#
|
||||
# The short X.Y version.
|
||||
import moto
|
||||
|
||||
version = moto.__version__
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = moto.__version__
|
||||
@ -70,37 +71,37 @@ language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
@ -110,156 +111,149 @@ todo_include_todos = False
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Motodoc'
|
||||
htmlhelp_basename = "Motodoc"
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Moto.tex', 'Moto Documentation',
|
||||
'Steve Pulec', 'manual'),
|
||||
(master_doc, "Moto.tex", "Moto Documentation", "Steve Pulec", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'moto', 'Moto Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
man_pages = [(master_doc, "moto", "Moto Documentation", [author], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
@ -268,19 +262,25 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Moto', 'Moto Documentation',
|
||||
author, 'Moto', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"Moto",
|
||||
"Moto Documentation",
|
||||
author,
|
||||
"Moto",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
# texinfo_no_detailmenu = False
|
||||
|
@ -110,8 +110,8 @@ class ApplicationAutoscalingBackend(BaseBackend):
|
||||
return r_id in self.targets.get(dimension, [])
|
||||
|
||||
def _ecs_service_exists_for_target(self, r_id):
|
||||
""" Raises a ValidationException if an ECS service does not exist
|
||||
for the specified resource ID.
|
||||
"""Raises a ValidationException if an ECS service does not exist
|
||||
for the specified resource ID.
|
||||
"""
|
||||
resource_type, cluster, service = r_id.split("/")
|
||||
result = self.ecs_backend.describe_services(cluster, [service])
|
||||
|
@ -96,8 +96,8 @@ class ApplicationAutoScalingResponse(BaseResponse):
|
||||
return json.dumps({})
|
||||
|
||||
def _validate_params(self):
|
||||
""" Validate parameters.
|
||||
TODO Integrate this validation with the validation in models.py
|
||||
"""Validate parameters.
|
||||
TODO Integrate this validation with the validation in models.py
|
||||
"""
|
||||
namespace = self._get_param("ServiceNamespace")
|
||||
dimension = self._get_param("ScalableDimension")
|
||||
|
@ -863,7 +863,7 @@ class AutoScalingBackend(BaseBackend):
|
||||
self.set_desired_capacity(group_name, desired_capacity)
|
||||
|
||||
def change_capacity_percent(self, group_name, scaling_adjustment):
|
||||
""" http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
"""http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
If PercentChangeInCapacity returns a value between 0 and 1,
|
||||
Auto Scaling will round it off to 1. If the PercentChangeInCapacity
|
||||
returns a value greater than 1, Auto Scaling will round it off to the
|
||||
|
@ -41,8 +41,7 @@ def random_suffix():
|
||||
|
||||
|
||||
def yaml_tag_constructor(loader, tag, node):
|
||||
"""convert shorthand intrinsic function to full name
|
||||
"""
|
||||
"""convert shorthand intrinsic function to full name"""
|
||||
|
||||
def _f(loader, tag, node):
|
||||
if tag == "!GetAtt":
|
||||
|
@ -1006,9 +1006,9 @@ class ConfigBackend(BaseBackend):
|
||||
def get_resource_config_history(self, resource_type, id, backend_region):
|
||||
"""Returns the configuration of an item in the AWS Config format of the resource for the current regional backend.
|
||||
|
||||
NOTE: This is --NOT-- returning history as it is not supported in moto at this time. (PR's welcome!)
|
||||
As such, the later_time, earlier_time, limit, and next_token are ignored as this will only
|
||||
return 1 item. (If no items, it raises an exception)
|
||||
NOTE: This is --NOT-- returning history as it is not supported in moto at this time. (PR's welcome!)
|
||||
As such, the later_time, earlier_time, limit, and next_token are ignored as this will only
|
||||
return 1 item. (If no items, it raises an exception)
|
||||
"""
|
||||
# If the type isn't implemented then we won't find the item:
|
||||
if resource_type not in RESOURCE_MAP:
|
||||
@ -1090,10 +1090,10 @@ class ConfigBackend(BaseBackend):
|
||||
):
|
||||
"""Returns the configuration of an item in the AWS Config format of the resource for the current regional backend.
|
||||
|
||||
As far a moto goes -- the only real difference between this function and the `batch_get_resource_config` function is that
|
||||
this will require a Config Aggregator be set up a priori and can search based on resource regions.
|
||||
As far a moto goes -- the only real difference between this function and the `batch_get_resource_config` function is that
|
||||
this will require a Config Aggregator be set up a priori and can search based on resource regions.
|
||||
|
||||
Note: moto will IGNORE the resource account ID in the search query.
|
||||
Note: moto will IGNORE the resource account ID in the search query.
|
||||
"""
|
||||
if not self.config_aggregators.get(aggregator_name):
|
||||
raise NoSuchConfigurationAggregatorException()
|
||||
|
@ -62,9 +62,9 @@ def _decode_dict(d):
|
||||
|
||||
class DynamicDictLoader(DictLoader):
|
||||
"""
|
||||
Note: There's a bug in jinja2 pre-2.7.3 DictLoader where caching does not work.
|
||||
Including the fixed (current) method version here to ensure performance benefit
|
||||
even for those using older jinja versions.
|
||||
Note: There's a bug in jinja2 pre-2.7.3 DictLoader where caching does not work.
|
||||
Including the fixed (current) method version here to ensure performance benefit
|
||||
even for those using older jinja versions.
|
||||
"""
|
||||
|
||||
def get_source(self, environment, template):
|
||||
|
@ -16,7 +16,7 @@ REQUEST_ID_LONG = string.digits + string.ascii_uppercase
|
||||
|
||||
|
||||
def camelcase_to_underscores(argument):
|
||||
""" Converts a camelcase param like theNewAttribute to the equivalent
|
||||
"""Converts a camelcase param like theNewAttribute to the equivalent
|
||||
python underscore variable like the_new_attribute"""
|
||||
result = ""
|
||||
prev_char_title = True
|
||||
@ -42,9 +42,9 @@ def camelcase_to_underscores(argument):
|
||||
|
||||
|
||||
def underscores_to_camelcase(argument):
|
||||
""" Converts a camelcase param like the_new_attribute to the equivalent
|
||||
"""Converts a camelcase param like the_new_attribute to the equivalent
|
||||
camelcase version like theNewAttribute. Note that the first letter is
|
||||
NOT capitalized by this function """
|
||||
NOT capitalized by this function"""
|
||||
result = ""
|
||||
previous_was_underscore = False
|
||||
for char in argument:
|
||||
|
@ -1059,7 +1059,7 @@ class InstanceBackend(object):
|
||||
return instance
|
||||
|
||||
def get_reservations_by_instance_ids(self, instance_ids, filters=None):
|
||||
""" Go through all of the reservations and filter to only return those
|
||||
"""Go through all of the reservations and filter to only return those
|
||||
associated with the given instance_ids.
|
||||
"""
|
||||
reservations = []
|
||||
@ -1358,9 +1358,9 @@ class Ami(TaggedEC2Resource):
|
||||
|
||||
elif source_ami:
|
||||
"""
|
||||
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
|
||||
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
|
||||
~ 2014.09.29
|
||||
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
|
||||
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
|
||||
~ 2014.09.29
|
||||
"""
|
||||
self.virtualization_type = source_ami.virtualization_type
|
||||
self.architecture = source_ami.architecture
|
||||
@ -1518,9 +1518,9 @@ class AmiBackend(object):
|
||||
# If anything is invalid, nothing is added. (No partial success.)
|
||||
if user_ids:
|
||||
"""
|
||||
AWS docs:
|
||||
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
|
||||
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
|
||||
AWS docs:
|
||||
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
|
||||
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
|
||||
"""
|
||||
for user_id in user_ids:
|
||||
if len(user_id) != 12 or not user_id.isdigit():
|
||||
|
@ -43,14 +43,14 @@ def steps_from_query_string(querystring_dict):
|
||||
class Unflattener:
|
||||
@staticmethod
|
||||
def unflatten_complex_params(input_dict, param_name):
|
||||
""" Function to unflatten (portions of) dicts with complex keys. The moto request parser flattens the incoming
|
||||
"""Function to unflatten (portions of) dicts with complex keys. The moto request parser flattens the incoming
|
||||
request bodies, which is generally helpful, but for nested dicts/lists can result in a hard-to-manage
|
||||
parameter exposion. This function allows one to selectively unflatten a set of dict keys, replacing them
|
||||
with a deep dist/list structure named identically to the root component in the complex name.
|
||||
|
||||
Complex keys are composed of multiple components
|
||||
separated by periods. Components may be prefixed with _, which is stripped. Lists indexes are represented
|
||||
with two components, 'member' and the index number. """
|
||||
with two components, 'member' and the index number."""
|
||||
items_to_process = {}
|
||||
for k in input_dict.keys():
|
||||
if k.startswith(param_name):
|
||||
|
@ -114,8 +114,7 @@ class FakeShadow(BaseModel):
|
||||
}
|
||||
|
||||
def to_dict(self, include_delta=True):
|
||||
"""returning nothing except for just top-level keys for now.
|
||||
"""
|
||||
"""returning nothing except for just top-level keys for now."""
|
||||
if self.deleted:
|
||||
return {"timestamp": self.timestamp, "version": self.version}
|
||||
delta = self.parse_payload(self.desired, self.reported)
|
||||
|
@ -269,13 +269,13 @@ class fakesock(object):
|
||||
_sock=None,
|
||||
):
|
||||
"""
|
||||
Matches both the Python 2 API:
|
||||
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
|
||||
https://github.com/python/cpython/blob/2.7/Lib/socket.py
|
||||
Matches both the Python 2 API:
|
||||
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
|
||||
https://github.com/python/cpython/blob/2.7/Lib/socket.py
|
||||
|
||||
and the Python 3 API:
|
||||
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
|
||||
https://github.com/python/cpython/blob/3.5/Lib/socket.py
|
||||
and the Python 3 API:
|
||||
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
|
||||
https://github.com/python/cpython/blob/3.5/Lib/socket.py
|
||||
"""
|
||||
if httpretty.allow_net_connect:
|
||||
if PY3:
|
||||
|
@ -98,7 +98,7 @@ def undo_clean_key_name(key_name):
|
||||
|
||||
class _VersionedKeyStore(dict):
|
||||
|
||||
""" A simplified/modified version of Django's `MultiValueDict` taken from:
|
||||
"""A simplified/modified version of Django's `MultiValueDict` taken from:
|
||||
https://github.com/django/django/blob/70576740b0bb5289873f5a9a9a4e1a26b2c330e5/django/utils/datastructures.py#L282
|
||||
"""
|
||||
|
||||
|
@ -580,7 +580,9 @@ class SageMakerModelBackend(BaseBackend):
|
||||
message = "Could not find model '{}'.".format(
|
||||
Model.arn_for_model_name(model_name, self.region_name)
|
||||
)
|
||||
raise ValidationError(message=message)
|
||||
raise RESTError(
|
||||
error_type="ValidationException", message=message, template="error_json",
|
||||
)
|
||||
|
||||
def list_models(self):
|
||||
models = []
|
||||
|
@ -1,4 +1,4 @@
|
||||
nose
|
||||
pytest
|
||||
pytest-cov
|
||||
sure==1.4.11
|
||||
freezegun
|
||||
parameterized>=0.7.0
|
@ -3,35 +3,64 @@ import json
|
||||
|
||||
# Taken from free tier list when creating an instance
|
||||
instances = [
|
||||
'ami-760aaa0f', 'ami-bb9a6bc2', 'ami-35e92e4c', 'ami-785db401', 'ami-b7e93bce', 'ami-dca37ea5', 'ami-999844e0',
|
||||
'ami-9b32e8e2', 'ami-f8e54081', 'ami-bceb39c5', 'ami-03cf127a', 'ami-1ecc1e67', 'ami-c2ff2dbb', 'ami-12c6146b',
|
||||
'ami-d1cb19a8', 'ami-61db0918', 'ami-56ec3e2f', 'ami-84ee3cfd', 'ami-86ee3cff', 'ami-f0e83a89', 'ami-1f12c066',
|
||||
'ami-afee3cd6', 'ami-1812c061', 'ami-77ed3f0e', 'ami-3bf32142', 'ami-6ef02217', 'ami-f4cf1d8d', 'ami-3df32144',
|
||||
'ami-c6f321bf', 'ami-24f3215d', 'ami-fa7cdd89', 'ami-1e749f67', 'ami-a9cc1ed0', 'ami-8104a4f8'
|
||||
"ami-760aaa0f",
|
||||
"ami-bb9a6bc2",
|
||||
"ami-35e92e4c",
|
||||
"ami-785db401",
|
||||
"ami-b7e93bce",
|
||||
"ami-dca37ea5",
|
||||
"ami-999844e0",
|
||||
"ami-9b32e8e2",
|
||||
"ami-f8e54081",
|
||||
"ami-bceb39c5",
|
||||
"ami-03cf127a",
|
||||
"ami-1ecc1e67",
|
||||
"ami-c2ff2dbb",
|
||||
"ami-12c6146b",
|
||||
"ami-d1cb19a8",
|
||||
"ami-61db0918",
|
||||
"ami-56ec3e2f",
|
||||
"ami-84ee3cfd",
|
||||
"ami-86ee3cff",
|
||||
"ami-f0e83a89",
|
||||
"ami-1f12c066",
|
||||
"ami-afee3cd6",
|
||||
"ami-1812c061",
|
||||
"ami-77ed3f0e",
|
||||
"ami-3bf32142",
|
||||
"ami-6ef02217",
|
||||
"ami-f4cf1d8d",
|
||||
"ami-3df32144",
|
||||
"ami-c6f321bf",
|
||||
"ami-24f3215d",
|
||||
"ami-fa7cdd89",
|
||||
"ami-1e749f67",
|
||||
"ami-a9cc1ed0",
|
||||
"ami-8104a4f8",
|
||||
]
|
||||
|
||||
client = boto3.client('ec2', region_name='eu-west-1')
|
||||
client = boto3.client("ec2", region_name="eu-west-1")
|
||||
|
||||
test = client.describe_images(ImageIds=instances)
|
||||
|
||||
result = []
|
||||
for image in test['Images']:
|
||||
for image in test["Images"]:
|
||||
try:
|
||||
tmp = {
|
||||
'ami_id': image['ImageId'],
|
||||
'name': image['Name'],
|
||||
'description': image['Description'],
|
||||
'owner_id': image['OwnerId'],
|
||||
'public': image['Public'],
|
||||
'virtualization_type': image['VirtualizationType'],
|
||||
'architecture': image['Architecture'],
|
||||
'state': image['State'],
|
||||
'platform': image.get('Platform'),
|
||||
'image_type': image['ImageType'],
|
||||
'hypervisor': image['Hypervisor'],
|
||||
'root_device_name': image['RootDeviceName'],
|
||||
'root_device_type': image['RootDeviceType'],
|
||||
'sriov': image.get('SriovNetSupport', 'simple')
|
||||
"ami_id": image["ImageId"],
|
||||
"name": image["Name"],
|
||||
"description": image["Description"],
|
||||
"owner_id": image["OwnerId"],
|
||||
"public": image["Public"],
|
||||
"virtualization_type": image["VirtualizationType"],
|
||||
"architecture": image["Architecture"],
|
||||
"state": image["State"],
|
||||
"platform": image.get("Platform"),
|
||||
"image_type": image["ImageType"],
|
||||
"hypervisor": image["Hypervisor"],
|
||||
"root_device_name": image["RootDeviceName"],
|
||||
"root_device_type": image["RootDeviceType"],
|
||||
"sriov": image.get("SriovNetSupport", "simple"),
|
||||
}
|
||||
result.append(tmp)
|
||||
except Exception as err:
|
||||
|
@ -7,12 +7,18 @@ import boto3
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
alternative_service_names = {'lambda': 'awslambda', 'dynamodb': 'dynamodb2'}
|
||||
alternative_service_names = {"lambda": "awslambda", "dynamodb": "dynamodb2"}
|
||||
|
||||
|
||||
def get_moto_implementation(service_name):
|
||||
service_name = service_name.replace("-", "") if "-" in service_name else service_name
|
||||
alt_service_name = alternative_service_names[service_name] if service_name in alternative_service_names else service_name
|
||||
service_name = (
|
||||
service_name.replace("-", "") if "-" in service_name else service_name
|
||||
)
|
||||
alt_service_name = (
|
||||
alternative_service_names[service_name]
|
||||
if service_name in alternative_service_names
|
||||
else service_name
|
||||
)
|
||||
if hasattr(moto, "mock_{}".format(alt_service_name)):
|
||||
mock = getattr(moto, "mock_{}".format(alt_service_name))
|
||||
elif hasattr(moto, "mock_{}".format(service_name)):
|
||||
@ -31,11 +37,13 @@ def calculate_implementation_coverage():
|
||||
coverage = {}
|
||||
for service_name in service_names:
|
||||
moto_client = get_moto_implementation(service_name)
|
||||
real_client = boto3.client(service_name, region_name='us-east-1')
|
||||
real_client = boto3.client(service_name, region_name="us-east-1")
|
||||
implemented = []
|
||||
not_implemented = []
|
||||
|
||||
operation_names = [xform_name(op) for op in real_client.meta.service_model.operation_names]
|
||||
operation_names = [
|
||||
xform_name(op) for op in real_client.meta.service_model.operation_names
|
||||
]
|
||||
for op in operation_names:
|
||||
if moto_client and op in dir(moto_client):
|
||||
implemented.append(op)
|
||||
@ -43,20 +51,22 @@ def calculate_implementation_coverage():
|
||||
not_implemented.append(op)
|
||||
|
||||
coverage[service_name] = {
|
||||
'implemented': implemented,
|
||||
'not_implemented': not_implemented,
|
||||
"implemented": implemented,
|
||||
"not_implemented": not_implemented,
|
||||
}
|
||||
return coverage
|
||||
|
||||
|
||||
def print_implementation_coverage(coverage):
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
implemented = coverage.get(service_name)["implemented"]
|
||||
not_implemented = coverage.get(service_name)["not_implemented"]
|
||||
operations = sorted(implemented + not_implemented)
|
||||
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
percentage_implemented = int(
|
||||
100.0 * len(implemented) / (len(implemented) + len(not_implemented))
|
||||
)
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
@ -84,12 +94,14 @@ def write_implementation_coverage_to_file(coverage):
|
||||
print("Writing to {}".format(implementation_coverage_file))
|
||||
with open(implementation_coverage_file, "w+") as file:
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
implemented = coverage.get(service_name)["implemented"]
|
||||
not_implemented = coverage.get(service_name)["not_implemented"]
|
||||
operations = sorted(implemented + not_implemented)
|
||||
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
percentage_implemented = int(
|
||||
100.0 * len(implemented) / (len(implemented) + len(not_implemented))
|
||||
)
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
@ -98,7 +110,9 @@ def write_implementation_coverage_to_file(coverage):
|
||||
file.write("\n")
|
||||
file.write("## {}\n".format(service_name))
|
||||
file.write("<details>\n")
|
||||
file.write("<summary>{}% implemented</summary>\n\n".format(percentage_implemented))
|
||||
file.write(
|
||||
"<summary>{}% implemented</summary>\n\n".format(percentage_implemented)
|
||||
)
|
||||
for op in operations:
|
||||
if op in implemented:
|
||||
file.write("- [X] {}\n".format(op))
|
||||
@ -107,7 +121,7 @@ def write_implementation_coverage_to_file(coverage):
|
||||
file.write("</details>\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
cov = calculate_implementation_coverage()
|
||||
write_implementation_coverage_to_file(cov)
|
||||
print_implementation_coverage(cov)
|
||||
|
@ -17,9 +17,7 @@ from lxml import etree
|
||||
|
||||
import click
|
||||
import jinja2
|
||||
from prompt_toolkit import (
|
||||
prompt
|
||||
)
|
||||
from prompt_toolkit import prompt
|
||||
from prompt_toolkit.completion import WordCompleter
|
||||
from prompt_toolkit.shortcuts import print_formatted_text
|
||||
|
||||
@ -29,35 +27,35 @@ import boto3
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from moto.core import BaseBackend
|
||||
from implementation_coverage import (
|
||||
get_moto_implementation
|
||||
)
|
||||
from implementation_coverage import get_moto_implementation
|
||||
from inflection import singularize
|
||||
|
||||
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), './template')
|
||||
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), "./template")
|
||||
|
||||
INPUT_IGNORED_IN_BACKEND = ['Marker', 'PageSize']
|
||||
OUTPUT_IGNORED_IN_BACKEND = ['NextMarker']
|
||||
INPUT_IGNORED_IN_BACKEND = ["Marker", "PageSize"]
|
||||
OUTPUT_IGNORED_IN_BACKEND = ["NextMarker"]
|
||||
|
||||
|
||||
def print_progress(title, body, color):
|
||||
click.secho(u'\t{}\t'.format(title), fg=color, nl=False)
|
||||
click.secho(u"\t{}\t".format(title), fg=color, nl=False)
|
||||
click.echo(body)
|
||||
|
||||
|
||||
def select_service_and_operation():
|
||||
service_names = Session().get_available_services()
|
||||
service_completer = WordCompleter(service_names)
|
||||
service_name = prompt(u'Select service: ', completer=service_completer)
|
||||
service_name = prompt(u"Select service: ", completer=service_completer)
|
||||
if service_name not in service_names:
|
||||
click.secho(u'{} is not valid service'.format(service_name), fg='red')
|
||||
click.secho(u"{} is not valid service".format(service_name), fg="red")
|
||||
raise click.Abort()
|
||||
moto_client = get_moto_implementation(service_name)
|
||||
real_client = boto3.client(service_name, region_name='us-east-1')
|
||||
real_client = boto3.client(service_name, region_name="us-east-1")
|
||||
implemented = []
|
||||
not_implemented = []
|
||||
|
||||
operation_names = [xform_name(op) for op in real_client.meta.service_model.operation_names]
|
||||
operation_names = [
|
||||
xform_name(op) for op in real_client.meta.service_model.operation_names
|
||||
]
|
||||
for op in operation_names:
|
||||
if moto_client and op in dir(moto_client):
|
||||
implemented.append(op)
|
||||
@ -65,133 +63,148 @@ def select_service_and_operation():
|
||||
not_implemented.append(op)
|
||||
operation_completer = WordCompleter(operation_names)
|
||||
|
||||
click.echo('==Current Implementation Status==')
|
||||
click.echo("==Current Implementation Status==")
|
||||
for operation_name in operation_names:
|
||||
check = 'X' if operation_name in implemented else ' '
|
||||
click.secho('[{}] {}'.format(check, operation_name))
|
||||
click.echo('=================================')
|
||||
operation_name = prompt(u'Select Operation: ', completer=operation_completer)
|
||||
check = "X" if operation_name in implemented else " "
|
||||
click.secho("[{}] {}".format(check, operation_name))
|
||||
click.echo("=================================")
|
||||
operation_name = prompt(u"Select Operation: ", completer=operation_completer)
|
||||
|
||||
if operation_name not in operation_names:
|
||||
click.secho('{} is not valid operation'.format(operation_name), fg='red')
|
||||
click.secho("{} is not valid operation".format(operation_name), fg="red")
|
||||
raise click.Abort()
|
||||
|
||||
if operation_name in implemented:
|
||||
click.secho('{} is already implemented'.format(operation_name), fg='red')
|
||||
click.secho("{} is already implemented".format(operation_name), fg="red")
|
||||
raise click.Abort()
|
||||
return service_name, operation_name
|
||||
|
||||
|
||||
def get_escaped_service(service):
|
||||
return service.replace('-', '')
|
||||
return service.replace("-", "")
|
||||
|
||||
|
||||
def get_lib_dir(service):
|
||||
return os.path.join('moto', get_escaped_service(service))
|
||||
return os.path.join("moto", get_escaped_service(service))
|
||||
|
||||
|
||||
def get_test_dir(service):
|
||||
return os.path.join('tests', 'test_{}'.format(get_escaped_service(service)))
|
||||
return os.path.join("tests", "test_{}".format(get_escaped_service(service)))
|
||||
|
||||
|
||||
def render_template(tmpl_dir, tmpl_filename, context, service, alt_filename=None):
|
||||
is_test = True if 'test' in tmpl_dir else False
|
||||
rendered = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(tmpl_dir)
|
||||
).get_template(tmpl_filename).render(context)
|
||||
is_test = True if "test" in tmpl_dir else False
|
||||
rendered = (
|
||||
jinja2.Environment(loader=jinja2.FileSystemLoader(tmpl_dir))
|
||||
.get_template(tmpl_filename)
|
||||
.render(context)
|
||||
)
|
||||
|
||||
dirname = get_test_dir(service) if is_test else get_lib_dir(service)
|
||||
filename = alt_filename or os.path.splitext(tmpl_filename)[0]
|
||||
filepath = os.path.join(dirname, filename)
|
||||
|
||||
if os.path.exists(filepath):
|
||||
print_progress('skip creating', filepath, 'yellow')
|
||||
print_progress("skip creating", filepath, "yellow")
|
||||
else:
|
||||
print_progress('creating', filepath, 'green')
|
||||
with open(filepath, 'w') as f:
|
||||
print_progress("creating", filepath, "green")
|
||||
with open(filepath, "w") as f:
|
||||
f.write(rendered)
|
||||
|
||||
|
||||
def append_mock_to_init_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', '__init__.py')
|
||||
path = os.path.join(os.path.dirname(__file__), "..", "moto", "__init__.py")
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match('^mock_{}.*lazy_load(.*)$'.format(service), _)):
|
||||
if any(_ for _ in lines if re.match("^mock_{}.*lazy_load(.*)$".format(service), _)):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match('^mock_.*lazy_load(.*)$', _)]
|
||||
filtered_lines = [_ for _ in lines if re.match("^mock_.*lazy_load(.*)$", _)]
|
||||
last_import_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(get_escaped_service(service), get_escaped_service(service), get_escaped_service(service))
|
||||
new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(
|
||||
get_escaped_service(service),
|
||||
get_escaped_service(service),
|
||||
get_escaped_service(service),
|
||||
)
|
||||
lines.insert(last_import_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def append_mock_dict_to_backends_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py')
|
||||
path = os.path.join(os.path.dirname(__file__), "..", "moto", "backends.py")
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match(".*\"{}\": {}_backends.*".format(service, service), _)):
|
||||
if any(
|
||||
_
|
||||
for _ in lines
|
||||
if re.match('.*"{}": {}_backends.*'.format(service, service), _)
|
||||
):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match(".*\".*\":.*_backends.*", _)]
|
||||
filtered_lines = [_ for _ in lines if re.match('.*".*":.*_backends.*', _)]
|
||||
last_elem_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = " \"{}\": (\"{}\", \"{}_backends\"),".format(service, get_escaped_service(service), get_escaped_service(service))
|
||||
new_line = ' "{}": ("{}", "{}_backends"),'.format(
|
||||
service, get_escaped_service(service), get_escaped_service(service)
|
||||
)
|
||||
prev_line = lines[last_elem_line_index]
|
||||
if not prev_line.endswith('{') and not prev_line.endswith(','):
|
||||
lines[last_elem_line_index] += ','
|
||||
if not prev_line.endswith("{") and not prev_line.endswith(","):
|
||||
lines[last_elem_line_index] += ","
|
||||
lines.insert(last_elem_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def initialize_service(service, operation, api_protocol):
|
||||
"""create lib and test dirs if not exist
|
||||
"""
|
||||
"""create lib and test dirs if not exist"""
|
||||
lib_dir = get_lib_dir(service)
|
||||
test_dir = get_test_dir(service)
|
||||
|
||||
print_progress('Initializing service', service, 'green')
|
||||
print_progress("Initializing service", service, "green")
|
||||
|
||||
client = boto3.client(service)
|
||||
service_class = client.__class__.__name__
|
||||
endpoint_prefix = client._service_model.endpoint_prefix
|
||||
|
||||
tmpl_context = {
|
||||
'service': service,
|
||||
'service_class': service_class,
|
||||
'endpoint_prefix': endpoint_prefix,
|
||||
'api_protocol': api_protocol,
|
||||
'escaped_service': get_escaped_service(service)
|
||||
"service": service,
|
||||
"service_class": service_class,
|
||||
"endpoint_prefix": endpoint_prefix,
|
||||
"api_protocol": api_protocol,
|
||||
"escaped_service": get_escaped_service(service),
|
||||
}
|
||||
|
||||
# initialize service directory
|
||||
if os.path.exists(lib_dir):
|
||||
print_progress('skip creating', lib_dir, 'yellow')
|
||||
print_progress("skip creating", lib_dir, "yellow")
|
||||
else:
|
||||
print_progress('creating', lib_dir, 'green')
|
||||
print_progress("creating", lib_dir, "green")
|
||||
os.makedirs(lib_dir)
|
||||
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, 'lib')
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, "lib")
|
||||
for tmpl_filename in os.listdir(tmpl_dir):
|
||||
render_template(
|
||||
tmpl_dir, tmpl_filename, tmpl_context, service
|
||||
)
|
||||
render_template(tmpl_dir, tmpl_filename, tmpl_context, service)
|
||||
|
||||
# initialize test directory
|
||||
if os.path.exists(test_dir):
|
||||
print_progress('skip creating', test_dir, 'yellow')
|
||||
print_progress("skip creating", test_dir, "yellow")
|
||||
else:
|
||||
print_progress('creating', test_dir, 'green')
|
||||
print_progress("creating", test_dir, "green")
|
||||
os.makedirs(test_dir)
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, 'test')
|
||||
tmpl_dir = os.path.join(TEMPLATE_DIR, "test")
|
||||
for tmpl_filename in os.listdir(tmpl_dir):
|
||||
alt_filename = 'test_{}.py'.format(get_escaped_service(service)) if tmpl_filename == 'test_service.py.j2' else None
|
||||
render_template(
|
||||
tmpl_dir, tmpl_filename, tmpl_context, service, alt_filename
|
||||
alt_filename = (
|
||||
"test_{}.py".format(get_escaped_service(service))
|
||||
if tmpl_filename == "test_service.py.j2"
|
||||
else None
|
||||
)
|
||||
render_template(tmpl_dir, tmpl_filename, tmpl_context, service, alt_filename)
|
||||
|
||||
# append mock to init files
|
||||
append_mock_to_init_py(service)
|
||||
@ -199,22 +212,24 @@ def initialize_service(service, operation, api_protocol):
|
||||
|
||||
|
||||
def to_upper_camel_case(s):
|
||||
return ''.join([_.title() for _ in s.split('_')])
|
||||
return "".join([_.title() for _ in s.split("_")])
|
||||
|
||||
|
||||
def to_lower_camel_case(s):
|
||||
words = s.split('_')
|
||||
return ''.join(words[:1] + [_.title() for _ in words[1:]])
|
||||
words = s.split("_")
|
||||
return "".join(words[:1] + [_.title() for _ in words[1:]])
|
||||
|
||||
|
||||
def to_snake_case(s):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
||||
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", s)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
|
||||
|
||||
|
||||
def get_operation_name_in_keys(operation_name, operation_keys):
|
||||
index = [_.lower() for _ in operation_keys].index(operation_name.lower())
|
||||
return operation_keys[index]
|
||||
|
||||
|
||||
def get_function_in_responses(service, operation, protocol):
|
||||
"""refers to definition of API in botocore, and autogenerates function
|
||||
You can see example of elbv2 from link below.
|
||||
@ -224,44 +239,56 @@ def get_function_in_responses(service, operation, protocol):
|
||||
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
if not hasattr(op_model.output_shape, "members"):
|
||||
outputs = {}
|
||||
else:
|
||||
outputs = op_model.output_shape.members
|
||||
inputs = op_model.input_shape.members
|
||||
input_names = [to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND]
|
||||
output_names = [to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND]
|
||||
body = '\ndef {}(self):\n'.format(operation)
|
||||
input_names = [
|
||||
to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
output_names = [
|
||||
to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
body = "\ndef {}(self):\n".format(operation)
|
||||
|
||||
for input_name, input_type in inputs.items():
|
||||
type_name = input_type.type_name
|
||||
if type_name == 'integer':
|
||||
if type_name == "integer":
|
||||
arg_line_tmpl = ' {} = self._get_int_param("{}")\n'
|
||||
elif type_name == 'list':
|
||||
elif type_name == "list":
|
||||
arg_line_tmpl = ' {} = self._get_list_prefix("{}.member")\n'
|
||||
else:
|
||||
arg_line_tmpl = ' {} = self._get_param("{}")\n'
|
||||
body += arg_line_tmpl.format(to_snake_case(input_name), input_name)
|
||||
if output_names:
|
||||
body += ' {} = self.{}_backend.{}(\n'.format(', '.join(output_names), get_escaped_service(service), operation)
|
||||
else:
|
||||
body += ' self.{}_backend.{}(\n'.format(get_escaped_service(service), operation)
|
||||
for input_name in input_names:
|
||||
body += ' {}={},\n'.format(input_name, input_name)
|
||||
|
||||
body += ' )\n'
|
||||
if protocol == 'query':
|
||||
body += ' template = self.response_template({}_TEMPLATE)\n'.format(operation.upper())
|
||||
body += ' return template.render({})\n'.format(
|
||||
', '.join(['{}={}'.format(_, _) for _ in output_names])
|
||||
body += " {} = self.{}_backend.{}(\n".format(
|
||||
", ".join(output_names), get_escaped_service(service), operation
|
||||
)
|
||||
else:
|
||||
body += " self.{}_backend.{}(\n".format(
|
||||
get_escaped_service(service), operation
|
||||
)
|
||||
for input_name in input_names:
|
||||
body += " {}={},\n".format(input_name, input_name)
|
||||
|
||||
body += " )\n"
|
||||
if protocol == "query":
|
||||
body += " template = self.response_template({}_TEMPLATE)\n".format(
|
||||
operation.upper()
|
||||
)
|
||||
body += " return template.render({})\n".format(
|
||||
", ".join(["{}={}".format(_, _) for _ in output_names])
|
||||
)
|
||||
elif protocol in ["json", "rest-json"]:
|
||||
body += " # TODO: adjust response\n"
|
||||
body += " return json.dumps(dict({}))\n".format(
|
||||
", ".join(["{}={}".format(to_lower_camel_case(_), _) for _ in output_names])
|
||||
)
|
||||
elif protocol in ['json', 'rest-json']:
|
||||
body += ' # TODO: adjust response\n'
|
||||
body += ' return json.dumps(dict({}))\n'.format(', '.join(['{}={}'.format(to_lower_camel_case(_), _) for _ in output_names]))
|
||||
return body
|
||||
|
||||
|
||||
@ -273,44 +300,55 @@ def get_function_in_models(service, operation):
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
inputs = op_model.input_shape.members
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
if not hasattr(op_model.output_shape, "members"):
|
||||
outputs = {}
|
||||
else:
|
||||
outputs = op_model.output_shape.members
|
||||
input_names = [to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND]
|
||||
output_names = [to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND]
|
||||
input_names = [
|
||||
to_snake_case(_) for _ in inputs.keys() if _ not in INPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
output_names = [
|
||||
to_snake_case(_) for _ in outputs.keys() if _ not in OUTPUT_IGNORED_IN_BACKEND
|
||||
]
|
||||
if input_names:
|
||||
body = 'def {}(self, {}):\n'.format(operation, ', '.join(input_names))
|
||||
body = "def {}(self, {}):\n".format(operation, ", ".join(input_names))
|
||||
else:
|
||||
body = 'def {}(self)\n'
|
||||
body += ' # implement here\n'
|
||||
body += ' return {}\n\n'.format(', '.join(output_names))
|
||||
body = "def {}(self)\n"
|
||||
body += " # implement here\n"
|
||||
body += " return {}\n\n".format(", ".join(output_names))
|
||||
|
||||
return body
|
||||
|
||||
|
||||
def _get_subtree(name, shape, replace_list, name_prefix=[]):
|
||||
class_name = shape.__class__.__name__
|
||||
if class_name in ('StringShape', 'Shape'):
|
||||
if class_name in ("StringShape", "Shape"):
|
||||
t = etree.Element(name)
|
||||
if name_prefix:
|
||||
t.text = '{{ %s.%s }}' % (name_prefix[-1], to_snake_case(name))
|
||||
t.text = "{{ %s.%s }}" % (name_prefix[-1], to_snake_case(name))
|
||||
else:
|
||||
t.text = '{{ %s }}' % to_snake_case(name)
|
||||
t.text = "{{ %s }}" % to_snake_case(name)
|
||||
return t
|
||||
elif class_name in ('ListShape', ):
|
||||
elif class_name in ("ListShape",):
|
||||
replace_list.append((name, name_prefix))
|
||||
t = etree.Element(name)
|
||||
t_member = etree.Element('member')
|
||||
t_member = etree.Element("member")
|
||||
t.append(t_member)
|
||||
for nested_name, nested_shape in shape.member.members.items():
|
||||
t_member.append(_get_subtree(nested_name, nested_shape, replace_list, name_prefix + [singularize(name.lower())]))
|
||||
t_member.append(
|
||||
_get_subtree(
|
||||
nested_name,
|
||||
nested_shape,
|
||||
replace_list,
|
||||
name_prefix + [singularize(name.lower())],
|
||||
)
|
||||
)
|
||||
return t
|
||||
raise ValueError('Not supported Shape')
|
||||
raise ValueError("Not supported Shape")
|
||||
|
||||
|
||||
def get_response_query_template(service, operation):
|
||||
@ -323,22 +361,22 @@ def get_response_query_template(service, operation):
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
result_wrapper = op_model.output_shape.serialization['resultWrapper']
|
||||
response_wrapper = result_wrapper.replace('Result', 'Response')
|
||||
result_wrapper = op_model.output_shape.serialization["resultWrapper"]
|
||||
response_wrapper = result_wrapper.replace("Result", "Response")
|
||||
metadata = op_model.metadata
|
||||
xml_namespace = metadata['xmlNamespace']
|
||||
xml_namespace = metadata["xmlNamespace"]
|
||||
|
||||
# build xml tree
|
||||
t_root = etree.Element(response_wrapper, xmlns=xml_namespace)
|
||||
t_root = etree.Element(response_wrapper, xmlns=xml_namespace)
|
||||
|
||||
# build metadata
|
||||
t_metadata = etree.Element('ResponseMetadata')
|
||||
t_request_id = etree.Element('RequestId')
|
||||
t_request_id.text = '1549581b-12b7-11e3-895e-1334aEXAMPLE'
|
||||
t_metadata = etree.Element("ResponseMetadata")
|
||||
t_request_id = etree.Element("RequestId")
|
||||
t_request_id.text = "1549581b-12b7-11e3-895e-1334aEXAMPLE"
|
||||
t_metadata.append(t_request_id)
|
||||
t_root.append(t_metadata)
|
||||
|
||||
@ -349,68 +387,73 @@ def get_response_query_template(service, operation):
|
||||
for output_name, output_shape in outputs.items():
|
||||
t_result.append(_get_subtree(output_name, output_shape, replace_list))
|
||||
t_root.append(t_result)
|
||||
xml_body = etree.tostring(t_root, pretty_print=True).decode('utf-8')
|
||||
xml_body = etree.tostring(t_root, pretty_print=True).decode("utf-8")
|
||||
xml_body_lines = xml_body.splitlines()
|
||||
for replace in replace_list:
|
||||
name = replace[0]
|
||||
prefix = replace[1]
|
||||
singular_name = singularize(name)
|
||||
|
||||
start_tag = '<%s>' % name
|
||||
iter_name = '{}.{}'.format(prefix[-1], name.lower())if prefix else name.lower()
|
||||
loop_start = '{%% for %s in %s %%}' % (singular_name.lower(), iter_name)
|
||||
end_tag = '</%s>' % name
|
||||
loop_end = '{{ endfor }}'
|
||||
start_tag = "<%s>" % name
|
||||
iter_name = "{}.{}".format(prefix[-1], name.lower()) if prefix else name.lower()
|
||||
loop_start = "{%% for %s in %s %%}" % (singular_name.lower(), iter_name)
|
||||
end_tag = "</%s>" % name
|
||||
loop_end = "{{ endfor }}"
|
||||
|
||||
start_tag_indexes = [i for i, l in enumerate(xml_body_lines) if start_tag in l]
|
||||
if len(start_tag_indexes) != 1:
|
||||
raise Exception('tag %s not found in response body' % start_tag)
|
||||
raise Exception("tag %s not found in response body" % start_tag)
|
||||
start_tag_index = start_tag_indexes[0]
|
||||
xml_body_lines.insert(start_tag_index + 1, loop_start)
|
||||
|
||||
end_tag_indexes = [i for i, l in enumerate(xml_body_lines) if end_tag in l]
|
||||
if len(end_tag_indexes) != 1:
|
||||
raise Exception('tag %s not found in response body' % end_tag)
|
||||
raise Exception("tag %s not found in response body" % end_tag)
|
||||
end_tag_index = end_tag_indexes[0]
|
||||
xml_body_lines.insert(end_tag_index, loop_end)
|
||||
xml_body = '\n'.join(xml_body_lines)
|
||||
xml_body = "\n".join(xml_body_lines)
|
||||
body = '\n{}_TEMPLATE = """{}"""'.format(operation.upper(), xml_body)
|
||||
return body
|
||||
|
||||
|
||||
def insert_code_to_class(path, base_class, new_code):
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
mod_path = os.path.splitext(path)[0].replace('/', '.')
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
mod_path = os.path.splitext(path)[0].replace("/", ".")
|
||||
mod = importlib.import_module(mod_path)
|
||||
clsmembers = inspect.getmembers(mod, inspect.isclass)
|
||||
_response_cls = [_[1] for _ in clsmembers if issubclass(_[1], base_class) and _[1] != base_class]
|
||||
_response_cls = [
|
||||
_[1] for _ in clsmembers if issubclass(_[1], base_class) and _[1] != base_class
|
||||
]
|
||||
if len(_response_cls) != 1:
|
||||
raise Exception('unknown error, number of clsmembers is not 1')
|
||||
raise Exception("unknown error, number of clsmembers is not 1")
|
||||
response_cls = _response_cls[0]
|
||||
code_lines, line_no = inspect.getsourcelines(response_cls)
|
||||
end_line_no = line_no + len(code_lines)
|
||||
|
||||
func_lines = [' ' * 4 + _ for _ in new_code.splitlines()]
|
||||
func_lines = [" " * 4 + _ for _ in new_code.splitlines()]
|
||||
|
||||
lines = lines[:end_line_no] + func_lines + lines[end_line_no:]
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def insert_url(service, operation, api_protocol):
|
||||
client = boto3.client(service)
|
||||
service_class = client.__class__.__name__
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
list(client._service_model._service_description["operations"].keys()),
|
||||
)
|
||||
uri = client._service_model.operation_model(aws_operation_name).http['requestUri']
|
||||
uri = client._service_model.operation_model(aws_operation_name).http["requestUri"]
|
||||
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', get_escaped_service(service), 'urls.py')
|
||||
path = os.path.join(
|
||||
os.path.dirname(__file__), "..", "moto", get_escaped_service(service), "urls.py"
|
||||
)
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
lines = [_.replace("\n", "") for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match(uri, _)):
|
||||
return
|
||||
@ -418,50 +461,49 @@ def insert_url(service, operation, api_protocol):
|
||||
url_paths_found = False
|
||||
last_elem_line_index = -1
|
||||
for i, line in enumerate(lines):
|
||||
if line.startswith('url_paths'):
|
||||
if line.startswith("url_paths"):
|
||||
url_paths_found = True
|
||||
if url_paths_found and line.startswith('}'):
|
||||
if url_paths_found and line.startswith("}"):
|
||||
last_elem_line_index = i - 1
|
||||
|
||||
prev_line = lines[last_elem_line_index]
|
||||
if not prev_line.endswith('{') and not prev_line.endswith(','):
|
||||
lines[last_elem_line_index] += ','
|
||||
if not prev_line.endswith("{") and not prev_line.endswith(","):
|
||||
lines[last_elem_line_index] += ","
|
||||
|
||||
# generate url pattern
|
||||
if api_protocol == 'rest-json':
|
||||
if api_protocol == "rest-json":
|
||||
new_line = " '{0}/.*$': response.dispatch,"
|
||||
else:
|
||||
new_line = " '{0}%s$': %sResponse.dispatch," % (
|
||||
uri, service_class
|
||||
)
|
||||
new_line = " '{0}%s$': %sResponse.dispatch," % (uri, service_class)
|
||||
if new_line in lines:
|
||||
return
|
||||
lines.insert(last_elem_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
body = "\n".join(lines) + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def insert_codes(service, operation, api_protocol):
|
||||
func_in_responses = get_function_in_responses(service, operation, api_protocol)
|
||||
func_in_models = get_function_in_models(service, operation)
|
||||
# edit responses.py
|
||||
responses_path = 'moto/{}/responses.py'.format(get_escaped_service(service))
|
||||
print_progress('inserting code', responses_path, 'green')
|
||||
responses_path = "moto/{}/responses.py".format(get_escaped_service(service))
|
||||
print_progress("inserting code", responses_path, "green")
|
||||
insert_code_to_class(responses_path, BaseResponse, func_in_responses)
|
||||
|
||||
# insert template
|
||||
if api_protocol == 'query':
|
||||
if api_protocol == "query":
|
||||
template = get_response_query_template(service, operation)
|
||||
with open(responses_path) as f:
|
||||
lines = [_[:-1] for _ in f.readlines()]
|
||||
lines += template.splitlines()
|
||||
with open(responses_path, 'w') as f:
|
||||
f.write('\n'.join(lines))
|
||||
with open(responses_path, "w") as f:
|
||||
f.write("\n".join(lines))
|
||||
|
||||
# edit models.py
|
||||
models_path = 'moto/{}/models.py'.format(get_escaped_service(service))
|
||||
print_progress('inserting code', models_path, 'green')
|
||||
models_path = "moto/{}/models.py".format(get_escaped_service(service))
|
||||
print_progress("inserting code", models_path, "green")
|
||||
insert_code_to_class(models_path, BaseBackend, func_in_models)
|
||||
|
||||
# edit urls.py
|
||||
@ -471,15 +513,20 @@ def insert_codes(service, operation, api_protocol):
|
||||
@click.command()
|
||||
def main():
|
||||
service, operation = select_service_and_operation()
|
||||
api_protocol = boto3.client(service)._service_model.metadata['protocol']
|
||||
api_protocol = boto3.client(service)._service_model.metadata["protocol"]
|
||||
initialize_service(service, operation, api_protocol)
|
||||
|
||||
if api_protocol in ['query', 'json', 'rest-json']:
|
||||
if api_protocol in ["query", "json", "rest-json"]:
|
||||
insert_codes(service, operation, api_protocol)
|
||||
else:
|
||||
print_progress('skip inserting code', 'api protocol "{}" is not supported'.format(api_protocol), 'yellow')
|
||||
print_progress(
|
||||
"skip inserting code",
|
||||
'api protocol "{}" is not supported'.format(api_protocol),
|
||||
"yellow",
|
||||
)
|
||||
|
||||
click.echo('You will still need to add the mock into "__init__.py"'.format(service))
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -23,42 +23,53 @@ def json_serial(obj):
|
||||
raise TypeError("Type not serializable")
|
||||
|
||||
|
||||
client = boto3.client('iam')
|
||||
client = boto3.client("iam")
|
||||
|
||||
policies = {}
|
||||
|
||||
paginator = client.get_paginator('list_policies')
|
||||
paginator = client.get_paginator("list_policies")
|
||||
try:
|
||||
response_iterator = paginator.paginate(Scope='AWS')
|
||||
response_iterator = paginator.paginate(Scope="AWS")
|
||||
for response in response_iterator:
|
||||
for policy in response['Policies']:
|
||||
policies[policy['PolicyName']] = policy
|
||||
for policy in response["Policies"]:
|
||||
policies[policy["PolicyName"]] = policy
|
||||
except NoCredentialsError:
|
||||
print("USAGE:")
|
||||
print("Put your AWS credentials into ~/.aws/credentials and run:")
|
||||
print(__file__)
|
||||
print("")
|
||||
print("Or specify them on the command line:")
|
||||
print("AWS_ACCESS_KEY_ID=your_personal_access_key AWS_SECRET_ACCESS_KEY=your_personal_secret {}".format(__file__))
|
||||
print(
|
||||
"AWS_ACCESS_KEY_ID=your_personal_access_key AWS_SECRET_ACCESS_KEY=your_personal_secret {}".format(
|
||||
__file__
|
||||
)
|
||||
)
|
||||
print("")
|
||||
sys.exit(1)
|
||||
|
||||
for policy_name in policies:
|
||||
response = client.get_policy_version(
|
||||
PolicyArn=policies[policy_name]['Arn'],
|
||||
VersionId=policies[policy_name]['DefaultVersionId'])
|
||||
for key in response['PolicyVersion']:
|
||||
if key != "CreateDate": # the policy's CreateDate should not be overwritten by its version's CreateDate
|
||||
policies[policy_name][key] = response['PolicyVersion'][key]
|
||||
PolicyArn=policies[policy_name]["Arn"],
|
||||
VersionId=policies[policy_name]["DefaultVersionId"],
|
||||
)
|
||||
for key in response["PolicyVersion"]:
|
||||
if (
|
||||
key != "CreateDate"
|
||||
): # the policy's CreateDate should not be overwritten by its version's CreateDate
|
||||
policies[policy_name][key] = response["PolicyVersion"][key]
|
||||
|
||||
with open(output_file, 'w') as f:
|
||||
triple_quote = '\"\"\"'
|
||||
with open(output_file, "w") as f:
|
||||
triple_quote = '"""'
|
||||
|
||||
f.write("# Imported via `make aws_managed_policies`\n")
|
||||
f.write('aws_managed_policies_data = {}\n'.format(triple_quote))
|
||||
f.write(json.dumps(policies,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
separators=(',', ': '),
|
||||
default=json_serial))
|
||||
f.write('{}\n'.format(triple_quote))
|
||||
f.write("aws_managed_policies_data = {}\n".format(triple_quote))
|
||||
f.write(
|
||||
json.dumps(
|
||||
policies,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
separators=(",", ": "),
|
||||
default=json_serial,
|
||||
)
|
||||
)
|
||||
f.write("{}\n".format(triple_quote))
|
||||
|
10
setup.cfg
10
setup.cfg
@ -1,8 +1,6 @@
|
||||
[nosetests]
|
||||
verbosity=1
|
||||
detailed-errors=1
|
||||
with-coverage=1
|
||||
cover-package=moto
|
||||
|
||||
[bdist_wheel]
|
||||
universal=1
|
||||
|
||||
[tool:pytest]
|
||||
markers =
|
||||
network: marks tests which require network connection
|
||||
|
70
setup.py
70
setup.py
@ -13,20 +13,22 @@ PY2 = sys.version_info[0] == 2
|
||||
# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def read(*parts):
|
||||
# intentionally *not* adding an encoding option to open, See:
|
||||
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
|
||||
with open(os.path.join(here, *parts), 'r') as fp:
|
||||
with open(os.path.join(here, *parts), "r") as fp:
|
||||
return fp.read()
|
||||
|
||||
|
||||
def get_version():
|
||||
version_file = read('moto', '__init__.py')
|
||||
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
|
||||
version_file, re.MULTILINE)
|
||||
version_file = read("moto", "__init__.py")
|
||||
version_match = re.search(
|
||||
r'^__version__ = [\'"]([^\'"]*)[\'"]', version_file, re.MULTILINE
|
||||
)
|
||||
if version_match:
|
||||
return version_match.group(1)
|
||||
raise RuntimeError('Unable to find version string.')
|
||||
raise RuntimeError("Unable to find version string.")
|
||||
|
||||
|
||||
install_requires = [
|
||||
@ -77,7 +79,9 @@ else:
|
||||
|
||||
_dep_PyYAML = "PyYAML>=5.1"
|
||||
_dep_python_jose = "python-jose[cryptography]>=3.1.0,<4.0.0"
|
||||
_dep_python_jose_ecdsa_pin = "ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984
|
||||
_dep_python_jose_ecdsa_pin = (
|
||||
"ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984
|
||||
)
|
||||
_dep_docker = "docker>=2.5.1"
|
||||
_dep_jsondiff = "jsondiff>=1.1.2"
|
||||
_dep_aws_xray_sdk = "aws-xray-sdk!=0.96,>=0.93"
|
||||
@ -98,31 +102,31 @@ all_extra_deps = [
|
||||
_dep_sshpubkeys_py2,
|
||||
_dep_sshpubkeys_py3,
|
||||
]
|
||||
all_server_deps = all_extra_deps + ['flask', 'flask-cors']
|
||||
all_server_deps = all_extra_deps + ["flask", "flask-cors"]
|
||||
|
||||
# TODO: do we want to add ALL services here?
|
||||
# i.e. even those without extra dependencies.
|
||||
# Would be good for future-compatibility, I guess.
|
||||
extras_per_service = {
|
||||
'apigateway': [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
'awslambda': [_dep_docker],
|
||||
'batch': [_dep_docker],
|
||||
'cloudformation': [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
'cognitoidp': [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
'dynamodb2': [_dep_docker],
|
||||
'dynamodbstreams': [_dep_docker],
|
||||
"apigateway": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
"awslambda": [_dep_docker],
|
||||
"batch": [_dep_docker],
|
||||
"cloudformation": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
"cognitoidp": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
"dynamodb2": [_dep_docker],
|
||||
"dynamodbstreams": [_dep_docker],
|
||||
"ec2": [_dep_docker, _dep_sshpubkeys_py2, _dep_sshpubkeys_py3],
|
||||
'iotdata': [_dep_jsondiff],
|
||||
's3': [_dep_PyYAML],
|
||||
'ses': [_dep_docker],
|
||||
'sns': [_dep_docker],
|
||||
'sqs': [_dep_docker],
|
||||
'ssm': [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
'xray': [_dep_aws_xray_sdk],
|
||||
"iotdata": [_dep_jsondiff],
|
||||
"s3": [_dep_PyYAML],
|
||||
"ses": [_dep_docker],
|
||||
"sns": [_dep_docker],
|
||||
"sqs": [_dep_docker],
|
||||
"ssm": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
"xray": [_dep_aws_xray_sdk],
|
||||
}
|
||||
extras_require = {
|
||||
'all': all_extra_deps,
|
||||
'server': all_server_deps,
|
||||
"all": all_extra_deps,
|
||||
"server": all_server_deps,
|
||||
}
|
||||
|
||||
extras_require.update(extras_per_service)
|
||||
@ -136,18 +140,18 @@ else:
|
||||
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
name="moto",
|
||||
version=get_version(),
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
long_description=read('README.md'),
|
||||
long_description_content_type='text/markdown',
|
||||
author='Steve Pulec',
|
||||
author_email='spulec@gmail.com',
|
||||
url='https://github.com/spulec/moto',
|
||||
description="A library that allows your python tests to easily"
|
||||
" mock out the boto library",
|
||||
long_description=read("README.md"),
|
||||
long_description_content_type="text/markdown",
|
||||
author="Steve Pulec",
|
||||
author_email="spulec@gmail.com",
|
||||
url="https://github.com/spulec/moto",
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'moto_server = moto.server:main',
|
||||
"console_scripts": [
|
||||
"moto_server = moto.server:main",
|
||||
],
|
||||
},
|
||||
packages=find_packages(exclude=("tests", "tests.*")),
|
||||
|
@ -6,4 +6,3 @@ import logging
|
||||
logging.getLogger("boto").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("boto3").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("botocore").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("nose").setLevel(logging.CRITICAL)
|
||||
|
@ -1,41 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
"""
|
||||
Patch courtesy of:
|
||||
https://marmida.com/blog/index.php/2012/08/08/monkey-patching-assert_raises/
|
||||
"""
|
||||
|
||||
# code for monkey-patching
|
||||
import nose.tools
|
||||
|
||||
# let's fix nose.tools.assert_raises (which is really unittest.assertRaises)
|
||||
# so that it always supports context management
|
||||
|
||||
# in order for these changes to be available to other modules, you'll need
|
||||
# to guarantee this module is imported by your fixture before either nose or
|
||||
# unittest are imported
|
||||
|
||||
try:
|
||||
nose.tools.assert_raises(Exception)
|
||||
except TypeError:
|
||||
# this version of assert_raises doesn't support the 1-arg version
|
||||
class AssertRaisesContext(object):
|
||||
def __init__(self, expected):
|
||||
self.expected = expected
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, tb):
|
||||
self.exception = exc_val
|
||||
if issubclass(exc_type, self.expected):
|
||||
return True
|
||||
nose.tools.assert_equal(exc_type, self.expected)
|
||||
# if you get to this line, the last assertion must have passed
|
||||
# suppress the propagation of this exception
|
||||
return True
|
||||
|
||||
def assert_raises_context(exc_type):
|
||||
return AssertRaisesContext(exc_type)
|
||||
|
||||
nose.tools.assert_raises = assert_raises_context
|
@ -1,6 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
from nose.plugins.skip import SkipTest
|
||||
from unittest import SkipTest
|
||||
import six
|
||||
|
||||
|
||||
|
1
tests/test_acm/__init__.py
Normal file
1
tests/test_acm/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
1
tests/test_acm/resources/__init__.py
Normal file
1
tests/test_acm/resources/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -1,18 +1,16 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import boto3
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
import uuid
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from freezegun import freeze_time
|
||||
from moto import mock_acm, settings
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
from nose import SkipTest
|
||||
from nose.tools import assert_raises
|
||||
from unittest import SkipTest
|
||||
|
||||
RESOURCE_FOLDER = os.path.join(os.path.dirname(__file__), "resources")
|
||||
_GET_RESOURCE = lambda x: open(os.path.join(RESOURCE_FOLDER, x), "rb").read()
|
||||
@ -396,17 +394,17 @@ def test_operations_with_invalid_tags():
|
||||
client = boto3.client("acm", region_name="eu-central-1")
|
||||
|
||||
# request certificate with invalid tags
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.request_certificate(
|
||||
DomainName="example.com", Tags=[{"Key": "X" * 200, "Value": "Valid"}],
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.contain(
|
||||
"Member must have length less than or equal to 128"
|
||||
)
|
||||
|
||||
# import certificate with invalid tags
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.import_certificate(
|
||||
Certificate=SERVER_CRT,
|
||||
PrivateKey=SERVER_KEY,
|
||||
@ -417,31 +415,31 @@ def test_operations_with_invalid_tags():
|
||||
],
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.contain(
|
||||
"Member must have length less than or equal to 256"
|
||||
)
|
||||
|
||||
arn = _import_cert(client)
|
||||
|
||||
# add invalid tags to existing certificate
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.add_tags_to_certificate(
|
||||
CertificateArn=arn,
|
||||
Tags=[{"Key": "aws:xxx", "Value": "Valid"}, {"Key": "key2"}],
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.contain(
|
||||
"AWS internal tags cannot be changed with this API"
|
||||
)
|
||||
|
||||
# try removing invalid tags from existing certificate
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.remove_tags_from_certificate(
|
||||
CertificateArn=arn, Tags=[{"Key": "aws:xxx", "Value": "Valid"}]
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.contain(
|
||||
"AWS internal tags cannot be changed with this API"
|
||||
)
|
||||
|
||||
@ -452,13 +450,13 @@ def test_add_too_many_tags():
|
||||
arn = _import_cert(client)
|
||||
|
||||
# Add 51 tags
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.add_tags_to_certificate(
|
||||
CertificateArn=arn,
|
||||
Tags=[{"Key": "a-%d" % i, "Value": "abcd"} for i in range(1, 52)],
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("TooManyTagsException")
|
||||
ex.exception.response["Error"]["Message"].should.contain("contains too many Tags")
|
||||
ex.value.response["Error"]["Code"].should.equal("TooManyTagsException")
|
||||
ex.value.response["Error"]["Message"].should.contain("contains too many Tags")
|
||||
client.list_tags_for_certificate(CertificateArn=arn)["Tags"].should.have.empty
|
||||
|
||||
# Add 49 tags first, then try to add 2 more.
|
||||
@ -469,15 +467,15 @@ def test_add_too_many_tags():
|
||||
client.list_tags_for_certificate(CertificateArn=arn)["Tags"].should.have.length_of(
|
||||
49
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.add_tags_to_certificate(
|
||||
CertificateArn=arn,
|
||||
Tags=[{"Key": "x-1", "Value": "xyz"}, {"Key": "x-2", "Value": "xyz"}],
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("TooManyTagsException")
|
||||
ex.exception.response["Error"]["Message"].should.contain("contains too many Tags")
|
||||
ex.exception.response["Error"]["Message"].count("pqrs").should.equal(49)
|
||||
ex.exception.response["Error"]["Message"].count("xyz").should.equal(2)
|
||||
ex.value.response["Error"]["Code"].should.equal("TooManyTagsException")
|
||||
ex.value.response["Error"]["Message"].should.contain("contains too many Tags")
|
||||
ex.value.response["Error"]["Message"].count("pqrs").should.equal(49)
|
||||
ex.value.response["Error"]["Message"].count("xyz").should.equal(2)
|
||||
client.list_tags_for_certificate(CertificateArn=arn)["Tags"].should.have.length_of(
|
||||
49
|
||||
)
|
||||
@ -495,20 +493,21 @@ def test_request_certificate_no_san():
|
||||
|
||||
|
||||
# Also tests the SAN code
|
||||
@freeze_time("2012-01-01 12:00:00", as_arg=True)
|
||||
@mock_acm
|
||||
def test_request_certificate_issued_status(frozen_time):
|
||||
def test_request_certificate_issued_status():
|
||||
# After requesting a certificate, it should then auto-validate after 1 minute
|
||||
# Some sneaky programming for that ;-)
|
||||
client = boto3.client("acm", region_name="eu-central-1")
|
||||
|
||||
resp = client.request_certificate(
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
with freeze_time("2012-01-01 12:00:00"):
|
||||
resp = client.request_certificate(
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
arn = resp["CertificateArn"]
|
||||
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
with freeze_time("2012-01-01 12:00:00"):
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
resp["Certificate"]["CertificateArn"].should.equal(arn)
|
||||
resp["Certificate"]["DomainName"].should.equal("google.com")
|
||||
resp["Certificate"]["Issuer"].should.equal("Amazon")
|
||||
@ -518,21 +517,21 @@ def test_request_certificate_issued_status(frozen_time):
|
||||
len(resp["Certificate"]["SubjectAlternativeNames"]).should.equal(3)
|
||||
|
||||
# validation will be pending for 1 minute.
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
with freeze_time("2012-01-01 12:00:00"):
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
resp["Certificate"]["CertificateArn"].should.equal(arn)
|
||||
resp["Certificate"]["Status"].should.equal("PENDING_VALIDATION")
|
||||
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
# Move time to get it issued.
|
||||
frozen_time.move_to("2012-01-01 12:02:00")
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
with freeze_time("2012-01-01 12:02:00"):
|
||||
resp = client.describe_certificate(CertificateArn=arn)
|
||||
resp["Certificate"]["CertificateArn"].should.equal(arn)
|
||||
resp["Certificate"]["Status"].should.equal("ISSUED")
|
||||
|
||||
|
||||
@freeze_time("2012-01-01 12:00:00", as_arg=True)
|
||||
@mock_acm
|
||||
def test_request_certificate_with_mutiple_times(frozen_time):
|
||||
def test_request_certificate_with_mutiple_times():
|
||||
if settings.TEST_SERVER_MODE:
|
||||
raise SkipTest("Cant manipulate time in server mode")
|
||||
|
||||
@ -540,11 +539,12 @@ def test_request_certificate_with_mutiple_times(frozen_time):
|
||||
# Some sneaky programming for that ;-)
|
||||
client = boto3.client("acm", region_name="eu-central-1")
|
||||
|
||||
resp = client.request_certificate(
|
||||
IdempotencyToken="test_token",
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
with freeze_time("2012-01-01 12:00:00"):
|
||||
resp = client.request_certificate(
|
||||
IdempotencyToken="test_token",
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
original_arn = resp["CertificateArn"]
|
||||
|
||||
# Should be able to request a certificate multiple times in an hour
|
||||
@ -554,21 +554,25 @@ def test_request_certificate_with_mutiple_times(frozen_time):
|
||||
"2012-01-01 12:30:00",
|
||||
"2012-01-01 12:45:00",
|
||||
):
|
||||
frozen_time.move_to(time_intervals)
|
||||
with freeze_time(time_intervals):
|
||||
resp = client.request_certificate(
|
||||
IdempotencyToken="test_token",
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=[
|
||||
"google.com",
|
||||
"www.google.com",
|
||||
"mail.google.com",
|
||||
],
|
||||
)
|
||||
arn = resp["CertificateArn"]
|
||||
arn.should.equal(original_arn)
|
||||
|
||||
# Move time
|
||||
with freeze_time("2012-01-01 13:01:00"):
|
||||
resp = client.request_certificate(
|
||||
IdempotencyToken="test_token",
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
arn = resp["CertificateArn"]
|
||||
arn.should.equal(original_arn)
|
||||
|
||||
# Move time
|
||||
frozen_time.move_to("2012-01-01 13:01:00")
|
||||
resp = client.request_certificate(
|
||||
IdempotencyToken="test_token",
|
||||
DomainName="google.com",
|
||||
SubjectAlternativeNames=["google.com", "www.google.com", "mail.google.com"],
|
||||
)
|
||||
arn = resp["CertificateArn"]
|
||||
arn.should_not.equal(original_arn)
|
||||
|
1
tests/test_apigateway/__init__.py
Normal file
1
tests/test_apigateway/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -11,7 +11,7 @@ from botocore.exceptions import ClientError
|
||||
import responses
|
||||
from moto import mock_apigateway, mock_cognitoidp, settings
|
||||
from moto.core import ACCOUNT_ID
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@ -90,13 +90,13 @@ def test_create_rest_api_with_policy():
|
||||
def test_create_rest_api_invalid_apikeysource():
|
||||
client = boto3.client("apigateway", region_name="us-west-2")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_rest_api(
|
||||
name="my_api",
|
||||
description="this is my api",
|
||||
apiKeySource="not a valid api key source",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -126,13 +126,13 @@ def test_create_rest_api_valid_apikeysources():
|
||||
def test_create_rest_api_invalid_endpointconfiguration():
|
||||
client = boto3.client("apigateway", region_name="us-west-2")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_rest_api(
|
||||
name="my_api",
|
||||
description="this is my api",
|
||||
endpointConfiguration={"types": ["INVALID"]},
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -194,10 +194,10 @@ def test_create_resource__validate_name():
|
||||
valid_names = ["users", "{user_id}", "{proxy+}", "user_09", "good-dog"]
|
||||
# All invalid names should throw an exception
|
||||
for name in invalid_names:
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_resource(restApiId=api_id, parentId=root_id, pathPart=name)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end and an optional plus sign before the closing brace."
|
||||
)
|
||||
# All valid names should go through
|
||||
@ -1194,10 +1194,10 @@ def test_create_deployment_requires_REST_methods():
|
||||
response = client.create_rest_api(name="my_api", description="this is my api")
|
||||
api_id = response["id"]
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_deployment(restApiId=api_id, stageName=stage_name)["id"]
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The REST API doesn't contain any methods"
|
||||
)
|
||||
|
||||
@ -1217,10 +1217,10 @@ def test_create_deployment_requires_REST_method_integrations():
|
||||
restApiId=api_id, resourceId=root_id, httpMethod="GET", authorizationType="NONE"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_deployment(restApiId=api_id, stageName=stage_name)["id"]
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"No integration defined for method"
|
||||
)
|
||||
|
||||
@ -1273,12 +1273,12 @@ def test_put_integration_response_requires_responseTemplate():
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration_response(
|
||||
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal("Invalid request input")
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid request input")
|
||||
# Works fine if responseTemplate is defined
|
||||
client.put_integration_response(
|
||||
restApiId=api_id,
|
||||
@ -1314,13 +1314,13 @@ def test_put_integration_response_with_response_template():
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration_response(
|
||||
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal("Invalid request input")
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid request input")
|
||||
|
||||
client.put_integration_response(
|
||||
restApiId=api_id,
|
||||
@ -1372,7 +1372,7 @@ def test_put_integration_validation():
|
||||
|
||||
for type in types_requiring_integration_method:
|
||||
# Ensure that integrations of these types fail if no integrationHttpMethod is provided
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1380,8 +1380,8 @@ def test_put_integration_validation():
|
||||
type=type,
|
||||
uri="http://httpbin.org/robots.txt",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Enumeration value for HttpMethod must be non-empty"
|
||||
)
|
||||
for type in types_not_requiring_integration_method:
|
||||
@ -1428,7 +1428,7 @@ def test_put_integration_validation():
|
||||
)
|
||||
for type in ["AWS_PROXY"]:
|
||||
# Ensure that aws_proxy does not support S3
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1440,13 +1440,13 @@ def test_put_integration_validation():
|
||||
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Integrations of type 'AWS_PROXY' currently only supports Lambda function and Firehose stream invocations."
|
||||
)
|
||||
for type in aws_types:
|
||||
# Ensure that the Role ARN is for the current account
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1456,13 +1456,13 @@ def test_put_integration_validation():
|
||||
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDeniedException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDeniedException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Cross-account pass role is not allowed."
|
||||
)
|
||||
for type in ["AWS"]:
|
||||
# Ensure that the Role ARN is specified for aws integrations
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1471,13 +1471,13 @@ def test_put_integration_validation():
|
||||
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Role ARN must be specified for AWS integrations"
|
||||
)
|
||||
for type in http_types:
|
||||
# Ensure that the URI is valid HTTP
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1486,13 +1486,13 @@ def test_put_integration_validation():
|
||||
uri="non-valid-http",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid HTTP endpoint specified for URI"
|
||||
)
|
||||
for type in aws_types:
|
||||
# Ensure that the URI is an ARN
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1501,13 +1501,13 @@ def test_put_integration_validation():
|
||||
uri="non-valid-arn",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid ARN specified in the request"
|
||||
)
|
||||
for type in aws_types:
|
||||
# Ensure that the URI is a valid ARN
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
@ -1516,8 +1516,8 @@ def test_put_integration_validation():
|
||||
uri="arn:aws:iam::0000000000:role/service-role/asdf",
|
||||
integrationHttpMethod="POST",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"AWS ARN for integration must contain path or action"
|
||||
)
|
||||
|
||||
@ -1632,11 +1632,11 @@ def test_create_domain_names():
|
||||
response["domainName"].should.equal(domain_name)
|
||||
response["certificateName"].should.equal(test_certificate_name)
|
||||
# without domain name it should throw BadRequestException
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_domain_name(domainName="")
|
||||
|
||||
ex.exception.response["Error"]["Message"].should.equal("No Domain Name specified")
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal("No Domain Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -1666,13 +1666,11 @@ def test_get_domain_name():
|
||||
client = boto3.client("apigateway", region_name="us-west-2")
|
||||
domain_name = "testDomain"
|
||||
# quering an invalid domain name which is not present
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_domain_name(domainName=domain_name)
|
||||
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"Invalid Domain Name specified"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Domain Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
# adding a domain name
|
||||
client.create_domain_name(domainName=domain_name)
|
||||
# retrieving the data of added domain name.
|
||||
@ -1701,19 +1699,17 @@ def test_create_model():
|
||||
response["description"].should.equal(description)
|
||||
|
||||
# with an invalid rest_api_id it should throw NotFoundException
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_model(
|
||||
restApiId=dummy_rest_api_id,
|
||||
name=model_name,
|
||||
description=description,
|
||||
contentType=content_type,
|
||||
)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"Invalid Rest API Id specified"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Rest API Id specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_model(
|
||||
restApiId=rest_api_id,
|
||||
name="",
|
||||
@ -1721,8 +1717,8 @@ def test_create_model():
|
||||
contentType=content_type,
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Message"].should.equal("No Model Name specified")
|
||||
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
ex.value.response["Error"]["Message"].should.equal("No Model Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("BadRequestException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -1770,12 +1766,10 @@ def test_get_model_by_name():
|
||||
result["name"] = model_name
|
||||
result["description"] = description
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_model(restApiId=dummy_rest_api_id, modelName=model_name)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"Invalid Rest API Id specified"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Rest API Id specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -1784,12 +1778,10 @@ def test_get_model_with_invalid_name():
|
||||
response = client.create_rest_api(name="my_api", description="this is my api")
|
||||
rest_api_id = response["id"]
|
||||
# test with an invalid model name
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_model(restApiId=rest_api_id, modelName="fake")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"Invalid Model Name specified"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal("Invalid Model Name specified")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -1868,11 +1860,11 @@ def test_create_api_headers():
|
||||
payload = {"value": apikey_value, "name": apikey_name}
|
||||
|
||||
client.create_api_key(**payload)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_api_key(**payload)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ConflictException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ConflictException")
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
ex.exception.response["ResponseMetadata"]["HTTPHeaders"].should.equal({})
|
||||
ex.value.response["ResponseMetadata"]["HTTPHeaders"].should.equal({})
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
@ -1939,10 +1931,10 @@ def test_usage_plans():
|
||||
len(response["items"]).should.equal(0)
|
||||
|
||||
# # Try to get info about a non existing usage
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_usage_plan(usagePlanId="not_existing")
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Usage Plan ID specified"
|
||||
)
|
||||
|
||||
@ -2030,26 +2022,26 @@ def test_usage_plan_keys():
|
||||
len(response["items"]).should.equal(0)
|
||||
|
||||
# Try to get info about a non existing api key
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_usage_plan_key(usagePlanId=usage_plan_id, keyId="not_existing_key")
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid API Key identifier specified"
|
||||
)
|
||||
|
||||
# Try to get info about an existing api key that has not jet added to a valid usage plan
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_usage_plan_key(usagePlanId=usage_plan_id, keyId=key_id)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Usage Plan ID specified"
|
||||
)
|
||||
|
||||
# Try to get info about an existing api key that has not jet added to a valid usage plan
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_usage_plan_key(usagePlanId="not_existing_plan_id", keyId=key_id)
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Usage Plan ID specified"
|
||||
)
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import botocore
|
||||
|
||||
import boto3
|
||||
import botocore
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from moto import mock_applicationautoscaling, mock_ecs
|
||||
from moto.applicationautoscaling.exceptions import AWSValidationException
|
||||
|
||||
DEFAULT_REGION = "us-east-1"
|
||||
DEFAULT_ECS_CLUSTER = "default"
|
||||
@ -334,7 +334,7 @@ def test_put_scaling_policy():
|
||||
},
|
||||
}
|
||||
|
||||
with assert_raises(client.exceptions.ValidationException) as e:
|
||||
with pytest.raises(client.exceptions.ValidationException) as e:
|
||||
client.put_scaling_policy(
|
||||
PolicyName=policy_name,
|
||||
ServiceNamespace=namespace,
|
||||
@ -343,7 +343,7 @@ def test_put_scaling_policy():
|
||||
PolicyType="ABCDEFG",
|
||||
TargetTrackingScalingPolicyConfiguration=policy_body,
|
||||
)
|
||||
e.exception.response["Error"]["Message"].should.match(
|
||||
e.value.response["Error"]["Message"].should.match(
|
||||
r"Unknown policy type .* specified."
|
||||
)
|
||||
|
||||
@ -443,14 +443,14 @@ def test_delete_scaling_policies():
|
||||
},
|
||||
}
|
||||
|
||||
with assert_raises(client.exceptions.ValidationException) as e:
|
||||
with pytest.raises(client.exceptions.ValidationException) as e:
|
||||
client.delete_scaling_policy(
|
||||
PolicyName=policy_name,
|
||||
ServiceNamespace=namespace,
|
||||
ResourceId=resource_id,
|
||||
ScalableDimension=scalable_dimension,
|
||||
)
|
||||
e.exception.response["Error"]["Message"].should.match(r"No scaling policy found .*")
|
||||
e.value.response["Error"]["Message"].should.match(r"No scaling policy found .*")
|
||||
|
||||
response = client.put_scaling_policy(
|
||||
PolicyName=policy_name,
|
||||
@ -507,12 +507,10 @@ def test_deregister_scalable_target():
|
||||
response = client.describe_scalable_targets(ServiceNamespace=namespace)
|
||||
len(response["ScalableTargets"]).should.equal(0)
|
||||
|
||||
with assert_raises(client.exceptions.ValidationException) as e:
|
||||
with pytest.raises(client.exceptions.ValidationException) as e:
|
||||
client.deregister_scalable_target(
|
||||
ServiceNamespace=namespace,
|
||||
ResourceId=resource_id,
|
||||
ScalableDimension=scalable_dimension,
|
||||
)
|
||||
e.exception.response["Error"]["Message"].should.match(
|
||||
r"No scalable target found .*"
|
||||
)
|
||||
e.value.response["Error"]["Message"].should.match(r"No scalable target found .*")
|
||||
|
@ -4,10 +4,9 @@ from moto import mock_applicationautoscaling, mock_ecs
|
||||
from moto.applicationautoscaling import models
|
||||
from moto.applicationautoscaling.exceptions import AWSValidationException
|
||||
from botocore.exceptions import ParamValidationError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from parameterized import parameterized
|
||||
from .test_applicationautoscaling import register_scalable_target
|
||||
|
||||
DEFAULT_REGION = "us-east-1"
|
||||
@ -25,21 +24,21 @@ DEFAULT_ROLE_ARN = "test:arn"
|
||||
@mock_applicationautoscaling
|
||||
def test_describe_scalable_targets_no_params_should_raise_param_validation_errors():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
with assert_raises(ParamValidationError):
|
||||
with pytest.raises(ParamValidationError):
|
||||
client.describe_scalable_targets()
|
||||
|
||||
|
||||
@mock_applicationautoscaling
|
||||
def test_register_scalable_target_no_params_should_raise_param_validation_errors():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
with assert_raises(ParamValidationError):
|
||||
with pytest.raises(ParamValidationError):
|
||||
client.register_scalable_target()
|
||||
|
||||
|
||||
@mock_applicationautoscaling
|
||||
def test_register_scalable_target_with_none_service_namespace_should_raise_param_validation_errors():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
with assert_raises(ParamValidationError):
|
||||
with pytest.raises(ParamValidationError):
|
||||
register_scalable_target(client, ServiceNamespace=None)
|
||||
|
||||
|
||||
@ -47,7 +46,7 @@ def test_register_scalable_target_with_none_service_namespace_should_raise_param
|
||||
def test_describe_scalable_targets_with_invalid_scalable_dimension_should_return_validation_exception():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace=DEFAULT_SERVICE_NAMESPACE, ScalableDimension="foo",
|
||||
)
|
||||
@ -62,7 +61,7 @@ def test_describe_scalable_targets_with_invalid_scalable_dimension_should_return
|
||||
def test_describe_scalable_targets_with_invalid_service_namespace_should_return_validation_exception():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace="foo", ScalableDimension=DEFAULT_SCALABLE_DIMENSION,
|
||||
)
|
||||
@ -77,7 +76,7 @@ def test_describe_scalable_targets_with_invalid_service_namespace_should_return_
|
||||
def test_describe_scalable_targets_with_multiple_invalid_parameters_should_return_validation_exception():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace="foo", ScalableDimension="bar",
|
||||
)
|
||||
@ -94,7 +93,7 @@ def test_register_scalable_target_ecs_with_non_existent_service_should_return_va
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
resource_id = "service/{}/foo".format(DEFAULT_ECS_CLUSTER)
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
register_scalable_target(client, ServiceNamespace="ecs", ResourceId=resource_id)
|
||||
err.response["Error"]["Code"].should.equal("ValidationException")
|
||||
err.response["Error"]["Message"].should.equal(
|
||||
@ -103,12 +102,13 @@ def test_register_scalable_target_ecs_with_non_existent_service_should_return_va
|
||||
err.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@parameterized(
|
||||
@pytest.mark.parametrize(
|
||||
"namespace,r_id,dimension,expected",
|
||||
[
|
||||
("ecs", "service/default/test-svc", "ecs:service:DesiredCount", True),
|
||||
("ecs", "banana/default/test-svc", "ecs:service:DesiredCount", False),
|
||||
("rds", "service/default/test-svc", "ecs:service:DesiredCount", False),
|
||||
]
|
||||
],
|
||||
)
|
||||
def test_target_params_are_valid_success(namespace, r_id, dimension, expected):
|
||||
if expected is True:
|
||||
@ -116,7 +116,7 @@ def test_target_params_are_valid_success(namespace, r_id, dimension, expected):
|
||||
expected
|
||||
)
|
||||
else:
|
||||
with assert_raises(AWSValidationException):
|
||||
with pytest.raises(AWSValidationException):
|
||||
models._target_params_are_valid(namespace, r_id, dimension)
|
||||
|
||||
|
||||
|
1
tests/test_athena/__init__.py
Normal file
1
tests/test_athena/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -1,7 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
@ -104,15 +104,15 @@ def test_start_query_execution():
|
||||
def test_start_query_validate_workgroup():
|
||||
client = boto3.client("athena", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
client.start_query_execution(
|
||||
QueryString="query1",
|
||||
QueryExecutionContext={"Database": "string"},
|
||||
ResultConfiguration={"OutputLocation": "string"},
|
||||
WorkGroup="unknown_workgroup",
|
||||
)
|
||||
err.exception.response["Error"]["Code"].should.equal("InvalidRequestException")
|
||||
err.exception.response["Error"]["Message"].should.equal("WorkGroup does not exist")
|
||||
err.value.response["Error"]["Code"].should.equal("InvalidRequestException")
|
||||
err.value.response["Error"]["Message"].should.equal("WorkGroup does not exist")
|
||||
|
||||
|
||||
@mock_athena
|
||||
|
1
tests/test_autoscaling/__init__.py
Normal file
1
tests/test_autoscaling/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -8,7 +8,7 @@ from boto.ec2.autoscale import Tag
|
||||
import boto.ec2.elb
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import (
|
||||
mock_autoscaling,
|
||||
@ -21,7 +21,7 @@ from moto import (
|
||||
)
|
||||
from tests.helpers import requires_boto_gte
|
||||
|
||||
from utils import (
|
||||
from .utils import (
|
||||
setup_networking,
|
||||
setup_networking_deprecated,
|
||||
setup_instance_with_networking,
|
||||
@ -97,8 +97,8 @@ def test_create_autoscaling_group():
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_create_autoscaling_groups_defaults():
|
||||
""" Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes """
|
||||
"""Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes"""
|
||||
|
||||
mocked_networking = setup_networking_deprecated()
|
||||
conn = boto.connect_autoscale()
|
||||
@ -781,7 +781,7 @@ def test_create_autoscaling_group_from_invalid_instance_id():
|
||||
|
||||
mocked_networking = setup_networking()
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
InstanceId=invalid_instance_id,
|
||||
@ -791,9 +791,9 @@ def test_create_autoscaling_group_from_invalid_instance_id():
|
||||
VPCZoneIdentifier=mocked_networking["subnet1"],
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Instance [{0}] is invalid.".format(invalid_instance_id)
|
||||
)
|
||||
|
||||
@ -842,7 +842,7 @@ def test_create_autoscaling_group_no_template_ref():
|
||||
)["LaunchTemplate"]
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
LaunchTemplate={"Version": str(template["LatestVersionNumber"])},
|
||||
@ -852,9 +852,9 @@ def test_create_autoscaling_group_no_template_ref():
|
||||
VPCZoneIdentifier=mocked_networking["subnet1"],
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Valid requests must contain either launchTemplateId or LaunchTemplateName"
|
||||
)
|
||||
|
||||
@ -874,7 +874,7 @@ def test_create_autoscaling_group_multiple_template_ref():
|
||||
)["LaunchTemplate"]
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
LaunchTemplate={
|
||||
@ -888,9 +888,9 @@ def test_create_autoscaling_group_multiple_template_ref():
|
||||
VPCZoneIdentifier=mocked_networking["subnet1"],
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Valid requests must contain either launchTemplateId or LaunchTemplateName"
|
||||
)
|
||||
|
||||
@ -899,7 +899,7 @@ def test_create_autoscaling_group_multiple_template_ref():
|
||||
def test_create_autoscaling_group_boto3_no_launch_configuration():
|
||||
mocked_networking = setup_networking()
|
||||
client = boto3.client("autoscaling", region_name="us-east-1")
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
MinSize=0,
|
||||
@ -908,9 +908,9 @@ def test_create_autoscaling_group_boto3_no_launch_configuration():
|
||||
VPCZoneIdentifier=mocked_networking["subnet1"],
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Valid requests must contain either LaunchTemplate, LaunchConfigurationName, "
|
||||
"InstanceId or MixedInstancesPolicy parameter."
|
||||
)
|
||||
@ -934,7 +934,7 @@ def test_create_autoscaling_group_boto3_multiple_launch_configurations():
|
||||
LaunchConfigurationName="test_launch_configuration"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_auto_scaling_group(
|
||||
AutoScalingGroupName="test_asg",
|
||||
LaunchConfigurationName="test_launch_configuration",
|
||||
@ -948,9 +948,9 @@ def test_create_autoscaling_group_boto3_multiple_launch_configurations():
|
||||
VPCZoneIdentifier=mocked_networking["subnet1"],
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Valid requests must contain either LaunchTemplate, LaunchConfigurationName, "
|
||||
"InstanceId or MixedInstancesPolicy parameter."
|
||||
)
|
||||
|
@ -7,7 +7,7 @@ from moto import (
|
||||
mock_ec2,
|
||||
)
|
||||
|
||||
from utils import setup_networking
|
||||
from .utils import setup_networking
|
||||
|
||||
|
||||
@mock_autoscaling
|
||||
|
@ -4,7 +4,7 @@ import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_autoscaling, mock_ec2, mock_elbv2
|
||||
|
||||
from utils import setup_networking
|
||||
from .utils import setup_networking
|
||||
|
||||
|
||||
@mock_elbv2
|
||||
|
@ -152,8 +152,8 @@ def test_create_launch_configuration_using_ip_association_should_default_to_fals
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_create_launch_configuration_defaults():
|
||||
""" Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes """
|
||||
"""Test with the minimum inputs and check that all of the proper defaults
|
||||
are assigned for the other attributes"""
|
||||
conn = boto.connect_autoscale()
|
||||
config = LaunchConfiguration(
|
||||
name="tester", image_id="ami-abcd1234", instance_type="m1.small"
|
||||
|
@ -7,7 +7,7 @@ import sure # noqa
|
||||
|
||||
from moto import mock_autoscaling_deprecated
|
||||
|
||||
from utils import setup_networking_deprecated
|
||||
from .utils import setup_networking_deprecated
|
||||
|
||||
|
||||
def setup_autoscale_group():
|
||||
@ -170,7 +170,7 @@ def test_execute_policy_percent_change_in_capacity():
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
def test_execute_policy_small_percent_change_in_capacity():
|
||||
""" http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
"""http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html
|
||||
If PercentChangeInCapacity returns a value between 0 and 1,
|
||||
Auto Scaling will round it off to 1."""
|
||||
setup_autoscale_group()
|
||||
|
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -4,7 +4,7 @@ import sure # noqa
|
||||
import zipfile
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_cloudformation, mock_iam, mock_lambda, mock_s3, mock_sqs
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from string import Template
|
||||
from uuid import uuid4
|
||||
|
||||
@ -109,9 +109,9 @@ def test_lambda_can_be_deleted_by_cloudformation():
|
||||
# Delete Stack
|
||||
cf.delete_stack(StackName=stack["StackId"])
|
||||
# Verify function was deleted
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
lmbda.get_function(FunctionName=created_fn_name)
|
||||
e.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
e.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
|
@ -24,7 +24,7 @@ from moto import (
|
||||
mock_sqs,
|
||||
)
|
||||
from moto.sts.models import ACCOUNT_ID
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
@ -93,6 +93,7 @@ def test_list_functions():
|
||||
result["Functions"].should.have.length_of(0)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_requestresponse_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
@ -137,6 +138,7 @@ def test_invoke_requestresponse_function():
|
||||
assert "LogResult" not in success_result
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_requestresponse_function_with_arn():
|
||||
from moto.awslambda.models import ACCOUNT_ID
|
||||
@ -169,6 +171,7 @@ def test_invoke_requestresponse_function_with_arn():
|
||||
json.loads(payload).should.equal(in_data)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_event_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
@ -196,6 +199,7 @@ def test_invoke_event_function():
|
||||
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_dryrun_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
@ -258,6 +262,7 @@ if settings.TEST_SERVER_MODE:
|
||||
actual_payload.should.equal(expected_payload)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_sns
|
||||
@mock_ec2
|
||||
@ -497,7 +502,7 @@ def test_get_function():
|
||||
)
|
||||
|
||||
# Test get function when can't find function name
|
||||
with assert_raises(conn.exceptions.ResourceNotFoundException):
|
||||
with pytest.raises(conn.exceptions.ResourceNotFoundException):
|
||||
conn.get_function(FunctionName="junk", Qualifier="$LATEST")
|
||||
|
||||
|
||||
@ -729,6 +734,7 @@ def test_list_create_list_get_delete_list():
|
||||
conn.list_functions()["Functions"].should.have.length_of(0)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_lambda_error():
|
||||
lambda_fx = """
|
||||
@ -844,6 +850,7 @@ def test_tags_not_found():
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_async_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
@ -1115,6 +1122,7 @@ def test_create_event_source_mapping():
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
@ -1156,6 +1164,7 @@ def test_invoke_function_from_sqs():
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_dynamodb2
|
||||
@ -1204,6 +1213,7 @@ def test_invoke_function_from_dynamodb_put():
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_dynamodb2
|
||||
@ -1286,6 +1296,7 @@ def wait_for_log_msg(expected_msg, log_group):
|
||||
return False, received_messages
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
@ -1662,7 +1673,7 @@ def test_update_function_s3():
|
||||
@mock_lambda
|
||||
def test_create_function_with_invalid_arn():
|
||||
err = create_invalid_lambda("test-iam-role")
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
err.value.response["Error"]["Message"].should.equal(
|
||||
r"1 validation error detected: Value 'test-iam-role' at 'role' failed to satisfy constraint: Member must satisfy regular expression pattern: arn:(aws[a-zA-Z-]*)?:iam::(\d{12}):role/?[a-zA-Z_0-9+=,.@\-_/]+"
|
||||
)
|
||||
|
||||
@ -1670,7 +1681,7 @@ def test_create_function_with_invalid_arn():
|
||||
@mock_lambda
|
||||
def test_create_function_with_arn_from_different_account():
|
||||
err = create_invalid_lambda("arn:aws:iam::000000000000:role/example_role")
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
err.value.response["Error"]["Message"].should.equal(
|
||||
"Cross-account pass role is not allowed."
|
||||
)
|
||||
|
||||
@ -1680,7 +1691,7 @@ def test_create_function_with_unknown_arn():
|
||||
err = create_invalid_lambda(
|
||||
"arn:aws:iam::" + str(ACCOUNT_ID) + ":role/service-role/unknown_role"
|
||||
)
|
||||
err.exception.response["Error"]["Message"].should.equal(
|
||||
err.value.response["Error"]["Message"].should.equal(
|
||||
"The role defined for the function cannot be assumed by Lambda."
|
||||
)
|
||||
|
||||
@ -1800,7 +1811,7 @@ def test_get_function_concurrency():
|
||||
def create_invalid_lambda(role):
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
zip_content = get_test_zip_file1()
|
||||
with assert_raises(ClientError) as err:
|
||||
with pytest.raises(ClientError) as err:
|
||||
conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
|
1
tests/test_batch/__init__.py
Normal file
1
tests/test_batch/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -6,10 +6,7 @@ import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import sure # noqa
|
||||
from moto import mock_batch, mock_iam, mock_ec2, mock_ecs, mock_logs
|
||||
|
||||
import functools
|
||||
import nose
|
||||
|
||||
import pytest
|
||||
|
||||
DEFAULT_REGION = "eu-central-1"
|
||||
|
||||
@ -689,6 +686,7 @@ def test_submit_job_by_name():
|
||||
@mock_ecs
|
||||
@mock_iam
|
||||
@mock_batch
|
||||
@pytest.mark.network
|
||||
def test_submit_job():
|
||||
ec2_client, iam_client, ecs_client, logs_client, batch_client = _get_clients()
|
||||
vpc_id, subnet_id, sg_id, iam_arn = _setup(ec2_client, iam_client)
|
||||
@ -740,7 +738,9 @@ def test_submit_job():
|
||||
else:
|
||||
raise RuntimeError("Batch job timed out")
|
||||
|
||||
resp = logs_client.describe_log_streams(logGroupName="/aws/batch/job")
|
||||
resp = logs_client.describe_log_streams(
|
||||
logGroupName="/aws/batch/job", logStreamNamePrefix="sayhellotomylittlefriend"
|
||||
)
|
||||
len(resp["logStreams"]).should.equal(1)
|
||||
ls_name = resp["logStreams"][0]["logStreamName"]
|
||||
|
||||
@ -755,6 +755,7 @@ def test_submit_job():
|
||||
@mock_ecs
|
||||
@mock_iam
|
||||
@mock_batch
|
||||
@pytest.mark.network
|
||||
def test_list_jobs():
|
||||
ec2_client, iam_client, ecs_client, logs_client, batch_client = _get_clients()
|
||||
vpc_id, subnet_id, sg_id, iam_arn = _setup(ec2_client, iam_client)
|
||||
|
@ -14,7 +14,6 @@ from moto import (
|
||||
mock_cloudformation,
|
||||
)
|
||||
import functools
|
||||
import nose
|
||||
import json
|
||||
|
||||
DEFAULT_REGION = "eu-central-1"
|
||||
|
@ -12,9 +12,7 @@ import boto.cloudformation
|
||||
from boto.exception import BotoServerError
|
||||
import sure # noqa
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
from moto import (
|
||||
@ -319,7 +317,7 @@ def test_delete_stack_by_id():
|
||||
conn.describe_stacks().should.have.length_of(1)
|
||||
conn.delete_stack(stack_id)
|
||||
conn.describe_stacks().should.have.length_of(0)
|
||||
with assert_raises(BotoServerError):
|
||||
with pytest.raises(BotoServerError):
|
||||
conn.describe_stacks("test_stack")
|
||||
|
||||
conn.describe_stacks(stack_id).should.have.length_of(1)
|
||||
@ -338,7 +336,7 @@ def test_delete_stack_with_resource_missing_delete_attr():
|
||||
@mock_cloudformation_deprecated
|
||||
def test_bad_describe_stack():
|
||||
conn = boto.connect_cloudformation()
|
||||
with assert_raises(BotoServerError):
|
||||
with pytest.raises(BotoServerError):
|
||||
conn.describe_stacks("bad_stack")
|
||||
|
||||
|
||||
@ -519,10 +517,10 @@ def test_update_stack_when_rolled_back():
|
||||
stack_id
|
||||
].status = "ROLLBACK_COMPLETE"
|
||||
|
||||
with assert_raises(BotoServerError) as err:
|
||||
with pytest.raises(BotoServerError) as err:
|
||||
conn.update_stack("test_stack", dummy_template_json)
|
||||
|
||||
ex = err.exception
|
||||
ex = err.value
|
||||
ex.body.should.match(r"is in ROLLBACK_COMPLETE state and can not be updated")
|
||||
ex.error_code.should.equal("ValidationError")
|
||||
ex.reason.should.equal("Bad Request")
|
||||
|
@ -9,8 +9,7 @@ import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import sure # noqa
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2
|
||||
from moto.core import ACCOUNT_ID
|
||||
@ -548,7 +547,7 @@ def test_boto3_list_stack_set_operations():
|
||||
@mock_cloudformation
|
||||
def test_boto3_bad_list_stack_resources():
|
||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
cf_conn.list_stack_resources(StackName="test_stack_set")
|
||||
|
||||
|
||||
@ -1180,7 +1179,7 @@ def test_describe_updated_stack():
|
||||
@mock_cloudformation
|
||||
def test_bad_describe_stack():
|
||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
cf_conn.describe_stacks(StackName="non_existent_stack")
|
||||
|
||||
|
||||
@ -1332,7 +1331,7 @@ def test_delete_stack_with_export():
|
||||
def test_export_names_must_be_unique():
|
||||
cf = boto3.resource("cloudformation", region_name="us-east-1")
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=dummy_output_template_json)
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=dummy_output_template_json)
|
||||
|
||||
|
||||
@ -1373,7 +1372,7 @@ def test_boto3_create_duplicate_stack():
|
||||
StackName="test_stack", TemplateBody=dummy_template_json,
|
||||
)
|
||||
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
cf_conn.create_stack(
|
||||
StackName="test_stack", TemplateBody=dummy_template_json,
|
||||
)
|
||||
|
@ -3,7 +3,6 @@ import json
|
||||
import yaml
|
||||
import os
|
||||
import boto3
|
||||
from nose.tools import raises
|
||||
import botocore
|
||||
import sure # noqa
|
||||
|
||||
|
1
tests/test_cloudwatch/__init__.py
Normal file
1
tests/test_cloudwatch/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -4,7 +4,7 @@ import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
from freezegun import freeze_time
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
import pytz
|
||||
import sure # noqa
|
||||
@ -111,18 +111,18 @@ def test_delete_invalid_alarm():
|
||||
)
|
||||
|
||||
# trying to delete an alarm which is not created along with valid alarm.
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
cloudwatch.delete_alarms(AlarmNames=["InvalidAlarmName", "testalarm1"])
|
||||
e.exception.response["Error"]["Code"].should.equal("ResourceNotFound")
|
||||
e.value.response["Error"]["Code"].should.equal("ResourceNotFound")
|
||||
|
||||
resp = cloudwatch.describe_alarms(AlarmNames=["testalarm1"])
|
||||
# making sure other alarms are not deleted in case of an error.
|
||||
len(resp["MetricAlarms"]).should.equal(1)
|
||||
|
||||
# test to check if the error raises if only one invalid alarm is tried to delete.
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
cloudwatch.delete_alarms(AlarmNames=["InvalidAlarmName"])
|
||||
e.exception.response["Error"]["Code"].should.equal("ResourceNotFound")
|
||||
e.value.response["Error"]["Code"].should.equal("ResourceNotFound")
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
@ -423,9 +423,9 @@ def test_list_metrics_paginated():
|
||||
# Verify that only a single page of metrics is returned
|
||||
cloudwatch.list_metrics()["Metrics"].should.be.empty
|
||||
# Verify we can't pass a random NextToken
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
cloudwatch.list_metrics(NextToken=str(uuid4()))
|
||||
e.exception.response["Error"]["Message"].should.equal(
|
||||
e.value.response["Error"]["Message"].should.equal(
|
||||
"Request parameter NextToken is invalid"
|
||||
)
|
||||
# Add a boatload of metrics
|
||||
@ -452,9 +452,9 @@ def test_list_metrics_paginated():
|
||||
len(third_page["Metrics"]).should.equal(100)
|
||||
third_page.shouldnt.contain("NextToken")
|
||||
# Verify that we can't reuse an existing token
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
cloudwatch.list_metrics(NextToken=first_page["NextToken"])
|
||||
e.exception.response["Error"]["Message"].should.equal(
|
||||
e.value.response["Error"]["Message"].should.equal(
|
||||
"Request parameter NextToken is invalid"
|
||||
)
|
||||
|
||||
|
@ -4,7 +4,7 @@ import sure # noqa
|
||||
from moto import mock_codecommit
|
||||
from moto.core import ACCOUNT_ID
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
|
||||
@mock_codecommit
|
||||
@ -81,12 +81,12 @@ def test_create_repository_repository_name_exists():
|
||||
|
||||
client.create_repository(repositoryName="repository_two")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_repository(
|
||||
repositoryName="repository_two",
|
||||
repositoryDescription="description repo two",
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreateRepository")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("RepositoryNameExistsException")
|
||||
@ -99,9 +99,9 @@ def test_create_repository_repository_name_exists():
|
||||
def test_create_repository_invalid_repository_name():
|
||||
client = boto3.client("codecommit", region_name="eu-central-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_repository(repositoryName="in_123_valid_@#$_characters")
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreateRepository")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidRepositoryNameException")
|
||||
@ -156,9 +156,9 @@ def test_get_repository():
|
||||
|
||||
client = boto3.client("codecommit", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.get_repository(repositoryName=repository_name)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("GetRepository")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("RepositoryDoesNotExistException")
|
||||
@ -171,9 +171,9 @@ def test_get_repository():
|
||||
def test_get_repository_invalid_repository_name():
|
||||
client = boto3.client("codecommit", region_name="eu-central-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.get_repository(repositoryName="repository_one-@#@")
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidRepositoryNameException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
@ -207,9 +207,9 @@ def test_delete_repository():
|
||||
def test_delete_repository_invalid_repository_name():
|
||||
client = boto3.client("codecommit", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.delete_repository(repositoryName="_rep@ository_one")
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("DeleteRepository")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidRepositoryNameException")
|
||||
|
@ -4,7 +4,7 @@ from datetime import datetime
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_codepipeline, mock_iam
|
||||
|
||||
@ -77,9 +77,9 @@ def test_create_pipeline_errors():
|
||||
client_iam = boto3.client("iam", region_name="us-east-1")
|
||||
create_basic_codepipeline(client, "test-pipeline")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
create_basic_codepipeline(client, "test-pipeline")
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreatePipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidStructureException")
|
||||
@ -87,7 +87,7 @@ def test_create_pipeline_errors():
|
||||
"A pipeline with the name 'test-pipeline' already exists in account '123456789012'"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_pipeline(
|
||||
pipeline={
|
||||
"name": "invalid-pipeline",
|
||||
@ -115,7 +115,7 @@ def test_create_pipeline_errors():
|
||||
],
|
||||
}
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreatePipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidStructureException")
|
||||
@ -139,7 +139,7 @@ def test_create_pipeline_errors():
|
||||
),
|
||||
)["Role"]["Arn"]
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_pipeline(
|
||||
pipeline={
|
||||
"name": "invalid-pipeline",
|
||||
@ -167,7 +167,7 @@ def test_create_pipeline_errors():
|
||||
],
|
||||
}
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreatePipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidStructureException")
|
||||
@ -175,7 +175,7 @@ def test_create_pipeline_errors():
|
||||
"CodePipeline is not authorized to perform AssumeRole on role arn:aws:iam::123456789012:role/wrong-role"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_pipeline(
|
||||
pipeline={
|
||||
"name": "invalid-pipeline",
|
||||
@ -203,7 +203,7 @@ def test_create_pipeline_errors():
|
||||
],
|
||||
}
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreatePipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidStructureException")
|
||||
@ -282,9 +282,9 @@ def test_get_pipeline():
|
||||
def test_get_pipeline_errors():
|
||||
client = boto3.client("codepipeline", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.get_pipeline(name="not-existing")
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("GetPipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("PipelineNotFoundException")
|
||||
@ -410,7 +410,7 @@ def test_update_pipeline():
|
||||
def test_update_pipeline_errors():
|
||||
client = boto3.client("codepipeline", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.update_pipeline(
|
||||
pipeline={
|
||||
"name": "not-existing",
|
||||
@ -456,7 +456,7 @@ def test_update_pipeline_errors():
|
||||
],
|
||||
}
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("UpdatePipeline")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
@ -517,11 +517,11 @@ def test_list_tags_for_resource():
|
||||
def test_list_tags_for_resource_errors():
|
||||
client = boto3.client("codepipeline", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.list_tags_for_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:not-existing"
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("ListTagsForResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
@ -555,12 +555,12 @@ def test_tag_resource_errors():
|
||||
name = "test-pipeline"
|
||||
create_basic_codepipeline(client, name)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:not-existing",
|
||||
tags=[{"key": "key-2", "value": "value-2"}],
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
@ -568,12 +568,12 @@ def test_tag_resource_errors():
|
||||
"The account with id '123456789012' does not include a pipeline with the name 'not-existing'"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
tags=[{"key": "aws:key", "value": "value"}],
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("InvalidTagsException")
|
||||
@ -583,7 +583,7 @@ def test_tag_resource_errors():
|
||||
"msg=[Caller is an end user and not allowed to mutate system tags]"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
tags=[
|
||||
@ -591,7 +591,7 @@ def test_tag_resource_errors():
|
||||
for i in range(50)
|
||||
],
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("TooManyTagsException")
|
||||
@ -634,12 +634,12 @@ def test_untag_resource():
|
||||
def test_untag_resource_errors():
|
||||
client = boto3.client("codepipeline", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.untag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:not-existing",
|
||||
tagKeys=["key"],
|
||||
)
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("UntagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
|
1
tests/test_cognitoidentity/__init__.py
Normal file
1
tests/test_cognitoidentity/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -3,7 +3,7 @@ from __future__ import unicode_literals
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_cognitoidentity
|
||||
from moto.cognitoidentity.utils import get_random_identity_id
|
||||
@ -75,12 +75,12 @@ def test_describe_identity_pool():
|
||||
def test_describe_identity_pool_with_invalid_id_raises_error():
|
||||
conn = boto3.client("cognito-identity", "us-west-2")
|
||||
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
conn.describe_identity_pool(IdentityPoolId="us-west-2_non-existent")
|
||||
|
||||
cm.exception.operation_name.should.equal("DescribeIdentityPool")
|
||||
cm.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.operation_name.should.equal("DescribeIdentityPool")
|
||||
cm.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
# testing a helper function
|
||||
|
1
tests/test_cognitoidp/__init__.py
Normal file
1
tests/test_cognitoidp/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -17,7 +17,7 @@ import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from jose import jws, jwk, jwt
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_cognitoidp, settings
|
||||
from moto.cognitoidp.utils import create_id
|
||||
@ -603,14 +603,14 @@ def test_update_identity_provider_no_user_pool():
|
||||
|
||||
new_value = str(uuid.uuid4())
|
||||
|
||||
with assert_raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||
with pytest.raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||
conn.update_identity_provider(
|
||||
UserPoolId="foo", ProviderName="bar", ProviderDetails={"thing": new_value}
|
||||
)
|
||||
|
||||
cm.exception.operation_name.should.equal("UpdateIdentityProvider")
|
||||
cm.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.operation_name.should.equal("UpdateIdentityProvider")
|
||||
cm.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
@ -623,16 +623,16 @@ def test_update_identity_provider_no_identity_provider():
|
||||
new_value = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
|
||||
with assert_raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||
with pytest.raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||
conn.update_identity_provider(
|
||||
UserPoolId=user_pool_id,
|
||||
ProviderName="foo",
|
||||
ProviderDetails={"thing": new_value},
|
||||
)
|
||||
|
||||
cm.exception.operation_name.should.equal("UpdateIdentityProvider")
|
||||
cm.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.operation_name.should.equal("UpdateIdentityProvider")
|
||||
cm.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
@ -699,11 +699,11 @@ def test_create_group_with_duplicate_name_raises_error():
|
||||
|
||||
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||
cm.exception.operation_name.should.equal("CreateGroup")
|
||||
cm.exception.response["Error"]["Code"].should.equal("GroupExistsException")
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.operation_name.should.equal("CreateGroup")
|
||||
cm.value.response["Error"]["Code"].should.equal("GroupExistsException")
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
@ -747,9 +747,9 @@ def test_delete_group():
|
||||
result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||
cm.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
cm.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
@ -1565,17 +1565,17 @@ def test_resource_server():
|
||||
res["ResourceServer"]["Name"].should.equal(name)
|
||||
res["ResourceServer"]["Scopes"].should.equal(scopes)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_resource_server(
|
||||
UserPoolId=user_pool_id, Identifier=identifier, Name=name, Scopes=scopes
|
||||
)
|
||||
|
||||
ex.exception.operation_name.should.equal("CreateResourceServer")
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameterException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.operation_name.should.equal("CreateResourceServer")
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameterException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"%s already exists in user pool %s." % (identifier, user_pool_id)
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
|
1
tests/test_config/__init__.py
Normal file
1
tests/test_config/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
File diff suppressed because it is too large
Load Diff
1
tests/test_core/__init__.py
Normal file
1
tests/test_core/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -4,9 +4,7 @@ import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_iam, mock_ec2, mock_s3, mock_sts, mock_elbv2, mock_rds2
|
||||
from moto.core import set_initial_no_auth_action_count
|
||||
@ -179,11 +177,11 @@ def test_invalid_client_token_id():
|
||||
aws_access_key_id="invalid",
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_user()
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidClientTokenId")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidClientTokenId")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The security token included in the request is invalid."
|
||||
)
|
||||
|
||||
@ -197,11 +195,11 @@ def test_auth_failure():
|
||||
aws_access_key_id="invalid",
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.describe_instances()
|
||||
ex.exception.response["Error"]["Code"].should.equal("AuthFailure")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(401)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AuthFailure")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(401)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"AWS was not able to validate the provided access credentials"
|
||||
)
|
||||
|
||||
@ -216,11 +214,11 @@ def test_signature_does_not_match():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_user()
|
||||
ex.exception.response["Error"]["Code"].should.equal("SignatureDoesNotMatch")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("SignatureDoesNotMatch")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request signature we calculated does not match the signature you provided. Check your AWS Secret Access Key and signing method. Consult the service documentation for details."
|
||||
)
|
||||
|
||||
@ -235,11 +233,11 @@ def test_auth_failure_with_valid_access_key_id():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.describe_instances()
|
||||
ex.exception.response["Error"]["Code"].should.equal("AuthFailure")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(401)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AuthFailure")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(401)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"AWS was not able to validate the provided access credentials"
|
||||
)
|
||||
|
||||
@ -255,11 +253,11 @@ def test_access_denied_with_no_policy():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.describe_instances()
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
user_name=user_name,
|
||||
@ -285,11 +283,11 @@ def test_access_denied_with_not_allowing_policy():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.describe_instances()
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
user_name=user_name,
|
||||
@ -321,11 +319,11 @@ def test_access_denied_for_run_instances():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.run_instances(MaxCount=1, MinCount=1)
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:RunInstances",
|
||||
)
|
||||
@ -352,11 +350,11 @@ def test_access_denied_with_denying_policy():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_vpc(CidrBlock="10.0.0.0/16")
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:CreateVpc"
|
||||
)
|
||||
@ -452,11 +450,11 @@ def test_s3_access_denied_with_denying_attached_group_policy():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.list_buckets()
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal("Access Denied")
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal("Access Denied")
|
||||
|
||||
|
||||
@set_initial_no_auth_action_count(6)
|
||||
@ -486,11 +484,11 @@ def test_s3_access_denied_with_denying_inline_group_policy():
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
client.create_bucket(Bucket=bucket_name)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.get_object(Bucket=bucket_name, Key="sdfsdf")
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal("Access Denied")
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal("Access Denied")
|
||||
|
||||
|
||||
@set_initial_no_auth_action_count(10)
|
||||
@ -532,11 +530,11 @@ def test_access_denied_with_many_irrelevant_policies():
|
||||
aws_access_key_id=access_key["AccessKeyId"],
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_key_pair(KeyName="TestKey")
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:CreateKeyPair"
|
||||
)
|
||||
@ -631,15 +629,15 @@ def test_access_denied_with_temporary_credentials():
|
||||
aws_secret_access_key=credentials["SecretAccessKey"],
|
||||
aws_session_token=credentials["SessionToken"],
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_db_instance(
|
||||
DBInstanceIdentifier="test-db-instance",
|
||||
DBInstanceClass="db.t3",
|
||||
Engine="aurora-postgresql",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=role_name,
|
||||
@ -678,11 +676,11 @@ def test_s3_invalid_access_key_id():
|
||||
aws_access_key_id="invalid",
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.list_buckets()
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidAccessKeyId")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidAccessKeyId")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The AWS Access Key Id you provided does not exist in our records."
|
||||
)
|
||||
|
||||
@ -700,11 +698,11 @@ def test_s3_signature_does_not_match():
|
||||
aws_secret_access_key="invalid",
|
||||
)
|
||||
client.create_bucket(Bucket=bucket_name)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.put_object(Bucket=bucket_name, Key="abc")
|
||||
ex.exception.response["Error"]["Code"].should.equal("SignatureDoesNotMatch")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("SignatureDoesNotMatch")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request signature we calculated does not match the signature you provided. Check your key and signing method."
|
||||
)
|
||||
|
||||
@ -736,11 +734,11 @@ def test_s3_access_denied_not_action():
|
||||
aws_secret_access_key=access_key["SecretAccessKey"],
|
||||
)
|
||||
client.create_bucket(Bucket=bucket_name)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.delete_object(Bucket=bucket_name, Key="sdfsdf")
|
||||
ex.exception.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.exception.response["Error"]["Message"].should.equal("Access Denied")
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal("Access Denied")
|
||||
|
||||
|
||||
@set_initial_no_auth_action_count(4)
|
||||
@ -776,10 +774,10 @@ def test_s3_invalid_token_with_temporary_credentials():
|
||||
aws_session_token="invalid",
|
||||
)
|
||||
client.create_bucket(Bucket=bucket_name)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.list_bucket_metrics_configurations(Bucket=bucket_name)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidToken")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidToken")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The provided token is malformed or otherwise invalid."
|
||||
)
|
||||
|
@ -4,8 +4,7 @@ from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
import unittest
|
||||
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_s3_deprecated
|
||||
|
||||
@ -25,23 +24,25 @@ def test_basic_decorator():
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
def test_context_manager():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError):
|
||||
with pytest.raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
with mock_ec2_deprecated():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
with pytest.raises(EC2ResponseError):
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
def test_decorator_start_and_stop():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError):
|
||||
with pytest.raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
mock = mock_ec2_deprecated()
|
||||
@ -50,7 +51,7 @@ def test_decorator_start_and_stop():
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
mock.stop()
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
with pytest.raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from moto import mock_ec2, settings
|
||||
|
@ -1,6 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
import boto3
|
||||
|
@ -1,4 +1,5 @@
|
||||
import requests
|
||||
import pytest
|
||||
import sure # noqa
|
||||
|
||||
import boto3
|
||||
@ -6,6 +7,7 @@ from moto import mock_sqs, settings
|
||||
|
||||
|
||||
@mock_sqs
|
||||
@pytest.mark.network
|
||||
def test_passthrough_requests():
|
||||
conn = boto3.client("sqs", region_name="us-west-1")
|
||||
conn.create_queue(QueueName="queue1")
|
||||
|
1
tests/test_datapipeline/__init__.py
Normal file
1
tests/test_datapipeline/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -4,11 +4,11 @@ import boto
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_datasync
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
|
||||
def create_locations(client, create_smb=False, create_s3=False):
|
||||
"""
|
||||
"""
|
||||
Convenience function for creating locations.
|
||||
Locations must exist before tasks can be created.
|
||||
"""
|
||||
@ -101,7 +101,7 @@ def test_describe_location_wrong():
|
||||
Password="",
|
||||
AgentArns=agent_arns,
|
||||
)
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
response = client.describe_location_s3(LocationArn=response["LocationArn"])
|
||||
|
||||
|
||||
@ -139,7 +139,7 @@ def test_delete_location():
|
||||
response = client.list_locations()
|
||||
assert len(response["Locations"]) == 0
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError):
|
||||
response = client.delete_location(LocationArn=location_arn)
|
||||
|
||||
|
||||
@ -159,11 +159,11 @@ def test_create_task_fail():
|
||||
""" Test that Locations must exist before a Task can be created """
|
||||
client = boto3.client("datasync", region_name="us-east-1")
|
||||
locations = create_locations(client, create_smb=True, create_s3=True)
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
response = client.create_task(
|
||||
SourceLocationArn="1", DestinationLocationArn=locations["s3_arn"]
|
||||
)
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
response = client.create_task(
|
||||
SourceLocationArn=locations["smb_arn"], DestinationLocationArn="2"
|
||||
)
|
||||
@ -220,7 +220,7 @@ def test_describe_task():
|
||||
def test_describe_task_not_exist():
|
||||
client = boto3.client("datasync", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.describe_task(TaskArn="abc")
|
||||
|
||||
|
||||
@ -262,7 +262,7 @@ def test_update_task():
|
||||
assert response["Name"] == updated_name
|
||||
assert response["Options"] == updated_options
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError):
|
||||
client.update_task(TaskArn="doesnt_exist")
|
||||
|
||||
|
||||
@ -286,7 +286,7 @@ def test_delete_task():
|
||||
response = client.list_tasks()
|
||||
assert len(response["Tasks"]) == 0
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError):
|
||||
response = client.delete_task(TaskArn=task_arn)
|
||||
|
||||
|
||||
@ -328,7 +328,7 @@ def test_start_task_execution_twice():
|
||||
assert "TaskExecutionArn" in response
|
||||
task_execution_arn = response["TaskExecutionArn"]
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
response = client.start_task_execution(TaskArn=task_arn)
|
||||
|
||||
|
||||
@ -392,7 +392,7 @@ def test_describe_task_execution():
|
||||
def test_describe_task_execution_not_exist():
|
||||
client = boto3.client("datasync", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as e:
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.describe_task_execution(TaskExecutionArn="abc")
|
||||
|
||||
|
||||
|
1
tests/test_dynamodb/__init__.py
Normal file
1
tests/test_dynamodb/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -4,8 +4,7 @@ import boto
|
||||
import boto.dynamodb
|
||||
import sure # noqa
|
||||
import requests
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_dynamodb, mock_dynamodb_deprecated
|
||||
from moto.dynamodb import dynamodb_backend
|
||||
@ -38,7 +37,7 @@ def test_list_tables_layer_1():
|
||||
@mock_dynamodb_deprecated
|
||||
def test_describe_missing_table():
|
||||
conn = boto.connect_dynamodb("the_key", "the_secret")
|
||||
with assert_raises(DynamoDBResponseError):
|
||||
with pytest.raises(DynamoDBResponseError):
|
||||
conn.describe_table("messages")
|
||||
|
||||
|
||||
|
1
tests/test_dynamodb2/__init__.py
Normal file
1
tests/test_dynamodb2/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -17,7 +17,7 @@ from tests.helpers import requires_boto_gte
|
||||
import moto.dynamodb2.comparisons
|
||||
import moto.dynamodb2.models
|
||||
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import boto.dynamodb2
|
||||
@ -72,7 +72,7 @@ def test_describe_missing_table():
|
||||
conn = boto.dynamodb2.connect_to_region(
|
||||
"us-west-2", aws_access_key_id="ak", aws_secret_access_key="sk"
|
||||
)
|
||||
with assert_raises(JSONResponseError):
|
||||
with pytest.raises(JSONResponseError):
|
||||
conn.describe_table("messages")
|
||||
|
||||
|
||||
@ -201,7 +201,7 @@ def test_item_add_empty_string_exception():
|
||||
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
conn.put_item(
|
||||
TableName=name,
|
||||
Item={
|
||||
@ -213,9 +213,9 @@ def test_item_add_empty_string_exception():
|
||||
},
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"One or more parameter values were invalid: An AttributeValue may not contain an empty string"
|
||||
)
|
||||
|
||||
@ -248,7 +248,7 @@ def test_update_item_with_empty_string_exception():
|
||||
},
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
conn.update_item(
|
||||
TableName=name,
|
||||
Key={"forum_name": {"S": "LOLCat Forum"}},
|
||||
@ -256,9 +256,9 @@ def test_update_item_with_empty_string_exception():
|
||||
ExpressionAttributeValues={":Body": {"S": ""}},
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"One or more parameter values were invalid: An AttributeValue may not contain an empty string"
|
||||
)
|
||||
|
||||
@ -1354,12 +1354,12 @@ def test_put_empty_item():
|
||||
)
|
||||
table = dynamodb.Table("test")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.put_item(Item={})
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"One or more parameter values were invalid: Missing the key structure_id in the item"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
@ -1373,12 +1373,12 @@ def test_put_item_nonexisting_hash_key():
|
||||
)
|
||||
table = dynamodb.Table("test")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.put_item(Item={"a_terribly_misguided_id_attribute": "abcdef"})
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"One or more parameter values were invalid: Missing the key structure_id in the item"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
@ -1398,12 +1398,12 @@ def test_put_item_nonexisting_range_key():
|
||||
)
|
||||
table = dynamodb.Table("test")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.put_item(Item={"structure_id": "abcdef"})
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"One or more parameter values were invalid: Missing the key added_at in the item"
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
|
||||
|
||||
def test_filter_expression():
|
||||
@ -1980,7 +1980,7 @@ def test_delete_item():
|
||||
assert response["Count"] == 2
|
||||
|
||||
# Test ReturnValues validation
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.delete_item(
|
||||
Key={"client": "client1", "app": "app1"}, ReturnValues="ALL_NEW"
|
||||
)
|
||||
@ -2085,11 +2085,11 @@ def test_describe_continuous_backups_errors():
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
# when
|
||||
with assert_raises(Exception) as e:
|
||||
with pytest.raises(Exception) as e:
|
||||
client.describe_continuous_backups(TableName="not-existing-table")
|
||||
|
||||
# then
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("DescribeContinuousBackups")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("TableNotFoundException")
|
||||
@ -2171,14 +2171,14 @@ def test_update_continuous_backups_errors():
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
# when
|
||||
with assert_raises(Exception) as e:
|
||||
with pytest.raises(Exception) as e:
|
||||
client.update_continuous_backups(
|
||||
TableName="not-existing-table",
|
||||
PointInTimeRecoverySpecification={"PointInTimeRecoveryEnabled": True},
|
||||
)
|
||||
|
||||
# then
|
||||
ex = e.exception
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("UpdateContinuousBackups")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("TableNotFoundException")
|
||||
@ -2291,7 +2291,7 @@ def test_update_item_on_map():
|
||||
ExpressionAttributeValues={":tb": "new_value"},
|
||||
)
|
||||
# Running this against AWS DDB gives an exception so make sure it also fails.:
|
||||
with assert_raises(client.exceptions.ClientError):
|
||||
with pytest.raises(client.exceptions.ClientError):
|
||||
# botocore.exceptions.ClientError: An error occurred (ValidationException) when calling the UpdateItem
|
||||
# operation: The document path provided in the update expression is invalid for update
|
||||
table.update_item(
|
||||
@ -2321,7 +2321,7 @@ def test_update_item_on_map():
|
||||
)
|
||||
|
||||
# Test nested value for a nonexistent attribute throws a ClientError.
|
||||
with assert_raises(client.exceptions.ClientError):
|
||||
with pytest.raises(client.exceptions.ClientError):
|
||||
table.update_item(
|
||||
Key={"forum_name": "the-key", "subject": "123"},
|
||||
UpdateExpression="SET nonexistent.#nested = :tb",
|
||||
@ -2409,7 +2409,7 @@ def test_update_return_attributes():
|
||||
r = update("col1", "val5", "NONE")
|
||||
assert r["Attributes"] == {}
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
r = update("col1", "val6", "WRONG")
|
||||
|
||||
|
||||
@ -2438,15 +2438,15 @@ def test_put_return_attributes():
|
||||
)
|
||||
assert r["Attributes"] == {"id": {"S": "foo"}, "col1": {"S": "val1"}}
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.put_item(
|
||||
TableName="moto-test",
|
||||
Item={"id": {"S": "foo"}, "col1": {"S": "val3"}},
|
||||
ReturnValues="ALL_NEW",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Return values set to invalid value"
|
||||
)
|
||||
|
||||
@ -2675,7 +2675,7 @@ def test_condition_expressions():
|
||||
},
|
||||
)
|
||||
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.put_item(
|
||||
TableName="test1",
|
||||
Item={
|
||||
@ -2691,7 +2691,7 @@ def test_condition_expressions():
|
||||
},
|
||||
)
|
||||
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.put_item(
|
||||
TableName="test1",
|
||||
Item={
|
||||
@ -2707,7 +2707,7 @@ def test_condition_expressions():
|
||||
},
|
||||
)
|
||||
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.put_item(
|
||||
TableName="test1",
|
||||
Item={
|
||||
@ -2735,7 +2735,7 @@ def test_condition_expressions():
|
||||
ExpressionAttributeValues={":match": {"S": "match"}},
|
||||
)
|
||||
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.update_item(
|
||||
TableName="test1",
|
||||
Key={"client": {"S": "client1"}, "app": {"S": "app1"}},
|
||||
@ -2745,7 +2745,7 @@ def test_condition_expressions():
|
||||
ExpressionAttributeNames={"#existing": "existing", "#match": "match"},
|
||||
)
|
||||
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.delete_item(
|
||||
TableName="test1",
|
||||
Key={"client": {"S": "client1"}, "app": {"S": "app1"}},
|
||||
@ -2830,7 +2830,7 @@ def test_condition_expression__attr_doesnt_exist():
|
||||
update_if_attr_doesnt_exist()
|
||||
|
||||
# Second time should fail
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
update_if_attr_doesnt_exist()
|
||||
|
||||
|
||||
@ -2870,7 +2870,7 @@ def test_condition_expression__and_order():
|
||||
|
||||
# ensure that the RHS of the AND expression is not evaluated if the LHS
|
||||
# returns true (as it would result an error)
|
||||
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||
with pytest.raises(client.exceptions.ConditionalCheckFailedException):
|
||||
client.update_item(
|
||||
TableName="test",
|
||||
Key={"forum_name": {"S": "the-key"}},
|
||||
@ -2966,12 +2966,12 @@ def test_scan_by_non_exists_index():
|
||||
],
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.scan(TableName="test", IndexName="non_exists_index")
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The table does not have the specified index: non_exists_index"
|
||||
)
|
||||
|
||||
@ -3001,15 +3001,15 @@ def test_query_by_non_exists_index():
|
||||
],
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.query(
|
||||
TableName="test",
|
||||
IndexName="non_exists_index",
|
||||
KeyConditionExpression="CarModel=M",
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid index: non_exists_index for table: test. Available indexes are: test_gsi"
|
||||
)
|
||||
|
||||
@ -3041,7 +3041,7 @@ def test_batch_items_returns_all():
|
||||
@mock_dynamodb2
|
||||
def test_batch_items_throws_exception_when_requesting_100_items_for_single_table():
|
||||
dynamodb = _create_user_table()
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.batch_get_item(
|
||||
RequestItems={
|
||||
"users": {
|
||||
@ -3052,8 +3052,8 @@ def test_batch_items_throws_exception_when_requesting_100_items_for_single_table
|
||||
}
|
||||
}
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
msg = ex.exception.response["Error"]["Message"]
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
msg = ex.value.response["Error"]["Message"]
|
||||
msg.should.contain("1 validation error detected: Value")
|
||||
msg.should.contain(
|
||||
"at 'requestItems.users.member.keys' failed to satisfy constraint: Member must have length less than or equal to 100"
|
||||
@ -3063,7 +3063,7 @@ def test_batch_items_throws_exception_when_requesting_100_items_for_single_table
|
||||
@mock_dynamodb2
|
||||
def test_batch_items_throws_exception_when_requesting_100_items_across_all_tables():
|
||||
dynamodb = _create_user_table()
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.batch_get_item(
|
||||
RequestItems={
|
||||
"users": {
|
||||
@ -3080,8 +3080,8 @@ def test_batch_items_throws_exception_when_requesting_100_items_across_all_table
|
||||
},
|
||||
}
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Too many items requested for the BatchGetItem call"
|
||||
)
|
||||
|
||||
@ -3160,7 +3160,7 @@ def test_batch_items_with_basic_projection_expression_and_attr_expression_names(
|
||||
@mock_dynamodb2
|
||||
def test_batch_items_should_throw_exception_for_duplicate_request():
|
||||
client = _create_user_table()
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.batch_get_item(
|
||||
RequestItems={
|
||||
"users": {
|
||||
@ -3172,8 +3172,8 @@ def test_batch_items_should_throw_exception_for_duplicate_request():
|
||||
}
|
||||
}
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Provided list of item keys contains duplicates"
|
||||
)
|
||||
|
||||
@ -3186,7 +3186,7 @@ def test_index_with_unknown_attributes_should_fail():
|
||||
"Some index key attributes are not defined in AttributeDefinitions."
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.create_table(
|
||||
AttributeDefinitions=[
|
||||
{"AttributeName": "customer_nr", "AttributeType": "S"},
|
||||
@ -3210,8 +3210,8 @@ def test_index_with_unknown_attributes_should_fail():
|
||||
BillingMode="PAY_PER_REQUEST",
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.contain(expected_exception)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.contain(expected_exception)
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
@ -3366,7 +3366,7 @@ def test_update_list_index__set_index_of_a_string():
|
||||
client.put_item(
|
||||
TableName=table_name, Item={"id": {"S": "foo2"}, "itemstr": {"S": "somestring"}}
|
||||
)
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.update_item(
|
||||
TableName=table_name,
|
||||
Key={"id": {"S": "foo2"}},
|
||||
@ -3377,8 +3377,8 @@ def test_update_list_index__set_index_of_a_string():
|
||||
"Item"
|
||||
]
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The document path provided in the update expression is invalid for update"
|
||||
)
|
||||
|
||||
@ -3615,19 +3615,19 @@ def test_item_size_is_under_400KB():
|
||||
|
||||
|
||||
def assert_failure_due_to_item_size(func, **kwargs):
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
func(**kwargs)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Item size has exceeded the maximum allowed size"
|
||||
)
|
||||
|
||||
|
||||
def assert_failure_due_to_item_size_to_update(func, **kwargs):
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
func(**kwargs)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Item size to update has exceeded the maximum allowed size"
|
||||
)
|
||||
|
||||
@ -3654,10 +3654,10 @@ def test_hash_key_cannot_use_begins_with_operations():
|
||||
batch.put_item(Item=item)
|
||||
|
||||
table = dynamodb.Table("test-table")
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.query(KeyConditionExpression=Key("key").begins_with("prefix-"))
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Query key condition not supported"
|
||||
)
|
||||
|
||||
@ -4047,7 +4047,7 @@ def test_update_catches_invalid_list_append_operation():
|
||||
)
|
||||
|
||||
# Update item using invalid list_append expression
|
||||
with assert_raises(ParamValidationError) as ex:
|
||||
with pytest.raises(ParamValidationError) as ex:
|
||||
client.update_item(
|
||||
TableName="TestTable",
|
||||
Key={"SHA256": {"S": "sha-of-file"}},
|
||||
@ -4056,10 +4056,8 @@ def test_update_catches_invalid_list_append_operation():
|
||||
)
|
||||
|
||||
# Verify correct error is returned
|
||||
str(ex.exception).should.match("Parameter validation failed:")
|
||||
str(ex.exception).should.match(
|
||||
"Invalid type for parameter ExpressionAttributeValues."
|
||||
)
|
||||
str(ex.value).should.match("Parameter validation failed:")
|
||||
str(ex.value).should.match("Invalid type for parameter ExpressionAttributeValues.")
|
||||
|
||||
|
||||
def _create_user_table():
|
||||
@ -4166,12 +4164,12 @@ def test_query_catches_when_no_filters():
|
||||
)
|
||||
table = dynamo.Table("origin-rbu-dev")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
table.query(TableName="original-rbu-dev")
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Either KeyConditions or QueryFilter should be present"
|
||||
)
|
||||
|
||||
@ -4197,7 +4195,7 @@ def test_invalid_transact_get_items():
|
||||
|
||||
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.transact_get_items(
|
||||
TransactItems=[
|
||||
{"Get": {"Key": {"id": {"S": "1"}}, "TableName": "test1"}}
|
||||
@ -4205,25 +4203,23 @@ def test_invalid_transact_get_items():
|
||||
]
|
||||
)
|
||||
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.match(
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.match(
|
||||
r"failed to satisfy constraint: Member must have length less than or equal to 25",
|
||||
re.I,
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.transact_get_items(
|
||||
TransactItems=[
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "test1"}},
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "non_exists_table"}},
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "test1",}},
|
||||
{"Get": {"Key": {"id": {"S": "1"},}, "TableName": "non_exists_table",}},
|
||||
]
|
||||
)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
"Requested resource not found"
|
||||
)
|
||||
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal("Requested resource not found")
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
@ -4491,7 +4487,7 @@ def test_transact_write_items_put_conditional_expressions():
|
||||
TableName="test-table", Item={"id": {"S": "foo2"},},
|
||||
)
|
||||
# Put multiple items
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
@ -4514,8 +4510,8 @@ def test_transact_write_items_put_conditional_expressions():
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
# Assert all are present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
@ -4581,7 +4577,7 @@ def test_transact_write_items_conditioncheck_fails():
|
||||
)
|
||||
# Try to put an email address, but verify whether it exists
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
@ -4604,8 +4600,8 @@ def test_transact_write_items_conditioncheck_fails():
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
# Assert the original email address is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
@ -4687,7 +4683,7 @@ def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
)
|
||||
# Try to delete an item that does not have an email address
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
@ -4701,8 +4697,8 @@ def test_transact_write_items_delete_with_failed_condition_expression():
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
# Assert the original item is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
@ -4758,7 +4754,7 @@ def test_transact_write_items_update_with_failed_condition_expression():
|
||||
)
|
||||
# Try to update an item that does not have an email address
|
||||
# ConditionCheck should fail
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
@ -4774,8 +4770,8 @@ def test_transact_write_items_update_with_failed_condition_expression():
|
||||
]
|
||||
)
|
||||
# Assert the exception is correct
|
||||
ex.exception.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
# Assert the original item is still present
|
||||
items = dynamodb.scan(TableName="test-table")["Items"]
|
||||
items.should.have.length_of(1)
|
||||
@ -5318,7 +5314,7 @@ def test_transact_write_items_fails_with_transaction_canceled_exception():
|
||||
# Insert one item
|
||||
dynamodb.put_item(TableName="test-table", Item={"id": {"S": "foo"}})
|
||||
# Update two items, the one that exists and another that doesn't
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
dynamodb.transact_write_items(
|
||||
TransactItems=[
|
||||
{
|
||||
@ -5343,9 +5339,9 @@ def test_transact_write_items_fails_with_transaction_canceled_exception():
|
||||
},
|
||||
]
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("TransactionCanceledException")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Transaction cancelled, please refer cancellation reasons for specific reasons [None, ConditionalCheckFailed]"
|
||||
)
|
||||
|
||||
|
@ -1,9 +1,10 @@
|
||||
import pytest
|
||||
|
||||
from moto.dynamodb2.exceptions import IncorrectOperandType, IncorrectDataType
|
||||
from moto.dynamodb2.models import Item, DynamoType
|
||||
from moto.dynamodb2.parsing.executors import UpdateExpressionExecutor
|
||||
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
|
||||
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
|
||||
from parameterized import parameterized
|
||||
|
||||
|
||||
def test_execution_of_if_not_exists_not_existing_value():
|
||||
@ -384,7 +385,8 @@ def test_execution_of_add_to_a_set():
|
||||
assert expected_item == item
|
||||
|
||||
|
||||
@parameterized(
|
||||
@pytest.mark.parametrize(
|
||||
"expression_attribute_values,unexpected_data_type",
|
||||
[
|
||||
({":value": {"S": "10"}}, "STRING",),
|
||||
({":value": {"N": "10"}}, "NUMBER",),
|
||||
@ -393,7 +395,7 @@ def test_execution_of_add_to_a_set():
|
||||
({":value": {"NULL": True}}, "NULL",),
|
||||
({":value": {"M": {"el0": {"S": "10"}}}}, "MAP",),
|
||||
({":value": {"L": []}}, "LIST",),
|
||||
]
|
||||
],
|
||||
)
|
||||
def test_execution_of__delete_element_from_set_invalid_value(
|
||||
expression_attribute_values, unexpected_data_type
|
||||
|
@ -8,7 +8,7 @@ from boto3.dynamodb.conditions import Key
|
||||
from botocore.exceptions import ClientError
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
from moto import mock_dynamodb2, mock_dynamodb2_deprecated
|
||||
from boto.exception import JSONResponseError
|
||||
@ -1353,10 +1353,10 @@ def test_update_item_with_expression():
|
||||
|
||||
|
||||
def assert_failure_due_to_key_not_in_schema(func, **kwargs):
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
func(**kwargs)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The provided key element does not match the schema"
|
||||
)
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
import pytest
|
||||
|
||||
from moto.dynamodb2.exceptions import (
|
||||
AttributeIsReservedKeyword,
|
||||
ExpressionAttributeValueNotDefined,
|
||||
@ -10,12 +12,10 @@ from moto.dynamodb2.models import Item, DynamoType
|
||||
from moto.dynamodb2.parsing.ast_nodes import (
|
||||
NodeDepthLeftTypeFetcher,
|
||||
UpdateExpressionSetAction,
|
||||
UpdateExpressionValue,
|
||||
DDBTypedValue,
|
||||
)
|
||||
from moto.dynamodb2.parsing.expressions import UpdateExpressionParser
|
||||
from moto.dynamodb2.parsing.validators import UpdateExpressionValidator
|
||||
from parameterized import parameterized
|
||||
|
||||
|
||||
def test_validation_of_update_expression_with_keyword():
|
||||
@ -41,8 +41,8 @@ def test_validation_of_update_expression_with_keyword():
|
||||
assert e.keyword == "path"
|
||||
|
||||
|
||||
@parameterized(
|
||||
["SET a = #b + :val2", "SET a = :val2 + #b",]
|
||||
@pytest.mark.parametrize(
|
||||
"update_expression", ["SET a = #b + :val2", "SET a = :val2 + #b",]
|
||||
)
|
||||
def test_validation_of_a_set_statement_with_incorrect_passed_value(update_expression):
|
||||
"""
|
||||
@ -98,9 +98,7 @@ def test_validation_of_update_expression_with_attribute_that_does_not_exist_in_i
|
||||
assert True
|
||||
|
||||
|
||||
@parameterized(
|
||||
["SET a = #c", "SET a = #c + #d",]
|
||||
)
|
||||
@pytest.mark.parametrize("update_expression", ["SET a = #c", "SET a = #c + #d",])
|
||||
def test_validation_of_update_expression_with_attribute_name_that_is_not_defined(
|
||||
update_expression,
|
||||
):
|
||||
|
1
tests/test_dynamodbstreams/__init__.py
Normal file
1
tests/test_dynamodbstreams/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -1,6 +1,6 @@
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
import boto3
|
||||
from moto import mock_dynamodb2, mock_dynamodbstreams
|
||||
@ -224,7 +224,7 @@ class TestEdges:
|
||||
assert "LatestStreamLabel" in resp["TableDescription"]
|
||||
|
||||
# now try to enable it again
|
||||
with assert_raises(conn.exceptions.ResourceInUseException):
|
||||
with pytest.raises(conn.exceptions.ResourceInUseException):
|
||||
resp = conn.update_table(
|
||||
TableName="test-streams",
|
||||
StreamSpecification={
|
||||
|
@ -6,8 +6,8 @@ import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
@ -27,13 +27,13 @@ def test_ami_create_and_delete():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
image_id = conn.create_image(
|
||||
instance.id, "test-ami", "this is a test ami", dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateImage operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -76,22 +76,22 @@ def test_ami_create_and_delete():
|
||||
root_mapping.should_not.be.none
|
||||
|
||||
# Deregister
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
success = conn.deregister_image(image_id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeregisterImage operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
success = conn.deregister_image(image_id)
|
||||
success.should.be.true
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.deregister_image(image_id)
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@requires_boto_gte("2.14.0")
|
||||
@ -112,7 +112,7 @@ def test_ami_copy():
|
||||
|
||||
# Boto returns a 'CopyImage' object with an image_id attribute here. Use
|
||||
# the image_id to fetch the full info.
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
copy_image_ref = conn.copy_image(
|
||||
source_image.region.name,
|
||||
source_image.id,
|
||||
@ -120,9 +120,9 @@ def test_ami_copy():
|
||||
"this is a test copy ami",
|
||||
dry_run=True,
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CopyImage operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -152,28 +152,28 @@ def test_ami_copy():
|
||||
)
|
||||
|
||||
# Copy from non-existent source ID.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.copy_image(
|
||||
source_image.region.name,
|
||||
"ami-abcd1234",
|
||||
"test-copy-ami",
|
||||
"this is a test copy ami",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Copy from non-existent source region.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
invalid_region = (
|
||||
"us-east-1" if (source_image.region.name != "us-east-1") else "us-west-1"
|
||||
)
|
||||
conn.copy_image(
|
||||
invalid_region, source_image.id, "test-copy-ami", "this is a test copy ami"
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -208,11 +208,11 @@ def test_ami_tagging():
|
||||
conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_all_images()[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
image.add_tag("a key", "some value", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -233,11 +233,11 @@ def test_ami_create_from_missing_instance():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
args = ["i-abcdefg", "test-ami", "this is a test ami"]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_image(*args)
|
||||
cm.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -353,22 +353,22 @@ def test_ami_filtering_via_tag():
|
||||
def test_getting_missing_ami():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_image("ami-missing")
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_getting_malformed_ami():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_image("foo-missing")
|
||||
cm.exception.code.should.equal("InvalidAMIID.Malformed")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.Malformed")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -399,11 +399,11 @@ def test_ami_attribute_group_permissions():
|
||||
}
|
||||
|
||||
# Add 'all' group and confirm
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.modify_image_attribute(**dict(ADD_GROUP_ARGS, **{"dry_run": True}))
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyImageAttribute operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -616,9 +616,9 @@ def test_ami_describe_executable_users_and_filter():
|
||||
@mock_ec2_deprecated
|
||||
def test_ami_attribute_user_and_group_permissions():
|
||||
"""
|
||||
Boto supports adding/removing both users and groups at the same time.
|
||||
Just spot-check this -- input variations, idempotency, etc are validated
|
||||
via user-specific and group-specific tests above.
|
||||
Boto supports adding/removing both users and groups at the same time.
|
||||
Just spot-check this -- input variations, idempotency, etc are validated
|
||||
via user-specific and group-specific tests above.
|
||||
"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
@ -678,86 +678,86 @@ def test_ami_attribute_error_cases():
|
||||
image = conn.get_image(image_id)
|
||||
|
||||
# Error: Add with group != 'all'
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
image.id, attribute="launchPermission", operation="add", groups="everyone"
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Add with user ID that isn't an integer.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
image.id,
|
||||
attribute="launchPermission",
|
||||
operation="add",
|
||||
user_ids="12345678901A",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Add with user ID that is > length 12.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
image.id,
|
||||
attribute="launchPermission",
|
||||
operation="add",
|
||||
user_ids="1234567890123",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Add with user ID that is < length 12.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
image.id,
|
||||
attribute="launchPermission",
|
||||
operation="add",
|
||||
user_ids="12345678901",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Add with one invalid user ID among other valid IDs, ensure no
|
||||
# partial changes.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
image.id,
|
||||
attribute="launchPermission",
|
||||
operation="add",
|
||||
user_ids=["123456789011", "foo", "123456789022"],
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
attributes = conn.get_image_attribute(image.id, attribute="launchPermission")
|
||||
attributes.attrs.should.have.length_of(0)
|
||||
|
||||
# Error: Add with invalid image ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
"ami-abcd1234", attribute="launchPermission", operation="add", groups="all"
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Remove with invalid image ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_image_attribute(
|
||||
"ami-abcd1234",
|
||||
attribute="launchPermission",
|
||||
operation="remove",
|
||||
groups="all",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -765,11 +765,11 @@ def test_ami_describe_non_existent():
|
||||
ec2 = boto3.resource("ec2", region_name="us-west-1")
|
||||
# Valid pattern but non-existent id
|
||||
img = ec2.Image("ami-abcd1234")
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
img.load()
|
||||
# Invalid ami pattern
|
||||
img = ec2.Image("not_an_ami_id")
|
||||
with assert_raises(ClientError):
|
||||
with pytest.raises(ClientError):
|
||||
img.load()
|
||||
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from nose.tools import assert_false
|
||||
import pytest
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
@ -45,5 +44,5 @@ def test_delete_customer_gateways():
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_customer_gateways_bad_id():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_customer_gateway("cgw-0123abcd")
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
@ -33,11 +32,11 @@ def test_dhcp_options_associate_invalid_dhcp_id():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options("foo", vpc.id)
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -46,11 +45,11 @@ def test_dhcp_options_associate_invalid_vpc_id():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
dhcp_options = conn.create_dhcp_options(SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options(dhcp_options.id, "foo")
|
||||
cm.exception.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -64,19 +63,19 @@ def test_dhcp_options_delete_with_vpc():
|
||||
rval = conn.associate_dhcp_options(dhcp_options_id, vpc.id)
|
||||
rval.should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options(dhcp_options_id)
|
||||
cm.exception.code.should.equal("DependencyViolation")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("DependencyViolation")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
vpc.delete()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_options_id])
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -100,17 +99,17 @@ def test_create_dhcp_options_invalid_options():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
servers = ["f", "f", "f", "f", "f"]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(ntp_servers=servers)
|
||||
cm.exception.code.should.equal("InvalidParameterValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidParameterValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(netbios_node_type="0")
|
||||
cm.exception.code.should.equal("InvalidParameterValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidParameterValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -131,11 +130,11 @@ def test_describe_dhcp_options_invalid_id():
|
||||
"""get error on invalid dhcp_option_id lookup"""
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options(["1"])
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -149,11 +148,11 @@ def test_delete_dhcp_options():
|
||||
|
||||
conn.delete_dhcp_options(dhcp_option.id) # .should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_option.id])
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -162,11 +161,11 @@ def test_delete_dhcp_options_invalid_id():
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("dopt-abcd1234")
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -175,11 +174,11 @@ def test_delete_dhcp_options_malformed_id():
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("foo-abcd1234")
|
||||
cm.exception.code.should.equal("InvalidDhcpOptionsId.Malformed")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidDhcpOptionsId.Malformed")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
|
@ -1,17 +1,15 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto.ec2 import ec2_backends
|
||||
import boto
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
from moto.ec2 import ec2_backends
|
||||
from moto.ec2.models import OWNER_ID
|
||||
from moto.kms import mock_kms
|
||||
|
||||
@ -31,11 +29,11 @@ def test_create_and_delete_volume():
|
||||
|
||||
volume = current_volume[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
volume.delete(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteVolume operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -46,11 +44,11 @@ def test_create_and_delete_volume():
|
||||
my_volume.should.have.length_of(0)
|
||||
|
||||
# Deleting something that was already deleted should throw an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
volume.delete()
|
||||
cm.exception.code.should.equal("InvalidVolume.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVolume.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -72,11 +70,11 @@ def test_delete_attached_volume():
|
||||
|
||||
# attempt to delete volume
|
||||
# assert raises VolumeInUseError
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
volume.delete()
|
||||
ex.exception.error_code.should.equal("VolumeInUse")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("VolumeInUse")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"Volume {0} is currently attached to {1}".format(volume.id, instance.id)
|
||||
)
|
||||
|
||||
@ -95,11 +93,11 @@ def test_delete_attached_volume():
|
||||
@mock_ec2_deprecated
|
||||
def test_create_encrypted_volume_dryrun():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -109,11 +107,11 @@ def test_create_encrypted_volume():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
volume = conn.create_volume(80, "us-east-1a", encrypted=True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.create_volume(80, "us-east-1a", encrypted=True, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateVolume operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -134,11 +132,11 @@ def test_filter_volume_by_id():
|
||||
vol2 = conn.get_all_volumes(volume_ids=[volume1.id, volume2.id])
|
||||
vol2.should.have.length_of(2)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_volumes(volume_ids=["vol-does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidVolume.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVolume.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -259,11 +257,11 @@ def test_volume_attach_and_detach():
|
||||
volume.update()
|
||||
volume.volume_state().should.equal("available")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
volume.attach(instance.id, "/dev/sdh", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AttachVolume operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -275,11 +273,11 @@ def test_volume_attach_and_detach():
|
||||
|
||||
volume.attach_data.instance_id.should.equal(instance.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
volume.detach(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DetachVolume operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -288,23 +286,23 @@ def test_volume_attach_and_detach():
|
||||
volume.update()
|
||||
volume.volume_state().should.equal("available")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm1:
|
||||
with pytest.raises(EC2ResponseError) as cm1:
|
||||
volume.attach("i-1234abcd", "/dev/sdh")
|
||||
cm1.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm1.exception.status.should.equal(400)
|
||||
cm1.exception.request_id.should_not.be.none
|
||||
cm1.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm1.value.status.should.equal(400)
|
||||
cm1.value.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm2:
|
||||
with pytest.raises(EC2ResponseError) as cm2:
|
||||
conn.detach_volume(volume.id, instance.id, "/dev/sdh")
|
||||
cm2.exception.code.should.equal("InvalidAttachment.NotFound")
|
||||
cm2.exception.status.should.equal(400)
|
||||
cm2.exception.request_id.should_not.be.none
|
||||
cm2.value.code.should.equal("InvalidAttachment.NotFound")
|
||||
cm2.value.status.should.equal(400)
|
||||
cm2.value.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm3:
|
||||
with pytest.raises(EC2ResponseError) as cm3:
|
||||
conn.detach_volume(volume.id, "i-1234abcd", "/dev/sdh")
|
||||
cm3.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm3.exception.status.should.equal(400)
|
||||
cm3.exception.request_id.should_not.be.none
|
||||
cm3.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm3.value.status.should.equal(400)
|
||||
cm3.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -312,11 +310,11 @@ def test_create_snapshot():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
volume = conn.create_volume(80, "us-east-1a")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
snapshot = volume.create_snapshot("a dryrun snapshot", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -340,11 +338,11 @@ def test_create_snapshot():
|
||||
conn.get_all_snapshots().should.have.length_of(num_snapshots)
|
||||
|
||||
# Deleting something that was already deleted should throw an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
snapshot.delete()
|
||||
cm.exception.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -382,11 +380,11 @@ def test_filter_snapshot_by_id():
|
||||
s.volume_id.should.be.within([volume2.id, volume3.id])
|
||||
s.region.name.should.equal(conn.region.name)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_snapshots(snapshot_ids=["snap-does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -484,11 +482,11 @@ def test_snapshot_attribute():
|
||||
|
||||
# Add 'all' group and confirm
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.modify_snapshot_attribute(**dict(ADD_GROUP_ARGS, **{"dry_run": True}))
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -506,11 +504,11 @@ def test_snapshot_attribute():
|
||||
)
|
||||
|
||||
# Remove 'all' group and confirm
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.modify_snapshot_attribute(**dict(REMOVE_GROUP_ARGS, **{"dry_run": True}))
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifySnapshotAttribute operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -527,40 +525,40 @@ def test_snapshot_attribute():
|
||||
).should_not.throw(EC2ResponseError)
|
||||
|
||||
# Error: Add with group != 'all'
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_snapshot_attribute(
|
||||
snapshot.id,
|
||||
attribute="createVolumePermission",
|
||||
operation="add",
|
||||
groups="everyone",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Add with invalid snapshot ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_snapshot_attribute(
|
||||
"snapshot-abcd1234",
|
||||
attribute="createVolumePermission",
|
||||
operation="add",
|
||||
groups="all",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Remove with invalid snapshot ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.modify_snapshot_attribute(
|
||||
"snapshot-abcd1234",
|
||||
attribute="createVolumePermission",
|
||||
operation="remove",
|
||||
groups="all",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -595,12 +593,12 @@ def test_modify_snapshot_attribute():
|
||||
}
|
||||
|
||||
# Add 'all' group and confirm
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_client.modify_snapshot_attribute(**dict(ADD_GROUP_ARGS, **{"DryRun": True}))
|
||||
|
||||
cm.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
ec2_client.modify_snapshot_attribute(**ADD_GROUP_ARGS)
|
||||
|
||||
@ -620,13 +618,13 @@ def test_modify_snapshot_attribute():
|
||||
], "This snapshot should have public group permissions."
|
||||
|
||||
# Remove 'all' group and confirm
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
ec2_client.modify_snapshot_attribute(
|
||||
**dict(REMOVE_GROUP_ARGS, **{"DryRun": True})
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
ec2_client.modify_snapshot_attribute(**REMOVE_GROUP_ARGS)
|
||||
|
||||
@ -646,40 +644,40 @@ def test_modify_snapshot_attribute():
|
||||
], "This snapshot should have no permissions."
|
||||
|
||||
# Error: Add with group != 'all'
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_client.modify_snapshot_attribute(
|
||||
SnapshotId=snapshot.id,
|
||||
Attribute="createVolumePermission",
|
||||
OperationType="add",
|
||||
GroupNames=["everyone"],
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidAMIAttributeItemValue")
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
# Error: Add with invalid snapshot ID
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_client.modify_snapshot_attribute(
|
||||
SnapshotId="snapshot-abcd1234",
|
||||
Attribute="createVolumePermission",
|
||||
OperationType="add",
|
||||
GroupNames=["all"],
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
# Error: Remove with invalid snapshot ID
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_client.modify_snapshot_attribute(
|
||||
SnapshotId="snapshot-abcd1234",
|
||||
Attribute="createVolumePermission",
|
||||
OperationType="remove",
|
||||
GroupNames=["all"],
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
# Test adding user id
|
||||
ec2_client.modify_snapshot_attribute(
|
||||
@ -740,11 +738,11 @@ def test_create_volume_from_snapshot():
|
||||
volume = conn.create_volume(80, "us-east-1a")
|
||||
snapshot = volume.create_snapshot("a test snapshot")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
snapshot = volume.create_snapshot("a test snapshot", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateSnapshot operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -786,13 +784,13 @@ def test_modify_attribute_blockDeviceMapping():
|
||||
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.modify_attribute(
|
||||
"blockDeviceMapping", {"/dev/sda1": True}, dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyInstanceAttribute operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -809,11 +807,11 @@ def test_volume_tag_escaping():
|
||||
vol = conn.create_volume(10, "us-east-1a")
|
||||
snapshot = conn.create_snapshot(vol.id, "Desc")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
snapshot.add_tags({"key": "</closed>"}, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id]
|
||||
@ -879,25 +877,25 @@ def test_copy_snapshot():
|
||||
getattr(source, attrib).should.equal(getattr(dest, attrib))
|
||||
|
||||
# Copy from non-existent source ID.
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
create_snapshot_error = ec2_client.create_snapshot(VolumeId="vol-abcd1234")
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidVolume.NotFound")
|
||||
cm.exception.response["Error"]["Message"].should.equal(
|
||||
"The volume 'vol-abcd1234' does not exist."
|
||||
)
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidVolume.NotFound")
|
||||
cm.value.response["Error"]["Message"].should.equal(
|
||||
"The volume 'vol-abcd1234' does not exist."
|
||||
)
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
# Copy from non-existent source region.
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
copy_snapshot_response = dest_ec2_client.copy_snapshot(
|
||||
SourceSnapshotId=create_snapshot_response["SnapshotId"],
|
||||
SourceRegion="eu-west-2",
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.exception.response["Error"]["Message"].should.be.none
|
||||
cm.exception.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidSnapshot.NotFound")
|
||||
cm.value.response["Error"]["Message"].should.be.none
|
||||
cm.value.response["ResponseMetadata"]["RequestId"].should_not.be.none
|
||||
cm.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -921,12 +919,12 @@ def test_search_for_many_snapshots():
|
||||
@mock_ec2
|
||||
def test_create_unencrypted_volume_with_kms_key_fails():
|
||||
resource = boto3.resource("ec2", region_name="us-east-1")
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
resource.create_volume(
|
||||
AvailabilityZone="us-east-1a", Encrypted=False, KmsKeyId="key", Size=10
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameterDependency")
|
||||
ex.exception.response["Error"]["Message"].should.contain("KmsKeyId")
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameterDependency")
|
||||
ex.value.response["Error"]["Message"].should.contain("KmsKeyId")
|
||||
|
||||
|
||||
@mock_kms
|
||||
@ -934,9 +932,9 @@ def test_create_unencrypted_volume_with_kms_key_fails():
|
||||
def test_create_encrypted_volume_without_kms_key_should_use_default_key():
|
||||
kms = boto3.client("kms", region_name="us-east-1")
|
||||
# Default master key for EBS does not exist until needed.
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
kms.describe_key(KeyId="alias/aws/ebs")
|
||||
ex.exception.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
ex.value.response["Error"]["Code"].should.equal("NotFoundException")
|
||||
# Creating an encrypted volume should create (and use) the default key.
|
||||
resource = boto3.resource("ec2", region_name="us-east-1")
|
||||
volume = resource.create_volume(
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
@ -21,11 +20,11 @@ def test_eip_allocate_classic():
|
||||
"""Allocate/release Classic EIP"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
standard = conn.allocate_address(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AllocateAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -35,11 +34,11 @@ def test_eip_allocate_classic():
|
||||
standard.instance_id.should.be.none
|
||||
standard.domain.should.be.equal("standard")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
standard.release(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ReleaseAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -52,11 +51,11 @@ def test_eip_allocate_vpc():
|
||||
"""Allocate/release VPC EIP"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
vpc = conn.allocate_address(domain="vpc", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AllocateAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -84,11 +83,11 @@ def test_eip_allocate_invalid_domain():
|
||||
"""Allocate EIP invalid domain"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.allocate_address(domain="bogus")
|
||||
cm.exception.code.should.equal("InvalidParameterValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidParameterValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -102,19 +101,19 @@ def test_eip_associate_classic():
|
||||
eip = conn.allocate_address()
|
||||
eip.instance_id.should.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(public_ip=eip.public_ip)
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.associate_address(
|
||||
instance_id=instance.id, public_ip=eip.public_ip, dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AssociateAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -123,11 +122,11 @@ def test_eip_associate_classic():
|
||||
eip = conn.get_all_addresses(addresses=[eip.public_ip])[0]
|
||||
eip.instance_id.should.be.equal(instance.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.disassociate_address(public_ip=eip.public_ip, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DisAssociateAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -153,11 +152,11 @@ def test_eip_associate_vpc():
|
||||
eip = conn.allocate_address(domain="vpc")
|
||||
eip.instance_id.should.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(allocation_id=eip.allocation_id)
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
conn.associate_address(instance_id=instance.id, allocation_id=eip.allocation_id)
|
||||
# no .update() on address ):
|
||||
@ -169,11 +168,11 @@ def test_eip_associate_vpc():
|
||||
eip.instance_id.should.be.equal("")
|
||||
eip.association_id.should.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
eip.release(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ReleaseAddress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -241,11 +240,11 @@ def test_eip_associate_network_interface():
|
||||
eip = conn.allocate_address(domain="vpc")
|
||||
eip.network_interface_id.should.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(network_interface_id=eni.id)
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
conn.associate_address(network_interface_id=eni.id, allocation_id=eip.allocation_id)
|
||||
# no .update() on address ):
|
||||
@ -276,13 +275,13 @@ def test_eip_reassociate():
|
||||
conn.associate_address(instance_id=instance1.id, public_ip=eip.public_ip)
|
||||
|
||||
# Different ID detects resource association
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(
|
||||
instance_id=instance2.id, public_ip=eip.public_ip, allow_reassociation=False
|
||||
)
|
||||
cm.exception.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
conn.associate_address.when.called_with(
|
||||
instance_id=instance2.id, public_ip=eip.public_ip, allow_reassociation=True
|
||||
@ -312,11 +311,11 @@ def test_eip_reassociate_nic():
|
||||
conn.associate_address(network_interface_id=eni1.id, public_ip=eip.public_ip)
|
||||
|
||||
# Different ID detects resource association
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(network_interface_id=eni2.id, public_ip=eip.public_ip)
|
||||
cm.exception.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
conn.associate_address.when.called_with(
|
||||
network_interface_id=eni2.id, public_ip=eip.public_ip, allow_reassociation=True
|
||||
@ -336,11 +335,11 @@ def test_eip_associate_invalid_args():
|
||||
|
||||
eip = conn.allocate_address()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_address(instance_id=instance.id)
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
instance.terminate()
|
||||
|
||||
@ -350,11 +349,11 @@ def test_eip_disassociate_bogus_association():
|
||||
"""Disassociate bogus EIP"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.disassociate_address(association_id="bogus")
|
||||
cm.exception.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -362,11 +361,11 @@ def test_eip_release_bogus_eip():
|
||||
"""Release bogus EIP"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.release_address(allocation_id="bogus")
|
||||
cm.exception.code.should.equal("InvalidAllocationID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAllocationID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -374,11 +373,11 @@ def test_eip_disassociate_arg_error():
|
||||
"""Invalid arguments disassociate address"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.disassociate_address()
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -386,11 +385,11 @@ def test_eip_release_arg_error():
|
||||
"""Invalid arguments release address"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.release_address()
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -438,11 +437,11 @@ def test_eip_describe_none():
|
||||
"""Error when search for bogus IP"""
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_addresses(addresses=["256.256.256.256"])
|
||||
cm.exception.code.should.equal("InvalidAddress.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAddress.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
@ -21,11 +20,11 @@ def test_elastic_network_interfaces():
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
eni = conn.create_network_interface(subnet.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -38,11 +37,11 @@ def test_elastic_network_interfaces():
|
||||
eni.private_ip_addresses.should.have.length_of(1)
|
||||
eni.private_ip_addresses[0].private_ip_address.startswith("10.").should.be.true
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.delete_network_interface(eni.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -51,22 +50,22 @@ def test_elastic_network_interfaces():
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_network_interface(eni.id)
|
||||
cm.exception.error_code.should.equal("InvalidNetworkInterfaceID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.error_code.should.equal("InvalidNetworkInterfaceID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_subnet_validation():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_network_interface("subnet-abcd1234")
|
||||
cm.exception.error_code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.error_code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -133,13 +132,13 @@ def test_elastic_network_interfaces_modify_attribute():
|
||||
eni.groups.should.have.length_of(1)
|
||||
eni.groups[0].id.should.equal(security_group1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.modify_network_interface_attribute(
|
||||
eni.id, "groupset", [security_group2.id], dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -228,11 +227,11 @@ def test_elastic_network_interfaces_get_by_tag_name():
|
||||
SubnetId=subnet.id, PrivateIpAddress="10.0.10.5"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
eni1.create_tags(Tags=[{"Key": "Name", "Value": "eni1"}], DryRun=True)
|
||||
ex.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
import boto3
|
||||
|
||||
@ -36,7 +35,7 @@ def test_create_flow_logs_s3():
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-1"},
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -45,9 +44,9 @@ def test_create_flow_logs_s3():
|
||||
LogDestination="arn:aws:s3:::" + bucket.name,
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateFlowLogs operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -87,7 +86,7 @@ def test_create_flow_logs_cloud_watch():
|
||||
vpc = client.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]
|
||||
logs_client.create_log_group(logGroupName="test-group")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -97,9 +96,9 @@ def test_create_flow_logs_cloud_watch():
|
||||
DeliverLogsPermissionArn="arn:aws:iam::" + ACCOUNT_ID + ":role/test-role",
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateFlowLogs operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -243,19 +242,19 @@ def test_delete_flow_logs_delete_many():
|
||||
def test_delete_flow_logs_non_existing():
|
||||
client = boto3.client("ec2", region_name="us-west-1")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.delete_flow_logs(FlowLogIds=["fl-1a2b3c4d"])
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidFlowLogId.NotFound")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidFlowLogId.NotFound")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"These flow log ids in the input list are not found: [TotalCount: 1] fl-1a2b3c4d"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.delete_flow_logs(FlowLogIds=["fl-1a2b3c4d", "fl-2b3c4d5e"])
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidFlowLogId.NotFound")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidFlowLogId.NotFound")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"These flow log ids in the input list are not found: [TotalCount: 2] fl-1a2b3c4d fl-2b3c4d5e"
|
||||
)
|
||||
|
||||
@ -304,7 +303,7 @@ def test_create_flow_logs_invalid_parameters():
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-1"},
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -313,26 +312,26 @@ def test_create_flow_logs_invalid_parameters():
|
||||
LogDestination="arn:aws:s3:::" + bucket.name,
|
||||
MaxAggregationInterval=10,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Invalid Flow Log Max Aggregation Interval"
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
TrafficType="ALL",
|
||||
LogDestinationType="s3",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"LogDestination can't be empty if LogGroupName is not provided."
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -340,22 +339,22 @@ def test_create_flow_logs_invalid_parameters():
|
||||
LogDestinationType="s3",
|
||||
LogGroupName="test",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"LogDestination type must be cloud-watch-logs if LogGroupName is provided."
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
TrafficType="ALL",
|
||||
LogGroupName="test",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"DeliverLogsPermissionArn can't be empty if LogDestinationType is cloud-watch-logs."
|
||||
)
|
||||
|
||||
@ -368,7 +367,7 @@ def test_create_flow_logs_invalid_parameters():
|
||||
)["FlowLogIds"]
|
||||
response.should.have.length_of(1)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -376,9 +375,9 @@ def test_create_flow_logs_invalid_parameters():
|
||||
LogDestinationType="s3",
|
||||
LogDestination="arn:aws:s3:::" + bucket.name,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("FlowLogAlreadyExists")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("FlowLogAlreadyExists")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Error. There is an existing Flow Log with the same configuration and log destination."
|
||||
)
|
||||
|
||||
@ -391,7 +390,7 @@ def test_create_flow_logs_invalid_parameters():
|
||||
)["FlowLogIds"]
|
||||
response.should.have.length_of(1)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_flow_logs(
|
||||
ResourceType="VPC",
|
||||
ResourceIds=[vpc["VpcId"]],
|
||||
@ -399,9 +398,9 @@ def test_create_flow_logs_invalid_parameters():
|
||||
LogGroupName="test-group",
|
||||
DeliverLogsPermissionArn="arn:aws:iam::" + ACCOUNT_ID + ":role/test-role",
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("FlowLogAlreadyExists")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("FlowLogAlreadyExists")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Error. There is an existing Flow Log with the same configuration and log destination."
|
||||
)
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
@ -25,11 +24,11 @@ def test_console_output():
|
||||
def test_console_output_without_instance():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_console_output("i-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
|
@ -1,10 +1,9 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
import base64
|
||||
import ipaddress
|
||||
@ -52,11 +51,11 @@ def test_add_servers():
|
||||
def test_instance_launch_and_terminate():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
reservation = conn.run_instances("ami-1234abcd", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -87,11 +86,11 @@ def test_instance_launch_and_terminate():
|
||||
volume.attach_data.instance_id.should.equal(instance.id)
|
||||
volume.status.should.equal("in-use")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.terminate_instances([instance.id], dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -216,14 +215,12 @@ def test_instance_detach_volume_wrong_path():
|
||||
)
|
||||
instance = result[0]
|
||||
for volume in instance.volumes.all():
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
instance.detach_volume(VolumeId=volume.volume_id, Device="/dev/sdf")
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal(
|
||||
"InvalidAttachment.NotFound"
|
||||
)
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidAttachment.NotFound")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The volume {0} is not attached to instance {1} as device {2}".format(
|
||||
volume.volume_id, instance.instance_id, "/dev/sdf"
|
||||
)
|
||||
@ -290,11 +287,11 @@ def test_get_instances_by_id():
|
||||
instance_ids.should.equal([instance1.id, instance2.id])
|
||||
|
||||
# Call get_all_instances with a bad id should raise an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_instances(instance_ids=[instance1.id, "i-1234abcd"])
|
||||
cm.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -743,11 +740,11 @@ def test_instance_start_and_stop():
|
||||
|
||||
instance_ids = [instance.id for instance in instances]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
stopped_instances = conn.stop_instances(instance_ids, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -756,11 +753,11 @@ def test_instance_start_and_stop():
|
||||
for instance in stopped_instances:
|
||||
instance.state.should.equal("stopping")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
started_instances = conn.start_instances([instances[0].id], dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -774,11 +771,11 @@ def test_instance_reboot():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.reboot(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -792,11 +789,11 @@ def test_instance_attribute_instance_type():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.modify_attribute("instanceType", "m1.small", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -820,11 +817,11 @@ def test_modify_instance_attribute_security_groups():
|
||||
"test security group 2", "this is a test security group 2"
|
||||
).id
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -843,11 +840,11 @@ def test_instance_attribute_user_data():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.modify_attribute("userData", "this is my user data", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -873,11 +870,11 @@ def test_instance_attribute_source_dest_check():
|
||||
|
||||
# Set to false (note: Boto converts bool to string, eg 'false')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.modify_attribute("sourceDestCheck", False, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -919,11 +916,11 @@ def test_user_data_with_run_instance():
|
||||
def test_run_instance_with_security_group_name():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
group = conn.create_security_group("group1", "some description", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -1196,11 +1193,11 @@ def test_instance_with_nic_attach_detach():
|
||||
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
|
||||
|
||||
# Attach
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.attach_network_interface(eni.id, instance.id, device_index=1, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -1223,11 +1220,11 @@ def test_instance_with_nic_attach_detach():
|
||||
)
|
||||
|
||||
# Detach
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.detach_network_interface(instance_eni.attachment.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -1242,11 +1239,11 @@ def test_instance_with_nic_attach_detach():
|
||||
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
|
||||
|
||||
# Detach with invalid attachment ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.detach_network_interface("eni-attach-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidAttachmentID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAttachmentID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -1307,12 +1304,12 @@ def test_run_instance_with_block_device_mappings_missing_ebs():
|
||||
"InstanceType": "t1.micro",
|
||||
"BlockDeviceMappings": [{"DeviceName": "/dev/sda2"}],
|
||||
}
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
ec2_client.run_instances(**kwargs)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request must contain the parameter ebs"
|
||||
)
|
||||
|
||||
@ -1331,12 +1328,12 @@ def test_run_instance_with_block_device_mappings_missing_size():
|
||||
{"DeviceName": "/dev/sda2", "Ebs": {"VolumeType": "standard"}}
|
||||
],
|
||||
}
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
ec2_client.run_instances(**kwargs)
|
||||
|
||||
ex.exception.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request must contain the parameter size or snapshotId"
|
||||
)
|
||||
|
||||
@ -1410,11 +1407,11 @@ def test_describe_instance_status_with_instance_filter_deprecated():
|
||||
all_status[0].id.should.equal(instance.id)
|
||||
|
||||
# Call get_all_instance_status with a bad id should raise an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_instance_status(instance_ids=[instance.id, "i-1234abcd"])
|
||||
cm.exception.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidInstanceID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -1537,13 +1534,13 @@ def test_get_instance_by_security_group():
|
||||
|
||||
security_group = conn.create_security_group("test", "test")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.modify_instance_attribute(
|
||||
instance.id, "groupSet", [security_group.id], dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -1661,13 +1658,13 @@ def test_describe_instance_attribute():
|
||||
]
|
||||
|
||||
for invalid_instance_attribute in invalid_instance_attributes:
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.describe_instance_attribute(
|
||||
InstanceId=instance_id, Attribute=invalid_instance_attribute
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
message = "Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute.".format(
|
||||
invalid_instance_attribute=invalid_instance_attribute
|
||||
)
|
||||
ex.exception.response["Error"]["Message"].should.equal(message)
|
||||
ex.value.response["Error"]["Message"].should.equal(message)
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import re
|
||||
|
||||
@ -28,11 +27,11 @@ def test_igw_create():
|
||||
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
igw = conn.create_internet_gateway(dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateInternetGateway operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -51,11 +50,11 @@ def test_igw_attach():
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.attach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the AttachInternetGateway operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -71,11 +70,11 @@ def test_igw_attach_bad_vpc():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
igw = conn.create_internet_gateway()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -87,11 +86,11 @@ def test_igw_attach_twice():
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Resource.AlreadyAssociated")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -102,11 +101,11 @@ def test_igw_detach():
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DetachInternetGateway operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -124,11 +123,11 @@ def test_igw_detach_wrong_vpc():
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal("Gateway.NotAttached")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Gateway.NotAttached")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -139,11 +138,11 @@ def test_igw_detach_invalid_vpc():
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal("Gateway.NotAttached")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Gateway.NotAttached")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -153,11 +152,11 @@ def test_igw_detach_unattached():
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id)
|
||||
cm.exception.code.should.equal("Gateway.NotAttached")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("Gateway.NotAttached")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -169,11 +168,11 @@ def test_igw_delete():
|
||||
igw = conn.create_internet_gateway()
|
||||
conn.get_all_internet_gateways().should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.delete_internet_gateway(igw.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteInternetGateway operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -189,11 +188,11 @@ def test_igw_delete_attached():
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_internet_gateway(igw.id)
|
||||
cm.exception.code.should.equal("DependencyViolation")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("DependencyViolation")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -209,11 +208,11 @@ def test_igw_desribe():
|
||||
def test_igw_describe_bad_id():
|
||||
""" internet gateway fail to fetch by bad id """
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_internet_gateways([BAD_IGW])
|
||||
cm.exception.code.should.equal("InvalidInternetGatewayID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidInternetGatewayID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
@ -56,22 +55,22 @@ def test_key_pairs_empty():
|
||||
def test_key_pairs_invalid_id():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_key_pairs("foo")
|
||||
cm.exception.code.should.equal("InvalidKeyPair.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidKeyPair.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.create_key_pair("foo", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateKeyPair operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -110,11 +109,11 @@ def test_key_pairs_create_exist():
|
||||
conn.create_key_pair("foo")
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair("foo")
|
||||
cm.exception.code.should.equal("InvalidKeyPair.Duplicate")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidKeyPair.Duplicate")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -130,11 +129,11 @@ def test_key_pairs_delete_exist():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
conn.create_key_pair("foo")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
r = conn.delete_key_pair("foo", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteKeyPair operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -147,11 +146,11 @@ def test_key_pairs_delete_exist():
|
||||
def test_key_pairs_import():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.import_key_pair("foo", RSA_PUBLIC_KEY_OPENSSH, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the ImportKeyPair operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -176,34 +175,34 @@ def test_key_pairs_import_exist():
|
||||
assert kp.name == "foo"
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair("foo")
|
||||
cm.exception.code.should.equal("InvalidKeyPair.Duplicate")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidKeyPair.Duplicate")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_invalid():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.import_key_pair("foo", b"")
|
||||
ex.exception.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
ex.value.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.import_key_pair("foo", b"garbage")
|
||||
ex.exception.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
ex.value.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.import_key_pair("foo", DSA_PUBLIC_KEY_OPENSSH)
|
||||
ex.exception.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
ex.value.error_code.should.equal("InvalidKeyPair.Format")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
|
@ -1,7 +1,7 @@
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from botocore.client import ClientError
|
||||
|
||||
from moto import mock_ec2
|
||||
@ -30,7 +30,7 @@ def test_launch_template_create():
|
||||
lt["DefaultVersionNumber"].should.equal(1)
|
||||
lt["LatestVersionNumber"].should.equal(1)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
cli.create_launch_template(
|
||||
LaunchTemplateName="test-template",
|
||||
LaunchTemplateData={
|
||||
@ -43,7 +43,7 @@ def test_launch_template_create():
|
||||
},
|
||||
)
|
||||
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidLaunchTemplateName.AlreadyExistsException) when calling the CreateLaunchTemplate operation: Launch template name already in use."
|
||||
)
|
||||
|
||||
|
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
||||
import boto
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
@ -261,7 +261,7 @@ def test_duplicate_network_acl_entry():
|
||||
RuleNumber=rule_number,
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
default_network_acl.create_entry(
|
||||
CidrBlock="10.0.0.0/0",
|
||||
Egress=egress,
|
||||
@ -269,7 +269,7 @@ def test_duplicate_network_acl_entry():
|
||||
RuleAction="deny",
|
||||
RuleNumber=rule_number,
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (NetworkAclEntryAlreadyExists) when calling the CreateNetworkAclEntry "
|
||||
"operation: The network acl entry identified by {} already exists.".format(
|
||||
rule_number
|
||||
@ -297,10 +297,10 @@ def test_describe_network_acls():
|
||||
resp2 = conn.describe_network_acls()["NetworkAcls"]
|
||||
resp2.should.have.length_of(3)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
conn.describe_network_acls(NetworkAclIds=["1"])
|
||||
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidRouteTableID.NotFound) when calling the "
|
||||
"DescribeNetworkAcls operation: The routeTable ID '1' does not exist"
|
||||
)
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
@ -61,22 +60,22 @@ def test_route_tables_additional():
|
||||
local_route.state.should.equal("active")
|
||||
local_route.destination_cidr_block.should.equal(vpc.cidr_block)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_vpc(vpc.id)
|
||||
cm.exception.code.should.equal("DependencyViolation")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("DependencyViolation")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
conn.delete_route_table(route_table.id)
|
||||
|
||||
all_route_tables = conn.get_all_route_tables(filters={"vpc-id": vpc.id})
|
||||
all_route_tables.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_route_table("rtb-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -197,11 +196,11 @@ def test_route_table_associations():
|
||||
association_id_idempotent.should.equal(association_id)
|
||||
|
||||
# Error: Attempt delete associated route table.
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_route_table(route_table.id)
|
||||
cm.exception.code.should.equal("DependencyViolation")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("DependencyViolation")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Disassociate
|
||||
conn.disassociate_route_table(association_id)
|
||||
@ -211,33 +210,33 @@ def test_route_table_associations():
|
||||
route_table.associations.should.have.length_of(0)
|
||||
|
||||
# Error: Disassociate with invalid association ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.disassociate_route_table(association_id)
|
||||
cm.exception.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Associate with invalid subnet ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_route_table(route_table.id, "subnet-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Associate with invalid route table ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.associate_route_table("rtb-1234abcd", subnet.id)
|
||||
cm.exception.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@requires_boto_gte("2.16.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_route_table_replace_route_table_association():
|
||||
"""
|
||||
Note: Boto has deprecated replace_route_table_association (which returns status)
|
||||
and now uses replace_route_table_association_with_assoc (which returns association ID).
|
||||
Note: Boto has deprecated replace_route_table_association (which returns status)
|
||||
and now uses replace_route_table_association_with_assoc (which returns association ID).
|
||||
"""
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
@ -293,20 +292,20 @@ def test_route_table_replace_route_table_association():
|
||||
association_id_idempotent.should.equal(association_id2)
|
||||
|
||||
# Error: Replace association with invalid association ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.replace_route_table_association_with_assoc(
|
||||
"rtbassoc-1234abcd", route_table1.id
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidAssociationID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Error: Replace association with invalid route table ID
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.replace_route_table_association_with_assoc(association_id2, "rtb-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -389,11 +388,11 @@ def test_routes_additional():
|
||||
]
|
||||
new_routes.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_route(main_route_table.id, ROUTE_CIDR)
|
||||
cm.exception.code.should.equal("InvalidRoute.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidRoute.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -442,11 +441,11 @@ def test_routes_replace():
|
||||
target_route.state.should.equal("active")
|
||||
target_route.destination_cidr_block.should.equal(ROUTE_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.replace_route("rtb-1234abcd", ROUTE_CIDR, gateway_id=igw.id)
|
||||
cm.exception.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidRouteTableID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@requires_boto_gte("2.19.0")
|
||||
@ -571,11 +570,11 @@ def test_create_route_with_invalid_destination_cidr_block_parameter():
|
||||
internet_gateway.reload()
|
||||
|
||||
destination_cidr_block = "1000.1.0.0/20"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
route = route_table.create_route(
|
||||
DestinationCidrBlock=destination_cidr_block, GatewayId=internet_gateway.id
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateRoute "
|
||||
"operation: Value ({}) for parameter destinationCidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
destination_cidr_block
|
||||
|
@ -3,9 +3,8 @@ from __future__ import unicode_literals
|
||||
import copy
|
||||
import json
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
@ -20,13 +19,13 @@ from moto import mock_ec2, mock_ec2_deprecated
|
||||
def test_create_and_describe_security_group():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
security_group = conn.create_security_group(
|
||||
"test security group", "this is a test security group", dry_run=True
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -38,13 +37,13 @@ def test_create_and_describe_security_group():
|
||||
security_group.description.should.equal("this is a test security group")
|
||||
|
||||
# Trying to create another group with the same name should throw an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_security_group(
|
||||
"test security group", "this is a test security group"
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidGroup.Duplicate")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidGroup.Duplicate")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
all_groups = conn.get_all_security_groups()
|
||||
# The default group gets created automatically
|
||||
@ -57,11 +56,11 @@ def test_create_and_describe_security_group():
|
||||
def test_create_security_group_without_description_raises_error():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_security_group("test security group", "")
|
||||
cm.exception.code.should.equal("MissingParameter")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("MissingParameter")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -87,13 +86,13 @@ def test_create_and_describe_vpc_security_group():
|
||||
|
||||
# Trying to create another group with the same name in the same VPC should
|
||||
# throw an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_security_group(
|
||||
"test security group", "this is a test security group", vpc_id
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidGroup.Duplicate")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidGroup.Duplicate")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
all_groups = conn.get_all_security_groups(filters={"vpc_id": [vpc_id]})
|
||||
|
||||
@ -146,18 +145,18 @@ def test_deleting_security_groups():
|
||||
conn.get_all_security_groups().should.have.length_of(4)
|
||||
|
||||
# Deleting a group that doesn't exist should throw an error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_security_group("foobar")
|
||||
cm.exception.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Delete by name
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.delete_security_group("test2", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteSecurityGroup operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -184,7 +183,7 @@ def test_authorize_ip_range_and_revoke():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
security_group = conn.create_security_group("test", "test")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
success = security_group.authorize(
|
||||
ip_protocol="tcp",
|
||||
from_port="22",
|
||||
@ -192,9 +191,9 @@ def test_authorize_ip_range_and_revoke():
|
||||
cidr_ip="123.123.123.123/32",
|
||||
dry_run=True,
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the GrantSecurityGroupIngress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -208,19 +207,19 @@ def test_authorize_ip_range_and_revoke():
|
||||
security_group.rules[0].grants[0].cidr_ip.should.equal("123.123.123.123/32")
|
||||
|
||||
# Wrong Cidr should throw error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
security_group.revoke(
|
||||
ip_protocol="tcp",
|
||||
from_port="22",
|
||||
to_port="2222",
|
||||
cidr_ip="123.123.123.122/32",
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidPermission.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidPermission.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Actually revoke
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
security_group.revoke(
|
||||
ip_protocol="tcp",
|
||||
from_port="22",
|
||||
@ -228,9 +227,9 @@ def test_authorize_ip_range_and_revoke():
|
||||
cidr_ip="123.123.123.123/32",
|
||||
dry_run=True,
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the RevokeSecurityGroupIngress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -246,7 +245,7 @@ def test_authorize_ip_range_and_revoke():
|
||||
"testegress", "testegress", vpc_id="vpc-3432589"
|
||||
)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
success = conn.authorize_security_group_egress(
|
||||
egress_security_group.id,
|
||||
"tcp",
|
||||
@ -255,9 +254,9 @@ def test_authorize_ip_range_and_revoke():
|
||||
cidr_ip="123.123.123.123/32",
|
||||
dry_run=True,
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the GrantSecurityGroupEgress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -285,7 +284,7 @@ def test_authorize_ip_range_and_revoke():
|
||||
).should.throw(EC2ResponseError)
|
||||
|
||||
# Actually revoke
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.revoke_security_group_egress(
|
||||
egress_security_group.id,
|
||||
"tcp",
|
||||
@ -294,9 +293,9 @@ def test_authorize_ip_range_and_revoke():
|
||||
cidr_ip="123.123.123.123/32",
|
||||
dry_run=True,
|
||||
)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the RevokeSecurityGroupEgress operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -335,13 +334,13 @@ def test_authorize_other_group_and_revoke():
|
||||
security_group.rules[0].grants[0].group_id.should.equal(other_security_group.id)
|
||||
|
||||
# Wrong source group should throw error
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
security_group.revoke(
|
||||
ip_protocol="tcp", from_port="22", to_port="2222", src_group=wrong_group
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidPermission.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidPermission.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
# Actually revoke
|
||||
security_group.revoke(
|
||||
@ -440,11 +439,11 @@ def test_get_all_security_groups():
|
||||
resp.should.have.length_of(1)
|
||||
resp[0].id.should.equal(sg1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_security_groups(groupnames=["does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
resp.should.have.length_of(1)
|
||||
resp[0].id.should.equal(sg1.id)
|
||||
@ -469,13 +468,13 @@ def test_get_all_security_groups():
|
||||
def test_authorize_bad_cidr_throws_invalid_parameter_value():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
security_group = conn.create_security_group("test", "test")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
security_group.authorize(
|
||||
ip_protocol="tcp", from_port="22", to_port="2222", cidr_ip="123.123.123.123"
|
||||
)
|
||||
cm.exception.code.should.equal("InvalidParameterValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidParameterValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -485,11 +484,11 @@ def test_security_group_tagging():
|
||||
|
||||
sg = conn.create_security_group("test-sg", "Test SG", vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
sg.add_tag("Test", "Tag", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -534,13 +533,13 @@ def test_sec_group_rule_limit():
|
||||
other_sg = ec2_conn.create_security_group("test_2", "test_other")
|
||||
|
||||
# INGRESS
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id,
|
||||
ip_protocol="-1",
|
||||
cidr_ip=["{0}.0.0.0/0".format(i) for i in range(110)],
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
sg.rules.should.be.empty
|
||||
# authorize a rule targeting a different sec group (because this count too)
|
||||
@ -556,17 +555,17 @@ def test_sec_group_rule_limit():
|
||||
)
|
||||
success.should.be.true
|
||||
# verify that we cannot authorize past the limit for a CIDR IP
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip=["100.0.0.0/0"]
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
# verify that we cannot authorize past the limit for a different sec group
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id, ip_protocol="-1", src_security_group_group_id=other_sg.id
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
# EGRESS
|
||||
# authorize a rule targeting a different sec group (because this count too)
|
||||
@ -581,17 +580,17 @@ def test_sec_group_rule_limit():
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip="{0}.0.0.0/0".format(i)
|
||||
)
|
||||
# verify that we cannot authorize past the limit for a CIDR IP
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group_egress(
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip="101.0.0.0/0"
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
# verify that we cannot authorize past the limit for a different sec group
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group_egress(
|
||||
group_id=sg.id, ip_protocol="-1", src_group_id=other_sg.id
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -605,13 +604,13 @@ def test_sec_group_rule_limit_vpc():
|
||||
other_sg = ec2_conn.create_security_group("test_2", "test", vpc_id=vpc.id)
|
||||
|
||||
# INGRESS
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id,
|
||||
ip_protocol="-1",
|
||||
cidr_ip=["{0}.0.0.0/0".format(i) for i in range(110)],
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
sg.rules.should.be.empty
|
||||
# authorize a rule targeting a different sec group (because this count too)
|
||||
@ -627,17 +626,17 @@ def test_sec_group_rule_limit_vpc():
|
||||
)
|
||||
# verify that we cannot authorize past the limit for a CIDR IP
|
||||
success.should.be.true
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip=["100.0.0.0/0"]
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
# verify that we cannot authorize past the limit for a different sec group
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group(
|
||||
group_id=sg.id, ip_protocol="-1", src_security_group_group_id=other_sg.id
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
# EGRESS
|
||||
# authorize a rule targeting a different sec group (because this count too)
|
||||
@ -652,17 +651,17 @@ def test_sec_group_rule_limit_vpc():
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip="{0}.0.0.0/0".format(i)
|
||||
)
|
||||
# verify that we cannot authorize past the limit for a CIDR IP
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group_egress(
|
||||
group_id=sg.id, ip_protocol="-1", cidr_ip="50.0.0.0/0"
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
# verify that we cannot authorize past the limit for a different sec group
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
ec2_conn.authorize_security_group_egress(
|
||||
group_id=sg.id, ip_protocol="-1", src_group_id=other_sg.id
|
||||
)
|
||||
cm.exception.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
cm.value.error_code.should.equal("RulesPerSecurityGroupLimitExceeded")
|
||||
|
||||
|
||||
"""
|
||||
@ -689,7 +688,7 @@ def test_add_same_rule_twice_throws_error():
|
||||
]
|
||||
sg.authorize_ingress(IpPermissions=ip_permissions)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
sg.authorize_ingress(IpPermissions=ip_permissions)
|
||||
|
||||
|
||||
@ -761,15 +760,15 @@ def test_security_group_tagging_boto3():
|
||||
|
||||
sg = conn.create_security_group(GroupName="test-sg", Description="Test SG")
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
conn.create_tags(
|
||||
Resources=[sg["GroupId"]],
|
||||
Tags=[{"Key": "Test", "Value": "Tag"}],
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -926,11 +925,11 @@ def test_get_all_security_groups_filter_with_same_vpc_id():
|
||||
)
|
||||
security_groups.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_security_groups(group_ids=["does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidGroup.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
|
@ -1,5 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import datetime
|
||||
|
||||
import boto
|
||||
@ -31,7 +31,7 @@ def test_request_spot_instances():
|
||||
start = iso_8601_datetime_with_milliseconds(start_dt)
|
||||
end = iso_8601_datetime_with_milliseconds(end_dt)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5",
|
||||
InstanceCount=1,
|
||||
@ -54,9 +54,9 @@ def test_request_spot_instances():
|
||||
},
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the RequestSpotInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -155,11 +155,11 @@ def test_cancel_spot_instance_request():
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.cancel_spot_instance_requests([requests[0].id], dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CancelSpotInstance operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
|
@ -1,17 +1,16 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
import boto.vpc
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ParamValidationError, ClientError
|
||||
import sure # noqa
|
||||
import random
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
import boto.vpc
|
||||
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ClientError, ParamValidationError
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@ -30,22 +29,22 @@ def test_subnets():
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(0 + len(ec2.get_all_zones()))
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_subnet(subnet.id)
|
||||
cm.exception.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_create_vpc_validation():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_subnet("vpc-abcd1234", "10.0.0.0/18")
|
||||
cm.exception.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -202,7 +201,7 @@ def test_modify_subnet_attribute_validation():
|
||||
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
|
||||
)
|
||||
|
||||
with assert_raises(ParamValidationError):
|
||||
with pytest.raises(ParamValidationError):
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={"Value": "invalid"}
|
||||
)
|
||||
@ -228,11 +227,11 @@ def test_subnet_get_by_id():
|
||||
subnetA.id.should.be.within(subnets_by_id)
|
||||
subnetB1.id.should.be.within(subnets_by_id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_subnets(subnet_ids=["subnet-does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidSubnetID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -386,13 +385,13 @@ def test_create_subnet_with_invalid_availability_zone():
|
||||
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
|
||||
|
||||
subnet_availability_zone = "asfasfas"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
subnet = client.create_subnet(
|
||||
VpcId=vpc.id,
|
||||
CidrBlock="10.0.0.0/24",
|
||||
AvailabilityZone=subnet_availability_zone,
|
||||
)
|
||||
assert str(ex.exception).startswith(
|
||||
assert str(ex.value).startswith(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
|
||||
"operation: Value ({}) for parameter availabilityZone is invalid. Subnets can currently only be created in the following availability zones: ".format(
|
||||
subnet_availability_zone
|
||||
@ -409,9 +408,9 @@ def test_create_subnet_with_invalid_cidr_range():
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet_cidr_block = "10.1.0.0/20"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
|
||||
)
|
||||
@ -427,9 +426,9 @@ def test_create_subnet_with_invalid_cidr_range_multiple_vpc_cidr_blocks():
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet_cidr_block = "10.2.0.0/20"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
|
||||
)
|
||||
@ -444,9 +443,9 @@ def test_create_subnet_with_invalid_cidr_block_parameter():
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet_cidr_block = "1000.1.0.0/20"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
|
||||
"operation: Value ({}) for parameter cidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
subnet_cidr_block
|
||||
@ -503,10 +502,10 @@ def test_create_subnets_with_overlapping_cidr_blocks():
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet_cidr_block = "10.0.0.0/24"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
subnet1 = ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
subnet2 = ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Conflict) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' conflicts with another subnet".format(
|
||||
subnet_cidr_block
|
||||
|
@ -1,5 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
import itertools
|
||||
|
||||
@ -11,7 +11,7 @@ from boto.ec2.instance import Reservation
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -20,11 +20,11 @@ def test_add_tag():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.add_tag("a key", "some value", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -51,11 +51,11 @@ def test_remove_tag():
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
instance.remove_tag("a key", dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the DeleteTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -106,11 +106,11 @@ def test_create_tags():
|
||||
"blank key": "",
|
||||
}
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
with pytest.raises(EC2ResponseError) as ex:
|
||||
conn.create_tags(instance.id, tag_dict, dry_run=True)
|
||||
ex.exception.error_code.should.equal("DryRunOperation")
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
ex.value.error_code.should.equal("DryRunOperation")
|
||||
ex.value.status.should.equal(400)
|
||||
ex.value.message.should.equal(
|
||||
"An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set"
|
||||
)
|
||||
|
||||
@ -131,18 +131,18 @@ def test_tag_limit_exceeded():
|
||||
for i in range(51):
|
||||
tag_dict["{0:02d}".format(i + 1)] = ""
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal("TagLimitExceeded")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("TagLimitExceeded")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
instance.add_tag("a key", "a value")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal("TagLimitExceeded")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("TagLimitExceeded")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
@ -157,27 +157,27 @@ def test_invalid_parameter_tag_null():
|
||||
reservation = conn.run_instances("ami-1234abcd")
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
instance.add_tag("a key", None)
|
||||
cm.exception.code.should.equal("InvalidParameterValue")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidParameterValue")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_invalid_id():
|
||||
conn = boto.connect_ec2("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_tags("ami-blah", {"key": "tag"})
|
||||
cm.exception.code.should.equal("InvalidID")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidID")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.create_tags("blah-blah", {"key": "tag"})
|
||||
cm.exception.code.should.equal("InvalidID")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidID")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -449,10 +449,10 @@ def test_create_tag_empty_resource():
|
||||
# create ec2 client in us-west-1
|
||||
client = boto3.client("ec2", region_name="us-west-1")
|
||||
# create tag with empty resource
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_tags(Resources=[], Tags=[{"Key": "Value"}])
|
||||
ex.exception.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request must contain the parameter resourceIdSet"
|
||||
)
|
||||
|
||||
@ -462,10 +462,10 @@ def test_delete_tag_empty_resource():
|
||||
# create ec2 client in us-west-1
|
||||
client = boto3.client("ec2", region_name="us-west-1")
|
||||
# delete tag with empty resource
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.delete_tags(Resources=[], Tags=[{"Key": "Value"}])
|
||||
ex.exception.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.exception.response["Error"]["Message"].should.equal(
|
||||
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The request must contain the parameter resourceIdSet"
|
||||
)
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
from moto.ec2.exceptions import EC2ClientError
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
@ -49,11 +48,11 @@ def test_vpc_peering_connections_accept():
|
||||
vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
vpc_pcx._status.code.should.equal("active")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal("InvalidStateTransition")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidStateTransition")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
@ -69,11 +68,11 @@ def test_vpc_peering_connections_reject():
|
||||
verdict = conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
verdict.should.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal("InvalidStateTransition")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidStateTransition")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
@ -93,11 +92,11 @@ def test_vpc_peering_connections_delete():
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal("deleted")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_vpc_peering_connection("pcx-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidVpcPeeringConnectionId.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcPeeringConnectionId.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -129,11 +128,11 @@ def test_vpc_peering_connections_cross_region_fail():
|
||||
ec2_apn1 = boto3.resource("ec2", region_name="ap-northeast-1")
|
||||
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock="10.20.0.0/16")
|
||||
# create peering wrong region with no vpc
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_usw1.create_vpc_peering_connection(
|
||||
VpcId=vpc_usw1.id, PeerVpcId=vpc_apn1.id, PeerRegion="ap-northeast-2"
|
||||
)
|
||||
cm.exception.response["Error"]["Code"].should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.response["Error"]["Code"].should.equal("InvalidVpcID.NotFound")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -253,15 +252,15 @@ def test_vpc_peering_connections_cross_region_accept_wrong_region():
|
||||
# accept wrong peering from us-west-1 which will raise error
|
||||
ec2_apn1 = boto3.client("ec2", region_name="ap-northeast-1")
|
||||
ec2_usw1 = boto3.client("ec2", region_name="us-west-1")
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_usw1.accept_vpc_peering_connection(VpcPeeringConnectionId=vpc_pcx_usw1.id)
|
||||
cm.exception.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
cm.value.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
exp_msg = (
|
||||
"Incorrect region ({0}) specified for this request.VPC "
|
||||
"peering connection {1} must be "
|
||||
"accepted in region {2}".format("us-west-1", vpc_pcx_usw1.id, "ap-northeast-1")
|
||||
)
|
||||
cm.exception.response["Error"]["Message"].should.equal(exp_msg)
|
||||
cm.value.response["Error"]["Message"].should.equal(exp_msg)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
@ -278,12 +277,12 @@ def test_vpc_peering_connections_cross_region_reject_wrong_region():
|
||||
# reject wrong peering from us-west-1 which will raise error
|
||||
ec2_apn1 = boto3.client("ec2", region_name="ap-northeast-1")
|
||||
ec2_usw1 = boto3.client("ec2", region_name="us-west-1")
|
||||
with assert_raises(ClientError) as cm:
|
||||
with pytest.raises(ClientError) as cm:
|
||||
ec2_usw1.reject_vpc_peering_connection(VpcPeeringConnectionId=vpc_pcx_usw1.id)
|
||||
cm.exception.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
cm.value.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
exp_msg = (
|
||||
"Incorrect region ({0}) specified for this request.VPC "
|
||||
"peering connection {1} must be accepted or "
|
||||
"rejected in region {2}".format("us-west-1", vpc_pcx_usw1.id, "ap-northeast-1")
|
||||
)
|
||||
cm.exception.response["Error"]["Message"].should.equal(exp_msg)
|
||||
cm.value.response["Error"]["Message"].should.equal(exp_msg)
|
||||
|
@ -1,8 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
# Ensure 'pytest.raises' context manager support for Python 2.6
|
||||
import pytest
|
||||
from moto.ec2.exceptions import EC2ClientError
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
@ -31,11 +30,11 @@ def test_vpcs():
|
||||
all_vpcs = conn.get_all_vpcs()
|
||||
all_vpcs.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.delete_vpc("vpc-1234abcd")
|
||||
cm.exception.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -114,11 +113,11 @@ def test_vpc_get_by_id():
|
||||
vpc1.id.should.be.within(vpc_ids)
|
||||
vpc2.id.should.be.within(vpc_ids)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
with pytest.raises(EC2ResponseError) as cm:
|
||||
conn.get_all_vpcs(vpc_ids=["vpc-does_not_exist"])
|
||||
cm.exception.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
cm.value.code.should.equal("InvalidVpcID.NotFound")
|
||||
cm.value.status.should.equal(400)
|
||||
cm.value.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -402,11 +401,11 @@ def test_associate_vpc_ipv4_cidr_block():
|
||||
)
|
||||
|
||||
# Check error on adding 6th association.
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
response = ec2.meta.client.associate_vpc_cidr_block(
|
||||
VpcId=vpc.id, CidrBlock="10.10.50.0/22"
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (CidrLimitExceeded) when calling the AssociateVpcCidrBlock "
|
||||
"operation: This network '{}' has met its maximum number of allowed CIDRs: 5".format(
|
||||
vpc.id
|
||||
@ -447,11 +446,11 @@ def test_disassociate_vpc_ipv4_cidr_block():
|
||||
)
|
||||
|
||||
# Error attempting to delete a non-existent CIDR_BLOCK association
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
response = ec2.meta.client.disassociate_vpc_cidr_block(
|
||||
AssociationId="vpc-cidr-assoc-BORING123"
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidVpcCidrBlockAssociationIdError.NotFound) when calling the "
|
||||
"DisassociateVpcCidrBlock operation: The vpc CIDR block association ID "
|
||||
"'vpc-cidr-assoc-BORING123' does not exist"
|
||||
@ -469,11 +468,11 @@ def test_disassociate_vpc_ipv4_cidr_block():
|
||||
{},
|
||||
)["AssociationId"]
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
response = ec2.meta.client.disassociate_vpc_cidr_block(
|
||||
AssociationId=vpc_base_cidr_assoc_id
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (OperationNotPermitted) when calling the DisassociateVpcCidrBlock operation: "
|
||||
"The vpc CIDR block with association ID {} may not be disassociated. It is the primary "
|
||||
"IPv4 CIDR block of the VPC".format(vpc_base_cidr_assoc_id)
|
||||
@ -549,11 +548,11 @@ def test_vpc_associate_ipv6_cidr_block():
|
||||
ipv6_cidr_block_association_set["AssociationId"].should.contain("vpc-cidr-assoc")
|
||||
|
||||
# Test Fail on adding 2nd IPV6 association - AWS only allows 1 at this time!
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
response = ec2.meta.client.associate_vpc_cidr_block(
|
||||
VpcId=vpc.id, AmazonProvidedIpv6CidrBlock=True
|
||||
)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (CidrLimitExceeded) when calling the AssociateVpcCidrBlock "
|
||||
"operation: This network '{}' has met its maximum number of allowed CIDRs: 1".format(
|
||||
vpc.id
|
||||
@ -657,9 +656,9 @@ def test_create_vpc_with_invalid_cidr_block_parameter():
|
||||
ec2 = boto3.resource("ec2", region_name="us-west-1")
|
||||
|
||||
vpc_cidr_block = "1000.1.0.0/20"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
vpc = ec2.create_vpc(CidrBlock=vpc_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateVpc "
|
||||
"operation: Value ({}) for parameter cidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
vpc_cidr_block
|
||||
@ -672,9 +671,9 @@ def test_create_vpc_with_invalid_cidr_range():
|
||||
ec2 = boto3.resource("ec2", region_name="us-west-1")
|
||||
|
||||
vpc_cidr_block = "10.1.0.0/29"
|
||||
with assert_raises(ClientError) as ex:
|
||||
with pytest.raises(ClientError) as ex:
|
||||
vpc = ec2.create_vpc(CidrBlock=vpc_cidr_block)
|
||||
str(ex.exception).should.equal(
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidVpc.Range) when calling the CreateVpc "
|
||||
"operation: The CIDR '{}' is invalid.".format(vpc_cidr_block)
|
||||
)
|
||||
|
@ -1,11 +1,11 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from nose.tools import assert_raises
|
||||
import pytest
|
||||
import sure # noqa
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@ -35,7 +35,7 @@ def test_delete_vpn_connections():
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_connections_bad_id():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
with assert_raises(EC2ResponseError):
|
||||
with pytest.raises(EC2ResponseError):
|
||||
conn.delete_vpn_connection("vpn-0123abcd")
|
||||
|
||||
|
||||
|
1
tests/test_ecr/__init__.py
Normal file
1
tests/test_ecr/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
@ -15,7 +15,7 @@ from botocore.exceptions import ClientError, ParamValidationError
|
||||
from dateutil.tz import tzlocal
|
||||
|
||||
from moto import mock_ecr
|
||||
from nose import SkipTest
|
||||
from unittest import SkipTest
|
||||
|
||||
|
||||
def _create_image_digest(contents=None):
|
||||
|
1
tests/test_ecs/__init__.py
Normal file
1
tests/test_ecs/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# This file is intentionally left blank.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user